mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-31 20:03:37 +00:00
feat: implement structured logging across server components
- Integrated a centralized logging system using createLogger from @automaker/utils, replacing console.log and console.error statements with logger methods for consistent log formatting and improved readability. - Updated various modules, including auth, events, and services, to utilize the new logging system, enhancing error tracking and operational visibility. - Refactored logging messages to provide clearer context and information, ensuring better maintainability and debugging capabilities. This update significantly enhances the observability of the server components, facilitating easier troubleshooting and monitoring.
This commit is contained in:
@@ -17,6 +17,9 @@ import dotenv from 'dotenv';
|
||||
|
||||
import { createEventEmitter, type EventEmitter } from './lib/events.js';
|
||||
import { initAllowedPaths } from '@automaker/platform';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('Server');
|
||||
import { authMiddleware, validateWsConnectionToken, checkRawAuthentication } from './lib/auth.js';
|
||||
import { requireJsonContentType } from './middleware/require-json-content-type.js';
|
||||
import { createAuthRoutes } from './routes/auth/index.js';
|
||||
@@ -70,7 +73,7 @@ const ENABLE_REQUEST_LOGGING = process.env.ENABLE_REQUEST_LOGGING !== 'false'; /
|
||||
const hasAnthropicKey = !!process.env.ANTHROPIC_API_KEY;
|
||||
|
||||
if (!hasAnthropicKey) {
|
||||
console.warn(`
|
||||
logger.warn(`
|
||||
╔═══════════════════════════════════════════════════════════════════════╗
|
||||
║ ⚠️ WARNING: No Claude authentication configured ║
|
||||
║ ║
|
||||
@@ -83,7 +86,7 @@ if (!hasAnthropicKey) {
|
||||
╚═══════════════════════════════════════════════════════════════════════╝
|
||||
`);
|
||||
} else {
|
||||
console.log('[Server] ✓ ANTHROPIC_API_KEY detected (API key auth)');
|
||||
logger.info('✓ ANTHROPIC_API_KEY detected (API key auth)');
|
||||
}
|
||||
|
||||
// Initialize security
|
||||
@@ -166,7 +169,7 @@ const mcpTestService = new MCPTestService(settingsService);
|
||||
// Initialize services
|
||||
(async () => {
|
||||
await agentService.initialize();
|
||||
console.log('[Server] Agent service initialized');
|
||||
logger.info('Agent service initialized');
|
||||
})();
|
||||
|
||||
// Run stale validation cleanup every hour to prevent memory leaks from crashed validations
|
||||
@@ -174,7 +177,7 @@ const VALIDATION_CLEANUP_INTERVAL_MS = 60 * 60 * 1000; // 1 hour
|
||||
setInterval(() => {
|
||||
const cleaned = cleanupStaleValidations();
|
||||
if (cleaned > 0) {
|
||||
console.log(`[Server] Cleaned up ${cleaned} stale validation entries`);
|
||||
logger.info(`Cleaned up ${cleaned} stale validation entries`);
|
||||
}
|
||||
}, VALIDATION_CLEANUP_INTERVAL_MS);
|
||||
|
||||
@@ -267,7 +270,7 @@ server.on('upgrade', (request, socket, head) => {
|
||||
|
||||
// Authenticate all WebSocket connections
|
||||
if (!authenticateWebSocket(request)) {
|
||||
console.log('[WebSocket] Authentication failed, rejecting connection');
|
||||
logger.info('Authentication failed, rejecting connection');
|
||||
socket.write('HTTP/1.1 401 Unauthorized\r\n\r\n');
|
||||
socket.destroy();
|
||||
return;
|
||||
@@ -288,11 +291,11 @@ server.on('upgrade', (request, socket, head) => {
|
||||
|
||||
// Events WebSocket connection handler
|
||||
wss.on('connection', (ws: WebSocket) => {
|
||||
console.log('[WebSocket] Client connected, ready state:', ws.readyState);
|
||||
logger.info('Client connected, ready state:', ws.readyState);
|
||||
|
||||
// Subscribe to all events and forward to this client
|
||||
const unsubscribe = events.subscribe((type, payload) => {
|
||||
console.log('[WebSocket] Event received:', {
|
||||
logger.info('Event received:', {
|
||||
type,
|
||||
hasPayload: !!payload,
|
||||
payloadKeys: payload ? Object.keys(payload) : [],
|
||||
@@ -302,27 +305,24 @@ wss.on('connection', (ws: WebSocket) => {
|
||||
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
const message = JSON.stringify({ type, payload });
|
||||
console.log('[WebSocket] Sending event to client:', {
|
||||
logger.info('Sending event to client:', {
|
||||
type,
|
||||
messageLength: message.length,
|
||||
sessionId: (payload as any)?.sessionId,
|
||||
});
|
||||
ws.send(message);
|
||||
} else {
|
||||
console.log(
|
||||
'[WebSocket] WARNING: Cannot send event, WebSocket not open. ReadyState:',
|
||||
ws.readyState
|
||||
);
|
||||
logger.info('WARNING: Cannot send event, WebSocket not open. ReadyState:', ws.readyState);
|
||||
}
|
||||
});
|
||||
|
||||
ws.on('close', () => {
|
||||
console.log('[WebSocket] Client disconnected');
|
||||
logger.info('Client disconnected');
|
||||
unsubscribe();
|
||||
});
|
||||
|
||||
ws.on('error', (error) => {
|
||||
console.error('[WebSocket] ERROR:', error);
|
||||
logger.error('ERROR:', error);
|
||||
unsubscribe();
|
||||
});
|
||||
});
|
||||
@@ -349,24 +349,24 @@ terminalWss.on('connection', (ws: WebSocket, req: import('http').IncomingMessage
|
||||
const sessionId = url.searchParams.get('sessionId');
|
||||
const token = url.searchParams.get('token');
|
||||
|
||||
console.log(`[Terminal WS] Connection attempt for session: ${sessionId}`);
|
||||
logger.info(`Connection attempt for session: ${sessionId}`);
|
||||
|
||||
// Check if terminal is enabled
|
||||
if (!isTerminalEnabled()) {
|
||||
console.log('[Terminal WS] Terminal is disabled');
|
||||
logger.info('Terminal is disabled');
|
||||
ws.close(4003, 'Terminal access is disabled');
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate token if password is required
|
||||
if (isTerminalPasswordRequired() && !validateTerminalToken(token || undefined)) {
|
||||
console.log('[Terminal WS] Invalid or missing token');
|
||||
logger.info('Invalid or missing token');
|
||||
ws.close(4001, 'Authentication required');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!sessionId) {
|
||||
console.log('[Terminal WS] No session ID provided');
|
||||
logger.info('No session ID provided');
|
||||
ws.close(4002, 'Session ID required');
|
||||
return;
|
||||
}
|
||||
@@ -374,12 +374,12 @@ terminalWss.on('connection', (ws: WebSocket, req: import('http').IncomingMessage
|
||||
// Check if session exists
|
||||
const session = terminalService.getSession(sessionId);
|
||||
if (!session) {
|
||||
console.log(`[Terminal WS] Session ${sessionId} not found`);
|
||||
logger.info(`Session ${sessionId} not found`);
|
||||
ws.close(4004, 'Session not found');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[Terminal WS] Client connected to session ${sessionId}`);
|
||||
logger.info(`Client connected to session ${sessionId}`);
|
||||
|
||||
// Track this connection
|
||||
if (!terminalConnections.has(sessionId)) {
|
||||
@@ -495,15 +495,15 @@ terminalWss.on('connection', (ws: WebSocket, req: import('http').IncomingMessage
|
||||
break;
|
||||
|
||||
default:
|
||||
console.warn(`[Terminal WS] Unknown message type: ${msg.type}`);
|
||||
logger.warn(`Unknown message type: ${msg.type}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Terminal WS] Error processing message:', error);
|
||||
logger.error('Error processing message:', error);
|
||||
}
|
||||
});
|
||||
|
||||
ws.on('close', () => {
|
||||
console.log(`[Terminal WS] Client disconnected from session ${sessionId}`);
|
||||
logger.info(`Client disconnected from session ${sessionId}`);
|
||||
unsubscribeData();
|
||||
unsubscribeExit();
|
||||
|
||||
@@ -522,7 +522,7 @@ terminalWss.on('connection', (ws: WebSocket, req: import('http').IncomingMessage
|
||||
});
|
||||
|
||||
ws.on('error', (error) => {
|
||||
console.error(`[Terminal WS] Error on session ${sessionId}:`, error);
|
||||
logger.error(`Error on session ${sessionId}:`, error);
|
||||
unsubscribeData();
|
||||
unsubscribeExit();
|
||||
});
|
||||
@@ -537,7 +537,7 @@ const startServer = (port: number) => {
|
||||
: 'enabled'
|
||||
: 'disabled';
|
||||
const portStr = port.toString().padEnd(4);
|
||||
console.log(`
|
||||
logger.info(`
|
||||
╔═══════════════════════════════════════════════════════╗
|
||||
║ Automaker Backend Server ║
|
||||
╠═══════════════════════════════════════════════════════╣
|
||||
@@ -552,7 +552,7 @@ const startServer = (port: number) => {
|
||||
|
||||
server.on('error', (error: NodeJS.ErrnoException) => {
|
||||
if (error.code === 'EADDRINUSE') {
|
||||
console.error(`
|
||||
logger.error(`
|
||||
╔═══════════════════════════════════════════════════════╗
|
||||
║ ❌ ERROR: Port ${port} is already in use ║
|
||||
╠═══════════════════════════════════════════════════════╣
|
||||
@@ -572,7 +572,7 @@ const startServer = (port: number) => {
|
||||
`);
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.error('[Server] Error starting server:', error);
|
||||
logger.error('Error starting server:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
@@ -582,19 +582,19 @@ startServer(PORT);
|
||||
|
||||
// Graceful shutdown
|
||||
process.on('SIGTERM', () => {
|
||||
console.log('SIGTERM received, shutting down...');
|
||||
logger.info('SIGTERM received, shutting down...');
|
||||
terminalService.cleanup();
|
||||
server.close(() => {
|
||||
console.log('Server closed');
|
||||
logger.info('Server closed');
|
||||
process.exit(0);
|
||||
});
|
||||
});
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
console.log('SIGINT received, shutting down...');
|
||||
logger.info('SIGINT received, shutting down...');
|
||||
terminalService.cleanup();
|
||||
server.close(() => {
|
||||
console.log('Server closed');
|
||||
logger.info('Server closed');
|
||||
process.exit(0);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -12,6 +12,9 @@ import type { Request, Response, NextFunction } from 'express';
|
||||
import crypto from 'crypto';
|
||||
import path from 'path';
|
||||
import * as secureFs from './secure-fs.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('Auth');
|
||||
|
||||
const DATA_DIR = process.env.DATA_DIR || './data';
|
||||
const API_KEY_FILE = path.join(DATA_DIR, '.api-key');
|
||||
@@ -61,11 +64,11 @@ function loadSessions(): void {
|
||||
}
|
||||
|
||||
if (loadedCount > 0 || expiredCount > 0) {
|
||||
console.log(`[Auth] Loaded ${loadedCount} sessions (${expiredCount} expired)`);
|
||||
logger.info(`Loaded ${loadedCount} sessions (${expiredCount} expired)`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('[Auth] Error loading sessions:', error);
|
||||
logger.warn('Error loading sessions:', error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,7 +84,7 @@ async function saveSessions(): Promise<void> {
|
||||
mode: 0o600,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Auth] Failed to save sessions:', error);
|
||||
logger.error('Failed to save sessions:', error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,7 +98,7 @@ loadSessions();
|
||||
function ensureApiKey(): string {
|
||||
// First check environment variable (Electron passes it this way)
|
||||
if (process.env.AUTOMAKER_API_KEY) {
|
||||
console.log('[Auth] Using API key from environment variable');
|
||||
logger.info('Using API key from environment variable');
|
||||
return process.env.AUTOMAKER_API_KEY;
|
||||
}
|
||||
|
||||
@@ -104,12 +107,12 @@ function ensureApiKey(): string {
|
||||
if (secureFs.existsSync(API_KEY_FILE)) {
|
||||
const key = (secureFs.readFileSync(API_KEY_FILE, 'utf-8') as string).trim();
|
||||
if (key) {
|
||||
console.log('[Auth] Loaded API key from file');
|
||||
logger.info('Loaded API key from file');
|
||||
return key;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('[Auth] Error reading API key file:', error);
|
||||
logger.warn('Error reading API key file:', error);
|
||||
}
|
||||
|
||||
// Generate new key
|
||||
@@ -117,9 +120,9 @@ function ensureApiKey(): string {
|
||||
try {
|
||||
secureFs.mkdirSync(path.dirname(API_KEY_FILE), { recursive: true });
|
||||
secureFs.writeFileSync(API_KEY_FILE, newKey, { encoding: 'utf-8', mode: 0o600 });
|
||||
console.log('[Auth] Generated new API key');
|
||||
logger.info('Generated new API key');
|
||||
} catch (error) {
|
||||
console.error('[Auth] Failed to save API key:', error);
|
||||
logger.error('Failed to save API key:', error);
|
||||
}
|
||||
return newKey;
|
||||
}
|
||||
@@ -129,7 +132,7 @@ const API_KEY = ensureApiKey();
|
||||
|
||||
// Print API key to console for web mode users (unless suppressed for production logging)
|
||||
if (process.env.AUTOMAKER_HIDE_API_KEY !== 'true') {
|
||||
console.log(`
|
||||
logger.info(`
|
||||
╔═══════════════════════════════════════════════════════════════════════╗
|
||||
║ 🔐 API Key for Web Mode Authentication ║
|
||||
╠═══════════════════════════════════════════════════════════════════════╣
|
||||
@@ -142,7 +145,7 @@ if (process.env.AUTOMAKER_HIDE_API_KEY !== 'true') {
|
||||
╚═══════════════════════════════════════════════════════════════════════╝
|
||||
`);
|
||||
} else {
|
||||
console.log('[Auth] API key banner hidden (AUTOMAKER_HIDE_API_KEY=true)');
|
||||
logger.info('API key banner hidden (AUTOMAKER_HIDE_API_KEY=true)');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -177,7 +180,7 @@ export function validateSession(token: string): boolean {
|
||||
if (Date.now() > session.expiresAt) {
|
||||
validSessions.delete(token);
|
||||
// Fire-and-forget: persist removal asynchronously
|
||||
saveSessions().catch((err) => console.error('[Auth] Error saving sessions:', err));
|
||||
saveSessions().catch((err) => logger.error('Error saving sessions:', err));
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
*/
|
||||
|
||||
import type { EventType, EventCallback } from '@automaker/types';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('Events');
|
||||
|
||||
// Re-export event types from shared package
|
||||
export type { EventType, EventCallback };
|
||||
@@ -21,7 +24,7 @@ export function createEventEmitter(): EventEmitter {
|
||||
try {
|
||||
callback(type, payload);
|
||||
} catch (error) {
|
||||
console.error('Error in event subscriber:', error);
|
||||
logger.error('Error in event subscriber:', error);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -19,6 +19,9 @@ import type { Options } from '@anthropic-ai/claude-agent-sdk';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { resolveModelString } from '@automaker/model-resolver';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('SdkOptions');
|
||||
import {
|
||||
DEFAULT_MODELS,
|
||||
CLAUDE_MODEL_MAP,
|
||||
@@ -332,8 +335,8 @@ function buildMcpOptions(config: CreateSdkOptionsConfig): McpPermissionOptions {
|
||||
*/
|
||||
function buildThinkingOptions(thinkingLevel?: ThinkingLevel): Partial<Options> {
|
||||
const maxThinkingTokens = getThinkingTokenBudget(thinkingLevel);
|
||||
console.log(
|
||||
`[SDK-Options] buildThinkingOptions: thinkingLevel="${thinkingLevel}" -> maxThinkingTokens=${maxThinkingTokens}`
|
||||
logger.debug(
|
||||
`buildThinkingOptions: thinkingLevel="${thinkingLevel}" -> maxThinkingTokens=${maxThinkingTokens}`
|
||||
);
|
||||
return maxThinkingTokens ? { maxThinkingTokens } : {};
|
||||
}
|
||||
|
||||
@@ -5,6 +5,9 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, join } from 'path';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('Version');
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
@@ -27,7 +30,7 @@ export function getVersion(): string {
|
||||
cachedVersion = version;
|
||||
return version;
|
||||
} catch (error) {
|
||||
console.warn('Failed to read version from package.json:', error);
|
||||
logger.warn('Failed to read version from package.json:', error);
|
||||
return '0.0.0';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,9 @@
|
||||
|
||||
import { query, type Options } from '@anthropic-ai/claude-agent-sdk';
|
||||
import { BaseProvider } from './base-provider.js';
|
||||
import { classifyError, getUserFriendlyErrorMessage } from '@automaker/utils';
|
||||
import { classifyError, getUserFriendlyErrorMessage, createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('ClaudeProvider');
|
||||
import { getThinkingTokenBudget } from '@automaker/types';
|
||||
import type {
|
||||
ExecuteOptions,
|
||||
@@ -147,7 +149,7 @@ export class ClaudeProvider extends BaseProvider {
|
||||
const errorInfo = classifyError(error);
|
||||
const userMessage = getUserFriendlyErrorMessage(error);
|
||||
|
||||
console.error('[ClaudeProvider] executeQuery() error during execution:', {
|
||||
logger.error('executeQuery() error during execution:', {
|
||||
type: errorInfo.type,
|
||||
message: errorInfo.message,
|
||||
isRateLimit: errorInfo.isRateLimit,
|
||||
|
||||
@@ -22,7 +22,7 @@ export function createSendHandler(agentService: AgentService) {
|
||||
thinkingLevel?: ThinkingLevel;
|
||||
};
|
||||
|
||||
console.log('[Send Handler] Received request:', {
|
||||
logger.debug('Received request:', {
|
||||
sessionId,
|
||||
messageLength: message?.length,
|
||||
workingDirectory,
|
||||
@@ -32,7 +32,7 @@ export function createSendHandler(agentService: AgentService) {
|
||||
});
|
||||
|
||||
if (!sessionId || !message) {
|
||||
console.log('[Send Handler] ERROR: Validation failed - missing sessionId or message');
|
||||
logger.warn('Validation failed - missing sessionId or message');
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'sessionId and message are required',
|
||||
@@ -40,7 +40,7 @@ export function createSendHandler(agentService: AgentService) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('[Send Handler] Validation passed, calling agentService.sendMessage()');
|
||||
logger.debug('Validation passed, calling agentService.sendMessage()');
|
||||
|
||||
// Start the message processing (don't await - it streams via WebSocket)
|
||||
agentService
|
||||
@@ -53,16 +53,16 @@ export function createSendHandler(agentService: AgentService) {
|
||||
thinkingLevel,
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('[Send Handler] ERROR: Background error in sendMessage():', error);
|
||||
logger.error('Background error in sendMessage():', error);
|
||||
logError(error, 'Send message failed (background)');
|
||||
});
|
||||
|
||||
console.log('[Send Handler] Returning immediate response to client');
|
||||
logger.debug('Returning immediate response to client');
|
||||
|
||||
// Return immediately - responses come via WebSocket
|
||||
res.json({ success: true, message: 'Message sent' });
|
||||
} catch (error) {
|
||||
console.error('[Send Handler] ERROR: Synchronous error:', error);
|
||||
logger.error('Synchronous error:', error);
|
||||
logError(error, 'Send message failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ export function createResumeFeatureHandler(autoModeService: AutoModeService) {
|
||||
autoModeService
|
||||
.resumeFeature(projectPath, featureId, useWorktrees ?? false)
|
||||
.catch((error) => {
|
||||
logger.error(`[AutoMode] Resume feature ${featureId} error:`, error);
|
||||
logger.error(`Resume feature ${featureId} error:`, error);
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
|
||||
@@ -31,7 +31,7 @@ export function createRunFeatureHandler(autoModeService: AutoModeService) {
|
||||
autoModeService
|
||||
.executeFeature(projectPath, featureId, useWorktrees ?? false, false)
|
||||
.catch((error) => {
|
||||
logger.error(`[AutoMode] Feature ${featureId} error:`, error);
|
||||
logger.error(`Feature ${featureId} error:`, error);
|
||||
})
|
||||
.finally(() => {
|
||||
// Release the starting slot when execution completes (success or error)
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { ClaudeUsageService } from '../../services/claude-usage-service.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('Claude');
|
||||
|
||||
export function createClaudeRoutes(service: ClaudeUsageService): Router {
|
||||
const router = Router();
|
||||
@@ -33,7 +36,7 @@ export function createClaudeRoutes(service: ClaudeUsageService): Router {
|
||||
message: 'The Claude CLI took too long to respond',
|
||||
});
|
||||
} else {
|
||||
console.error('Error fetching usage:', error);
|
||||
logger.error('Error fetching usage:', error);
|
||||
res.status(500).json({ error: message });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,7 +97,7 @@ export function createDescribeFileHandler(
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`[DescribeFile] Starting description generation for: ${filePath}`);
|
||||
logger.info(`Starting description generation for: ${filePath}`);
|
||||
|
||||
// Resolve the path for logging and cwd derivation
|
||||
const resolvedPath = secureFs.resolvePath(filePath);
|
||||
@@ -112,7 +112,7 @@ export function createDescribeFileHandler(
|
||||
} catch (readError) {
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (readError instanceof PathNotAllowedError) {
|
||||
logger.warn(`[DescribeFile] Path not allowed: ${filePath}`);
|
||||
logger.warn(`Path not allowed: ${filePath}`);
|
||||
const response: DescribeFileErrorResponse = {
|
||||
success: false,
|
||||
error: 'File path is not within the allowed directory',
|
||||
@@ -128,7 +128,7 @@ export function createDescribeFileHandler(
|
||||
'code' in readError &&
|
||||
readError.code === 'ENOENT'
|
||||
) {
|
||||
logger.warn(`[DescribeFile] File not found: ${resolvedPath}`);
|
||||
logger.warn(`File not found: ${resolvedPath}`);
|
||||
const response: DescribeFileErrorResponse = {
|
||||
success: false,
|
||||
error: `File not found: ${filePath}`,
|
||||
@@ -138,7 +138,7 @@ export function createDescribeFileHandler(
|
||||
}
|
||||
|
||||
const errorMessage = readError instanceof Error ? readError.message : 'Unknown error';
|
||||
logger.error(`[DescribeFile] Failed to read file: ${errorMessage}`);
|
||||
logger.error(`Failed to read file: ${errorMessage}`);
|
||||
const response: DescribeFileErrorResponse = {
|
||||
success: false,
|
||||
error: `Failed to read file: ${errorMessage}`,
|
||||
@@ -182,23 +182,20 @@ File: ${fileName}${truncated ? ' (truncated)' : ''}`;
|
||||
|
||||
// Get model from phase settings
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
logger.info(
|
||||
`[DescribeFile] Raw phaseModels from settings:`,
|
||||
JSON.stringify(settings?.phaseModels, null, 2)
|
||||
);
|
||||
logger.info(`Raw phaseModels from settings:`, JSON.stringify(settings?.phaseModels, null, 2));
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.fileDescriptionModel || DEFAULT_PHASE_MODELS.fileDescriptionModel;
|
||||
logger.info(`[DescribeFile] fileDescriptionModel entry:`, JSON.stringify(phaseModelEntry));
|
||||
logger.info(`fileDescriptionModel entry:`, JSON.stringify(phaseModelEntry));
|
||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||
|
||||
logger.info(`[DescribeFile] Resolved model: ${model}, thinkingLevel: ${thinkingLevel}`);
|
||||
logger.info(`Resolved model: ${model}, thinkingLevel: ${thinkingLevel}`);
|
||||
|
||||
let description: string;
|
||||
|
||||
// Route to appropriate provider based on model type
|
||||
if (isCursorModel(model)) {
|
||||
// Use Cursor provider for Cursor models
|
||||
logger.info(`[DescribeFile] Using Cursor provider for model: ${model}`);
|
||||
logger.info(`Using Cursor provider for model: ${model}`);
|
||||
|
||||
const provider = ProviderFactory.getProviderForModel(model);
|
||||
|
||||
@@ -225,7 +222,7 @@ File: ${fileName}${truncated ? ' (truncated)' : ''}`;
|
||||
description = responseText;
|
||||
} else {
|
||||
// Use Claude SDK for Claude models
|
||||
logger.info(`[DescribeFile] Using Claude SDK for model: ${model}`);
|
||||
logger.info(`Using Claude SDK for model: ${model}`);
|
||||
|
||||
// Use centralized SDK options with proper cwd validation
|
||||
// No tools needed since we're passing file content directly
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('GitHub');
|
||||
|
||||
export const execAsync = promisify(exec);
|
||||
|
||||
@@ -31,5 +34,5 @@ export function getErrorMessage(error: unknown): string {
|
||||
}
|
||||
|
||||
export function logError(error: unknown, context: string): void {
|
||||
console.error(`[GitHub] ${context}:`, error);
|
||||
logger.error(`${context}:`, error);
|
||||
}
|
||||
|
||||
@@ -6,6 +6,9 @@ import { spawn } from 'child_process';
|
||||
import type { Request, Response } from 'express';
|
||||
import { execAsync, execEnv, getErrorMessage, logError } from './common.js';
|
||||
import { checkGitHubRemote } from './check-github-remote.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('ListIssues');
|
||||
|
||||
export interface GitHubLabel {
|
||||
name: string;
|
||||
@@ -179,7 +182,7 @@ async function fetchLinkedPRs(
|
||||
}
|
||||
} catch (error) {
|
||||
// If GraphQL fails, continue without linked PRs
|
||||
console.warn(
|
||||
logger.warn(
|
||||
'Failed to fetch linked PRs via GraphQL:',
|
||||
error instanceof Error ? error.message : error
|
||||
);
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
* Common utilities for MCP routes
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('MCP');
|
||||
|
||||
/**
|
||||
* Extract error message from unknown error
|
||||
*/
|
||||
@@ -16,5 +20,5 @@ export function getErrorMessage(error: unknown): string {
|
||||
* Log error with prefix
|
||||
*/
|
||||
export function logError(error: unknown, message: string): void {
|
||||
console.error(`[MCP] ${message}:`, error);
|
||||
logger.error(`${message}:`, error);
|
||||
}
|
||||
|
||||
@@ -8,6 +8,9 @@
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { getBranchTrackingPath, ensureAutomakerDir } from '@automaker/platform';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('BranchTracking');
|
||||
|
||||
export interface TrackedBranch {
|
||||
name: string;
|
||||
@@ -32,7 +35,7 @@ export async function getTrackedBranches(projectPath: string): Promise<TrackedBr
|
||||
if (error.code === 'ENOENT') {
|
||||
return [];
|
||||
}
|
||||
console.warn('[branch-tracking] Failed to read tracked branches:', error);
|
||||
logger.warn('Failed to read tracked branches:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -65,7 +68,7 @@ export async function trackBranch(projectPath: string, branchName: string): Prom
|
||||
});
|
||||
|
||||
await saveTrackedBranches(projectPath, branches);
|
||||
console.log(`[branch-tracking] Now tracking branch: ${branchName}`);
|
||||
logger.info(`Now tracking branch: ${branchName}`);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -77,7 +80,7 @@ export async function untrackBranch(projectPath: string, branchName: string): Pr
|
||||
|
||||
if (filtered.length !== branches.length) {
|
||||
await saveTrackedBranches(projectPath, filtered);
|
||||
console.log(`[branch-tracking] Stopped tracking branch: ${branchName}`);
|
||||
logger.info(`Stopped tracking branch: ${branchName}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,9 @@ import {
|
||||
isGhCliAvailable,
|
||||
} from '../common.js';
|
||||
import { updateWorktreePRInfo } from '../../../lib/worktree-metadata.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('CreatePR');
|
||||
|
||||
export function createCreatePRHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -56,15 +59,15 @@ export function createCreatePRHandler() {
|
||||
}
|
||||
|
||||
// Check for uncommitted changes
|
||||
console.log(`[CreatePR] Checking for uncommitted changes in: ${worktreePath}`);
|
||||
logger.debug(`Checking for uncommitted changes in: ${worktreePath}`);
|
||||
const { stdout: status } = await execAsync('git status --porcelain', {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
const hasChanges = status.trim().length > 0;
|
||||
console.log(`[CreatePR] Has uncommitted changes: ${hasChanges}`);
|
||||
logger.debug(`Has uncommitted changes: ${hasChanges}`);
|
||||
if (hasChanges) {
|
||||
console.log(`[CreatePR] Changed files:\n${status}`);
|
||||
logger.debug(`Changed files:\n${status}`);
|
||||
}
|
||||
|
||||
// If there are changes, commit them
|
||||
@@ -72,15 +75,15 @@ export function createCreatePRHandler() {
|
||||
let commitError: string | null = null;
|
||||
if (hasChanges) {
|
||||
const message = commitMessage || `Changes from ${branchName}`;
|
||||
console.log(`[CreatePR] Committing changes with message: ${message}`);
|
||||
logger.debug(`Committing changes with message: ${message}`);
|
||||
|
||||
try {
|
||||
// Stage all changes
|
||||
console.log(`[CreatePR] Running: git add -A`);
|
||||
logger.debug(`Running: git add -A`);
|
||||
await execAsync('git add -A', { cwd: worktreePath, env: execEnv });
|
||||
|
||||
// Create commit
|
||||
console.log(`[CreatePR] Running: git commit`);
|
||||
logger.debug(`Running: git commit`);
|
||||
await execAsync(`git commit -m "${message.replace(/"/g, '\\"')}"`, {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
@@ -92,11 +95,11 @@ export function createCreatePRHandler() {
|
||||
env: execEnv,
|
||||
});
|
||||
commitHash = hashOutput.trim().substring(0, 8);
|
||||
console.log(`[CreatePR] Commit successful: ${commitHash}`);
|
||||
logger.info(`Commit successful: ${commitHash}`);
|
||||
} catch (commitErr: unknown) {
|
||||
const err = commitErr as { stderr?: string; message?: string };
|
||||
commitError = err.stderr || err.message || 'Commit failed';
|
||||
console.error(`[CreatePR] Commit failed: ${commitError}`);
|
||||
logger.error(`Commit failed: ${commitError}`);
|
||||
|
||||
// Return error immediately - don't proceed with push/PR if commit fails
|
||||
res.status(500).json({
|
||||
@@ -126,7 +129,7 @@ export function createCreatePRHandler() {
|
||||
// Capture push error for reporting
|
||||
const err = error2 as { stderr?: string; message?: string };
|
||||
pushError = err.stderr || err.message || 'Push failed';
|
||||
console.error('[CreatePR] Push failed:', pushError);
|
||||
logger.error('Push failed:', pushError);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -246,26 +249,22 @@ export function createCreatePRHandler() {
|
||||
const headRef = upstreamRepo && originOwner ? `${originOwner}:${branchName}` : branchName;
|
||||
const repoArg = upstreamRepo ? ` --repo "${upstreamRepo}"` : '';
|
||||
|
||||
console.log(
|
||||
`[CreatePR] Checking for existing PR for branch: ${branchName} (headRef: ${headRef})`
|
||||
);
|
||||
logger.debug(`Checking for existing PR for branch: ${branchName} (headRef: ${headRef})`);
|
||||
try {
|
||||
const listCmd = `gh pr list${repoArg} --head "${headRef}" --json number,title,url,state --limit 1`;
|
||||
console.log(`[CreatePR] Running: ${listCmd}`);
|
||||
logger.debug(`Running: ${listCmd}`);
|
||||
const { stdout: existingPrOutput } = await execAsync(listCmd, {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
console.log(`[CreatePR] gh pr list output: ${existingPrOutput}`);
|
||||
logger.debug(`gh pr list output: ${existingPrOutput}`);
|
||||
|
||||
const existingPrs = JSON.parse(existingPrOutput);
|
||||
|
||||
if (Array.isArray(existingPrs) && existingPrs.length > 0) {
|
||||
const existingPr = existingPrs[0];
|
||||
// PR already exists - use it and store metadata
|
||||
console.log(
|
||||
`[CreatePR] PR already exists for branch ${branchName}: PR #${existingPr.number}`
|
||||
);
|
||||
logger.info(`PR already exists for branch ${branchName}: PR #${existingPr.number}`);
|
||||
prUrl = existingPr.url;
|
||||
prNumber = existingPr.number;
|
||||
prAlreadyExisted = true;
|
||||
@@ -278,15 +277,15 @@ export function createCreatePRHandler() {
|
||||
state: existingPr.state || 'open',
|
||||
createdAt: new Date().toISOString(),
|
||||
});
|
||||
console.log(
|
||||
`[CreatePR] Stored existing PR info for branch ${branchName}: PR #${existingPr.number}`
|
||||
logger.debug(
|
||||
`Stored existing PR info for branch ${branchName}: PR #${existingPr.number}`
|
||||
);
|
||||
} else {
|
||||
console.log(`[CreatePR] No existing PR found for branch ${branchName}`);
|
||||
logger.debug(`No existing PR found for branch ${branchName}`);
|
||||
}
|
||||
} catch (listError) {
|
||||
// gh pr list failed - log but continue to try creating
|
||||
console.log(`[CreatePR] gh pr list failed (this is ok, will try to create):`, listError);
|
||||
logger.debug(`gh pr list failed (this is ok, will try to create):`, listError);
|
||||
}
|
||||
|
||||
// Only create a new PR if one doesn't already exist
|
||||
@@ -307,13 +306,13 @@ export function createCreatePRHandler() {
|
||||
prCmd += ` --title "${title.replace(/"/g, '\\"')}" --body "${body.replace(/"/g, '\\"')}" ${draftFlag}`;
|
||||
prCmd = prCmd.trim();
|
||||
|
||||
console.log(`[CreatePR] Creating PR with command: ${prCmd}`);
|
||||
logger.debug(`Creating PR with command: ${prCmd}`);
|
||||
const { stdout: prOutput } = await execAsync(prCmd, {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
prUrl = prOutput.trim();
|
||||
console.log(`[CreatePR] PR created: ${prUrl}`);
|
||||
logger.info(`PR created: ${prUrl}`);
|
||||
|
||||
// Extract PR number and store metadata for newly created PR
|
||||
if (prUrl) {
|
||||
@@ -329,11 +328,9 @@ export function createCreatePRHandler() {
|
||||
state: draft ? 'draft' : 'open',
|
||||
createdAt: new Date().toISOString(),
|
||||
});
|
||||
console.log(
|
||||
`[CreatePR] Stored PR info for branch ${branchName}: PR #${prNumber}`
|
||||
);
|
||||
logger.debug(`Stored PR info for branch ${branchName}: PR #${prNumber}`);
|
||||
} catch (metadataError) {
|
||||
console.error('[CreatePR] Failed to store PR metadata:', metadataError);
|
||||
logger.error('Failed to store PR metadata:', metadataError);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -341,11 +338,11 @@ export function createCreatePRHandler() {
|
||||
// gh CLI failed - check if it's "already exists" error and try to fetch the PR
|
||||
const err = ghError as { stderr?: string; message?: string };
|
||||
const errorMessage = err.stderr || err.message || 'PR creation failed';
|
||||
console.log(`[CreatePR] gh pr create failed: ${errorMessage}`);
|
||||
logger.debug(`gh pr create failed: ${errorMessage}`);
|
||||
|
||||
// If error indicates PR already exists, try to fetch it
|
||||
if (errorMessage.toLowerCase().includes('already exists')) {
|
||||
console.log(`[CreatePR] PR already exists error - trying to fetch existing PR`);
|
||||
logger.debug(`PR already exists error - trying to fetch existing PR`);
|
||||
try {
|
||||
const { stdout: viewOutput } = await execAsync(
|
||||
`gh pr view --json number,title,url,state`,
|
||||
@@ -364,10 +361,10 @@ export function createCreatePRHandler() {
|
||||
state: existingPr.state || 'open',
|
||||
createdAt: new Date().toISOString(),
|
||||
});
|
||||
console.log(`[CreatePR] Fetched and stored existing PR: #${existingPr.number}`);
|
||||
logger.debug(`Fetched and stored existing PR: #${existingPr.number}`);
|
||||
}
|
||||
} catch (viewError) {
|
||||
console.error('[CreatePR] Failed to fetch existing PR:', viewError);
|
||||
logger.error('Failed to fetch existing PR:', viewError);
|
||||
prError = errorMessage;
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -20,6 +20,9 @@ import {
|
||||
ensureInitialCommit,
|
||||
} from '../common.js';
|
||||
import { trackBranch } from './branch-tracking.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('Worktree');
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -114,8 +117,8 @@ export function createCreateHandler() {
|
||||
if (existingWorktree) {
|
||||
// Worktree already exists, return it as success (not an error)
|
||||
// This handles manually created worktrees or worktrees from previous runs
|
||||
console.log(
|
||||
`[Worktree] Found existing worktree for branch "${branchName}" at: ${existingWorktree.path}`
|
||||
logger.info(
|
||||
`Found existing worktree for branch "${branchName}" at: ${existingWorktree.path}`
|
||||
);
|
||||
|
||||
// Track the branch so it persists in the UI
|
||||
|
||||
@@ -11,6 +11,9 @@ import {
|
||||
isValidBranchName,
|
||||
isGhCliAvailable,
|
||||
} from '../common.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('PRInfo');
|
||||
|
||||
export interface PRComment {
|
||||
id: number;
|
||||
@@ -174,7 +177,7 @@ export function createPRInfoHandler() {
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
console.warn('[PRInfo] Failed to fetch PR comments:', error);
|
||||
logger.warn('Failed to fetch PR comments:', error);
|
||||
}
|
||||
|
||||
// Get review comments (inline code comments)
|
||||
@@ -209,10 +212,10 @@ export function createPRInfoHandler() {
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
console.warn('[PRInfo] Failed to fetch review comments:', error);
|
||||
logger.warn('Failed to fetch review comments:', error);
|
||||
}
|
||||
} else {
|
||||
console.warn('[PRInfo] Cannot fetch review comments: repository info not available');
|
||||
logger.warn('Cannot fetch review comments: repository info not available');
|
||||
}
|
||||
|
||||
const prInfo: PRInfo = {
|
||||
|
||||
@@ -25,7 +25,10 @@ import {
|
||||
isAbortError,
|
||||
classifyError,
|
||||
loadContextFiles,
|
||||
createLogger,
|
||||
} from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('AutoMode');
|
||||
import { resolveModelString, resolvePhaseModel, DEFAULT_MODELS } from '@automaker/model-resolver';
|
||||
import { resolveDependencies, areDependenciesSatisfied } from '@automaker/dependency-resolver';
|
||||
import { getFeatureDir, getAutomakerDir, getFeaturesDir } from '@automaker/platform';
|
||||
@@ -262,8 +265,8 @@ export class AutoModeService {
|
||||
|
||||
this.pausedDueToFailures = true;
|
||||
const failureCount = this.consecutiveFailures.length;
|
||||
console.log(
|
||||
`[AutoMode] Pausing auto loop after ${failureCount} consecutive failures. Last error: ${errorInfo.type}`
|
||||
logger.info(
|
||||
`Pausing auto loop after ${failureCount} consecutive failures. Last error: ${errorInfo.type}`
|
||||
);
|
||||
|
||||
// Emit event to notify UI
|
||||
@@ -323,7 +326,7 @@ export class AutoModeService {
|
||||
|
||||
// Run the loop in the background
|
||||
this.runAutoLoop().catch((error) => {
|
||||
console.error('[AutoMode] Loop error:', error);
|
||||
logger.error('Loop error:', error);
|
||||
const errorInfo = classifyError(error);
|
||||
this.emitAutoModeEvent('auto_mode_error', {
|
||||
error: errorInfo.message,
|
||||
@@ -368,13 +371,13 @@ export class AutoModeService {
|
||||
this.config!.useWorktrees,
|
||||
true
|
||||
).catch((error) => {
|
||||
console.error(`[AutoMode] Feature ${nextFeature.id} error:`, error);
|
||||
logger.error(`Feature ${nextFeature.id} error:`, error);
|
||||
});
|
||||
}
|
||||
|
||||
await this.sleep(2000);
|
||||
} catch (error) {
|
||||
console.error('[AutoMode] Loop iteration error:', error);
|
||||
logger.error('Loop iteration error:', error);
|
||||
await this.sleep(5000);
|
||||
}
|
||||
}
|
||||
@@ -447,8 +450,8 @@ export class AutoModeService {
|
||||
if (!options?.continuationPrompt) {
|
||||
const hasExistingContext = await this.contextExists(projectPath, featureId);
|
||||
if (hasExistingContext) {
|
||||
console.log(
|
||||
`[AutoMode] Feature ${featureId} has existing context, resuming instead of starting fresh`
|
||||
logger.info(
|
||||
`Feature ${featureId} has existing context, resuming instead of starting fresh`
|
||||
);
|
||||
// Remove from running features temporarily, resumeFeature will add it back
|
||||
this.runningFeatures.delete(featureId);
|
||||
@@ -483,12 +486,10 @@ export class AutoModeService {
|
||||
worktreePath = await this.findExistingWorktreeForBranch(projectPath, branchName);
|
||||
|
||||
if (worktreePath) {
|
||||
console.log(`[AutoMode] Using worktree for branch "${branchName}": ${worktreePath}`);
|
||||
logger.info(`Using worktree for branch "${branchName}": ${worktreePath}`);
|
||||
} else {
|
||||
// Worktree doesn't exist - log warning and continue with project path
|
||||
console.warn(
|
||||
`[AutoMode] Worktree for branch "${branchName}" not found, using project path`
|
||||
);
|
||||
logger.warn(`Worktree for branch "${branchName}" not found, using project path`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -528,7 +529,7 @@ export class AutoModeService {
|
||||
// Continuation prompt is used when recovering from a plan approval
|
||||
// The plan was already approved, so skip the planning phase
|
||||
prompt = options.continuationPrompt;
|
||||
console.log(`[AutoMode] Using continuation prompt for feature ${featureId}`);
|
||||
logger.info(`Using continuation prompt for feature ${featureId}`);
|
||||
} else {
|
||||
// Normal flow: build prompt with planning phase
|
||||
const featurePrompt = this.buildFeaturePrompt(feature);
|
||||
@@ -553,8 +554,8 @@ export class AutoModeService {
|
||||
// Get model from feature and determine provider
|
||||
const model = resolveModelString(feature.model, DEFAULT_MODELS.claude);
|
||||
const provider = ProviderFactory.getProviderNameForModel(model);
|
||||
console.log(
|
||||
`[AutoMode] Executing feature ${featureId} with model: ${model}, provider: ${provider} in ${workDir}`
|
||||
logger.info(
|
||||
`Executing feature ${featureId} with model: ${model}, provider: ${provider} in ${workDir}`
|
||||
);
|
||||
|
||||
// Store model and provider in running feature for tracking
|
||||
@@ -628,7 +629,7 @@ export class AutoModeService {
|
||||
projectPath,
|
||||
});
|
||||
} else {
|
||||
console.error(`[AutoMode] Feature ${featureId} failed:`, error);
|
||||
logger.error(`Feature ${featureId} failed:`, error);
|
||||
await this.updateFeatureStatus(projectPath, featureId, 'backlog');
|
||||
this.emitAutoModeEvent('auto_mode_error', {
|
||||
featureId,
|
||||
@@ -653,9 +654,9 @@ export class AutoModeService {
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
console.log(`[AutoMode] Feature ${featureId} execution ended, cleaning up runningFeatures`);
|
||||
console.log(
|
||||
`[AutoMode] Pending approvals at cleanup: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
||||
logger.info(`Feature ${featureId} execution ended, cleaning up runningFeatures`);
|
||||
logger.info(
|
||||
`Pending approvals at cleanup: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
||||
);
|
||||
this.runningFeatures.delete(featureId);
|
||||
}
|
||||
@@ -673,7 +674,7 @@ export class AutoModeService {
|
||||
abortController: AbortController,
|
||||
autoLoadClaudeMd: boolean
|
||||
): Promise<void> {
|
||||
console.log(`[AutoMode] Executing ${steps.length} pipeline step(s) for feature ${featureId}`);
|
||||
logger.info(`Executing ${steps.length} pipeline step(s) for feature ${featureId}`);
|
||||
|
||||
// Load context files once
|
||||
const contextResult = await loadContextFiles({
|
||||
@@ -756,12 +757,12 @@ export class AutoModeService {
|
||||
projectPath,
|
||||
});
|
||||
|
||||
console.log(
|
||||
`[AutoMode] Pipeline step ${i + 1}/${steps.length} (${step.name}) completed for feature ${featureId}`
|
||||
logger.info(
|
||||
`Pipeline step ${i + 1}/${steps.length} (${step.name}) completed for feature ${featureId}`
|
||||
);
|
||||
}
|
||||
|
||||
console.log(`[AutoMode] All pipeline steps completed for feature ${featureId}`);
|
||||
logger.info(`All pipeline steps completed for feature ${featureId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -886,7 +887,7 @@ Complete the pipeline step instructions above. Review the previous work and appl
|
||||
|
||||
if (worktreePath) {
|
||||
workDir = worktreePath;
|
||||
console.log(`[AutoMode] Follow-up using worktree for branch "${branchName}": ${workDir}`);
|
||||
logger.info(`Follow-up using worktree for branch "${branchName}": ${workDir}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -942,9 +943,7 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
// Get model from feature and determine provider early for tracking
|
||||
const model = resolveModelString(feature?.model, DEFAULT_MODELS.claude);
|
||||
const provider = ProviderFactory.getProviderNameForModel(model);
|
||||
console.log(
|
||||
`[AutoMode] Follow-up for feature ${featureId} using model: ${model}, provider: ${provider}`
|
||||
);
|
||||
logger.info(`Follow-up for feature ${featureId} using model: ${model}, provider: ${provider}`);
|
||||
|
||||
this.runningFeatures.set(featureId, {
|
||||
featureId,
|
||||
@@ -994,7 +993,7 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
// Store the absolute path (external storage uses absolute paths)
|
||||
copiedImagePaths.push(destPath);
|
||||
} catch (error) {
|
||||
console.error(`[AutoMode] Failed to copy follow-up image ${imagePath}:`, error);
|
||||
logger.error(`Failed to copy follow-up image ${imagePath}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1030,7 +1029,7 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
try {
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
} catch (error) {
|
||||
console.error(`[AutoMode] Failed to save feature.json:`, error);
|
||||
logger.error(`Failed to save feature.json:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1178,10 +1177,10 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
try {
|
||||
await secureFs.access(providedWorktreePath);
|
||||
workDir = providedWorktreePath;
|
||||
console.log(`[AutoMode] Committing in provided worktree: ${workDir}`);
|
||||
logger.info(`Committing in provided worktree: ${workDir}`);
|
||||
} catch {
|
||||
console.log(
|
||||
`[AutoMode] Provided worktree path doesn't exist: ${providedWorktreePath}, using project path`
|
||||
logger.info(
|
||||
`Provided worktree path doesn't exist: ${providedWorktreePath}, using project path`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
@@ -1190,9 +1189,9 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
try {
|
||||
await secureFs.access(legacyWorktreePath);
|
||||
workDir = legacyWorktreePath;
|
||||
console.log(`[AutoMode] Committing in legacy worktree: ${workDir}`);
|
||||
logger.info(`Committing in legacy worktree: ${workDir}`);
|
||||
} catch {
|
||||
console.log(`[AutoMode] No worktree found, committing in project path: ${workDir}`);
|
||||
logger.info(`No worktree found, committing in project path: ${workDir}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1232,7 +1231,7 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
|
||||
return hash.trim();
|
||||
} catch (error) {
|
||||
console.error(`[AutoMode] Commit failed for ${featureId}:`, error);
|
||||
logger.error(`Commit failed for ${featureId}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -1286,7 +1285,7 @@ Format your response as a structured markdown document.`;
|
||||
settings?.phaseModels?.projectAnalysisModel || DEFAULT_PHASE_MODELS.projectAnalysisModel;
|
||||
const { model: analysisModel, thinkingLevel: analysisThinkingLevel } =
|
||||
resolvePhaseModel(phaseModelEntry);
|
||||
console.log('[AutoMode] Using model for project analysis:', analysisModel);
|
||||
logger.info('Using model for project analysis:', analysisModel);
|
||||
|
||||
const provider = ProviderFactory.getProviderForModel(analysisModel);
|
||||
|
||||
@@ -1432,9 +1431,9 @@ Format your response as a structured markdown document.`;
|
||||
featureId: string,
|
||||
projectPath: string
|
||||
): Promise<{ approved: boolean; editedPlan?: string; feedback?: string }> {
|
||||
console.log(`[AutoMode] Registering pending approval for feature ${featureId}`);
|
||||
console.log(
|
||||
`[AutoMode] Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
||||
logger.info(`Registering pending approval for feature ${featureId}`);
|
||||
logger.info(
|
||||
`Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
||||
);
|
||||
return new Promise((resolve, reject) => {
|
||||
this.pendingApprovals.set(featureId, {
|
||||
@@ -1443,7 +1442,7 @@ Format your response as a structured markdown document.`;
|
||||
featureId,
|
||||
projectPath,
|
||||
});
|
||||
console.log(`[AutoMode] Pending approval registered for feature ${featureId}`);
|
||||
logger.info(`Pending approval registered for feature ${featureId}`);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1458,27 +1457,23 @@ Format your response as a structured markdown document.`;
|
||||
feedback?: string,
|
||||
projectPathFromClient?: string
|
||||
): Promise<{ success: boolean; error?: string }> {
|
||||
console.log(
|
||||
`[AutoMode] resolvePlanApproval called for feature ${featureId}, approved=${approved}`
|
||||
);
|
||||
console.log(
|
||||
`[AutoMode] Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
||||
logger.info(`resolvePlanApproval called for feature ${featureId}, approved=${approved}`);
|
||||
logger.info(
|
||||
`Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
||||
);
|
||||
const pending = this.pendingApprovals.get(featureId);
|
||||
|
||||
if (!pending) {
|
||||
console.log(`[AutoMode] No pending approval in Map for feature ${featureId}`);
|
||||
logger.info(`No pending approval in Map for feature ${featureId}`);
|
||||
|
||||
// RECOVERY: If no pending approval but we have projectPath from client,
|
||||
// check if feature's planSpec.status is 'generated' and handle recovery
|
||||
if (projectPathFromClient) {
|
||||
console.log(`[AutoMode] Attempting recovery with projectPath: ${projectPathFromClient}`);
|
||||
logger.info(`Attempting recovery with projectPath: ${projectPathFromClient}`);
|
||||
const feature = await this.loadFeature(projectPathFromClient, featureId);
|
||||
|
||||
if (feature?.planSpec?.status === 'generated') {
|
||||
console.log(
|
||||
`[AutoMode] Feature ${featureId} has planSpec.status='generated', performing recovery`
|
||||
);
|
||||
logger.info(`Feature ${featureId} has planSpec.status='generated', performing recovery`);
|
||||
|
||||
if (approved) {
|
||||
// Update planSpec to approved
|
||||
@@ -1497,17 +1492,14 @@ Format your response as a structured markdown document.`;
|
||||
}
|
||||
continuationPrompt += `Now proceed with the implementation as specified in the plan:\n\n${planContent}\n\nImplement the feature now.`;
|
||||
|
||||
console.log(`[AutoMode] Starting recovery execution for feature ${featureId}`);
|
||||
logger.info(`Starting recovery execution for feature ${featureId}`);
|
||||
|
||||
// Start feature execution with the continuation prompt (async, don't await)
|
||||
// Pass undefined for providedWorktreePath, use options for continuation prompt
|
||||
this.executeFeature(projectPathFromClient, featureId, true, false, undefined, {
|
||||
continuationPrompt,
|
||||
}).catch((error) => {
|
||||
console.error(
|
||||
`[AutoMode] Recovery execution failed for feature ${featureId}:`,
|
||||
error
|
||||
);
|
||||
logger.error(`Recovery execution failed for feature ${featureId}:`, error);
|
||||
});
|
||||
|
||||
return { success: true };
|
||||
@@ -1531,15 +1523,15 @@ Format your response as a structured markdown document.`;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[AutoMode] ERROR: No pending approval found for feature ${featureId} and recovery not possible`
|
||||
logger.info(
|
||||
`ERROR: No pending approval found for feature ${featureId} and recovery not possible`
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
error: `No pending approval for feature ${featureId}`,
|
||||
};
|
||||
}
|
||||
console.log(`[AutoMode] Found pending approval for feature ${featureId}, proceeding...`);
|
||||
logger.info(`Found pending approval for feature ${featureId}, proceeding...`);
|
||||
|
||||
const { projectPath } = pending;
|
||||
|
||||
@@ -1572,17 +1564,17 @@ Format your response as a structured markdown document.`;
|
||||
* Cancel a pending plan approval (e.g., when feature is stopped).
|
||||
*/
|
||||
cancelPlanApproval(featureId: string): void {
|
||||
console.log(`[AutoMode] cancelPlanApproval called for feature ${featureId}`);
|
||||
console.log(
|
||||
`[AutoMode] Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
||||
logger.info(`cancelPlanApproval called for feature ${featureId}`);
|
||||
logger.info(
|
||||
`Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
||||
);
|
||||
const pending = this.pendingApprovals.get(featureId);
|
||||
if (pending) {
|
||||
console.log(`[AutoMode] Found and cancelling pending approval for feature ${featureId}`);
|
||||
logger.info(`Found and cancelling pending approval for feature ${featureId}`);
|
||||
pending.reject(new Error('Plan approval cancelled - feature was stopped'));
|
||||
this.pendingApprovals.delete(featureId);
|
||||
} else {
|
||||
console.log(`[AutoMode] No pending approval to cancel for feature ${featureId}`);
|
||||
logger.info(`No pending approval to cancel for feature ${featureId}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1722,7 +1714,7 @@ Format your response as a structured markdown document.`;
|
||||
feature.updatedAt = new Date().toISOString();
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
} catch (error) {
|
||||
console.error(`[AutoMode] Failed to update planSpec for ${featureId}:`, error);
|
||||
logger.error(`Failed to update planSpec for ${featureId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1980,7 +1972,7 @@ This helps parse your summary correctly in the output logs.`;
|
||||
// CI/CD Mock Mode: Return early with mock response when AUTOMAKER_MOCK_AGENT is set
|
||||
// This prevents actual API calls during automated testing
|
||||
if (process.env.AUTOMAKER_MOCK_AGENT === 'true') {
|
||||
console.log(`[AutoMode] MOCK MODE: Skipping real agent execution for feature ${featureId}`);
|
||||
logger.info(`MOCK MODE: Skipping real agent execution for feature ${featureId}`);
|
||||
|
||||
// Simulate some work being done
|
||||
await this.sleep(500);
|
||||
@@ -2030,7 +2022,7 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
|
||||
await secureFs.mkdir(path.dirname(outputPath), { recursive: true });
|
||||
await secureFs.writeFile(outputPath, mockOutput);
|
||||
|
||||
console.log(`[AutoMode] MOCK MODE: Completed mock execution for feature ${featureId}`);
|
||||
logger.info(`MOCK MODE: Completed mock execution for feature ${featureId}`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -2068,14 +2060,14 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
|
||||
const maxTurns = sdkOptions.maxTurns;
|
||||
const allowedTools = sdkOptions.allowedTools as string[] | undefined;
|
||||
|
||||
console.log(
|
||||
`[AutoMode] runAgent called for feature ${featureId} with model: ${finalModel}, planningMode: ${planningMode}, requiresApproval: ${requiresApproval}`
|
||||
logger.info(
|
||||
`runAgent called for feature ${featureId} with model: ${finalModel}, planningMode: ${planningMode}, requiresApproval: ${requiresApproval}`
|
||||
);
|
||||
|
||||
// Get provider for this model
|
||||
const provider = ProviderFactory.getProviderForModel(finalModel);
|
||||
|
||||
console.log(`[AutoMode] Using provider "${provider.getName()}" for model "${finalModel}"`);
|
||||
logger.info(`Using provider "${provider.getName()}" for model "${finalModel}"`);
|
||||
|
||||
// Build prompt content with images using utility
|
||||
const { content: promptContent } = await buildPromptWithImages(
|
||||
@@ -2087,8 +2079,8 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
|
||||
|
||||
// Debug: Log if system prompt is provided
|
||||
if (options?.systemPrompt) {
|
||||
console.log(
|
||||
`[AutoMode] System prompt provided (${options.systemPrompt.length} chars), first 200 chars:\n${options.systemPrompt.substring(0, 200)}...`
|
||||
logger.info(
|
||||
`System prompt provided (${options.systemPrompt.length} chars), first 200 chars:\n${options.systemPrompt.substring(0, 200)}...`
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2109,9 +2101,9 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
|
||||
};
|
||||
|
||||
// Execute via provider
|
||||
console.log(`[AutoMode] Starting stream for feature ${featureId}...`);
|
||||
logger.info(`Starting stream for feature ${featureId}...`);
|
||||
const stream = provider.executeQuery(executeOptions);
|
||||
console.log(`[AutoMode] Stream created, starting to iterate...`);
|
||||
logger.info(`Stream created, starting to iterate...`);
|
||||
// Initialize with previous content if this is a follow-up, with a separator
|
||||
let responseText = previousContent
|
||||
? `${previousContent}\n\n---\n\n## Follow-up Session\n\n`
|
||||
@@ -2157,7 +2149,7 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
|
||||
await secureFs.appendFile(rawOutputPath, rawOutputLines.join('\n') + '\n');
|
||||
rawOutputLines = []; // Clear after writing
|
||||
} catch (error) {
|
||||
console.error(`[AutoMode] Failed to write raw output for ${featureId}:`, error);
|
||||
logger.error(`Failed to write raw output for ${featureId}:`, error);
|
||||
}
|
||||
}, WRITE_DEBOUNCE_MS);
|
||||
} catch {
|
||||
@@ -2172,7 +2164,7 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
|
||||
await secureFs.writeFile(outputPath, responseText);
|
||||
} catch (error) {
|
||||
// Log but don't crash - file write errors shouldn't stop execution
|
||||
console.error(`[AutoMode] Failed to write agent output for ${featureId}:`, error);
|
||||
logger.error(`Failed to write agent output for ${featureId}:`, error);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2190,7 +2182,7 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
|
||||
// Log raw stream event for debugging
|
||||
appendRawEvent(msg);
|
||||
|
||||
console.log(`[AutoMode] Stream message received:`, msg.type, msg.subtype || '');
|
||||
logger.info(`Stream message received:`, msg.type, msg.subtype || '');
|
||||
if (msg.type === 'assistant' && msg.message?.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === 'text') {
|
||||
@@ -2255,11 +2247,9 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
|
||||
let parsedTasks = parseTasksFromSpec(planContent);
|
||||
const tasksTotal = parsedTasks.length;
|
||||
|
||||
console.log(
|
||||
`[AutoMode] Parsed ${tasksTotal} tasks from spec for feature ${featureId}`
|
||||
);
|
||||
logger.info(`Parsed ${tasksTotal} tasks from spec for feature ${featureId}`);
|
||||
if (parsedTasks.length > 0) {
|
||||
console.log(`[AutoMode] Tasks: ${parsedTasks.map((t) => t.id).join(', ')}`);
|
||||
logger.info(`Tasks: ${parsedTasks.map((t) => t.id).join(', ')}`);
|
||||
}
|
||||
|
||||
// Update planSpec status to 'generated' and save content with parsed tasks
|
||||
@@ -2288,8 +2278,8 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
|
||||
let planApproved = false;
|
||||
|
||||
while (!planApproved) {
|
||||
console.log(
|
||||
`[AutoMode] Spec v${planVersion} generated for feature ${featureId}, waiting for approval`
|
||||
logger.info(
|
||||
`Spec v${planVersion} generated for feature ${featureId}, waiting for approval`
|
||||
);
|
||||
|
||||
// CRITICAL: Register pending approval BEFORE emitting event
|
||||
@@ -2310,9 +2300,7 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
|
||||
|
||||
if (approvalResult.approved) {
|
||||
// User approved the plan
|
||||
console.log(
|
||||
`[AutoMode] Plan v${planVersion} approved for feature ${featureId}`
|
||||
);
|
||||
logger.info(`Plan v${planVersion} approved for feature ${featureId}`);
|
||||
planApproved = true;
|
||||
|
||||
// If user provided edits, use the edited version
|
||||
@@ -2344,15 +2332,15 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
|
||||
|
||||
if (!hasFeedback && !hasEdits) {
|
||||
// No feedback or edits = explicit cancel
|
||||
console.log(
|
||||
`[AutoMode] Plan rejected without feedback for feature ${featureId}, cancelling`
|
||||
logger.info(
|
||||
`Plan rejected without feedback for feature ${featureId}, cancelling`
|
||||
);
|
||||
throw new Error('Plan cancelled by user');
|
||||
}
|
||||
|
||||
// User wants revisions - regenerate the plan
|
||||
console.log(
|
||||
`[AutoMode] Plan v${planVersion} rejected with feedback for feature ${featureId}, regenerating...`
|
||||
logger.info(
|
||||
`Plan v${planVersion} rejected with feedback for feature ${featureId}, regenerating...`
|
||||
);
|
||||
planVersion++;
|
||||
|
||||
@@ -2429,7 +2417,7 @@ After generating the revised spec, output:
|
||||
|
||||
// Re-parse tasks from revised plan
|
||||
const revisedTasks = parseTasksFromSpec(currentPlanContent);
|
||||
console.log(`[AutoMode] Revised plan has ${revisedTasks.length} tasks`);
|
||||
logger.info(`Revised plan has ${revisedTasks.length} tasks`);
|
||||
|
||||
// Update planSpec with revised content
|
||||
await this.updateFeaturePlanSpec(projectPath, featureId, {
|
||||
@@ -2455,8 +2443,8 @@ After generating the revised spec, output:
|
||||
}
|
||||
} else {
|
||||
// Auto-approve: requirePlanApproval is false, just continue without pausing
|
||||
console.log(
|
||||
`[AutoMode] Spec generated for feature ${featureId}, auto-approving (requirePlanApproval=false)`
|
||||
logger.info(
|
||||
`Spec generated for feature ${featureId}, auto-approving (requirePlanApproval=false)`
|
||||
);
|
||||
|
||||
// Emit info event for frontend
|
||||
@@ -2472,9 +2460,7 @@ After generating the revised spec, output:
|
||||
|
||||
// CRITICAL: After approval, we need to make a second call to continue implementation
|
||||
// The agent is waiting for "approved" - we need to send it and continue
|
||||
console.log(
|
||||
`[AutoMode] Making continuation call after plan approval for feature ${featureId}`
|
||||
);
|
||||
logger.info(`Making continuation call after plan approval for feature ${featureId}`);
|
||||
|
||||
// Update planSpec status to approved (handles both manual and auto-approval paths)
|
||||
await this.updateFeaturePlanSpec(projectPath, featureId, {
|
||||
@@ -2489,8 +2475,8 @@ After generating the revised spec, output:
|
||||
// ========================================
|
||||
|
||||
if (parsedTasks.length > 0) {
|
||||
console.log(
|
||||
`[AutoMode] Starting multi-agent execution: ${parsedTasks.length} tasks for feature ${featureId}`
|
||||
logger.info(
|
||||
`Starting multi-agent execution: ${parsedTasks.length} tasks for feature ${featureId}`
|
||||
);
|
||||
|
||||
// Execute each task with a separate agent
|
||||
@@ -2503,7 +2489,7 @@ After generating the revised spec, output:
|
||||
}
|
||||
|
||||
// Emit task started
|
||||
console.log(`[AutoMode] Starting task ${task.id}: ${task.description}`);
|
||||
logger.info(`Starting task ${task.id}: ${task.description}`);
|
||||
this.emitAutoModeEvent('auto_mode_task_started', {
|
||||
featureId,
|
||||
projectPath,
|
||||
@@ -2570,7 +2556,7 @@ After generating the revised spec, output:
|
||||
}
|
||||
|
||||
// Emit task completed
|
||||
console.log(`[AutoMode] Task ${task.id} completed for feature ${featureId}`);
|
||||
logger.info(`Task ${task.id} completed for feature ${featureId}`);
|
||||
this.emitAutoModeEvent('auto_mode_task_complete', {
|
||||
featureId,
|
||||
projectPath,
|
||||
@@ -2601,13 +2587,11 @@ After generating the revised spec, output:
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[AutoMode] All ${parsedTasks.length} tasks completed for feature ${featureId}`
|
||||
);
|
||||
logger.info(`All ${parsedTasks.length} tasks completed for feature ${featureId}`);
|
||||
} else {
|
||||
// No parsed tasks - fall back to single-agent execution
|
||||
console.log(
|
||||
`[AutoMode] No parsed tasks, using single-agent execution for feature ${featureId}`
|
||||
logger.info(
|
||||
`No parsed tasks, using single-agent execution for feature ${featureId}`
|
||||
);
|
||||
|
||||
const continuationPrompt = `The plan/specification has been approved. Now implement it.
|
||||
@@ -2657,15 +2641,15 @@ Implement all the changes described in the plan above.`;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[AutoMode] Implementation completed for feature ${featureId}`);
|
||||
logger.info(`Implementation completed for feature ${featureId}`);
|
||||
// Exit the original stream loop since continuation is done
|
||||
break streamLoop;
|
||||
}
|
||||
|
||||
// Only emit progress for non-marker text (marker was already handled above)
|
||||
if (!specDetected) {
|
||||
console.log(
|
||||
`[AutoMode] Emitting progress event for ${featureId}, content length: ${block.text?.length || 0}`
|
||||
logger.info(
|
||||
`Emitting progress event for ${featureId}, content length: ${block.text?.length || 0}`
|
||||
);
|
||||
this.emitAutoModeEvent('auto_mode_progress', {
|
||||
featureId,
|
||||
@@ -2719,7 +2703,7 @@ Implement all the changes described in the plan above.`;
|
||||
await secureFs.mkdir(path.dirname(rawOutputPath), { recursive: true });
|
||||
await secureFs.appendFile(rawOutputPath, rawOutputLines.join('\n') + '\n');
|
||||
} catch (error) {
|
||||
console.error(`[AutoMode] Failed to write final raw output for ${featureId}:`, error);
|
||||
logger.error(`Failed to write final raw output for ${featureId}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,9 @@ import { spawn, execSync, type ChildProcess } from 'child_process';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import net from 'net';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('DevServerService');
|
||||
|
||||
export interface DevServerInfo {
|
||||
worktreePath: string;
|
||||
@@ -69,7 +72,7 @@ class DevServerService {
|
||||
for (const pid of pids) {
|
||||
try {
|
||||
execSync(`taskkill /F /PID ${pid}`, { stdio: 'ignore' });
|
||||
console.log(`[DevServerService] Killed process ${pid} on port ${port}`);
|
||||
logger.debug(`Killed process ${pid} on port ${port}`);
|
||||
} catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
@@ -82,7 +85,7 @@ class DevServerService {
|
||||
for (const pid of pids) {
|
||||
try {
|
||||
execSync(`kill -9 ${pid}`, { stdio: 'ignore' });
|
||||
console.log(`[DevServerService] Killed process ${pid} on port ${port}`);
|
||||
logger.debug(`Killed process ${pid} on port ${port}`);
|
||||
} catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
@@ -93,7 +96,7 @@ class DevServerService {
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore errors - port might not have any process
|
||||
console.log(`[DevServerService] No process to kill on port ${port}`);
|
||||
logger.debug(`No process to kill on port ${port}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -251,11 +254,9 @@ class DevServerService {
|
||||
// Small delay to ensure related ports are freed
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
console.log(`[DevServerService] Starting dev server on port ${port}`);
|
||||
console.log(`[DevServerService] Working directory (cwd): ${worktreePath}`);
|
||||
console.log(
|
||||
`[DevServerService] Command: ${devCommand.cmd} ${devCommand.args.join(' ')} with PORT=${port}`
|
||||
);
|
||||
logger.info(`Starting dev server on port ${port}`);
|
||||
logger.debug(`Working directory (cwd): ${worktreePath}`);
|
||||
logger.debug(`Command: ${devCommand.cmd} ${devCommand.args.join(' ')} with PORT=${port}`);
|
||||
|
||||
// Spawn the dev process with PORT environment variable
|
||||
const env = {
|
||||
@@ -276,26 +277,26 @@ class DevServerService {
|
||||
// Log output for debugging
|
||||
if (devProcess.stdout) {
|
||||
devProcess.stdout.on('data', (data: Buffer) => {
|
||||
console.log(`[DevServer:${port}] ${data.toString().trim()}`);
|
||||
logger.debug(`[Port${port}] ${data.toString().trim()}`);
|
||||
});
|
||||
}
|
||||
|
||||
if (devProcess.stderr) {
|
||||
devProcess.stderr.on('data', (data: Buffer) => {
|
||||
const msg = data.toString().trim();
|
||||
console.error(`[DevServer:${port}] ${msg}`);
|
||||
logger.debug(`[Port${port}] ${msg}`);
|
||||
});
|
||||
}
|
||||
|
||||
devProcess.on('error', (error) => {
|
||||
console.error(`[DevServerService] Process error:`, error);
|
||||
logger.error(`Process error:`, error);
|
||||
status.error = error.message;
|
||||
this.allocatedPorts.delete(port);
|
||||
this.runningServers.delete(worktreePath);
|
||||
});
|
||||
|
||||
devProcess.on('exit', (code) => {
|
||||
console.log(`[DevServerService] Process for ${worktreePath} exited with code ${code}`);
|
||||
logger.info(`Process for ${worktreePath} exited with code ${code}`);
|
||||
status.exited = true;
|
||||
this.allocatedPorts.delete(port);
|
||||
this.runningServers.delete(worktreePath);
|
||||
@@ -352,9 +353,7 @@ class DevServerService {
|
||||
// If we don't have a record of this server, it may have crashed/exited on its own
|
||||
// Return success so the frontend can clear its state
|
||||
if (!server) {
|
||||
console.log(
|
||||
`[DevServerService] No server record for ${worktreePath}, may have already stopped`
|
||||
);
|
||||
logger.debug(`No server record for ${worktreePath}, may have already stopped`);
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
@@ -364,7 +363,7 @@ class DevServerService {
|
||||
};
|
||||
}
|
||||
|
||||
console.log(`[DevServerService] Stopping dev server for ${worktreePath}`);
|
||||
logger.info(`Stopping dev server for ${worktreePath}`);
|
||||
|
||||
// Kill the process
|
||||
if (server.process && !server.process.killed) {
|
||||
@@ -434,7 +433,7 @@ class DevServerService {
|
||||
* Stop all running dev servers (for cleanup)
|
||||
*/
|
||||
async stopAll(): Promise<void> {
|
||||
console.log(`[DevServerService] Stopping all ${this.runningServers.size} dev servers`);
|
||||
logger.info(`Stopping all ${this.runningServers.size} dev servers`);
|
||||
|
||||
for (const [worktreePath] of this.runningServers) {
|
||||
await this.stopDevServer(worktreePath);
|
||||
|
||||
@@ -56,10 +56,10 @@ export class FeatureLoader {
|
||||
try {
|
||||
// Paths are now absolute
|
||||
await secureFs.unlink(oldPath);
|
||||
console.log(`[FeatureLoader] Deleted orphaned image: ${oldPath}`);
|
||||
logger.info(`Deleted orphaned image: ${oldPath}`);
|
||||
} catch (error) {
|
||||
// Ignore errors when deleting (file may already be gone)
|
||||
logger.warn(`[FeatureLoader] Failed to delete image: ${oldPath}`, error);
|
||||
logger.warn(`Failed to delete image: ${oldPath}`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -101,7 +101,7 @@ export class FeatureLoader {
|
||||
try {
|
||||
await secureFs.access(fullOriginalPath);
|
||||
} catch {
|
||||
logger.warn(`[FeatureLoader] Image not found, skipping: ${fullOriginalPath}`);
|
||||
logger.warn(`Image not found, skipping: ${fullOriginalPath}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -111,7 +111,7 @@ export class FeatureLoader {
|
||||
|
||||
// Copy the file
|
||||
await secureFs.copyFile(fullOriginalPath, newPath);
|
||||
console.log(`[FeatureLoader] Copied image: ${originalPath} -> ${newPath}`);
|
||||
logger.info(`Copied image: ${originalPath} -> ${newPath}`);
|
||||
|
||||
// Try to delete the original temp file
|
||||
try {
|
||||
@@ -202,9 +202,7 @@ export class FeatureLoader {
|
||||
const feature = JSON.parse(content);
|
||||
|
||||
if (!feature.id) {
|
||||
logger.warn(
|
||||
`[FeatureLoader] Feature ${featureId} missing required 'id' field, skipping`
|
||||
);
|
||||
logger.warn(`Feature ${featureId} missing required 'id' field, skipping`);
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -213,14 +211,9 @@ export class FeatureLoader {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
} else if (error instanceof SyntaxError) {
|
||||
logger.warn(
|
||||
`[FeatureLoader] Failed to parse feature.json for ${featureId}: ${error.message}`
|
||||
);
|
||||
logger.warn(`Failed to parse feature.json for ${featureId}: ${error.message}`);
|
||||
} else {
|
||||
logger.error(
|
||||
`[FeatureLoader] Failed to load feature ${featureId}:`,
|
||||
(error as Error).message
|
||||
);
|
||||
logger.error(`Failed to load feature ${featureId}:`, (error as Error).message);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -255,7 +248,7 @@ export class FeatureLoader {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
logger.error(`[FeatureLoader] Failed to get feature ${featureId}:`, error);
|
||||
logger.error(`Failed to get feature ${featureId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -342,10 +335,10 @@ export class FeatureLoader {
|
||||
try {
|
||||
const featureDir = this.getFeatureDir(projectPath, featureId);
|
||||
await secureFs.rm(featureDir, { recursive: true, force: true });
|
||||
console.log(`[FeatureLoader] Deleted feature ${featureId}`);
|
||||
logger.info(`Deleted feature ${featureId}`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error(`[FeatureLoader] Failed to delete feature ${featureId}:`, error);
|
||||
logger.error(`Failed to delete feature ${featureId}:`, error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -362,7 +355,7 @@ export class FeatureLoader {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
logger.error(`[FeatureLoader] Failed to get agent output for ${featureId}:`, error);
|
||||
logger.error(`Failed to get agent output for ${featureId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -379,7 +372,7 @@ export class FeatureLoader {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
logger.error(`[FeatureLoader] Failed to get raw output for ${featureId}:`, error);
|
||||
logger.error(`Failed to get raw output for ${featureId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,9 @@ import * as path from 'path';
|
||||
// secureFs is used for user-controllable paths (working directory validation)
|
||||
// to enforce ALLOWED_ROOT_DIRECTORY security boundary
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('Terminal');
|
||||
// System paths module handles shell binary checks and WSL detection
|
||||
// These are system paths outside ALLOWED_ROOT_DIRECTORY, centralized for security auditing
|
||||
import {
|
||||
@@ -219,7 +222,7 @@ export class TerminalService extends EventEmitter {
|
||||
|
||||
// Reject paths with null bytes (could bypass path checks)
|
||||
if (cwd.includes('\0')) {
|
||||
console.warn(`[Terminal] Rejecting path with null byte: ${cwd.replace(/\0/g, '\\0')}`);
|
||||
logger.warn(`Rejecting path with null byte: ${cwd.replace(/\0/g, '\\0')}`);
|
||||
return homeDir;
|
||||
}
|
||||
|
||||
@@ -242,12 +245,10 @@ export class TerminalService extends EventEmitter {
|
||||
if (statResult.isDirectory()) {
|
||||
return cwd;
|
||||
}
|
||||
console.warn(`[Terminal] Path exists but is not a directory: ${cwd}, falling back to home`);
|
||||
logger.warn(`Path exists but is not a directory: ${cwd}, falling back to home`);
|
||||
return homeDir;
|
||||
} catch {
|
||||
console.warn(
|
||||
`[Terminal] Working directory does not exist or not allowed: ${cwd}, falling back to home`
|
||||
);
|
||||
logger.warn(`Working directory does not exist or not allowed: ${cwd}, falling back to home`);
|
||||
return homeDir;
|
||||
}
|
||||
}
|
||||
@@ -272,7 +273,7 @@ export class TerminalService extends EventEmitter {
|
||||
setMaxSessions(limit: number): void {
|
||||
if (limit >= MIN_MAX_SESSIONS && limit <= MAX_MAX_SESSIONS) {
|
||||
maxSessions = limit;
|
||||
console.log(`[Terminal] Max sessions limit updated to ${limit}`);
|
||||
logger.info(`Max sessions limit updated to ${limit}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -283,7 +284,7 @@ export class TerminalService extends EventEmitter {
|
||||
async createSession(options: TerminalOptions = {}): Promise<TerminalSession | null> {
|
||||
// Check session limit
|
||||
if (this.sessions.size >= maxSessions) {
|
||||
console.error(`[Terminal] Max sessions (${maxSessions}) reached, refusing new session`);
|
||||
logger.error(`Max sessions (${maxSessions}) reached, refusing new session`);
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -319,7 +320,7 @@ export class TerminalService extends EventEmitter {
|
||||
...options.env,
|
||||
};
|
||||
|
||||
console.log(`[Terminal] Creating session ${id} with shell: ${shell} in ${cwd}`);
|
||||
logger.info(`Creating session ${id} with shell: ${shell} in ${cwd}`);
|
||||
|
||||
const ptyProcess = pty.spawn(shell, shellArgs, {
|
||||
name: 'xterm-256color',
|
||||
@@ -391,13 +392,13 @@ export class TerminalService extends EventEmitter {
|
||||
|
||||
// Handle exit
|
||||
ptyProcess.onExit(({ exitCode }) => {
|
||||
console.log(`[Terminal] Session ${id} exited with code ${exitCode}`);
|
||||
logger.info(`Session ${id} exited with code ${exitCode}`);
|
||||
this.sessions.delete(id);
|
||||
this.exitCallbacks.forEach((cb) => cb(id, exitCode));
|
||||
this.emit('exit', id, exitCode);
|
||||
});
|
||||
|
||||
console.log(`[Terminal] Session ${id} created successfully`);
|
||||
logger.info(`Session ${id} created successfully`);
|
||||
return session;
|
||||
}
|
||||
|
||||
@@ -407,7 +408,7 @@ export class TerminalService extends EventEmitter {
|
||||
write(sessionId: string, data: string): boolean {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) {
|
||||
console.warn(`[Terminal] Session ${sessionId} not found`);
|
||||
logger.warn(`Session ${sessionId} not found`);
|
||||
return false;
|
||||
}
|
||||
session.pty.write(data);
|
||||
@@ -422,7 +423,7 @@ export class TerminalService extends EventEmitter {
|
||||
resize(sessionId: string, cols: number, rows: number, suppressOutput: boolean = true): boolean {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) {
|
||||
console.warn(`[Terminal] Session ${sessionId} not found for resize`);
|
||||
logger.warn(`Session ${sessionId} not found for resize`);
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
@@ -448,7 +449,7 @@ export class TerminalService extends EventEmitter {
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(`[Terminal] Error resizing session ${sessionId}:`, error);
|
||||
logger.error(`Error resizing session ${sessionId}:`, error);
|
||||
session.resizeInProgress = false; // Clear flag on error
|
||||
return false;
|
||||
}
|
||||
@@ -476,14 +477,14 @@ export class TerminalService extends EventEmitter {
|
||||
}
|
||||
|
||||
// First try graceful SIGTERM to allow process cleanup
|
||||
console.log(`[Terminal] Session ${sessionId} sending SIGTERM`);
|
||||
logger.info(`Session ${sessionId} sending SIGTERM`);
|
||||
session.pty.kill('SIGTERM');
|
||||
|
||||
// Schedule SIGKILL fallback if process doesn't exit gracefully
|
||||
// The onExit handler will remove session from map when it actually exits
|
||||
setTimeout(() => {
|
||||
if (this.sessions.has(sessionId)) {
|
||||
console.log(`[Terminal] Session ${sessionId} still alive after SIGTERM, sending SIGKILL`);
|
||||
logger.info(`Session ${sessionId} still alive after SIGTERM, sending SIGKILL`);
|
||||
try {
|
||||
session.pty.kill('SIGKILL');
|
||||
} catch {
|
||||
@@ -494,10 +495,10 @@ export class TerminalService extends EventEmitter {
|
||||
}
|
||||
}, 1000);
|
||||
|
||||
console.log(`[Terminal] Session ${sessionId} kill initiated`);
|
||||
logger.info(`Session ${sessionId} kill initiated`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(`[Terminal] Error killing session ${sessionId}:`, error);
|
||||
logger.error(`Error killing session ${sessionId}:`, error);
|
||||
// Still try to remove from map even if kill fails
|
||||
this.sessions.delete(sessionId);
|
||||
return false;
|
||||
@@ -580,7 +581,7 @@ export class TerminalService extends EventEmitter {
|
||||
* Clean up all sessions
|
||||
*/
|
||||
cleanup(): void {
|
||||
console.log(`[Terminal] Cleaning up ${this.sessions.size} sessions`);
|
||||
logger.info(`Cleaning up ${this.sessions.size} sessions`);
|
||||
this.sessions.forEach((session, id) => {
|
||||
try {
|
||||
// Clean up flush timeout
|
||||
|
||||
@@ -247,10 +247,12 @@ describe('claude-provider.ts', () => {
|
||||
|
||||
await expect(collectAsyncGenerator(generator)).rejects.toThrow('SDK execution failed');
|
||||
|
||||
// Should log error with classification info (after refactoring)
|
||||
// Should log error with classification info (via logger)
|
||||
// Logger format: [Context] message, data
|
||||
const errorCall = consoleErrorSpy.mock.calls[0];
|
||||
expect(errorCall[0]).toBe('[ClaudeProvider] executeQuery() error during execution:');
|
||||
expect(errorCall[1]).toMatchObject({
|
||||
expect(errorCall[0]).toBe('[ClaudeProvider]');
|
||||
expect(errorCall[1]).toBe('executeQuery() error during execution:');
|
||||
expect(errorCall[2]).toMatchObject({
|
||||
type: expect.any(String),
|
||||
message: 'SDK execution failed',
|
||||
isRateLimit: false,
|
||||
|
||||
Reference in New Issue
Block a user