Merge branch 'main' into feat/claude-usage-clean

This commit is contained in:
Mohamad Yahia
2025-12-21 11:15:41 +04:00
committed by GitHub
63 changed files with 4984 additions and 3517 deletions

View File

@@ -9,45 +9,59 @@
* - Chat: Full tool access for interactive coding
*
* Uses model-resolver for consistent model handling across the application.
*
* SECURITY: All factory functions validate the working directory (cwd) against
* ALLOWED_ROOT_DIRECTORY before returning options. This provides a centralized
* security check that applies to ALL AI model invocations, regardless of provider.
*/
import type { Options } from "@anthropic-ai/claude-agent-sdk";
import { resolveModelString } from "@automaker/model-resolver";
import { DEFAULT_MODELS, CLAUDE_MODEL_MAP } from "@automaker/types";
import type { Options } from '@anthropic-ai/claude-agent-sdk';
import path from 'path';
import { resolveModelString } from '@automaker/model-resolver';
import { DEFAULT_MODELS, CLAUDE_MODEL_MAP } from '@automaker/types';
import { isPathAllowed, PathNotAllowedError, getAllowedRootDirectory } from '@automaker/platform';
/**
* Validate that a working directory is allowed by ALLOWED_ROOT_DIRECTORY.
* This is the centralized security check for ALL AI model invocations.
*
* @param cwd - The working directory to validate
* @throws PathNotAllowedError if the directory is not within ALLOWED_ROOT_DIRECTORY
*
* This function is called by all create*Options() factory functions to ensure
* that AI models can only operate within allowed directories. This applies to:
* - All current models (Claude, future models)
* - All invocation types (chat, auto-mode, spec generation, etc.)
*/
export function validateWorkingDirectory(cwd: string): void {
const resolvedCwd = path.resolve(cwd);
if (!isPathAllowed(resolvedCwd)) {
const allowedRoot = getAllowedRootDirectory();
throw new PathNotAllowedError(
`Working directory "${cwd}" (resolved: ${resolvedCwd}) is not allowed. ` +
(allowedRoot
? `Must be within ALLOWED_ROOT_DIRECTORY: ${allowedRoot}`
: 'ALLOWED_ROOT_DIRECTORY is configured but path is not within allowed directories.')
);
}
}
/**
* Tool presets for different use cases
*/
export const TOOL_PRESETS = {
/** Read-only tools for analysis */
readOnly: ["Read", "Glob", "Grep"] as const,
readOnly: ['Read', 'Glob', 'Grep'] as const,
/** Tools for spec generation that needs to read the codebase */
specGeneration: ["Read", "Glob", "Grep"] as const,
specGeneration: ['Read', 'Glob', 'Grep'] as const,
/** Full tool access for feature implementation */
fullAccess: [
"Read",
"Write",
"Edit",
"Glob",
"Grep",
"Bash",
"WebSearch",
"WebFetch",
] as const,
fullAccess: ['Read', 'Write', 'Edit', 'Glob', 'Grep', 'Bash', 'WebSearch', 'WebFetch'] as const,
/** Tools for chat/interactive mode */
chat: [
"Read",
"Write",
"Edit",
"Glob",
"Grep",
"Bash",
"WebSearch",
"WebFetch",
] as const,
chat: ['Read', 'Write', 'Edit', 'Glob', 'Grep', 'Bash', 'WebSearch', 'WebFetch'] as const,
} as const;
/**
@@ -78,7 +92,7 @@ export const MAX_TURNS = {
* - AUTOMAKER_MODEL_DEFAULT: Fallback model for all operations
*/
export function getModelForUseCase(
useCase: "spec" | "features" | "suggestions" | "chat" | "auto" | "default",
useCase: 'spec' | 'features' | 'suggestions' | 'chat' | 'auto' | 'default',
explicitModel?: string
): string {
// Explicit model takes precedence
@@ -102,12 +116,12 @@ export function getModelForUseCase(
}
const defaultModels: Record<string, string> = {
spec: CLAUDE_MODEL_MAP["haiku"], // used to generate app specs
features: CLAUDE_MODEL_MAP["haiku"], // used to generate features from app specs
suggestions: CLAUDE_MODEL_MAP["haiku"], // used for suggestions
chat: CLAUDE_MODEL_MAP["haiku"], // used for chat
auto: CLAUDE_MODEL_MAP["opus"], // used to implement kanban cards
default: CLAUDE_MODEL_MAP["opus"],
spec: CLAUDE_MODEL_MAP['haiku'], // used to generate app specs
features: CLAUDE_MODEL_MAP['haiku'], // used to generate features from app specs
suggestions: CLAUDE_MODEL_MAP['haiku'], // used for suggestions
chat: CLAUDE_MODEL_MAP['haiku'], // used for chat
auto: CLAUDE_MODEL_MAP['opus'], // used to implement kanban cards
default: CLAUDE_MODEL_MAP['opus'],
};
return resolveModelString(defaultModels[useCase] || DEFAULT_MODELS.claude);
@@ -118,7 +132,7 @@ export function getModelForUseCase(
*/
function getBaseOptions(): Partial<Options> {
return {
permissionMode: "acceptEdits",
permissionMode: 'acceptEdits',
};
}
@@ -143,7 +157,7 @@ export interface CreateSdkOptionsConfig {
/** Optional output format for structured outputs */
outputFormat?: {
type: "json_schema";
type: 'json_schema';
schema: Record<string, unknown>;
};
}
@@ -156,16 +170,17 @@ export interface CreateSdkOptionsConfig {
* - Extended turns for thorough exploration
* - Opus model by default (can be overridden)
*/
export function createSpecGenerationOptions(
config: CreateSdkOptionsConfig
): Options {
export function createSpecGenerationOptions(config: CreateSdkOptionsConfig): Options {
// Validate working directory before creating options
validateWorkingDirectory(config.cwd);
return {
...getBaseOptions(),
// Override permissionMode - spec generation only needs read-only tools
// Using "acceptEdits" can cause Claude to write files to unexpected locations
// See: https://github.com/AutoMaker-Org/automaker/issues/149
permissionMode: "default",
model: getModelForUseCase("spec", config.model),
permissionMode: 'default',
model: getModelForUseCase('spec', config.model),
maxTurns: MAX_TURNS.maximum,
cwd: config.cwd,
allowedTools: [...TOOL_PRESETS.specGeneration],
@@ -183,14 +198,15 @@ export function createSpecGenerationOptions(
* - Quick turns since it's mostly JSON generation
* - Sonnet model by default for speed
*/
export function createFeatureGenerationOptions(
config: CreateSdkOptionsConfig
): Options {
export function createFeatureGenerationOptions(config: CreateSdkOptionsConfig): Options {
// Validate working directory before creating options
validateWorkingDirectory(config.cwd);
return {
...getBaseOptions(),
// Override permissionMode - feature generation only needs read-only tools
permissionMode: "default",
model: getModelForUseCase("features", config.model),
permissionMode: 'default',
model: getModelForUseCase('features', config.model),
maxTurns: MAX_TURNS.quick,
cwd: config.cwd,
allowedTools: [...TOOL_PRESETS.readOnly],
@@ -207,12 +223,13 @@ export function createFeatureGenerationOptions(
* - Standard turns to allow thorough codebase exploration and structured output generation
* - Opus model by default for thorough analysis
*/
export function createSuggestionsOptions(
config: CreateSdkOptionsConfig
): Options {
export function createSuggestionsOptions(config: CreateSdkOptionsConfig): Options {
// Validate working directory before creating options
validateWorkingDirectory(config.cwd);
return {
...getBaseOptions(),
model: getModelForUseCase("suggestions", config.model),
model: getModelForUseCase('suggestions', config.model),
maxTurns: MAX_TURNS.extended,
cwd: config.cwd,
allowedTools: [...TOOL_PRESETS.readOnly],
@@ -232,12 +249,15 @@ export function createSuggestionsOptions(
* - Sandbox enabled for bash safety
*/
export function createChatOptions(config: CreateSdkOptionsConfig): Options {
// Validate working directory before creating options
validateWorkingDirectory(config.cwd);
// Model priority: explicit model > session model > chat default
const effectiveModel = config.model || config.sessionModel;
return {
...getBaseOptions(),
model: getModelForUseCase("chat", effectiveModel),
model: getModelForUseCase('chat', effectiveModel),
maxTurns: MAX_TURNS.standard,
cwd: config.cwd,
allowedTools: [...TOOL_PRESETS.chat],
@@ -260,9 +280,12 @@ export function createChatOptions(config: CreateSdkOptionsConfig): Options {
* - Sandbox enabled for bash safety
*/
export function createAutoModeOptions(config: CreateSdkOptionsConfig): Options {
// Validate working directory before creating options
validateWorkingDirectory(config.cwd);
return {
...getBaseOptions(),
model: getModelForUseCase("auto", config.model),
model: getModelForUseCase('auto', config.model),
maxTurns: MAX_TURNS.maximum,
cwd: config.cwd,
allowedTools: [...TOOL_PRESETS.fullAccess],
@@ -287,14 +310,15 @@ export function createCustomOptions(
sandbox?: { enabled: boolean; autoAllowBashIfSandboxed?: boolean };
}
): Options {
// Validate working directory before creating options
validateWorkingDirectory(config.cwd);
return {
...getBaseOptions(),
model: getModelForUseCase("default", config.model),
model: getModelForUseCase('default', config.model),
maxTurns: config.maxTurns ?? MAX_TURNS.maximum,
cwd: config.cwd,
allowedTools: config.allowedTools
? [...config.allowedTools]
: [...TOOL_PRESETS.readOnly],
allowedTools: config.allowedTools ? [...config.allowedTools] : [...TOOL_PRESETS.readOnly],
...(config.sandbox && { sandbox: config.sandbox }),
...(config.systemPrompt && { systemPrompt: config.systemPrompt }),
...(config.abortController && { abortController: config.abortController }),

View File

@@ -3,8 +3,8 @@
* Stores worktree-specific data in .automaker/worktrees/:branch/worktree.json
*/
import * as fs from "fs/promises";
import * as path from "path";
import * as secureFs from './secure-fs.js';
import * as path from 'path';
/** Maximum length for sanitized branch names in filesystem paths */
const MAX_SANITIZED_BRANCH_PATH_LENGTH = 200;
@@ -32,11 +32,11 @@ function sanitizeBranchName(branch: string): string {
// - Windows invalid chars: : * ? " < > |
// - Other potentially problematic chars
let safeBranch = branch
.replace(/[/\\:*?"<>|]/g, "-") // Replace invalid chars with dash
.replace(/\s+/g, "_") // Replace spaces with underscores
.replace(/\.+$/g, "") // Remove trailing dots (Windows issue)
.replace(/-+/g, "-") // Collapse multiple dashes
.replace(/^-|-$/g, ""); // Remove leading/trailing dashes
.replace(/[/\\:*?"<>|]/g, '-') // Replace invalid chars with dash
.replace(/\s+/g, '_') // Replace spaces with underscores
.replace(/\.+$/g, '') // Remove trailing dots (Windows issue)
.replace(/-+/g, '-') // Collapse multiple dashes
.replace(/^-|-$/g, ''); // Remove leading/trailing dashes
// Truncate to safe length (leave room for path components)
safeBranch = safeBranch.substring(0, MAX_SANITIZED_BRANCH_PATH_LENGTH);
@@ -44,7 +44,7 @@ function sanitizeBranchName(branch: string): string {
// Handle Windows reserved names (CON, PRN, AUX, NUL, COM1-9, LPT1-9)
const windowsReserved = /^(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9])$/i;
if (windowsReserved.test(safeBranch) || safeBranch.length === 0) {
safeBranch = `_${safeBranch || "branch"}`;
safeBranch = `_${safeBranch || 'branch'}`;
}
return safeBranch;
@@ -55,14 +55,14 @@ function sanitizeBranchName(branch: string): string {
*/
function getWorktreeMetadataDir(projectPath: string, branch: string): string {
const safeBranch = sanitizeBranchName(branch);
return path.join(projectPath, ".automaker", "worktrees", safeBranch);
return path.join(projectPath, '.automaker', 'worktrees', safeBranch);
}
/**
* Get the path to the worktree metadata file
*/
function getWorktreeMetadataPath(projectPath: string, branch: string): string {
return path.join(getWorktreeMetadataDir(projectPath, branch), "worktree.json");
return path.join(getWorktreeMetadataDir(projectPath, branch), 'worktree.json');
}
/**
@@ -74,7 +74,7 @@ export async function readWorktreeMetadata(
): Promise<WorktreeMetadata | null> {
try {
const metadataPath = getWorktreeMetadataPath(projectPath, branch);
const content = await fs.readFile(metadataPath, "utf-8");
const content = (await secureFs.readFile(metadataPath, 'utf-8')) as string;
return JSON.parse(content) as WorktreeMetadata;
} catch (error) {
// File doesn't exist or can't be read
@@ -94,10 +94,10 @@ export async function writeWorktreeMetadata(
const metadataPath = getWorktreeMetadataPath(projectPath, branch);
// Ensure directory exists
await fs.mkdir(metadataDir, { recursive: true });
await secureFs.mkdir(metadataDir, { recursive: true });
// Write metadata
await fs.writeFile(metadataPath, JSON.stringify(metadata, null, 2), "utf-8");
await secureFs.writeFile(metadataPath, JSON.stringify(metadata, null, 2), 'utf-8');
}
/**
@@ -143,16 +143,16 @@ export async function readAllWorktreeMetadata(
projectPath: string
): Promise<Map<string, WorktreeMetadata>> {
const result = new Map<string, WorktreeMetadata>();
const worktreesDir = path.join(projectPath, ".automaker", "worktrees");
const worktreesDir = path.join(projectPath, '.automaker', 'worktrees');
try {
const dirs = await fs.readdir(worktreesDir, { withFileTypes: true });
const dirs = await secureFs.readdir(worktreesDir, { withFileTypes: true });
for (const dir of dirs) {
if (dir.isDirectory()) {
const metadataPath = path.join(worktreesDir, dir.name, "worktree.json");
const metadataPath = path.join(worktreesDir, dir.name, 'worktree.json');
try {
const content = await fs.readFile(metadataPath, "utf-8");
const content = (await secureFs.readFile(metadataPath, 'utf-8')) as string;
const metadata = JSON.parse(content) as WorktreeMetadata;
result.set(metadata.branch, metadata);
} catch {
@@ -170,13 +170,10 @@ export async function readAllWorktreeMetadata(
/**
* Delete worktree metadata for a branch
*/
export async function deleteWorktreeMetadata(
projectPath: string,
branch: string
): Promise<void> {
export async function deleteWorktreeMetadata(projectPath: string, branch: string): Promise<void> {
const metadataDir = getWorktreeMetadataDir(projectPath, branch);
try {
await fs.rm(metadataDir, { recursive: true, force: true });
await secureFs.rm(metadataDir, { recursive: true, force: true });
} catch {
// Ignore errors if directory doesn't exist
}

View File

@@ -2,16 +2,16 @@
* Generate features from existing app_spec.txt
*/
import { query } from "@anthropic-ai/claude-agent-sdk";
import fs from "fs/promises";
import type { EventEmitter } from "../../lib/events.js";
import { createLogger } from "@automaker/utils";
import { createFeatureGenerationOptions } from "../../lib/sdk-options.js";
import { logAuthStatus } from "./common.js";
import { parseAndCreateFeatures } from "./parse-and-create-features.js";
import { getAppSpecPath } from "@automaker/platform";
import { query } from '@anthropic-ai/claude-agent-sdk';
import * as secureFs from '../../lib/secure-fs.js';
import type { EventEmitter } from '../../lib/events.js';
import { createLogger } from '@automaker/utils';
import { createFeatureGenerationOptions } from '../../lib/sdk-options.js';
import { logAuthStatus } from './common.js';
import { parseAndCreateFeatures } from './parse-and-create-features.js';
import { getAppSpecPath } from '@automaker/platform';
const logger = createLogger("SpecRegeneration");
const logger = createLogger('SpecRegeneration');
const DEFAULT_MAX_FEATURES = 50;
@@ -22,28 +22,26 @@ export async function generateFeaturesFromSpec(
maxFeatures?: number
): Promise<void> {
const featureCount = maxFeatures ?? DEFAULT_MAX_FEATURES;
logger.debug("========== generateFeaturesFromSpec() started ==========");
logger.debug("projectPath:", projectPath);
logger.debug("maxFeatures:", featureCount);
logger.debug('========== generateFeaturesFromSpec() started ==========');
logger.debug('projectPath:', projectPath);
logger.debug('maxFeatures:', featureCount);
// Read existing spec from .automaker directory
const specPath = getAppSpecPath(projectPath);
let spec: string;
logger.debug("Reading spec from:", specPath);
logger.debug('Reading spec from:', specPath);
try {
spec = await fs.readFile(specPath, "utf-8");
spec = (await secureFs.readFile(specPath, 'utf-8')) as string;
logger.info(`Spec loaded successfully (${spec.length} chars)`);
logger.info(`Spec preview (first 500 chars): ${spec.substring(0, 500)}`);
logger.info(
`Spec preview (last 500 chars): ${spec.substring(spec.length - 500)}`
);
logger.info(`Spec preview (last 500 chars): ${spec.substring(spec.length - 500)}`);
} catch (readError) {
logger.error("❌ Failed to read spec file:", readError);
events.emit("spec-regeneration:event", {
type: "spec_regeneration_error",
error: "No project spec found. Generate spec first.",
logger.error('❌ Failed to read spec file:', readError);
events.emit('spec-regeneration:event', {
type: 'spec_regeneration_error',
error: 'No project spec found. Generate spec first.',
projectPath: projectPath,
});
return;
@@ -82,16 +80,14 @@ Generate ${featureCount} features that build on each other logically.
IMPORTANT: Do not ask for clarification. The specification is provided above. Generate the JSON immediately.`;
logger.info("========== PROMPT BEING SENT ==========");
logger.info('========== PROMPT BEING SENT ==========');
logger.info(`Prompt length: ${prompt.length} chars`);
logger.info(
`Prompt preview (first 1000 chars):\n${prompt.substring(0, 1000)}`
);
logger.info("========== END PROMPT PREVIEW ==========");
logger.info(`Prompt preview (first 1000 chars):\n${prompt.substring(0, 1000)}`);
logger.info('========== END PROMPT PREVIEW ==========');
events.emit("spec-regeneration:event", {
type: "spec_regeneration_progress",
content: "Analyzing spec and generating features...\n",
events.emit('spec-regeneration:event', {
type: 'spec_regeneration_progress',
content: 'Analyzing spec and generating features...\n',
projectPath: projectPath,
});
@@ -100,73 +96,67 @@ IMPORTANT: Do not ask for clarification. The specification is provided above. Ge
abortController,
});
logger.debug("SDK Options:", JSON.stringify(options, null, 2));
logger.info("Calling Claude Agent SDK query() for features...");
logger.debug('SDK Options:', JSON.stringify(options, null, 2));
logger.info('Calling Claude Agent SDK query() for features...');
logAuthStatus("Right before SDK query() for features");
logAuthStatus('Right before SDK query() for features');
let stream;
try {
stream = query({ prompt, options });
logger.debug("query() returned stream successfully");
logger.debug('query() returned stream successfully');
} catch (queryError) {
logger.error("❌ query() threw an exception:");
logger.error("Error:", queryError);
logger.error('❌ query() threw an exception:');
logger.error('Error:', queryError);
throw queryError;
}
let responseText = "";
let responseText = '';
let messageCount = 0;
logger.debug("Starting to iterate over feature stream...");
logger.debug('Starting to iterate over feature stream...');
try {
for await (const msg of stream) {
messageCount++;
logger.debug(
`Feature stream message #${messageCount}:`,
JSON.stringify(
{ type: msg.type, subtype: (msg as any).subtype },
null,
2
)
JSON.stringify({ type: msg.type, subtype: (msg as any).subtype }, null, 2)
);
if (msg.type === "assistant" && msg.message.content) {
if (msg.type === 'assistant' && msg.message.content) {
for (const block of msg.message.content) {
if (block.type === "text") {
if (block.type === 'text') {
responseText += block.text;
logger.debug(
`Feature text block received (${block.text.length} chars)`
);
events.emit("spec-regeneration:event", {
type: "spec_regeneration_progress",
logger.debug(`Feature text block received (${block.text.length} chars)`);
events.emit('spec-regeneration:event', {
type: 'spec_regeneration_progress',
content: block.text,
projectPath: projectPath,
});
}
}
} else if (msg.type === "result" && (msg as any).subtype === "success") {
logger.debug("Received success result for features");
} else if (msg.type === 'result' && (msg as any).subtype === 'success') {
logger.debug('Received success result for features');
responseText = (msg as any).result || responseText;
} else if ((msg as { type: string }).type === "error") {
logger.error("❌ Received error message from feature stream:");
logger.error("Error message:", JSON.stringify(msg, null, 2));
} else if ((msg as { type: string }).type === 'error') {
logger.error('❌ Received error message from feature stream:');
logger.error('Error message:', JSON.stringify(msg, null, 2));
}
}
} catch (streamError) {
logger.error("❌ Error while iterating feature stream:");
logger.error("Stream error:", streamError);
logger.error('❌ Error while iterating feature stream:');
logger.error('Stream error:', streamError);
throw streamError;
}
logger.info(`Feature stream complete. Total messages: ${messageCount}`);
logger.info(`Feature response length: ${responseText.length} chars`);
logger.info("========== FULL RESPONSE TEXT ==========");
logger.info('========== FULL RESPONSE TEXT ==========');
logger.info(responseText);
logger.info("========== END RESPONSE TEXT ==========");
logger.info('========== END RESPONSE TEXT ==========');
await parseAndCreateFeatures(projectPath, responseText, events);
logger.debug("========== generateFeaturesFromSpec() completed ==========");
logger.debug('========== generateFeaturesFromSpec() completed ==========');
}

View File

@@ -2,23 +2,23 @@
* Generate app_spec.txt from project overview
*/
import { query } from "@anthropic-ai/claude-agent-sdk";
import path from "path";
import fs from "fs/promises";
import type { EventEmitter } from "../../lib/events.js";
import { query } from '@anthropic-ai/claude-agent-sdk';
import path from 'path';
import * as secureFs from '../../lib/secure-fs.js';
import type { EventEmitter } from '../../lib/events.js';
import {
specOutputSchema,
specToXml,
getStructuredSpecPromptInstruction,
type SpecOutput,
} from "../../lib/app-spec-format.js";
import { createLogger } from "@automaker/utils";
import { createSpecGenerationOptions } from "../../lib/sdk-options.js";
import { logAuthStatus } from "./common.js";
import { generateFeaturesFromSpec } from "./generate-features-from-spec.js";
import { ensureAutomakerDir, getAppSpecPath } from "@automaker/platform";
} from '../../lib/app-spec-format.js';
import { createLogger } from '@automaker/utils';
import { createSpecGenerationOptions } from '../../lib/sdk-options.js';
import { logAuthStatus } from './common.js';
import { generateFeaturesFromSpec } from './generate-features-from-spec.js';
import { ensureAutomakerDir, getAppSpecPath } from '@automaker/platform';
const logger = createLogger("SpecRegeneration");
const logger = createLogger('SpecRegeneration');
export async function generateSpec(
projectPath: string,
@@ -29,17 +29,17 @@ export async function generateSpec(
analyzeProject?: boolean,
maxFeatures?: number
): Promise<void> {
logger.info("========== generateSpec() started ==========");
logger.info("projectPath:", projectPath);
logger.info("projectOverview length:", `${projectOverview.length} chars`);
logger.info("projectOverview preview:", projectOverview.substring(0, 300));
logger.info("generateFeatures:", generateFeatures);
logger.info("analyzeProject:", analyzeProject);
logger.info("maxFeatures:", maxFeatures);
logger.info('========== generateSpec() started ==========');
logger.info('projectPath:', projectPath);
logger.info('projectOverview length:', `${projectOverview.length} chars`);
logger.info('projectOverview preview:', projectOverview.substring(0, 300));
logger.info('generateFeatures:', generateFeatures);
logger.info('analyzeProject:', analyzeProject);
logger.info('maxFeatures:', maxFeatures);
// Build the prompt based on whether we should analyze the project
let analysisInstructions = "";
let techStackDefaults = "";
let analysisInstructions = '';
let techStackDefaults = '';
if (analyzeProject !== false) {
// Default to true - analyze the project
@@ -73,114 +73,110 @@ ${analysisInstructions}
${getStructuredSpecPromptInstruction()}`;
logger.info("========== PROMPT BEING SENT ==========");
logger.info('========== PROMPT BEING SENT ==========');
logger.info(`Prompt length: ${prompt.length} chars`);
logger.info(`Prompt preview (first 500 chars):\n${prompt.substring(0, 500)}`);
logger.info("========== END PROMPT PREVIEW ==========");
logger.info('========== END PROMPT PREVIEW ==========');
events.emit("spec-regeneration:event", {
type: "spec_progress",
content: "Starting spec generation...\n",
events.emit('spec-regeneration:event', {
type: 'spec_progress',
content: 'Starting spec generation...\n',
});
const options = createSpecGenerationOptions({
cwd: projectPath,
abortController,
outputFormat: {
type: "json_schema",
type: 'json_schema',
schema: specOutputSchema,
},
});
logger.debug("SDK Options:", JSON.stringify(options, null, 2));
logger.info("Calling Claude Agent SDK query()...");
logger.debug('SDK Options:', JSON.stringify(options, null, 2));
logger.info('Calling Claude Agent SDK query()...');
// Log auth status right before the SDK call
logAuthStatus("Right before SDK query()");
logAuthStatus('Right before SDK query()');
let stream;
try {
stream = query({ prompt, options });
logger.debug("query() returned stream successfully");
logger.debug('query() returned stream successfully');
} catch (queryError) {
logger.error("❌ query() threw an exception:");
logger.error("Error:", queryError);
logger.error('❌ query() threw an exception:');
logger.error('Error:', queryError);
throw queryError;
}
let responseText = "";
let responseText = '';
let messageCount = 0;
let structuredOutput: SpecOutput | null = null;
logger.info("Starting to iterate over stream...");
logger.info('Starting to iterate over stream...');
try {
for await (const msg of stream) {
messageCount++;
logger.info(
`Stream message #${messageCount}: type=${msg.type}, subtype=${
(msg as any).subtype
}`
`Stream message #${messageCount}: type=${msg.type}, subtype=${(msg as any).subtype}`
);
if (msg.type === "assistant") {
if (msg.type === 'assistant') {
const msgAny = msg as any;
if (msgAny.message?.content) {
for (const block of msgAny.message.content) {
if (block.type === "text") {
if (block.type === 'text') {
responseText += block.text;
logger.info(
`Text block received (${block.text.length} chars), total now: ${responseText.length} chars`
);
events.emit("spec-regeneration:event", {
type: "spec_regeneration_progress",
events.emit('spec-regeneration:event', {
type: 'spec_regeneration_progress',
content: block.text,
projectPath: projectPath,
});
} else if (block.type === "tool_use") {
logger.info("Tool use:", block.name);
events.emit("spec-regeneration:event", {
type: "spec_tool",
} else if (block.type === 'tool_use') {
logger.info('Tool use:', block.name);
events.emit('spec-regeneration:event', {
type: 'spec_tool',
tool: block.name,
input: block.input,
});
}
}
}
} else if (msg.type === "result" && (msg as any).subtype === "success") {
logger.info("Received success result");
} else if (msg.type === 'result' && (msg as any).subtype === 'success') {
logger.info('Received success result');
// Check for structured output - this is the reliable way to get spec data
const resultMsg = msg as any;
if (resultMsg.structured_output) {
structuredOutput = resultMsg.structured_output as SpecOutput;
logger.info("✅ Received structured output");
logger.debug("Structured output:", JSON.stringify(structuredOutput, null, 2));
logger.info('✅ Received structured output');
logger.debug('Structured output:', JSON.stringify(structuredOutput, null, 2));
} else {
logger.warn("⚠️ No structured output in result, will fall back to text parsing");
logger.warn('⚠️ No structured output in result, will fall back to text parsing');
}
} else if (msg.type === "result") {
} else if (msg.type === 'result') {
// Handle error result types
const subtype = (msg as any).subtype;
logger.info(`Result message: subtype=${subtype}`);
if (subtype === "error_max_turns") {
logger.error("❌ Hit max turns limit!");
} else if (subtype === "error_max_structured_output_retries") {
logger.error("❌ Failed to produce valid structured output after retries");
throw new Error("Could not produce valid spec output");
if (subtype === 'error_max_turns') {
logger.error('❌ Hit max turns limit!');
} else if (subtype === 'error_max_structured_output_retries') {
logger.error('❌ Failed to produce valid structured output after retries');
throw new Error('Could not produce valid spec output');
}
} else if ((msg as { type: string }).type === "error") {
logger.error("❌ Received error message from stream:");
logger.error("Error message:", JSON.stringify(msg, null, 2));
} else if (msg.type === "user") {
} else if ((msg as { type: string }).type === 'error') {
logger.error('❌ Received error message from stream:');
logger.error('Error message:', JSON.stringify(msg, null, 2));
} else if (msg.type === 'user') {
// Log user messages (tool results)
logger.info(
`User message (tool result): ${JSON.stringify(msg).substring(0, 500)}`
);
logger.info(`User message (tool result): ${JSON.stringify(msg).substring(0, 500)}`);
}
}
} catch (streamError) {
logger.error("❌ Error while iterating stream:");
logger.error("Stream error:", streamError);
logger.error('❌ Error while iterating stream:');
logger.error('Stream error:', streamError);
throw streamError;
}
@@ -192,40 +188,42 @@ ${getStructuredSpecPromptInstruction()}`;
if (structuredOutput) {
// Use structured output - convert JSON to XML
logger.info("✅ Using structured output for XML generation");
logger.info('✅ Using structured output for XML generation');
xmlContent = specToXml(structuredOutput);
logger.info(`Generated XML from structured output: ${xmlContent.length} chars`);
} else {
// Fallback: Extract XML content from response text
// Claude might include conversational text before/after
// See: https://github.com/AutoMaker-Org/automaker/issues/149
logger.warn("⚠️ No structured output, falling back to text parsing");
logger.info("========== FINAL RESPONSE TEXT ==========");
logger.info(responseText || "(empty)");
logger.info("========== END RESPONSE TEXT ==========");
logger.warn('⚠️ No structured output, falling back to text parsing');
logger.info('========== FINAL RESPONSE TEXT ==========');
logger.info(responseText || '(empty)');
logger.info('========== END RESPONSE TEXT ==========');
if (!responseText || responseText.trim().length === 0) {
throw new Error("No response text and no structured output - cannot generate spec");
throw new Error('No response text and no structured output - cannot generate spec');
}
const xmlStart = responseText.indexOf("<project_specification>");
const xmlEnd = responseText.lastIndexOf("</project_specification>");
const xmlStart = responseText.indexOf('<project_specification>');
const xmlEnd = responseText.lastIndexOf('</project_specification>');
if (xmlStart !== -1 && xmlEnd !== -1) {
// Extract just the XML content, discarding any conversational text before/after
xmlContent = responseText.substring(xmlStart, xmlEnd + "</project_specification>".length);
xmlContent = responseText.substring(xmlStart, xmlEnd + '</project_specification>'.length);
logger.info(`Extracted XML content: ${xmlContent.length} chars (from position ${xmlStart})`);
} else {
// No valid XML structure found in the response text
// This happens when structured output was expected but not received, and the agent
// output conversational text instead of XML (e.g., "The project directory appears to be empty...")
// We should NOT save this conversational text as it's not a valid spec
logger.error("❌ Response does not contain valid <project_specification> XML structure");
logger.error("This typically happens when structured output failed and the agent produced conversational text instead of XML");
logger.error('❌ Response does not contain valid <project_specification> XML structure');
logger.error(
'This typically happens when structured output failed and the agent produced conversational text instead of XML'
);
throw new Error(
"Failed to generate spec: No valid XML structure found in response. " +
"The response contained conversational text but no <project_specification> tags. " +
"Please try again."
'Failed to generate spec: No valid XML structure found in response. ' +
'The response contained conversational text but no <project_specification> tags. ' +
'Please try again.'
);
}
}
@@ -234,60 +232,55 @@ ${getStructuredSpecPromptInstruction()}`;
await ensureAutomakerDir(projectPath);
const specPath = getAppSpecPath(projectPath);
logger.info("Saving spec to:", specPath);
logger.info('Saving spec to:', specPath);
logger.info(`Content to save (${xmlContent.length} chars)`);
await fs.writeFile(specPath, xmlContent);
await secureFs.writeFile(specPath, xmlContent);
// Verify the file was written
const savedContent = await fs.readFile(specPath, "utf-8");
const savedContent = await secureFs.readFile(specPath, 'utf-8');
logger.info(`Verified saved file: ${savedContent.length} chars`);
if (savedContent.length === 0) {
logger.error("❌ File was saved but is empty!");
logger.error('❌ File was saved but is empty!');
}
logger.info("Spec saved successfully");
logger.info('Spec saved successfully');
// Emit spec completion event
if (generateFeatures) {
// If features will be generated, emit intermediate completion
events.emit("spec-regeneration:event", {
type: "spec_regeneration_progress",
content: "[Phase: spec_complete] Spec created! Generating features...\n",
events.emit('spec-regeneration:event', {
type: 'spec_regeneration_progress',
content: '[Phase: spec_complete] Spec created! Generating features...\n',
projectPath: projectPath,
});
} else {
// If no features, emit final completion
events.emit("spec-regeneration:event", {
type: "spec_regeneration_complete",
message: "Spec regeneration complete!",
events.emit('spec-regeneration:event', {
type: 'spec_regeneration_complete',
message: 'Spec regeneration complete!',
projectPath: projectPath,
});
}
// If generate features was requested, generate them from the spec
if (generateFeatures) {
logger.info("Starting feature generation from spec...");
logger.info('Starting feature generation from spec...');
// Create a new abort controller for feature generation
const featureAbortController = new AbortController();
try {
await generateFeaturesFromSpec(
projectPath,
events,
featureAbortController,
maxFeatures
);
await generateFeaturesFromSpec(projectPath, events, featureAbortController, maxFeatures);
// Final completion will be emitted by generateFeaturesFromSpec -> parseAndCreateFeatures
} catch (featureError) {
logger.error("Feature generation failed:", featureError);
logger.error('Feature generation failed:', featureError);
// Don't throw - spec generation succeeded, feature generation is optional
events.emit("spec-regeneration:event", {
type: "spec_regeneration_error",
error: (featureError as Error).message || "Feature generation failed",
events.emit('spec-regeneration:event', {
type: 'spec_regeneration_error',
error: (featureError as Error).message || 'Feature generation failed',
projectPath: projectPath,
});
}
}
logger.debug("========== generateSpec() completed ==========");
logger.debug('========== generateSpec() completed ==========');
}

View File

@@ -2,71 +2,71 @@
* Parse agent response and create feature files
*/
import path from "path";
import fs from "fs/promises";
import type { EventEmitter } from "../../lib/events.js";
import { createLogger } from "@automaker/utils";
import { getFeaturesDir } from "@automaker/platform";
import path from 'path';
import * as secureFs from '../../lib/secure-fs.js';
import type { EventEmitter } from '../../lib/events.js';
import { createLogger } from '@automaker/utils';
import { getFeaturesDir } from '@automaker/platform';
const logger = createLogger("SpecRegeneration");
const logger = createLogger('SpecRegeneration');
export async function parseAndCreateFeatures(
projectPath: string,
content: string,
events: EventEmitter
): Promise<void> {
logger.info("========== parseAndCreateFeatures() started ==========");
logger.info('========== parseAndCreateFeatures() started ==========');
logger.info(`Content length: ${content.length} chars`);
logger.info("========== CONTENT RECEIVED FOR PARSING ==========");
logger.info('========== CONTENT RECEIVED FOR PARSING ==========');
logger.info(content);
logger.info("========== END CONTENT ==========");
logger.info('========== END CONTENT ==========');
try {
// Extract JSON from response
logger.info("Extracting JSON from response...");
logger.info('Extracting JSON from response...');
logger.info(`Looking for pattern: /{[\\s\\S]*"features"[\\s\\S]*}/`);
const jsonMatch = content.match(/\{[\s\S]*"features"[\s\S]*\}/);
if (!jsonMatch) {
logger.error("❌ No valid JSON found in response");
logger.error("Full content received:");
logger.error('❌ No valid JSON found in response');
logger.error('Full content received:');
logger.error(content);
throw new Error("No valid JSON found in response");
throw new Error('No valid JSON found in response');
}
logger.info(`JSON match found (${jsonMatch[0].length} chars)`);
logger.info("========== MATCHED JSON ==========");
logger.info('========== MATCHED JSON ==========');
logger.info(jsonMatch[0]);
logger.info("========== END MATCHED JSON ==========");
logger.info('========== END MATCHED JSON ==========');
const parsed = JSON.parse(jsonMatch[0]);
logger.info(`Parsed ${parsed.features?.length || 0} features`);
logger.info("Parsed features:", JSON.stringify(parsed.features, null, 2));
logger.info('Parsed features:', JSON.stringify(parsed.features, null, 2));
const featuresDir = getFeaturesDir(projectPath);
await fs.mkdir(featuresDir, { recursive: true });
await secureFs.mkdir(featuresDir, { recursive: true });
const createdFeatures: Array<{ id: string; title: string }> = [];
for (const feature of parsed.features) {
logger.debug("Creating feature:", feature.id);
logger.debug('Creating feature:', feature.id);
const featureDir = path.join(featuresDir, feature.id);
await fs.mkdir(featureDir, { recursive: true });
await secureFs.mkdir(featureDir, { recursive: true });
const featureData = {
id: feature.id,
category: feature.category || "Uncategorized",
category: feature.category || 'Uncategorized',
title: feature.title,
description: feature.description,
status: "backlog", // Features go to backlog - user must manually start them
status: 'backlog', // Features go to backlog - user must manually start them
priority: feature.priority || 2,
complexity: feature.complexity || "moderate",
complexity: feature.complexity || 'moderate',
dependencies: feature.dependencies || [],
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
await fs.writeFile(
path.join(featureDir, "feature.json"),
await secureFs.writeFile(
path.join(featureDir, 'feature.json'),
JSON.stringify(featureData, null, 2)
);
@@ -75,20 +75,20 @@ export async function parseAndCreateFeatures(
logger.info(`✓ Created ${createdFeatures.length} features successfully`);
events.emit("spec-regeneration:event", {
type: "spec_regeneration_complete",
events.emit('spec-regeneration:event', {
type: 'spec_regeneration_complete',
message: `Spec regeneration complete! Created ${createdFeatures.length} features.`,
projectPath: projectPath,
});
} catch (error) {
logger.error("❌ parseAndCreateFeatures() failed:");
logger.error("Error:", error);
events.emit("spec-regeneration:event", {
type: "spec_regeneration_error",
logger.error('❌ parseAndCreateFeatures() failed:');
logger.error('Error:', error);
events.emit('spec-regeneration:event', {
type: 'spec_regeneration_error',
error: (error as Error).message,
projectPath: projectPath,
});
}
logger.debug("========== parseAndCreateFeatures() completed ==========");
logger.debug('========== parseAndCreateFeatures() completed ==========');
}

View File

@@ -2,16 +2,12 @@
* POST /browse endpoint - Browse directories for file browser UI
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import os from "os";
import path from "path";
import {
getAllowedRootDirectory,
isPathAllowed,
PathNotAllowedError,
} from "@automaker/platform";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import os from 'os';
import path from 'path';
import { getAllowedRootDirectory, PathNotAllowedError } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
export function createBrowseHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -22,24 +18,19 @@ export function createBrowseHandler() {
const defaultPath = getAllowedRootDirectory() || os.homedir();
const targetPath = dirPath ? path.resolve(dirPath) : defaultPath;
// Validate that the path is allowed
if (!isPathAllowed(targetPath)) {
throw new PathNotAllowedError(dirPath || targetPath);
}
// Detect available drives on Windows
const detectDrives = async (): Promise<string[]> => {
if (os.platform() !== "win32") {
if (os.platform() !== 'win32') {
return [];
}
const drives: string[] = [];
const letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
const letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';
for (const letter of letters) {
const drivePath = `${letter}:\\`;
try {
await fs.access(drivePath);
await secureFs.access(drivePath);
drives.push(drivePath);
} catch {
// Drive doesn't exist, skip it
@@ -57,21 +48,19 @@ export function createBrowseHandler() {
const drives = await detectDrives();
try {
const stats = await fs.stat(targetPath);
const stats = await secureFs.stat(targetPath);
if (!stats.isDirectory()) {
res
.status(400)
.json({ success: false, error: "Path is not a directory" });
res.status(400).json({ success: false, error: 'Path is not a directory' });
return;
}
// Read directory contents
const entries = await fs.readdir(targetPath, { withFileTypes: true });
const entries = await secureFs.readdir(targetPath, { withFileTypes: true });
// Filter for directories only and add parent directory option
const directories = entries
.filter((entry) => entry.isDirectory() && !entry.name.startsWith("."))
.filter((entry) => entry.isDirectory() && !entry.name.startsWith('.'))
.map((entry) => ({
name: entry.name,
path: path.join(targetPath, entry.name),
@@ -87,10 +76,8 @@ export function createBrowseHandler() {
});
} catch (error) {
// Handle permission errors gracefully - still return path info so user can navigate away
const errorMessage =
error instanceof Error ? error.message : "Failed to read directory";
const isPermissionError =
errorMessage.includes("EPERM") || errorMessage.includes("EACCES");
const errorMessage = error instanceof Error ? error.message : 'Failed to read directory';
const isPermissionError = errorMessage.includes('EPERM') || errorMessage.includes('EACCES');
if (isPermissionError) {
// Return success with empty directories so user can still navigate to parent
@@ -101,7 +88,7 @@ export function createBrowseHandler() {
directories: [],
drives,
warning:
"Permission denied - grant Full Disk Access to Terminal in System Preferences > Privacy & Security",
'Permission denied - grant Full Disk Access to Terminal in System Preferences > Privacy & Security',
});
} else {
res.status(400).json({
@@ -117,7 +104,7 @@ export function createBrowseHandler() {
return;
}
logError(error, "Browse directories failed");
logError(error, 'Browse directories failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,11 +2,11 @@
* POST /delete-board-background endpoint - Delete board background image
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import path from "path";
import { getErrorMessage, logError } from "../common.js";
import { getBoardDir } from "@automaker/platform";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { getErrorMessage, logError } from '../common.js';
import { getBoardDir } from '@automaker/platform';
export function createDeleteBoardBackgroundHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -16,7 +16,7 @@ export function createDeleteBoardBackgroundHandler() {
if (!projectPath) {
res.status(400).json({
success: false,
error: "projectPath is required",
error: 'projectPath is required',
});
return;
}
@@ -26,10 +26,10 @@ export function createDeleteBoardBackgroundHandler() {
try {
// Try to remove all background files in the board directory
const files = await fs.readdir(boardDir);
const files = await secureFs.readdir(boardDir);
for (const file of files) {
if (file.startsWith("background")) {
await fs.unlink(path.join(boardDir, file));
if (file.startsWith('background')) {
await secureFs.unlink(path.join(boardDir, file));
}
}
} catch {
@@ -38,7 +38,7 @@ export function createDeleteBoardBackgroundHandler() {
res.json({ success: true });
} catch (error) {
logError(error, "Delete board background failed");
logError(error, 'Delete board background failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,10 +2,10 @@
* POST /delete endpoint - Delete file
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import { validatePath, PathNotAllowedError } from "@automaker/platform";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import { PathNotAllowedError } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
export function createDeleteHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -13,12 +13,11 @@ export function createDeleteHandler() {
const { filePath } = req.body as { filePath: string };
if (!filePath) {
res.status(400).json({ success: false, error: "filePath is required" });
res.status(400).json({ success: false, error: 'filePath is required' });
return;
}
const resolvedPath = validatePath(filePath);
await fs.rm(resolvedPath, { recursive: true });
await secureFs.rm(filePath, { recursive: true });
res.json({ success: true });
} catch (error) {
@@ -28,7 +27,7 @@ export function createDeleteHandler() {
return;
}
logError(error, "Delete file failed");
logError(error, 'Delete file failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,11 +2,10 @@
* POST /exists endpoint - Check if file/directory exists
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import path from "path";
import { isPathAllowed, PathNotAllowedError } from "@automaker/platform";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import { PathNotAllowedError } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
export function createExistsHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -14,21 +13,18 @@ export function createExistsHandler() {
const { filePath } = req.body as { filePath: string };
if (!filePath) {
res.status(400).json({ success: false, error: "filePath is required" });
res.status(400).json({ success: false, error: 'filePath is required' });
return;
}
const resolvedPath = path.resolve(filePath);
// Validate that the path is allowed
if (!isPathAllowed(resolvedPath)) {
throw new PathNotAllowedError(filePath);
}
try {
await fs.access(resolvedPath);
await secureFs.access(filePath);
res.json({ success: true, exists: true });
} catch {
} catch (accessError) {
// Check if it's a path not allowed error vs file not existing
if (accessError instanceof PathNotAllowedError) {
throw accessError;
}
res.json({ success: true, exists: false });
}
} catch (error) {
@@ -38,7 +34,7 @@ export function createExistsHandler() {
return;
}
logError(error, "Check exists failed");
logError(error, 'Check exists failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,10 +2,11 @@
* GET /image endpoint - Serve image files
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import path from "path";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { PathNotAllowedError } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
export function createImageHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -16,7 +17,7 @@ export function createImageHandler() {
};
if (!imagePath) {
res.status(400).json({ success: false, error: "path is required" });
res.status(400).json({ success: false, error: 'path is required' });
return;
}
@@ -24,40 +25,41 @@ export function createImageHandler() {
const fullPath = path.isAbsolute(imagePath)
? imagePath
: projectPath
? path.join(projectPath, imagePath)
: imagePath;
? path.join(projectPath, imagePath)
: imagePath;
// Check if file exists
try {
await fs.access(fullPath);
} catch {
res.status(404).json({ success: false, error: "Image not found" });
await secureFs.access(fullPath);
} catch (accessError) {
if (accessError instanceof PathNotAllowedError) {
res.status(403).json({ success: false, error: 'Path not allowed' });
return;
}
res.status(404).json({ success: false, error: 'Image not found' });
return;
}
// Read the file
const buffer = await fs.readFile(fullPath);
const buffer = await secureFs.readFile(fullPath);
// Determine MIME type from extension
const ext = path.extname(fullPath).toLowerCase();
const mimeTypes: Record<string, string> = {
".png": "image/png",
".jpg": "image/jpeg",
".jpeg": "image/jpeg",
".gif": "image/gif",
".webp": "image/webp",
".svg": "image/svg+xml",
".bmp": "image/bmp",
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
'.webp': 'image/webp',
'.svg': 'image/svg+xml',
'.bmp': 'image/bmp',
};
res.setHeader(
"Content-Type",
mimeTypes[ext] || "application/octet-stream"
);
res.setHeader("Cache-Control", "public, max-age=3600");
res.setHeader('Content-Type', mimeTypes[ext] || 'application/octet-stream');
res.setHeader('Cache-Control', 'public, max-age=3600');
res.send(buffer);
} catch (error) {
logError(error, "Serve image failed");
logError(error, 'Serve image failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -3,11 +3,11 @@
* Handles symlinks safely to avoid ELOOP errors
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import path from "path";
import { isPathAllowed, PathNotAllowedError } from "@automaker/platform";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { PathNotAllowedError } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
export function createMkdirHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -15,20 +15,15 @@ export function createMkdirHandler() {
const { dirPath } = req.body as { dirPath: string };
if (!dirPath) {
res.status(400).json({ success: false, error: "dirPath is required" });
res.status(400).json({ success: false, error: 'dirPath is required' });
return;
}
const resolvedPath = path.resolve(dirPath);
// Validate that the path is allowed
if (!isPathAllowed(resolvedPath)) {
throw new PathNotAllowedError(dirPath);
}
// Check if path already exists using lstat (doesn't follow symlinks)
try {
const stats = await fs.lstat(resolvedPath);
const stats = await secureFs.lstat(resolvedPath);
// Path exists - if it's a directory or symlink, consider it success
if (stats.isDirectory() || stats.isSymbolicLink()) {
res.json({ success: true });
@@ -37,19 +32,19 @@ export function createMkdirHandler() {
// It's a file - can't create directory
res.status(400).json({
success: false,
error: "Path exists and is not a directory",
error: 'Path exists and is not a directory',
});
return;
} catch (statError: any) {
// ENOENT means path doesn't exist - we should create it
if (statError.code !== "ENOENT") {
if (statError.code !== 'ENOENT') {
// Some other error (could be ELOOP in parent path)
throw statError;
}
}
// Path doesn't exist, create it
await fs.mkdir(resolvedPath, { recursive: true });
await secureFs.mkdir(resolvedPath, { recursive: true });
res.json({ success: true });
} catch (error: any) {
@@ -60,15 +55,15 @@ export function createMkdirHandler() {
}
// Handle ELOOP specifically
if (error.code === "ELOOP") {
logError(error, "Create directory failed - symlink loop detected");
if (error.code === 'ELOOP') {
logError(error, 'Create directory failed - symlink loop detected');
res.status(400).json({
success: false,
error: "Cannot create directory: symlink loop detected in path",
error: 'Cannot create directory: symlink loop detected in path',
});
return;
}
logError(error, "Create directory failed");
logError(error, 'Create directory failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,26 +2,21 @@
* POST /read endpoint - Read file
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import { validatePath, PathNotAllowedError } from "@automaker/platform";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import { PathNotAllowedError } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
// Optional files that are expected to not exist in new projects
// Don't log ENOENT errors for these to reduce noise
const OPTIONAL_FILES = ["categories.json", "app_spec.txt"];
const OPTIONAL_FILES = ['categories.json', 'app_spec.txt'];
function isOptionalFile(filePath: string): boolean {
return OPTIONAL_FILES.some((optionalFile) => filePath.endsWith(optionalFile));
}
function isENOENT(error: unknown): boolean {
return (
error !== null &&
typeof error === "object" &&
"code" in error &&
error.code === "ENOENT"
);
return error !== null && typeof error === 'object' && 'code' in error && error.code === 'ENOENT';
}
export function createReadHandler() {
@@ -30,12 +25,11 @@ export function createReadHandler() {
const { filePath } = req.body as { filePath: string };
if (!filePath) {
res.status(400).json({ success: false, error: "filePath is required" });
res.status(400).json({ success: false, error: 'filePath is required' });
return;
}
const resolvedPath = validatePath(filePath);
const content = await fs.readFile(resolvedPath, "utf-8");
const content = await secureFs.readFile(filePath, 'utf-8');
res.json({ success: true, content });
} catch (error) {
@@ -46,9 +40,9 @@ export function createReadHandler() {
}
// Don't log ENOENT errors for optional files (expected to be missing in new projects)
const shouldLog = !(isENOENT(error) && isOptionalFile(req.body?.filePath || ""));
const shouldLog = !(isENOENT(error) && isOptionalFile(req.body?.filePath || ''));
if (shouldLog) {
logError(error, "Read file failed");
logError(error, 'Read file failed');
}
res.status(500).json({ success: false, error: getErrorMessage(error) });
}

View File

@@ -2,10 +2,10 @@
* POST /readdir endpoint - Read directory
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import { validatePath, PathNotAllowedError } from "@automaker/platform";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import { PathNotAllowedError } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
export function createReaddirHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -13,12 +13,11 @@ export function createReaddirHandler() {
const { dirPath } = req.body as { dirPath: string };
if (!dirPath) {
res.status(400).json({ success: false, error: "dirPath is required" });
res.status(400).json({ success: false, error: 'dirPath is required' });
return;
}
const resolvedPath = validatePath(dirPath);
const entries = await fs.readdir(resolvedPath, { withFileTypes: true });
const entries = await secureFs.readdir(dirPath, { withFileTypes: true });
const result = entries.map((entry) => ({
name: entry.name,
@@ -34,7 +33,7 @@ export function createReaddirHandler() {
return;
}
logError(error, "Read directory failed");
logError(error, 'Read directory failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,10 +2,10 @@
* POST /resolve-directory endpoint - Resolve directory path from directory name
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import path from "path";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { getErrorMessage, logError } from '../common.js';
export function createResolveDirectoryHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -17,9 +17,7 @@ export function createResolveDirectoryHandler() {
};
if (!directoryName) {
res
.status(400)
.json({ success: false, error: "directoryName is required" });
res.status(400).json({ success: false, error: 'directoryName is required' });
return;
}
@@ -27,7 +25,7 @@ export function createResolveDirectoryHandler() {
if (path.isAbsolute(directoryName) || directoryName.includes(path.sep)) {
try {
const resolvedPath = path.resolve(directoryName);
const stats = await fs.stat(resolvedPath);
const stats = await secureFs.stat(resolvedPath);
if (stats.isDirectory()) {
res.json({
success: true,
@@ -43,17 +41,11 @@ export function createResolveDirectoryHandler() {
// Search for directory in common locations
const searchPaths: string[] = [
process.cwd(), // Current working directory
process.env.HOME || process.env.USERPROFILE || "", // User home
path.join(
process.env.HOME || process.env.USERPROFILE || "",
"Documents"
),
path.join(process.env.HOME || process.env.USERPROFILE || "", "Desktop"),
process.env.HOME || process.env.USERPROFILE || '', // User home
path.join(process.env.HOME || process.env.USERPROFILE || '', 'Documents'),
path.join(process.env.HOME || process.env.USERPROFILE || '', 'Desktop'),
// Common project locations
path.join(
process.env.HOME || process.env.USERPROFILE || "",
"Projects"
),
path.join(process.env.HOME || process.env.USERPROFILE || '', 'Projects'),
].filter(Boolean);
// Also check parent of current working directory
@@ -70,7 +62,7 @@ export function createResolveDirectoryHandler() {
for (const searchPath of searchPaths) {
try {
const candidatePath = path.join(searchPath, directoryName);
const stats = await fs.stat(candidatePath);
const stats = await secureFs.stat(candidatePath);
if (stats.isDirectory()) {
// Verify it matches by checking for sample files
@@ -78,15 +70,15 @@ export function createResolveDirectoryHandler() {
let matches = 0;
for (const sampleFile of sampleFiles.slice(0, 5)) {
// Remove directory name prefix from sample file path
const relativeFile = sampleFile.startsWith(directoryName + "/")
const relativeFile = sampleFile.startsWith(directoryName + '/')
? sampleFile.substring(directoryName.length + 1)
: sampleFile.split("/").slice(1).join("/") ||
sampleFile.split("/").pop() ||
: sampleFile.split('/').slice(1).join('/') ||
sampleFile.split('/').pop() ||
sampleFile;
try {
const filePath = path.join(candidatePath, relativeFile);
await fs.access(filePath);
await secureFs.access(filePath);
matches++;
} catch {
// File doesn't exist, continue checking
@@ -118,7 +110,7 @@ export function createResolveDirectoryHandler() {
error: `Directory "${directoryName}" not found in common locations. Please ensure the directory exists.`,
});
} catch (error) {
logError(error, "Resolve directory failed");
logError(error, 'Resolve directory failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,11 +2,11 @@
* POST /save-board-background endpoint - Save board background image
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import path from "path";
import { getErrorMessage, logError } from "../common.js";
import { getBoardDir } from "@automaker/platform";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { getErrorMessage, logError } from '../common.js';
import { getBoardDir } from '@automaker/platform';
export function createSaveBoardBackgroundHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -21,31 +21,31 @@ export function createSaveBoardBackgroundHandler() {
if (!data || !filename || !projectPath) {
res.status(400).json({
success: false,
error: "data, filename, and projectPath are required",
error: 'data, filename, and projectPath are required',
});
return;
}
// Get board directory
const boardDir = getBoardDir(projectPath);
await fs.mkdir(boardDir, { recursive: true });
await secureFs.mkdir(boardDir, { recursive: true });
// Decode base64 data (remove data URL prefix if present)
const base64Data = data.replace(/^data:image\/\w+;base64,/, "");
const buffer = Buffer.from(base64Data, "base64");
const base64Data = data.replace(/^data:image\/\w+;base64,/, '');
const buffer = Buffer.from(base64Data, 'base64');
// Use a fixed filename for the board background (overwrite previous)
const ext = path.extname(filename) || ".png";
const ext = path.extname(filename) || '.png';
const uniqueFilename = `background${ext}`;
const filePath = path.join(boardDir, uniqueFilename);
// Write file
await fs.writeFile(filePath, buffer);
await secureFs.writeFile(filePath, buffer);
// Return the absolute path
res.json({ success: true, path: filePath });
} catch (error) {
logError(error, "Save board background failed");
logError(error, 'Save board background failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,11 +2,11 @@
* POST /save-image endpoint - Save image to .automaker images directory
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import path from "path";
import { getErrorMessage, logError } from "../common.js";
import { getImagesDir } from "@automaker/platform";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { getErrorMessage, logError } from '../common.js';
import { getImagesDir } from '@automaker/platform';
export function createSaveImageHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -21,33 +21,33 @@ export function createSaveImageHandler() {
if (!data || !filename || !projectPath) {
res.status(400).json({
success: false,
error: "data, filename, and projectPath are required",
error: 'data, filename, and projectPath are required',
});
return;
}
// Get images directory
const imagesDir = getImagesDir(projectPath);
await fs.mkdir(imagesDir, { recursive: true });
await secureFs.mkdir(imagesDir, { recursive: true });
// Decode base64 data (remove data URL prefix if present)
const base64Data = data.replace(/^data:image\/\w+;base64,/, "");
const buffer = Buffer.from(base64Data, "base64");
const base64Data = data.replace(/^data:image\/\w+;base64,/, '');
const buffer = Buffer.from(base64Data, 'base64');
// Generate unique filename with timestamp
const timestamp = Date.now();
const ext = path.extname(filename) || ".png";
const ext = path.extname(filename) || '.png';
const baseName = path.basename(filename, ext);
const uniqueFilename = `${baseName}-${timestamp}${ext}`;
const filePath = path.join(imagesDir, uniqueFilename);
// Write file
await fs.writeFile(filePath, buffer);
await secureFs.writeFile(filePath, buffer);
// Return the absolute path
res.json({ success: true, path: filePath });
} catch (error) {
logError(error, "Save image failed");
logError(error, 'Save image failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,10 +2,10 @@
* POST /stat endpoint - Get file stats
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import { validatePath, PathNotAllowedError } from "@automaker/platform";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import { PathNotAllowedError } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
export function createStatHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -13,12 +13,11 @@ export function createStatHandler() {
const { filePath } = req.body as { filePath: string };
if (!filePath) {
res.status(400).json({ success: false, error: "filePath is required" });
res.status(400).json({ success: false, error: 'filePath is required' });
return;
}
const resolvedPath = validatePath(filePath);
const stats = await fs.stat(resolvedPath);
const stats = await secureFs.stat(filePath);
res.json({
success: true,
@@ -36,7 +35,7 @@ export function createStatHandler() {
return;
}
logError(error, "Get file stats failed");
logError(error, 'Get file stats failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,11 +2,11 @@
* POST /validate-path endpoint - Validate and add path to allowed list
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import path from "path";
import { isPathAllowed } from "@automaker/platform";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { isPathAllowed } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
export function createValidatePathHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -14,7 +14,7 @@ export function createValidatePathHandler() {
const { filePath } = req.body as { filePath: string };
if (!filePath) {
res.status(400).json({ success: false, error: "filePath is required" });
res.status(400).json({ success: false, error: 'filePath is required' });
return;
}
@@ -22,12 +22,10 @@ export function createValidatePathHandler() {
// Check if path exists
try {
const stats = await fs.stat(resolvedPath);
const stats = await secureFs.stat(resolvedPath);
if (!stats.isDirectory()) {
res
.status(400)
.json({ success: false, error: "Path is not a directory" });
res.status(400).json({ success: false, error: 'Path is not a directory' });
return;
}
@@ -37,10 +35,10 @@ export function createValidatePathHandler() {
isAllowed: isPathAllowed(resolvedPath),
});
} catch {
res.status(400).json({ success: false, error: "Path does not exist" });
res.status(400).json({ success: false, error: 'Path does not exist' });
}
} catch (error) {
logError(error, "Validate path failed");
logError(error, 'Validate path failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,12 +2,12 @@
* POST /write endpoint - Write file
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import path from "path";
import { validatePath, PathNotAllowedError } from "@automaker/platform";
import { mkdirSafe } from "@automaker/utils";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { PathNotAllowedError } from '@automaker/platform';
import { mkdirSafe } from '@automaker/utils';
import { getErrorMessage, logError } from '../common.js';
export function createWriteHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -18,15 +18,13 @@ export function createWriteHandler() {
};
if (!filePath) {
res.status(400).json({ success: false, error: "filePath is required" });
res.status(400).json({ success: false, error: 'filePath is required' });
return;
}
const resolvedPath = validatePath(filePath);
// Ensure parent directory exists (symlink-safe)
await mkdirSafe(path.dirname(resolvedPath));
await fs.writeFile(resolvedPath, content, "utf-8");
await mkdirSafe(path.dirname(path.resolve(filePath)));
await secureFs.writeFile(filePath, content, 'utf-8');
res.json({ success: true });
} catch (error) {
@@ -36,7 +34,7 @@ export function createWriteHandler() {
return;
}
logError(error, "Write file failed");
logError(error, 'Write file failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,12 +2,12 @@
* POST /clone endpoint - Clone a GitHub template to a new project directory
*/
import type { Request, Response } from "express";
import { spawn } from "child_process";
import path from "path";
import fs from "fs/promises";
import { isPathAllowed } from "@automaker/platform";
import { logger, getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import { spawn } from 'child_process';
import path from 'path';
import * as secureFs from '../../../lib/secure-fs.js';
import { PathNotAllowedError } from '@automaker/platform';
import { logger, getErrorMessage, logError } from '../common.js';
export function createCloneHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -22,7 +22,7 @@ export function createCloneHandler() {
if (!repoUrl || !projectName || !parentDir) {
res.status(400).json({
success: false,
error: "repoUrl, projectName, and parentDir are required",
error: 'repoUrl, projectName, and parentDir are required',
});
return;
}
@@ -36,17 +36,15 @@ export function createCloneHandler() {
if (!githubUrlPattern.test(repoUrl)) {
res.status(400).json({
success: false,
error: "Invalid GitHub repository URL",
error: 'Invalid GitHub repository URL',
});
return;
}
// Sanitize project name (allow alphanumeric, dash, underscore)
const sanitizedName = projectName.replace(/[^a-zA-Z0-9-_]/g, "-");
const sanitizedName = projectName.replace(/[^a-zA-Z0-9-_]/g, '-');
if (sanitizedName !== projectName) {
logger.info(
`[Templates] Sanitized project name: ${projectName} -> ${sanitizedName}`
);
logger.info(`[Templates] Sanitized project name: ${projectName} -> ${sanitizedName}`);
}
// Build full project path
@@ -55,41 +53,30 @@ export function createCloneHandler() {
const resolvedParent = path.resolve(parentDir);
const resolvedProject = path.resolve(projectPath);
const relativePath = path.relative(resolvedParent, resolvedProject);
if (relativePath.startsWith("..") || path.isAbsolute(relativePath)) {
if (relativePath.startsWith('..') || path.isAbsolute(relativePath)) {
res.status(400).json({
success: false,
error: "Invalid project name; potential path traversal attempt.",
error: 'Invalid project name; potential path traversal attempt.',
});
return;
}
// Validate that parent directory is within allowed root directory
if (!isPathAllowed(resolvedParent)) {
res.status(403).json({
success: false,
error: `Parent directory not allowed: ${parentDir}. Must be within ALLOWED_ROOT_DIRECTORY.`,
});
return;
}
// Validate that project path will be within allowed root directory
if (!isPathAllowed(resolvedProject)) {
res.status(403).json({
success: false,
error: `Project path not allowed: ${projectPath}. Must be within ALLOWED_ROOT_DIRECTORY.`,
});
return;
}
// Check if directory already exists
// Check if directory already exists (secureFs.access also validates path is allowed)
try {
await fs.access(projectPath);
await secureFs.access(projectPath);
res.status(400).json({
success: false,
error: `Directory "${sanitizedName}" already exists in ${parentDir}`,
});
return;
} catch {
} catch (accessError) {
if (accessError instanceof PathNotAllowedError) {
res.status(403).json({
success: false,
error: `Project path not allowed: ${projectPath}. Must be within ALLOWED_ROOT_DIRECTORY.`,
});
return;
}
// Directory doesn't exist, which is what we want
}
@@ -97,35 +84,33 @@ export function createCloneHandler() {
try {
// Check if parentDir is a root path (Windows: C:\, D:\, etc. or Unix: /)
const isWindowsRoot = /^[A-Za-z]:\\?$/.test(parentDir);
const isUnixRoot = parentDir === "/" || parentDir === "";
const isUnixRoot = parentDir === '/' || parentDir === '';
const isRoot = isWindowsRoot || isUnixRoot;
if (isRoot) {
// Root paths always exist, just verify access
logger.info(`[Templates] Using root path: ${parentDir}`);
await fs.access(parentDir);
await secureFs.access(parentDir);
} else {
// Check if parent directory exists
const parentExists = await fs
.access(parentDir)
.then(() => true)
.catch(() => false);
let parentExists = false;
try {
await secureFs.access(parentDir);
parentExists = true;
} catch {
parentExists = false;
}
if (!parentExists) {
logger.info(`[Templates] Creating parent directory: ${parentDir}`);
await fs.mkdir(parentDir, { recursive: true });
await secureFs.mkdir(parentDir, { recursive: true });
} else {
logger.info(`[Templates] Parent directory exists: ${parentDir}`);
}
}
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : String(error);
logger.error(
"[Templates] Failed to access parent directory:",
parentDir,
error
);
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('[Templates] Failed to access parent directory:', parentDir, error);
res.status(500).json({
success: false,
error: `Failed to access parent directory: ${errorMessage}`,
@@ -140,17 +125,17 @@ export function createCloneHandler() {
success: boolean;
error?: string;
}>((resolve) => {
const gitProcess = spawn("git", ["clone", repoUrl, projectPath], {
const gitProcess = spawn('git', ['clone', repoUrl, projectPath], {
cwd: parentDir,
});
let stderr = "";
let stderr = '';
gitProcess.stderr.on("data", (data) => {
gitProcess.stderr.on('data', (data) => {
stderr += data.toString();
});
gitProcess.on("close", (code) => {
gitProcess.on('close', (code) => {
if (code === 0) {
resolve({ success: true });
} else {
@@ -161,7 +146,7 @@ export function createCloneHandler() {
}
});
gitProcess.on("error", (error) => {
gitProcess.on('error', (error) => {
resolve({
success: false,
error: `Failed to spawn git: ${error.message}`,
@@ -172,34 +157,34 @@ export function createCloneHandler() {
if (!cloneResult.success) {
res.status(500).json({
success: false,
error: cloneResult.error || "Failed to clone repository",
error: cloneResult.error || 'Failed to clone repository',
});
return;
}
// Remove .git directory to start fresh
try {
const gitDir = path.join(projectPath, ".git");
await fs.rm(gitDir, { recursive: true, force: true });
logger.info("[Templates] Removed .git directory");
const gitDir = path.join(projectPath, '.git');
await secureFs.rm(gitDir, { recursive: true, force: true });
logger.info('[Templates] Removed .git directory');
} catch (error) {
logger.warn("[Templates] Could not remove .git directory:", error);
logger.warn('[Templates] Could not remove .git directory:', error);
// Continue anyway - not critical
}
// Initialize a fresh git repository
await new Promise<void>((resolve) => {
const gitInit = spawn("git", ["init"], {
const gitInit = spawn('git', ['init'], {
cwd: projectPath,
});
gitInit.on("close", () => {
logger.info("[Templates] Initialized fresh git repository");
gitInit.on('close', () => {
logger.info('[Templates] Initialized fresh git repository');
resolve();
});
gitInit.on("error", () => {
logger.warn("[Templates] Could not initialize git");
gitInit.on('error', () => {
logger.warn('[Templates] Could not initialize git');
resolve();
});
});
@@ -212,7 +197,7 @@ export function createCloneHandler() {
projectName: sanitizedName,
});
} catch (error) {
logError(error, "Clone template failed");
logError(error, 'Clone template failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,14 +2,11 @@
* GET /config endpoint - Get workspace configuration status
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import path from "path";
import {
getAllowedRootDirectory,
getDataDirectory,
} from "@automaker/platform";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { getAllowedRootDirectory, getDataDirectory } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
export function createConfigHandler() {
return async (_req: Request, res: Response): Promise<void> => {
@@ -30,12 +27,12 @@ export function createConfigHandler() {
// Check if the directory exists
try {
const resolvedWorkspaceDir = path.resolve(allowedRootDirectory);
const stats = await fs.stat(resolvedWorkspaceDir);
const stats = await secureFs.stat(resolvedWorkspaceDir);
if (!stats.isDirectory()) {
res.json({
success: true,
configured: false,
error: "ALLOWED_ROOT_DIRECTORY is not a valid directory",
error: 'ALLOWED_ROOT_DIRECTORY is not a valid directory',
});
return;
}
@@ -50,11 +47,11 @@ export function createConfigHandler() {
res.json({
success: true,
configured: false,
error: "ALLOWED_ROOT_DIRECTORY path does not exist",
error: 'ALLOWED_ROOT_DIRECTORY path does not exist',
});
}
} catch (error) {
logError(error, "Get workspace config failed");
logError(error, 'Get workspace config failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,11 +2,11 @@
* GET /directories endpoint - List directories in workspace
*/
import type { Request, Response } from "express";
import fs from "fs/promises";
import path from "path";
import { getAllowedRootDirectory } from "@automaker/platform";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { getAllowedRootDirectory } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
export function createDirectoriesHandler() {
return async (_req: Request, res: Response): Promise<void> => {
@@ -16,7 +16,7 @@ export function createDirectoriesHandler() {
if (!allowedRootDirectory) {
res.status(400).json({
success: false,
error: "ALLOWED_ROOT_DIRECTORY is not configured",
error: 'ALLOWED_ROOT_DIRECTORY is not configured',
});
return;
}
@@ -25,23 +25,23 @@ export function createDirectoriesHandler() {
// Check if directory exists
try {
await fs.stat(resolvedWorkspaceDir);
await secureFs.stat(resolvedWorkspaceDir);
} catch {
res.status(400).json({
success: false,
error: "Workspace directory path does not exist",
error: 'Workspace directory path does not exist',
});
return;
}
// Read directory contents
const entries = await fs.readdir(resolvedWorkspaceDir, {
const entries = await secureFs.readdir(resolvedWorkspaceDir, {
withFileTypes: true,
});
// Filter to directories only and map to result format
const directories = entries
.filter((entry) => entry.isDirectory() && !entry.name.startsWith("."))
.filter((entry) => entry.isDirectory() && !entry.name.startsWith('.'))
.map((entry) => ({
name: entry.name,
path: path.join(resolvedWorkspaceDir, entry.name),
@@ -53,7 +53,7 @@ export function createDirectoriesHandler() {
directories,
});
} catch (error) {
logError(error, "List workspace directories failed");
logError(error, 'List workspace directories failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,18 +2,14 @@
* Common utilities for worktree routes
*/
import { createLogger } from "@automaker/utils";
import { exec } from "child_process";
import { promisify } from "util";
import path from "path";
import fs from "fs/promises";
import {
getErrorMessage as getErrorMessageShared,
createLogError,
} from "../common.js";
import { FeatureLoader } from "../../services/feature-loader.js";
import { createLogger } from '@automaker/utils';
import { exec } from 'child_process';
import { promisify } from 'util';
import path from 'path';
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
import { FeatureLoader } from '../../services/feature-loader.js';
const logger = createLogger("Worktree");
const logger = createLogger('Worktree');
export const execAsync = promisify(exec);
const featureLoader = new FeatureLoader();
@@ -28,10 +24,10 @@ export const MAX_BRANCH_NAME_LENGTH = 250;
// Extended PATH configuration for Electron apps
// ============================================================================
const pathSeparator = process.platform === "win32" ? ";" : ":";
const pathSeparator = process.platform === 'win32' ? ';' : ':';
const additionalPaths: string[] = [];
if (process.platform === "win32") {
if (process.platform === 'win32') {
// Windows paths
if (process.env.LOCALAPPDATA) {
additionalPaths.push(`${process.env.LOCALAPPDATA}\\Programs\\Git\\cmd`);
@@ -39,23 +35,22 @@ if (process.platform === "win32") {
if (process.env.PROGRAMFILES) {
additionalPaths.push(`${process.env.PROGRAMFILES}\\Git\\cmd`);
}
if (process.env["ProgramFiles(x86)"]) {
additionalPaths.push(`${process.env["ProgramFiles(x86)"]}\\Git\\cmd`);
if (process.env['ProgramFiles(x86)']) {
additionalPaths.push(`${process.env['ProgramFiles(x86)']}\\Git\\cmd`);
}
} else {
// Unix/Mac paths
additionalPaths.push(
"/opt/homebrew/bin", // Homebrew on Apple Silicon
"/usr/local/bin", // Homebrew on Intel Mac, common Linux location
"/home/linuxbrew/.linuxbrew/bin", // Linuxbrew
`${process.env.HOME}/.local/bin`, // pipx, other user installs
'/opt/homebrew/bin', // Homebrew on Apple Silicon
'/usr/local/bin', // Homebrew on Intel Mac, common Linux location
'/home/linuxbrew/.linuxbrew/bin', // Linuxbrew
`${process.env.HOME}/.local/bin` // pipx, other user installs
);
}
const extendedPath = [
process.env.PATH,
...additionalPaths.filter(Boolean),
].filter(Boolean).join(pathSeparator);
const extendedPath = [process.env.PATH, ...additionalPaths.filter(Boolean)]
.filter(Boolean)
.join(pathSeparator);
/**
* Environment variables with extended PATH for executing shell commands.
@@ -85,9 +80,7 @@ export function isValidBranchName(name: string): boolean {
*/
export async function isGhCliAvailable(): Promise<boolean> {
try {
const checkCommand = process.platform === "win32"
? "where gh"
: "command -v gh";
const checkCommand = process.platform === 'win32' ? 'where gh' : 'command -v gh';
await execAsync(checkCommand, { env: execEnv });
return true;
} catch {
@@ -95,8 +88,7 @@ export async function isGhCliAvailable(): Promise<boolean> {
}
}
export const AUTOMAKER_INITIAL_COMMIT_MESSAGE =
"chore: automaker initial commit";
export const AUTOMAKER_INITIAL_COMMIT_MESSAGE = 'chore: automaker initial commit';
/**
* Normalize path separators to forward slashes for cross-platform consistency.
@@ -104,7 +96,7 @@ export const AUTOMAKER_INITIAL_COMMIT_MESSAGE =
* from git commands (which may use forward slashes).
*/
export function normalizePath(p: string): string {
return p.replace(/\\/g, "/");
return p.replace(/\\/g, '/');
}
/**
@@ -112,7 +104,7 @@ export function normalizePath(p: string): string {
*/
export async function isGitRepo(repoPath: string): Promise<boolean> {
try {
await execAsync("git rev-parse --is-inside-work-tree", { cwd: repoPath });
await execAsync('git rev-parse --is-inside-work-tree', { cwd: repoPath });
return true;
} catch {
return false;
@@ -124,30 +116,21 @@ export async function isGitRepo(repoPath: string): Promise<boolean> {
* These are expected in test environments with mock paths
*/
export function isENOENT(error: unknown): boolean {
return (
error !== null &&
typeof error === "object" &&
"code" in error &&
error.code === "ENOENT"
);
return error !== null && typeof error === 'object' && 'code' in error && error.code === 'ENOENT';
}
/**
* Check if a path is a mock/test path that doesn't exist
*/
export function isMockPath(worktreePath: string): boolean {
return worktreePath.startsWith("/mock/") || worktreePath.includes("/mock/");
return worktreePath.startsWith('/mock/') || worktreePath.includes('/mock/');
}
/**
* Conditionally log worktree errors - suppress ENOENT for mock paths
* to reduce noise in test output
*/
export function logWorktreeError(
error: unknown,
message: string,
worktreePath?: string
): void {
export function logWorktreeError(error: unknown, message: string, worktreePath?: string): void {
// Don't log ENOENT errors for mock paths (expected in tests)
if (isENOENT(error) && worktreePath && isMockPath(worktreePath)) {
return;
@@ -165,17 +148,14 @@ export const logError = createLogError(logger);
*/
export async function ensureInitialCommit(repoPath: string): Promise<boolean> {
try {
await execAsync("git rev-parse --verify HEAD", { cwd: repoPath });
await execAsync('git rev-parse --verify HEAD', { cwd: repoPath });
return false;
} catch {
try {
await execAsync(
`git commit --allow-empty -m "${AUTOMAKER_INITIAL_COMMIT_MESSAGE}"`,
{ cwd: repoPath }
);
logger.info(
`[Worktree] Created initial empty commit to enable worktrees in ${repoPath}`
);
await execAsync(`git commit --allow-empty -m "${AUTOMAKER_INITIAL_COMMIT_MESSAGE}"`, {
cwd: repoPath,
});
logger.info(`[Worktree] Created initial empty commit to enable worktrees in ${repoPath}`);
return true;
} catch (error) {
const reason = getErrorMessageShared(error);

View File

@@ -5,12 +5,9 @@
* can switch between branches even after worktrees are removed.
*/
import { readFile, writeFile } from "fs/promises";
import path from "path";
import {
getBranchTrackingPath,
ensureAutomakerDir,
} from "@automaker/platform";
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { getBranchTrackingPath, ensureAutomakerDir } from '@automaker/platform';
export interface TrackedBranch {
name: string;
@@ -25,19 +22,17 @@ interface BranchTrackingData {
/**
* Read tracked branches from file
*/
export async function getTrackedBranches(
projectPath: string
): Promise<TrackedBranch[]> {
export async function getTrackedBranches(projectPath: string): Promise<TrackedBranch[]> {
try {
const filePath = getBranchTrackingPath(projectPath);
const content = await readFile(filePath, "utf-8");
const content = (await secureFs.readFile(filePath, 'utf-8')) as string;
const data: BranchTrackingData = JSON.parse(content);
return data.branches || [];
} catch (error: any) {
if (error.code === "ENOENT") {
if (error.code === 'ENOENT') {
return [];
}
console.warn("[branch-tracking] Failed to read tracked branches:", error);
console.warn('[branch-tracking] Failed to read tracked branches:', error);
return [];
}
}
@@ -45,23 +40,17 @@ export async function getTrackedBranches(
/**
* Save tracked branches to file
*/
async function saveTrackedBranches(
projectPath: string,
branches: TrackedBranch[]
): Promise<void> {
async function saveTrackedBranches(projectPath: string, branches: TrackedBranch[]): Promise<void> {
const automakerDir = await ensureAutomakerDir(projectPath);
const filePath = path.join(automakerDir, "active-branches.json");
const filePath = path.join(automakerDir, 'active-branches.json');
const data: BranchTrackingData = { branches };
await writeFile(filePath, JSON.stringify(data, null, 2), "utf-8");
await secureFs.writeFile(filePath, JSON.stringify(data, null, 2), 'utf-8');
}
/**
* Add a branch to tracking
*/
export async function trackBranch(
projectPath: string,
branchName: string
): Promise<void> {
export async function trackBranch(projectPath: string, branchName: string): Promise<void> {
const branches = await getTrackedBranches(projectPath);
// Check if already tracked
@@ -82,10 +71,7 @@ export async function trackBranch(
/**
* Remove a branch from tracking
*/
export async function untrackBranch(
projectPath: string,
branchName: string
): Promise<void> {
export async function untrackBranch(projectPath: string, branchName: string): Promise<void> {
const branches = await getTrackedBranches(projectPath);
const filtered = branches.filter((b) => b.name !== branchName);
@@ -114,10 +100,7 @@ export async function updateBranchActivation(
/**
* Check if a branch is tracked
*/
export async function isBranchTracked(
projectPath: string,
branchName: string
): Promise<boolean> {
export async function isBranchTracked(projectPath: string, branchName: string): Promise<boolean> {
const branches = await getTrackedBranches(projectPath);
return branches.some((b) => b.name === branchName);
}

View File

@@ -7,19 +7,19 @@
* 3. Only creates a new worktree if none exists for the branch
*/
import type { Request, Response } from "express";
import { exec } from "child_process";
import { promisify } from "util";
import path from "path";
import { mkdir } from "fs/promises";
import type { Request, Response } from 'express';
import { exec } from 'child_process';
import { promisify } from 'util';
import path from 'path';
import * as secureFs from '../../../lib/secure-fs.js';
import {
isGitRepo,
getErrorMessage,
logError,
normalizePath,
ensureInitialCommit,
} from "../common.js";
import { trackBranch } from "./branch-tracking.js";
} from '../common.js';
import { trackBranch } from './branch-tracking.js';
const execAsync = promisify(exec);
@@ -31,20 +31,20 @@ async function findExistingWorktreeForBranch(
branchName: string
): Promise<{ path: string; branch: string } | null> {
try {
const { stdout } = await execAsync("git worktree list --porcelain", {
const { stdout } = await execAsync('git worktree list --porcelain', {
cwd: projectPath,
});
const lines = stdout.split("\n");
const lines = stdout.split('\n');
let currentPath: string | null = null;
let currentBranch: string | null = null;
for (const line of lines) {
if (line.startsWith("worktree ")) {
if (line.startsWith('worktree ')) {
currentPath = line.slice(9);
} else if (line.startsWith("branch ")) {
currentBranch = line.slice(7).replace("refs/heads/", "");
} else if (line === "" && currentPath && currentBranch) {
} else if (line.startsWith('branch ')) {
currentBranch = line.slice(7).replace('refs/heads/', '');
} else if (line === '' && currentPath && currentBranch) {
// End of a worktree entry
if (currentBranch === branchName) {
// Resolve to absolute path - git may return relative paths
@@ -86,7 +86,7 @@ export function createCreateHandler() {
if (!projectPath || !branchName) {
res.status(400).json({
success: false,
error: "projectPath and branchName required",
error: 'projectPath and branchName required',
});
return;
}
@@ -94,7 +94,7 @@ export function createCreateHandler() {
if (!(await isGitRepo(projectPath))) {
res.status(400).json({
success: false,
error: "Not a git repository",
error: 'Not a git repository',
});
return;
}
@@ -107,7 +107,9 @@ export function createCreateHandler() {
if (existingWorktree) {
// Worktree already exists, return it as success (not an error)
// This handles manually created worktrees or worktrees from previous runs
console.log(`[Worktree] Found existing worktree for branch "${branchName}" at: ${existingWorktree.path}`);
console.log(
`[Worktree] Found existing worktree for branch "${branchName}" at: ${existingWorktree.path}`
);
// Track the branch so it persists in the UI
await trackBranch(projectPath, branchName);
@@ -124,12 +126,12 @@ export function createCreateHandler() {
}
// Sanitize branch name for directory usage
const sanitizedName = branchName.replace(/[^a-zA-Z0-9_-]/g, "-");
const worktreesDir = path.join(projectPath, ".worktrees");
const sanitizedName = branchName.replace(/[^a-zA-Z0-9_-]/g, '-');
const worktreesDir = path.join(projectPath, '.worktrees');
const worktreePath = path.join(worktreesDir, sanitizedName);
// Create worktrees directory if it doesn't exist
await mkdir(worktreesDir, { recursive: true });
await secureFs.mkdir(worktreesDir, { recursive: true });
// Check if branch exists
let branchExists = false;
@@ -149,7 +151,7 @@ export function createCreateHandler() {
createCmd = `git worktree add "${worktreePath}" ${branchName}`;
} else {
// Create new branch from base or HEAD
const base = baseBranch || "HEAD";
const base = baseBranch || 'HEAD';
createCmd = `git worktree add -b ${branchName} "${worktreePath}" ${base}`;
}
@@ -174,7 +176,7 @@ export function createCreateHandler() {
},
});
} catch (error) {
logError(error, "Create worktree failed");
logError(error, 'Create worktree failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,11 +2,11 @@
* POST /diffs endpoint - Get diffs for a worktree
*/
import type { Request, Response } from "express";
import path from "path";
import fs from "fs/promises";
import { getErrorMessage, logError } from "../common.js";
import { getGitRepositoryDiffs } from "../../common.js";
import type { Request, Response } from 'express';
import path from 'path';
import * as secureFs from '../../../lib/secure-fs.js';
import { getErrorMessage, logError } from '../common.js';
import { getGitRepositoryDiffs } from '../../common.js';
export function createDiffsHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -17,21 +17,19 @@ export function createDiffsHandler() {
};
if (!projectPath || !featureId) {
res
.status(400)
.json({
success: false,
error: "projectPath and featureId required",
});
res.status(400).json({
success: false,
error: 'projectPath and featureId required',
});
return;
}
// Git worktrees are stored in project directory
const worktreePath = path.join(projectPath, ".worktrees", featureId);
const worktreePath = path.join(projectPath, '.worktrees', featureId);
try {
// Check if worktree exists
await fs.access(worktreePath);
await secureFs.access(worktreePath);
// Get diffs from worktree
const result = await getGitRepositoryDiffs(worktreePath);
@@ -43,7 +41,7 @@ export function createDiffsHandler() {
});
} catch (innerError) {
// Worktree doesn't exist - fallback to main project path
logError(innerError, "Worktree access failed, falling back to main project");
logError(innerError, 'Worktree access failed, falling back to main project');
try {
const result = await getGitRepositoryDiffs(projectPath);
@@ -54,12 +52,12 @@ export function createDiffsHandler() {
hasChanges: result.hasChanges,
});
} catch (fallbackError) {
logError(fallbackError, "Fallback to main project also failed");
res.json({ success: true, diff: "", files: [], hasChanges: false });
logError(fallbackError, 'Fallback to main project also failed');
res.json({ success: true, diff: '', files: [], hasChanges: false });
}
}
} catch (error) {
logError(error, "Get worktree diffs failed");
logError(error, 'Get worktree diffs failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,13 +2,13 @@
* POST /file-diff endpoint - Get diff for a specific file
*/
import type { Request, Response } from "express";
import { exec } from "child_process";
import { promisify } from "util";
import path from "path";
import fs from "fs/promises";
import { getErrorMessage, logError } from "../common.js";
import { generateSyntheticDiffForNewFile } from "../../common.js";
import type { Request, Response } from 'express';
import { exec } from 'child_process';
import { promisify } from 'util';
import path from 'path';
import * as secureFs from '../../../lib/secure-fs.js';
import { getErrorMessage, logError } from '../common.js';
import { generateSyntheticDiffForNewFile } from '../../common.js';
const execAsync = promisify(exec);
@@ -24,24 +24,23 @@ export function createFileDiffHandler() {
if (!projectPath || !featureId || !filePath) {
res.status(400).json({
success: false,
error: "projectPath, featureId, and filePath required",
error: 'projectPath, featureId, and filePath required',
});
return;
}
// Git worktrees are stored in project directory
const worktreePath = path.join(projectPath, ".worktrees", featureId);
const worktreePath = path.join(projectPath, '.worktrees', featureId);
try {
await fs.access(worktreePath);
await secureFs.access(worktreePath);
// First check if the file is untracked
const { stdout: status } = await execAsync(
`git status --porcelain -- "${filePath}"`,
{ cwd: worktreePath }
);
const { stdout: status } = await execAsync(`git status --porcelain -- "${filePath}"`, {
cwd: worktreePath,
});
const isUntracked = status.trim().startsWith("??");
const isUntracked = status.trim().startsWith('??');
let diff: string;
if (isUntracked) {
@@ -49,23 +48,20 @@ export function createFileDiffHandler() {
diff = await generateSyntheticDiffForNewFile(worktreePath, filePath);
} else {
// Use regular git diff for tracked files
const result = await execAsync(
`git diff HEAD -- "${filePath}"`,
{
cwd: worktreePath,
maxBuffer: 10 * 1024 * 1024,
}
);
const result = await execAsync(`git diff HEAD -- "${filePath}"`, {
cwd: worktreePath,
maxBuffer: 10 * 1024 * 1024,
});
diff = result.stdout;
}
res.json({ success: true, diff, filePath });
} catch (innerError) {
logError(innerError, "Worktree file diff failed");
res.json({ success: true, diff: "", filePath });
logError(innerError, 'Worktree file diff failed');
res.json({ success: true, diff: '', filePath });
}
} catch (error) {
logError(error, "Get worktree file diff failed");
logError(error, 'Get worktree file diff failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,12 +2,12 @@
* POST /info endpoint - Get worktree info
*/
import type { Request, Response } from "express";
import { exec } from "child_process";
import { promisify } from "util";
import path from "path";
import fs from "fs/promises";
import { getErrorMessage, logError, normalizePath } from "../common.js";
import type { Request, Response } from 'express';
import { exec } from 'child_process';
import { promisify } from 'util';
import path from 'path';
import * as secureFs from '../../../lib/secure-fs.js';
import { getErrorMessage, logError, normalizePath } from '../common.js';
const execAsync = promisify(exec);
@@ -20,20 +20,18 @@ export function createInfoHandler() {
};
if (!projectPath || !featureId) {
res
.status(400)
.json({
success: false,
error: "projectPath and featureId required",
});
res.status(400).json({
success: false,
error: 'projectPath and featureId required',
});
return;
}
// Check if worktree exists (git worktrees are stored in project directory)
const worktreePath = path.join(projectPath, ".worktrees", featureId);
const worktreePath = path.join(projectPath, '.worktrees', featureId);
try {
await fs.access(worktreePath);
const { stdout } = await execAsync("git rev-parse --abbrev-ref HEAD", {
await secureFs.access(worktreePath);
const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {
cwd: worktreePath,
});
res.json({
@@ -45,7 +43,7 @@ export function createInfoHandler() {
res.json({ success: true, worktreePath: null, branchName: null });
}
} catch (error) {
logError(error, "Get worktree info failed");
logError(error, 'Get worktree info failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,12 +2,12 @@
* POST /init-git endpoint - Initialize a git repository in a directory
*/
import type { Request, Response } from "express";
import { exec } from "child_process";
import { promisify } from "util";
import { existsSync } from "fs";
import { join } from "path";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import { exec } from 'child_process';
import { promisify } from 'util';
import * as secureFs from '../../../lib/secure-fs.js';
import { join } from 'path';
import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
@@ -21,39 +21,42 @@ export function createInitGitHandler() {
if (!projectPath) {
res.status(400).json({
success: false,
error: "projectPath required",
error: 'projectPath required',
});
return;
}
// Check if .git already exists
const gitDirPath = join(projectPath, ".git");
if (existsSync(gitDirPath)) {
const gitDirPath = join(projectPath, '.git');
try {
await secureFs.access(gitDirPath);
// .git exists
res.json({
success: true,
result: {
initialized: false,
message: "Git repository already exists",
message: 'Git repository already exists',
},
});
return;
} catch {
// .git doesn't exist, continue with initialization
}
// Initialize git and create an initial empty commit
await execAsync(
`git init && git commit --allow-empty -m "Initial commit"`,
{ cwd: projectPath }
);
await execAsync(`git init && git commit --allow-empty -m "Initial commit"`, {
cwd: projectPath,
});
res.json({
success: true,
result: {
initialized: true,
message: "Git repository initialized with initial commit",
message: 'Git repository initialized with initial commit',
},
});
} catch (error) {
logError(error, "Init git failed");
logError(error, 'Init git failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -5,13 +5,13 @@
* Does NOT include tracked branches - only real worktrees with separate directories.
*/
import type { Request, Response } from "express";
import { exec } from "child_process";
import { promisify } from "util";
import { existsSync } from "fs";
import { isGitRepo } from "@automaker/git-utils";
import { getErrorMessage, logError, normalizePath } from "../common.js";
import { readAllWorktreeMetadata, type WorktreePRInfo } from "../../../lib/worktree-metadata.js";
import type { Request, Response } from 'express';
import { exec } from 'child_process';
import { promisify } from 'util';
import * as secureFs from '../../../lib/secure-fs.js';
import { isGitRepo } from '@automaker/git-utils';
import { getErrorMessage, logError, normalizePath } from '../common.js';
import { readAllWorktreeMetadata, type WorktreePRInfo } from '../../../lib/worktree-metadata.js';
const execAsync = promisify(exec);
@@ -28,10 +28,10 @@ interface WorktreeInfo {
async function getCurrentBranch(cwd: string): Promise<string> {
try {
const { stdout } = await execAsync("git branch --show-current", { cwd });
const { stdout } = await execAsync('git branch --show-current', { cwd });
return stdout.trim();
} catch {
return "";
return '';
}
}
@@ -44,7 +44,7 @@ export function createListHandler() {
};
if (!projectPath) {
res.status(400).json({ success: false, error: "projectPath required" });
res.status(400).json({ success: false, error: 'projectPath required' });
return;
}
@@ -57,28 +57,35 @@ export function createListHandler() {
const currentBranch = await getCurrentBranch(projectPath);
// Get actual worktrees from git
const { stdout } = await execAsync("git worktree list --porcelain", {
const { stdout } = await execAsync('git worktree list --porcelain', {
cwd: projectPath,
});
const worktrees: WorktreeInfo[] = [];
const removedWorktrees: Array<{ path: string; branch: string }> = [];
const lines = stdout.split("\n");
const lines = stdout.split('\n');
let current: { path?: string; branch?: string } = {};
let isFirst = true;
// First pass: detect removed worktrees
for (const line of lines) {
if (line.startsWith("worktree ")) {
if (line.startsWith('worktree ')) {
current.path = normalizePath(line.slice(9));
} else if (line.startsWith("branch ")) {
current.branch = line.slice(7).replace("refs/heads/", "");
} else if (line === "") {
} else if (line.startsWith('branch ')) {
current.branch = line.slice(7).replace('refs/heads/', '');
} else if (line === '') {
if (current.path && current.branch) {
const isMainWorktree = isFirst;
// Check if the worktree directory actually exists
// Skip checking/pruning the main worktree (projectPath itself)
if (!isMainWorktree && !existsSync(current.path)) {
let worktreeExists = false;
try {
await secureFs.access(current.path);
worktreeExists = true;
} catch {
worktreeExists = false;
}
if (!isMainWorktree && !worktreeExists) {
// Worktree directory doesn't exist - it was manually deleted
removedWorktrees.push({
path: current.path,
@@ -103,7 +110,7 @@ export function createListHandler() {
// Prune removed worktrees from git (only if any were detected)
if (removedWorktrees.length > 0) {
try {
await execAsync("git worktree prune", { cwd: projectPath });
await execAsync('git worktree prune', { cwd: projectPath });
} catch {
// Prune failed, but we'll still report the removed worktrees
}
@@ -116,13 +123,12 @@ export function createListHandler() {
if (includeDetails) {
for (const worktree of worktrees) {
try {
const { stdout: statusOutput } = await execAsync(
"git status --porcelain",
{ cwd: worktree.path }
);
const { stdout: statusOutput } = await execAsync('git status --porcelain', {
cwd: worktree.path,
});
const changedFiles = statusOutput
.trim()
.split("\n")
.split('\n')
.filter((line) => line.trim());
worktree.hasChanges = changedFiles.length > 0;
worktree.changedFilesCount = changedFiles.length;
@@ -141,13 +147,13 @@ export function createListHandler() {
}
}
res.json({
success: true,
res.json({
success: true,
worktrees,
removedWorktrees: removedWorktrees.length > 0 ? removedWorktrees : undefined,
});
} catch (error) {
logError(error, "List worktrees failed");
logError(error, 'List worktrees failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -2,12 +2,12 @@
* POST /status endpoint - Get worktree status
*/
import type { Request, Response } from "express";
import { exec } from "child_process";
import { promisify } from "util";
import path from "path";
import fs from "fs/promises";
import { getErrorMessage, logError } from "../common.js";
import type { Request, Response } from 'express';
import { exec } from 'child_process';
import { promisify } from 'util';
import path from 'path';
import * as secureFs from '../../../lib/secure-fs.js';
import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
@@ -20,53 +20,50 @@ export function createStatusHandler() {
};
if (!projectPath || !featureId) {
res
.status(400)
.json({
success: false,
error: "projectPath and featureId required",
});
res.status(400).json({
success: false,
error: 'projectPath and featureId required',
});
return;
}
// Git worktrees are stored in project directory
const worktreePath = path.join(projectPath, ".worktrees", featureId);
const worktreePath = path.join(projectPath, '.worktrees', featureId);
try {
await fs.access(worktreePath);
const { stdout: status } = await execAsync("git status --porcelain", {
await secureFs.access(worktreePath);
const { stdout: status } = await execAsync('git status --porcelain', {
cwd: worktreePath,
});
const files = status
.split("\n")
.split('\n')
.filter(Boolean)
.map((line) => line.slice(3));
const { stdout: diffStat } = await execAsync("git diff --stat", {
const { stdout: diffStat } = await execAsync('git diff --stat', {
cwd: worktreePath,
});
const { stdout: logOutput } = await execAsync('git log --oneline -5 --format="%h %s"', {
cwd: worktreePath,
});
const { stdout: logOutput } = await execAsync(
'git log --oneline -5 --format="%h %s"',
{ cwd: worktreePath }
);
res.json({
success: true,
modifiedFiles: files.length,
files,
diffStat: diffStat.trim(),
recentCommits: logOutput.trim().split("\n").filter(Boolean),
recentCommits: logOutput.trim().split('\n').filter(Boolean),
});
} catch {
res.json({
success: true,
modifiedFiles: 0,
files: [],
diffStat: "",
diffStat: '',
recentCommits: [],
});
}
} catch (error) {
logError(error, "Get worktree status failed");
logError(error, 'Get worktree status failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};

View File

@@ -3,22 +3,18 @@
* Manages conversation sessions and streams responses via WebSocket
*/
import path from "path";
import * as secureFs from "../lib/secure-fs.js";
import type { EventEmitter } from "../lib/events.js";
import type { ExecuteOptions } from "@automaker/types";
import {
readImageAsBase64,
buildPromptWithImages,
isAbortError,
} from "@automaker/utils";
import { ProviderFactory } from "../providers/provider-factory.js";
import { createChatOptions } from "../lib/sdk-options.js";
import { isPathAllowed, PathNotAllowedError } from "@automaker/platform";
import path from 'path';
import * as secureFs from '../lib/secure-fs.js';
import type { EventEmitter } from '../lib/events.js';
import type { ExecuteOptions } from '@automaker/types';
import { readImageAsBase64, buildPromptWithImages, isAbortError } from '@automaker/utils';
import { ProviderFactory } from '../providers/provider-factory.js';
import { createChatOptions, validateWorkingDirectory } from '../lib/sdk-options.js';
import { PathNotAllowedError } from '@automaker/platform';
interface Message {
id: string;
role: "user" | "assistant";
role: 'user' | 'assistant';
content: string;
images?: Array<{
data: string;
@@ -58,8 +54,8 @@ export class AgentService {
private events: EventEmitter;
constructor(dataDir: string, events: EventEmitter) {
this.stateDir = path.join(dataDir, "agent-sessions");
this.metadataFile = path.join(dataDir, "sessions-metadata.json");
this.stateDir = path.join(dataDir, 'agent-sessions');
this.metadataFile = path.join(dataDir, 'sessions-metadata.json');
this.events = events;
}
@@ -86,12 +82,8 @@ export class AgentService {
const effectiveWorkingDirectory = workingDirectory || process.cwd();
const resolvedWorkingDirectory = path.resolve(effectiveWorkingDirectory);
// Validate that the working directory is allowed
if (!isPathAllowed(resolvedWorkingDirectory)) {
throw new Error(
`Working directory ${effectiveWorkingDirectory} is not allowed`
);
}
// Validate that the working directory is allowed using centralized validation
validateWorkingDirectory(resolvedWorkingDirectory);
this.sessions.set(sessionId, {
messages,
@@ -132,7 +124,7 @@ export class AgentService {
}
if (session.isRunning) {
throw new Error("Agent is already processing a message");
throw new Error('Agent is already processing a message');
}
// Update session model if provided
@@ -142,7 +134,7 @@ export class AgentService {
}
// Read images and convert to base64
const images: Message["images"] = [];
const images: Message['images'] = [];
if (imagePaths && imagePaths.length > 0) {
for (const imagePath of imagePaths) {
try {
@@ -153,10 +145,7 @@ export class AgentService {
filename: imageData.filename,
});
} catch (error) {
console.error(
`[AgentService] Failed to load image ${imagePath}:`,
error
);
console.error(`[AgentService] Failed to load image ${imagePath}:`, error);
}
}
}
@@ -164,7 +153,7 @@ export class AgentService {
// Add user message
const userMessage: Message = {
id: this.generateId(),
role: "user",
role: 'user',
content: message,
images: images.length > 0 ? images : undefined,
timestamp: new Date().toISOString(),
@@ -182,7 +171,7 @@ export class AgentService {
// Emit user message event
this.emitAgentEvent(sessionId, {
type: "message",
type: 'message',
message: userMessage,
});
@@ -212,15 +201,14 @@ export class AgentService {
// Build options for provider
const options: ExecuteOptions = {
prompt: "", // Will be set below based on images
prompt: '', // Will be set below based on images
model: effectiveModel,
cwd: workingDirectory || session.workingDirectory,
systemPrompt: this.getSystemPrompt(),
maxTurns: maxTurns,
allowedTools: allowedTools,
abortController: session.abortController!,
conversationHistory:
conversationHistory.length > 0 ? conversationHistory : undefined,
conversationHistory: conversationHistory.length > 0 ? conversationHistory : undefined,
sdkSessionId: session.sdkSessionId, // Pass SDK session ID for resuming
};
@@ -239,30 +227,28 @@ export class AgentService {
const stream = provider.executeQuery(options);
let currentAssistantMessage: Message | null = null;
let responseText = "";
let responseText = '';
const toolUses: Array<{ name: string; input: unknown }> = [];
for await (const msg of stream) {
// Capture SDK session ID from any message and persist it
if (msg.session_id && !session.sdkSessionId) {
session.sdkSessionId = msg.session_id;
console.log(
`[AgentService] Captured SDK session ID: ${msg.session_id}`
);
console.log(`[AgentService] Captured SDK session ID: ${msg.session_id}`);
// Persist the SDK session ID to ensure conversation continuity across server restarts
await this.updateSession(sessionId, { sdkSessionId: msg.session_id });
}
if (msg.type === "assistant") {
if (msg.type === 'assistant') {
if (msg.message?.content) {
for (const block of msg.message.content) {
if (block.type === "text") {
if (block.type === 'text') {
responseText += block.text;
if (!currentAssistantMessage) {
currentAssistantMessage = {
id: this.generateId(),
role: "assistant",
role: 'assistant',
content: responseText,
timestamp: new Date().toISOString(),
};
@@ -272,27 +258,27 @@ export class AgentService {
}
this.emitAgentEvent(sessionId, {
type: "stream",
type: 'stream',
messageId: currentAssistantMessage.id,
content: responseText,
isComplete: false,
});
} else if (block.type === "tool_use") {
} else if (block.type === 'tool_use') {
const toolUse = {
name: block.name || "unknown",
name: block.name || 'unknown',
input: block.input,
};
toolUses.push(toolUse);
this.emitAgentEvent(sessionId, {
type: "tool_use",
type: 'tool_use',
tool: toolUse,
});
}
}
}
} else if (msg.type === "result") {
if (msg.subtype === "success" && msg.result) {
} else if (msg.type === 'result') {
if (msg.subtype === 'success' && msg.result) {
if (currentAssistantMessage) {
currentAssistantMessage.content = msg.result;
responseText = msg.result;
@@ -300,7 +286,7 @@ export class AgentService {
}
this.emitAgentEvent(sessionId, {
type: "complete",
type: 'complete',
messageId: currentAssistantMessage?.id,
content: responseText,
toolUses,
@@ -324,14 +310,14 @@ export class AgentService {
return { success: false, aborted: true };
}
console.error("[AgentService] Error:", error);
console.error('[AgentService] Error:', error);
session.isRunning = false;
session.abortController = null;
const errorMessage: Message = {
id: this.generateId(),
role: "assistant",
role: 'assistant',
content: `Error: ${(error as Error).message}`,
timestamp: new Date().toISOString(),
isError: true,
@@ -341,7 +327,7 @@ export class AgentService {
await this.saveSession(sessionId, session.messages);
this.emitAgentEvent(sessionId, {
type: "error",
type: 'error',
error: (error as Error).message,
message: errorMessage,
});
@@ -356,7 +342,7 @@ export class AgentService {
getHistory(sessionId: string) {
const session = this.sessions.get(sessionId);
if (!session) {
return { success: false, error: "Session not found" };
return { success: false, error: 'Session not found' };
}
return {
@@ -372,7 +358,7 @@ export class AgentService {
async stopExecution(sessionId: string) {
const session = this.sessions.get(sessionId);
if (!session) {
return { success: false, error: "Session not found" };
return { success: false, error: 'Session not found' };
}
if (session.abortController) {
@@ -404,7 +390,7 @@ export class AgentService {
const sessionFile = path.join(this.stateDir, `${sessionId}.json`);
try {
const data = (await secureFs.readFile(sessionFile, "utf-8")) as string;
const data = (await secureFs.readFile(sessionFile, 'utf-8')) as string;
return JSON.parse(data);
} catch {
return [];
@@ -415,23 +401,16 @@ export class AgentService {
const sessionFile = path.join(this.stateDir, `${sessionId}.json`);
try {
await secureFs.writeFile(
sessionFile,
JSON.stringify(messages, null, 2),
"utf-8"
);
await secureFs.writeFile(sessionFile, JSON.stringify(messages, null, 2), 'utf-8');
await this.updateSessionTimestamp(sessionId);
} catch (error) {
console.error("[AgentService] Failed to save session:", error);
console.error('[AgentService] Failed to save session:', error);
}
}
async loadMetadata(): Promise<Record<string, SessionMetadata>> {
try {
const data = (await secureFs.readFile(
this.metadataFile,
"utf-8"
)) as string;
const data = (await secureFs.readFile(this.metadataFile, 'utf-8')) as string;
return JSON.parse(data);
} catch {
return {};
@@ -439,11 +418,7 @@ export class AgentService {
}
async saveMetadata(metadata: Record<string, SessionMetadata>): Promise<void> {
await secureFs.writeFile(
this.metadataFile,
JSON.stringify(metadata, null, 2),
"utf-8"
);
await secureFs.writeFile(this.metadataFile, JSON.stringify(metadata, null, 2), 'utf-8');
}
async updateSessionTimestamp(sessionId: string): Promise<void> {
@@ -463,8 +438,7 @@ export class AgentService {
}
return sessions.sort(
(a, b) =>
new Date(b.updatedAt).getTime() - new Date(a.updatedAt).getTime()
(a, b) => new Date(b.updatedAt).getTime() - new Date(a.updatedAt).getTime()
);
}
@@ -478,21 +452,15 @@ export class AgentService {
const metadata = await this.loadMetadata();
// Determine the effective working directory
const effectiveWorkingDirectory =
workingDirectory || projectPath || process.cwd();
const effectiveWorkingDirectory = workingDirectory || projectPath || process.cwd();
const resolvedWorkingDirectory = path.resolve(effectiveWorkingDirectory);
// Validate that the working directory is allowed
if (!isPathAllowed(resolvedWorkingDirectory)) {
throw new PathNotAllowedError(effectiveWorkingDirectory);
}
// Validate that the working directory is allowed using centralized validation
validateWorkingDirectory(resolvedWorkingDirectory);
// Validate that projectPath is allowed if provided
if (projectPath) {
const resolvedProjectPath = path.resolve(projectPath);
if (!isPathAllowed(resolvedProjectPath)) {
throw new PathNotAllowedError(projectPath);
}
validateWorkingDirectory(projectPath);
}
const session: SessionMetadata = {
@@ -569,11 +537,8 @@ export class AgentService {
return true;
}
private emitAgentEvent(
sessionId: string,
data: Record<string, unknown>
): void {
this.events.emit("agent:stream", { sessionId, ...data });
private emitAgentEvent(sessionId: string, data: Record<string, unknown>): void {
this.events.emit('agent:stream', { sessionId, ...data });
}
private getSystemPrompt(): string {

File diff suppressed because it is too large Load Diff

View File

@@ -7,10 +7,10 @@
* Developers should configure their projects to use the PORT environment variable.
*/
import { spawn, execSync, type ChildProcess } from "child_process";
import { existsSync } from "fs";
import path from "path";
import net from "net";
import { spawn, execSync, type ChildProcess } from 'child_process';
import * as secureFs from '../lib/secure-fs.js';
import path from 'path';
import net from 'net';
export interface DevServerInfo {
worktreePath: string;
@@ -40,12 +40,12 @@ class DevServerService {
// Then check if the system has it in use
return new Promise((resolve) => {
const server = net.createServer();
server.once("error", () => resolve(false));
server.once("listening", () => {
server.once('error', () => resolve(false));
server.once('listening', () => {
server.close();
resolve(true);
});
server.listen(port, "127.0.0.1");
server.listen(port, '127.0.0.1');
});
}
@@ -54,21 +54,21 @@ class DevServerService {
*/
private killProcessOnPort(port: number): void {
try {
if (process.platform === "win32") {
if (process.platform === 'win32') {
// Windows: find and kill process on port
const result = execSync(`netstat -ano | findstr :${port}`, { encoding: "utf-8" });
const lines = result.trim().split("\n");
const result = execSync(`netstat -ano | findstr :${port}`, { encoding: 'utf-8' });
const lines = result.trim().split('\n');
const pids = new Set<string>();
for (const line of lines) {
const parts = line.trim().split(/\s+/);
const pid = parts[parts.length - 1];
if (pid && pid !== "0") {
if (pid && pid !== '0') {
pids.add(pid);
}
}
for (const pid of pids) {
try {
execSync(`taskkill /F /PID ${pid}`, { stdio: "ignore" });
execSync(`taskkill /F /PID ${pid}`, { stdio: 'ignore' });
console.log(`[DevServerService] Killed process ${pid} on port ${port}`);
} catch {
// Process may have already exited
@@ -77,11 +77,11 @@ class DevServerService {
} else {
// macOS/Linux: use lsof to find and kill process
try {
const result = execSync(`lsof -ti:${port}`, { encoding: "utf-8" });
const pids = result.trim().split("\n").filter(Boolean);
const result = execSync(`lsof -ti:${port}`, { encoding: 'utf-8' });
const pids = result.trim().split('\n').filter(Boolean);
for (const pid of pids) {
try {
execSync(`kill -9 ${pid}`, { stdio: "ignore" });
execSync(`kill -9 ${pid}`, { stdio: 'ignore' });
console.log(`[DevServerService] Killed process ${pid} on port ${port}`);
} catch {
// Process may have already exited
@@ -127,37 +127,47 @@ class DevServerService {
throw new Error(`No available ports found between ${BASE_PORT} and ${MAX_PORT}`);
}
/**
* Helper to check if a file exists using secureFs
*/
private async fileExists(filePath: string): Promise<boolean> {
try {
await secureFs.access(filePath);
return true;
} catch {
return false;
}
}
/**
* Detect the package manager used in a directory
*/
private detectPackageManager(
dir: string
): "npm" | "yarn" | "pnpm" | "bun" | null {
if (existsSync(path.join(dir, "bun.lockb"))) return "bun";
if (existsSync(path.join(dir, "pnpm-lock.yaml"))) return "pnpm";
if (existsSync(path.join(dir, "yarn.lock"))) return "yarn";
if (existsSync(path.join(dir, "package-lock.json"))) return "npm";
if (existsSync(path.join(dir, "package.json"))) return "npm"; // Default
private async detectPackageManager(dir: string): Promise<'npm' | 'yarn' | 'pnpm' | 'bun' | null> {
if (await this.fileExists(path.join(dir, 'bun.lockb'))) return 'bun';
if (await this.fileExists(path.join(dir, 'pnpm-lock.yaml'))) return 'pnpm';
if (await this.fileExists(path.join(dir, 'yarn.lock'))) return 'yarn';
if (await this.fileExists(path.join(dir, 'package-lock.json'))) return 'npm';
if (await this.fileExists(path.join(dir, 'package.json'))) return 'npm'; // Default
return null;
}
/**
* Get the dev script command for a directory
*/
private getDevCommand(dir: string): { cmd: string; args: string[] } | null {
const pm = this.detectPackageManager(dir);
private async getDevCommand(dir: string): Promise<{ cmd: string; args: string[] } | null> {
const pm = await this.detectPackageManager(dir);
if (!pm) return null;
switch (pm) {
case "bun":
return { cmd: "bun", args: ["run", "dev"] };
case "pnpm":
return { cmd: "pnpm", args: ["run", "dev"] };
case "yarn":
return { cmd: "yarn", args: ["dev"] };
case "npm":
case 'bun':
return { cmd: 'bun', args: ['run', 'dev'] };
case 'pnpm':
return { cmd: 'pnpm', args: ['run', 'dev'] };
case 'yarn':
return { cmd: 'yarn', args: ['dev'] };
case 'npm':
default:
return { cmd: "npm", args: ["run", "dev"] };
return { cmd: 'npm', args: ['run', 'dev'] };
}
}
@@ -192,7 +202,7 @@ class DevServerService {
}
// Verify the worktree exists
if (!existsSync(worktreePath)) {
if (!(await this.fileExists(worktreePath))) {
return {
success: false,
error: `Worktree path does not exist: ${worktreePath}`,
@@ -200,8 +210,8 @@ class DevServerService {
}
// Check for package.json
const packageJsonPath = path.join(worktreePath, "package.json");
if (!existsSync(packageJsonPath)) {
const packageJsonPath = path.join(worktreePath, 'package.json');
if (!(await this.fileExists(packageJsonPath))) {
return {
success: false,
error: `No package.json found in: ${worktreePath}`,
@@ -209,7 +219,7 @@ class DevServerService {
}
// Get dev command
const devCommand = this.getDevCommand(worktreePath);
const devCommand = await this.getDevCommand(worktreePath);
if (!devCommand) {
return {
success: false,
@@ -224,7 +234,7 @@ class DevServerService {
} catch (error) {
return {
success: false,
error: error instanceof Error ? error.message : "Port allocation failed",
error: error instanceof Error ? error.message : 'Port allocation failed',
};
}
@@ -241,14 +251,10 @@ class DevServerService {
// Small delay to ensure related ports are freed
await new Promise((resolve) => setTimeout(resolve, 100));
console.log(`[DevServerService] Starting dev server on port ${port}`);
console.log(`[DevServerService] Working directory (cwd): ${worktreePath}`);
console.log(
`[DevServerService] Starting dev server on port ${port}`
);
console.log(
`[DevServerService] Working directory (cwd): ${worktreePath}`
);
console.log(
`[DevServerService] Command: ${devCommand.cmd} ${devCommand.args.join(" ")} with PORT=${port}`
`[DevServerService] Command: ${devCommand.cmd} ${devCommand.args.join(' ')} with PORT=${port}`
);
// Spawn the dev process with PORT environment variable
@@ -260,7 +266,7 @@ class DevServerService {
const devProcess = spawn(devCommand.cmd, devCommand.args, {
cwd: worktreePath,
env,
stdio: ["ignore", "pipe", "pipe"],
stdio: ['ignore', 'pipe', 'pipe'],
detached: false,
});
@@ -269,29 +275,27 @@ class DevServerService {
// Log output for debugging
if (devProcess.stdout) {
devProcess.stdout.on("data", (data: Buffer) => {
devProcess.stdout.on('data', (data: Buffer) => {
console.log(`[DevServer:${port}] ${data.toString().trim()}`);
});
}
if (devProcess.stderr) {
devProcess.stderr.on("data", (data: Buffer) => {
devProcess.stderr.on('data', (data: Buffer) => {
const msg = data.toString().trim();
console.error(`[DevServer:${port}] ${msg}`);
});
}
devProcess.on("error", (error) => {
devProcess.on('error', (error) => {
console.error(`[DevServerService] Process error:`, error);
status.error = error.message;
this.allocatedPorts.delete(port);
this.runningServers.delete(worktreePath);
});
devProcess.on("exit", (code) => {
console.log(
`[DevServerService] Process for ${worktreePath} exited with code ${code}`
);
devProcess.on('exit', (code) => {
console.log(`[DevServerService] Process for ${worktreePath} exited with code ${code}`);
status.exited = true;
this.allocatedPorts.delete(port);
this.runningServers.delete(worktreePath);
@@ -348,7 +352,9 @@ class DevServerService {
// If we don't have a record of this server, it may have crashed/exited on its own
// Return success so the frontend can clear its state
if (!server) {
console.log(`[DevServerService] No server record for ${worktreePath}, may have already stopped`);
console.log(
`[DevServerService] No server record for ${worktreePath}, may have already stopped`
);
return {
success: true,
result: {
@@ -362,7 +368,7 @@ class DevServerService {
// Kill the process
if (server.process && !server.process.killed) {
server.process.kill("SIGTERM");
server.process.kill('SIGTERM');
}
// Free the port
@@ -447,13 +453,13 @@ export function getDevServerService(): DevServerService {
}
// Cleanup on process exit
process.on("SIGTERM", async () => {
process.on('SIGTERM', async () => {
if (devServerServiceInstance) {
await devServerServiceInstance.stopAll();
}
});
process.on("SIGINT", async () => {
process.on('SIGINT', async () => {
if (devServerServiceInstance) {
await devServerServiceInstance.stopAll();
}