mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-02 08:33:36 +00:00
Merge main into massive-terminal-upgrade
Resolves merge conflicts: - apps/server/src/routes/terminal/common.ts: Keep randomBytes import, use @automaker/utils for createLogger - apps/ui/eslint.config.mjs: Use main's explicit globals list with XMLHttpRequest and MediaQueryListEvent additions - apps/ui/src/components/views/terminal-view.tsx: Keep our terminal improvements (killAllSessions, beforeunload, better error handling) - apps/ui/src/config/terminal-themes.ts: Keep our search highlight colors for all themes - apps/ui/src/store/app-store.ts: Keep our terminal settings persistence improvements (merge function) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -2,13 +2,10 @@
|
||||
* Common utilities for agent routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("Agent");
|
||||
const logger = createLogger('Agent');
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -2,28 +2,30 @@
|
||||
* Agent routes - HTTP API for Claude agent interactions
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { AgentService } from "../../services/agent-service.js";
|
||||
import type { EventEmitter } from "../../lib/events.js";
|
||||
import { createStartHandler } from "./routes/start.js";
|
||||
import { createSendHandler } from "./routes/send.js";
|
||||
import { createHistoryHandler } from "./routes/history.js";
|
||||
import { createStopHandler } from "./routes/stop.js";
|
||||
import { createClearHandler } from "./routes/clear.js";
|
||||
import { createModelHandler } from "./routes/model.js";
|
||||
import { Router } from 'express';
|
||||
import { AgentService } from '../../services/agent-service.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createStartHandler } from './routes/start.js';
|
||||
import { createSendHandler } from './routes/send.js';
|
||||
import { createHistoryHandler } from './routes/history.js';
|
||||
import { createStopHandler } from './routes/stop.js';
|
||||
import { createClearHandler } from './routes/clear.js';
|
||||
import { createModelHandler } from './routes/model.js';
|
||||
|
||||
export function createAgentRoutes(
|
||||
agentService: AgentService,
|
||||
_events: EventEmitter
|
||||
): Router {
|
||||
export function createAgentRoutes(agentService: AgentService, _events: EventEmitter): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/start", createStartHandler(agentService));
|
||||
router.post("/send", createSendHandler(agentService));
|
||||
router.post("/history", createHistoryHandler(agentService));
|
||||
router.post("/stop", createStopHandler(agentService));
|
||||
router.post("/clear", createClearHandler(agentService));
|
||||
router.post("/model", createModelHandler(agentService));
|
||||
router.post('/start', validatePathParams('workingDirectory?'), createStartHandler(agentService));
|
||||
router.post(
|
||||
'/send',
|
||||
validatePathParams('workingDirectory?', 'imagePaths[]'),
|
||||
createSendHandler(agentService)
|
||||
);
|
||||
router.post('/history', createHistoryHandler(agentService));
|
||||
router.post('/stop', createStopHandler(agentService));
|
||||
router.post('/clear', createClearHandler(agentService));
|
||||
router.post('/model', createModelHandler(agentService));
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,29 +2,27 @@
|
||||
* POST /send endpoint - Send a message
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
const logger = createLogger("Agent");
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
const logger = createLogger('Agent');
|
||||
|
||||
export function createSendHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { sessionId, message, workingDirectory, imagePaths, model } =
|
||||
req.body as {
|
||||
sessionId: string;
|
||||
message: string;
|
||||
workingDirectory?: string;
|
||||
imagePaths?: string[];
|
||||
model?: string;
|
||||
};
|
||||
const { sessionId, message, workingDirectory, imagePaths, model } = req.body as {
|
||||
sessionId: string;
|
||||
message: string;
|
||||
workingDirectory?: string;
|
||||
imagePaths?: string[];
|
||||
model?: string;
|
||||
};
|
||||
|
||||
if (!sessionId || !message) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "sessionId and message are required",
|
||||
error: 'sessionId and message are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -39,13 +37,13 @@ export function createSendHandler(agentService: AgentService) {
|
||||
model,
|
||||
})
|
||||
.catch((error) => {
|
||||
logError(error, "Send message failed (background)");
|
||||
logError(error, 'Send message failed (background)');
|
||||
});
|
||||
|
||||
// Return immediately - responses come via WebSocket
|
||||
res.json({ success: true, message: "Message sent" });
|
||||
res.json({ success: true, message: 'Message sent' });
|
||||
} catch (error) {
|
||||
logError(error, "Send message failed");
|
||||
logError(error, 'Send message failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,12 +2,11 @@
|
||||
* POST /start endpoint - Start a conversation
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
const logger = createLogger("Agent");
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
const logger = createLogger('Agent');
|
||||
|
||||
export function createStartHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -18,9 +17,7 @@ export function createStartHandler(agentService: AgentService) {
|
||||
};
|
||||
|
||||
if (!sessionId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "sessionId is required" });
|
||||
res.status(400).json({ success: false, error: 'sessionId is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -31,7 +28,7 @@ export function createStartHandler(agentService: AgentService) {
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
logError(error, "Start conversation failed");
|
||||
logError(error, 'Start conversation failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* Common utilities and state management for spec regeneration
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger("SpecRegeneration");
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
// Shared state for tracking generation status - private
|
||||
let isRunning = false;
|
||||
@@ -23,10 +23,7 @@ export function getSpecRegenerationStatus(): {
|
||||
/**
|
||||
* Set the running state and abort controller
|
||||
*/
|
||||
export function setRunningState(
|
||||
running: boolean,
|
||||
controller: AbortController | null = null
|
||||
): void {
|
||||
export function setRunningState(running: boolean, controller: AbortController | null = null): void {
|
||||
isRunning = running;
|
||||
currentAbortController = controller;
|
||||
}
|
||||
@@ -40,14 +37,12 @@ export function logAuthStatus(context: string): void {
|
||||
logger.info(`${context} - Auth Status:`);
|
||||
logger.info(
|
||||
` ANTHROPIC_API_KEY: ${
|
||||
hasApiKey
|
||||
? "SET (" + process.env.ANTHROPIC_API_KEY?.substring(0, 20) + "...)"
|
||||
: "NOT SET"
|
||||
hasApiKey ? 'SET (' + process.env.ANTHROPIC_API_KEY?.substring(0, 20) + '...)' : 'NOT SET'
|
||||
}`
|
||||
);
|
||||
|
||||
if (!hasApiKey) {
|
||||
logger.warn("⚠️ WARNING: No authentication configured! SDK will fail.");
|
||||
logger.warn('⚠️ WARNING: No authentication configured! SDK will fail.');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,16 +51,13 @@ export function logAuthStatus(context: string): void {
|
||||
*/
|
||||
export function logError(error: unknown, context: string): void {
|
||||
logger.error(`❌ ${context}:`);
|
||||
logger.error("Error name:", (error as any)?.name);
|
||||
logger.error("Error message:", (error as Error)?.message);
|
||||
logger.error("Error stack:", (error as Error)?.stack);
|
||||
logger.error(
|
||||
"Full error object:",
|
||||
JSON.stringify(error, Object.getOwnPropertyNames(error), 2)
|
||||
);
|
||||
logger.error('Error name:', (error as any)?.name);
|
||||
logger.error('Error message:', (error as Error)?.message);
|
||||
logger.error('Error stack:', (error as Error)?.stack);
|
||||
logger.error('Full error object:', JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
|
||||
}
|
||||
|
||||
import { getErrorMessage as getErrorMessageShared } from "../common.js";
|
||||
import { getErrorMessage as getErrorMessageShared } from '../common.js';
|
||||
|
||||
// Re-export shared utility
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -2,16 +2,16 @@
|
||||
* Generate features from existing app_spec.txt
|
||||
*/
|
||||
|
||||
import { query } from "@anthropic-ai/claude-agent-sdk";
|
||||
import fs from "fs/promises";
|
||||
import type { EventEmitter } from "../../lib/events.js";
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import { createFeatureGenerationOptions } from "../../lib/sdk-options.js";
|
||||
import { logAuthStatus } from "./common.js";
|
||||
import { parseAndCreateFeatures } from "./parse-and-create-features.js";
|
||||
import { getAppSpecPath } from "../../lib/automaker-paths.js";
|
||||
import { query } from '@anthropic-ai/claude-agent-sdk';
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createFeatureGenerationOptions } from '../../lib/sdk-options.js';
|
||||
import { logAuthStatus } from './common.js';
|
||||
import { parseAndCreateFeatures } from './parse-and-create-features.js';
|
||||
import { getAppSpecPath } from '@automaker/platform';
|
||||
|
||||
const logger = createLogger("SpecRegeneration");
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
const DEFAULT_MAX_FEATURES = 50;
|
||||
|
||||
@@ -22,28 +22,26 @@ export async function generateFeaturesFromSpec(
|
||||
maxFeatures?: number
|
||||
): Promise<void> {
|
||||
const featureCount = maxFeatures ?? DEFAULT_MAX_FEATURES;
|
||||
logger.debug("========== generateFeaturesFromSpec() started ==========");
|
||||
logger.debug("projectPath:", projectPath);
|
||||
logger.debug("maxFeatures:", featureCount);
|
||||
logger.debug('========== generateFeaturesFromSpec() started ==========');
|
||||
logger.debug('projectPath:', projectPath);
|
||||
logger.debug('maxFeatures:', featureCount);
|
||||
|
||||
// Read existing spec from .automaker directory
|
||||
const specPath = getAppSpecPath(projectPath);
|
||||
let spec: string;
|
||||
|
||||
logger.debug("Reading spec from:", specPath);
|
||||
logger.debug('Reading spec from:', specPath);
|
||||
|
||||
try {
|
||||
spec = await fs.readFile(specPath, "utf-8");
|
||||
spec = (await secureFs.readFile(specPath, 'utf-8')) as string;
|
||||
logger.info(`Spec loaded successfully (${spec.length} chars)`);
|
||||
logger.info(`Spec preview (first 500 chars): ${spec.substring(0, 500)}`);
|
||||
logger.info(
|
||||
`Spec preview (last 500 chars): ${spec.substring(spec.length - 500)}`
|
||||
);
|
||||
logger.info(`Spec preview (last 500 chars): ${spec.substring(spec.length - 500)}`);
|
||||
} catch (readError) {
|
||||
logger.error("❌ Failed to read spec file:", readError);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_error",
|
||||
error: "No project spec found. Generate spec first.",
|
||||
logger.error('❌ Failed to read spec file:', readError);
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_error',
|
||||
error: 'No project spec found. Generate spec first.',
|
||||
projectPath: projectPath,
|
||||
});
|
||||
return;
|
||||
@@ -82,16 +80,14 @@ Generate ${featureCount} features that build on each other logically.
|
||||
|
||||
IMPORTANT: Do not ask for clarification. The specification is provided above. Generate the JSON immediately.`;
|
||||
|
||||
logger.info("========== PROMPT BEING SENT ==========");
|
||||
logger.info('========== PROMPT BEING SENT ==========');
|
||||
logger.info(`Prompt length: ${prompt.length} chars`);
|
||||
logger.info(
|
||||
`Prompt preview (first 1000 chars):\n${prompt.substring(0, 1000)}`
|
||||
);
|
||||
logger.info("========== END PROMPT PREVIEW ==========");
|
||||
logger.info(`Prompt preview (first 1000 chars):\n${prompt.substring(0, 1000)}`);
|
||||
logger.info('========== END PROMPT PREVIEW ==========');
|
||||
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_progress",
|
||||
content: "Analyzing spec and generating features...\n",
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: 'Analyzing spec and generating features...\n',
|
||||
projectPath: projectPath,
|
||||
});
|
||||
|
||||
@@ -100,73 +96,67 @@ IMPORTANT: Do not ask for clarification. The specification is provided above. Ge
|
||||
abortController,
|
||||
});
|
||||
|
||||
logger.debug("SDK Options:", JSON.stringify(options, null, 2));
|
||||
logger.info("Calling Claude Agent SDK query() for features...");
|
||||
logger.debug('SDK Options:', JSON.stringify(options, null, 2));
|
||||
logger.info('Calling Claude Agent SDK query() for features...');
|
||||
|
||||
logAuthStatus("Right before SDK query() for features");
|
||||
logAuthStatus('Right before SDK query() for features');
|
||||
|
||||
let stream;
|
||||
try {
|
||||
stream = query({ prompt, options });
|
||||
logger.debug("query() returned stream successfully");
|
||||
logger.debug('query() returned stream successfully');
|
||||
} catch (queryError) {
|
||||
logger.error("❌ query() threw an exception:");
|
||||
logger.error("Error:", queryError);
|
||||
logger.error('❌ query() threw an exception:');
|
||||
logger.error('Error:', queryError);
|
||||
throw queryError;
|
||||
}
|
||||
|
||||
let responseText = "";
|
||||
let responseText = '';
|
||||
let messageCount = 0;
|
||||
|
||||
logger.debug("Starting to iterate over feature stream...");
|
||||
logger.debug('Starting to iterate over feature stream...');
|
||||
|
||||
try {
|
||||
for await (const msg of stream) {
|
||||
messageCount++;
|
||||
logger.debug(
|
||||
`Feature stream message #${messageCount}:`,
|
||||
JSON.stringify(
|
||||
{ type: msg.type, subtype: (msg as any).subtype },
|
||||
null,
|
||||
2
|
||||
)
|
||||
JSON.stringify({ type: msg.type, subtype: (msg as any).subtype }, null, 2)
|
||||
);
|
||||
|
||||
if (msg.type === "assistant" && msg.message.content) {
|
||||
if (msg.type === 'assistant' && msg.message.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text") {
|
||||
if (block.type === 'text') {
|
||||
responseText += block.text;
|
||||
logger.debug(
|
||||
`Feature text block received (${block.text.length} chars)`
|
||||
);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_progress",
|
||||
logger.debug(`Feature text block received (${block.text.length} chars)`);
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: block.text,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (msg.type === "result" && (msg as any).subtype === "success") {
|
||||
logger.debug("Received success result for features");
|
||||
} else if (msg.type === 'result' && (msg as any).subtype === 'success') {
|
||||
logger.debug('Received success result for features');
|
||||
responseText = (msg as any).result || responseText;
|
||||
} else if ((msg as { type: string }).type === "error") {
|
||||
logger.error("❌ Received error message from feature stream:");
|
||||
logger.error("Error message:", JSON.stringify(msg, null, 2));
|
||||
} else if ((msg as { type: string }).type === 'error') {
|
||||
logger.error('❌ Received error message from feature stream:');
|
||||
logger.error('Error message:', JSON.stringify(msg, null, 2));
|
||||
}
|
||||
}
|
||||
} catch (streamError) {
|
||||
logger.error("❌ Error while iterating feature stream:");
|
||||
logger.error("Stream error:", streamError);
|
||||
logger.error('❌ Error while iterating feature stream:');
|
||||
logger.error('Stream error:', streamError);
|
||||
throw streamError;
|
||||
}
|
||||
|
||||
logger.info(`Feature stream complete. Total messages: ${messageCount}`);
|
||||
logger.info(`Feature response length: ${responseText.length} chars`);
|
||||
logger.info("========== FULL RESPONSE TEXT ==========");
|
||||
logger.info('========== FULL RESPONSE TEXT ==========');
|
||||
logger.info(responseText);
|
||||
logger.info("========== END RESPONSE TEXT ==========");
|
||||
logger.info('========== END RESPONSE TEXT ==========');
|
||||
|
||||
await parseAndCreateFeatures(projectPath, responseText, events);
|
||||
|
||||
logger.debug("========== generateFeaturesFromSpec() completed ==========");
|
||||
logger.debug('========== generateFeaturesFromSpec() completed ==========');
|
||||
}
|
||||
|
||||
@@ -2,23 +2,23 @@
|
||||
* Generate app_spec.txt from project overview
|
||||
*/
|
||||
|
||||
import { query } from "@anthropic-ai/claude-agent-sdk";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import type { EventEmitter } from "../../lib/events.js";
|
||||
import { query } from '@anthropic-ai/claude-agent-sdk';
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import {
|
||||
specOutputSchema,
|
||||
specToXml,
|
||||
getStructuredSpecPromptInstruction,
|
||||
type SpecOutput,
|
||||
} from "../../lib/app-spec-format.js";
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import { createSpecGenerationOptions } from "../../lib/sdk-options.js";
|
||||
import { logAuthStatus } from "./common.js";
|
||||
import { generateFeaturesFromSpec } from "./generate-features-from-spec.js";
|
||||
import { ensureAutomakerDir, getAppSpecPath } from "../../lib/automaker-paths.js";
|
||||
} from '../../lib/app-spec-format.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createSpecGenerationOptions } from '../../lib/sdk-options.js';
|
||||
import { logAuthStatus } from './common.js';
|
||||
import { generateFeaturesFromSpec } from './generate-features-from-spec.js';
|
||||
import { ensureAutomakerDir, getAppSpecPath } from '@automaker/platform';
|
||||
|
||||
const logger = createLogger("SpecRegeneration");
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
export async function generateSpec(
|
||||
projectPath: string,
|
||||
@@ -29,17 +29,17 @@ export async function generateSpec(
|
||||
analyzeProject?: boolean,
|
||||
maxFeatures?: number
|
||||
): Promise<void> {
|
||||
logger.info("========== generateSpec() started ==========");
|
||||
logger.info("projectPath:", projectPath);
|
||||
logger.info("projectOverview length:", `${projectOverview.length} chars`);
|
||||
logger.info("projectOverview preview:", projectOverview.substring(0, 300));
|
||||
logger.info("generateFeatures:", generateFeatures);
|
||||
logger.info("analyzeProject:", analyzeProject);
|
||||
logger.info("maxFeatures:", maxFeatures);
|
||||
logger.info('========== generateSpec() started ==========');
|
||||
logger.info('projectPath:', projectPath);
|
||||
logger.info('projectOverview length:', `${projectOverview.length} chars`);
|
||||
logger.info('projectOverview preview:', projectOverview.substring(0, 300));
|
||||
logger.info('generateFeatures:', generateFeatures);
|
||||
logger.info('analyzeProject:', analyzeProject);
|
||||
logger.info('maxFeatures:', maxFeatures);
|
||||
|
||||
// Build the prompt based on whether we should analyze the project
|
||||
let analysisInstructions = "";
|
||||
let techStackDefaults = "";
|
||||
let analysisInstructions = '';
|
||||
let techStackDefaults = '';
|
||||
|
||||
if (analyzeProject !== false) {
|
||||
// Default to true - analyze the project
|
||||
@@ -73,114 +73,110 @@ ${analysisInstructions}
|
||||
|
||||
${getStructuredSpecPromptInstruction()}`;
|
||||
|
||||
logger.info("========== PROMPT BEING SENT ==========");
|
||||
logger.info('========== PROMPT BEING SENT ==========');
|
||||
logger.info(`Prompt length: ${prompt.length} chars`);
|
||||
logger.info(`Prompt preview (first 500 chars):\n${prompt.substring(0, 500)}`);
|
||||
logger.info("========== END PROMPT PREVIEW ==========");
|
||||
logger.info('========== END PROMPT PREVIEW ==========');
|
||||
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_progress",
|
||||
content: "Starting spec generation...\n",
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_progress',
|
||||
content: 'Starting spec generation...\n',
|
||||
});
|
||||
|
||||
const options = createSpecGenerationOptions({
|
||||
cwd: projectPath,
|
||||
abortController,
|
||||
outputFormat: {
|
||||
type: "json_schema",
|
||||
type: 'json_schema',
|
||||
schema: specOutputSchema,
|
||||
},
|
||||
});
|
||||
|
||||
logger.debug("SDK Options:", JSON.stringify(options, null, 2));
|
||||
logger.info("Calling Claude Agent SDK query()...");
|
||||
logger.debug('SDK Options:', JSON.stringify(options, null, 2));
|
||||
logger.info('Calling Claude Agent SDK query()...');
|
||||
|
||||
// Log auth status right before the SDK call
|
||||
logAuthStatus("Right before SDK query()");
|
||||
logAuthStatus('Right before SDK query()');
|
||||
|
||||
let stream;
|
||||
try {
|
||||
stream = query({ prompt, options });
|
||||
logger.debug("query() returned stream successfully");
|
||||
logger.debug('query() returned stream successfully');
|
||||
} catch (queryError) {
|
||||
logger.error("❌ query() threw an exception:");
|
||||
logger.error("Error:", queryError);
|
||||
logger.error('❌ query() threw an exception:');
|
||||
logger.error('Error:', queryError);
|
||||
throw queryError;
|
||||
}
|
||||
|
||||
let responseText = "";
|
||||
let responseText = '';
|
||||
let messageCount = 0;
|
||||
let structuredOutput: SpecOutput | null = null;
|
||||
|
||||
logger.info("Starting to iterate over stream...");
|
||||
logger.info('Starting to iterate over stream...');
|
||||
|
||||
try {
|
||||
for await (const msg of stream) {
|
||||
messageCount++;
|
||||
logger.info(
|
||||
`Stream message #${messageCount}: type=${msg.type}, subtype=${
|
||||
(msg as any).subtype
|
||||
}`
|
||||
`Stream message #${messageCount}: type=${msg.type}, subtype=${(msg as any).subtype}`
|
||||
);
|
||||
|
||||
if (msg.type === "assistant") {
|
||||
if (msg.type === 'assistant') {
|
||||
const msgAny = msg as any;
|
||||
if (msgAny.message?.content) {
|
||||
for (const block of msgAny.message.content) {
|
||||
if (block.type === "text") {
|
||||
if (block.type === 'text') {
|
||||
responseText += block.text;
|
||||
logger.info(
|
||||
`Text block received (${block.text.length} chars), total now: ${responseText.length} chars`
|
||||
);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_progress",
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: block.text,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
} else if (block.type === "tool_use") {
|
||||
logger.info("Tool use:", block.name);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_tool",
|
||||
} else if (block.type === 'tool_use') {
|
||||
logger.info('Tool use:', block.name);
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_tool',
|
||||
tool: block.name,
|
||||
input: block.input,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (msg.type === "result" && (msg as any).subtype === "success") {
|
||||
logger.info("Received success result");
|
||||
} else if (msg.type === 'result' && (msg as any).subtype === 'success') {
|
||||
logger.info('Received success result');
|
||||
// Check for structured output - this is the reliable way to get spec data
|
||||
const resultMsg = msg as any;
|
||||
if (resultMsg.structured_output) {
|
||||
structuredOutput = resultMsg.structured_output as SpecOutput;
|
||||
logger.info("✅ Received structured output");
|
||||
logger.debug("Structured output:", JSON.stringify(structuredOutput, null, 2));
|
||||
logger.info('✅ Received structured output');
|
||||
logger.debug('Structured output:', JSON.stringify(structuredOutput, null, 2));
|
||||
} else {
|
||||
logger.warn("⚠️ No structured output in result, will fall back to text parsing");
|
||||
logger.warn('⚠️ No structured output in result, will fall back to text parsing');
|
||||
}
|
||||
} else if (msg.type === "result") {
|
||||
} else if (msg.type === 'result') {
|
||||
// Handle error result types
|
||||
const subtype = (msg as any).subtype;
|
||||
logger.info(`Result message: subtype=${subtype}`);
|
||||
if (subtype === "error_max_turns") {
|
||||
logger.error("❌ Hit max turns limit!");
|
||||
} else if (subtype === "error_max_structured_output_retries") {
|
||||
logger.error("❌ Failed to produce valid structured output after retries");
|
||||
throw new Error("Could not produce valid spec output");
|
||||
if (subtype === 'error_max_turns') {
|
||||
logger.error('❌ Hit max turns limit!');
|
||||
} else if (subtype === 'error_max_structured_output_retries') {
|
||||
logger.error('❌ Failed to produce valid structured output after retries');
|
||||
throw new Error('Could not produce valid spec output');
|
||||
}
|
||||
} else if ((msg as { type: string }).type === "error") {
|
||||
logger.error("❌ Received error message from stream:");
|
||||
logger.error("Error message:", JSON.stringify(msg, null, 2));
|
||||
} else if (msg.type === "user") {
|
||||
} else if ((msg as { type: string }).type === 'error') {
|
||||
logger.error('❌ Received error message from stream:');
|
||||
logger.error('Error message:', JSON.stringify(msg, null, 2));
|
||||
} else if (msg.type === 'user') {
|
||||
// Log user messages (tool results)
|
||||
logger.info(
|
||||
`User message (tool result): ${JSON.stringify(msg).substring(0, 500)}`
|
||||
);
|
||||
logger.info(`User message (tool result): ${JSON.stringify(msg).substring(0, 500)}`);
|
||||
}
|
||||
}
|
||||
} catch (streamError) {
|
||||
logger.error("❌ Error while iterating stream:");
|
||||
logger.error("Stream error:", streamError);
|
||||
logger.error('❌ Error while iterating stream:');
|
||||
logger.error('Stream error:', streamError);
|
||||
throw streamError;
|
||||
}
|
||||
|
||||
@@ -192,40 +188,42 @@ ${getStructuredSpecPromptInstruction()}`;
|
||||
|
||||
if (structuredOutput) {
|
||||
// Use structured output - convert JSON to XML
|
||||
logger.info("✅ Using structured output for XML generation");
|
||||
logger.info('✅ Using structured output for XML generation');
|
||||
xmlContent = specToXml(structuredOutput);
|
||||
logger.info(`Generated XML from structured output: ${xmlContent.length} chars`);
|
||||
} else {
|
||||
// Fallback: Extract XML content from response text
|
||||
// Claude might include conversational text before/after
|
||||
// See: https://github.com/AutoMaker-Org/automaker/issues/149
|
||||
logger.warn("⚠️ No structured output, falling back to text parsing");
|
||||
logger.info("========== FINAL RESPONSE TEXT ==========");
|
||||
logger.info(responseText || "(empty)");
|
||||
logger.info("========== END RESPONSE TEXT ==========");
|
||||
logger.warn('⚠️ No structured output, falling back to text parsing');
|
||||
logger.info('========== FINAL RESPONSE TEXT ==========');
|
||||
logger.info(responseText || '(empty)');
|
||||
logger.info('========== END RESPONSE TEXT ==========');
|
||||
|
||||
if (!responseText || responseText.trim().length === 0) {
|
||||
throw new Error("No response text and no structured output - cannot generate spec");
|
||||
throw new Error('No response text and no structured output - cannot generate spec');
|
||||
}
|
||||
|
||||
const xmlStart = responseText.indexOf("<project_specification>");
|
||||
const xmlEnd = responseText.lastIndexOf("</project_specification>");
|
||||
const xmlStart = responseText.indexOf('<project_specification>');
|
||||
const xmlEnd = responseText.lastIndexOf('</project_specification>');
|
||||
|
||||
if (xmlStart !== -1 && xmlEnd !== -1) {
|
||||
// Extract just the XML content, discarding any conversational text before/after
|
||||
xmlContent = responseText.substring(xmlStart, xmlEnd + "</project_specification>".length);
|
||||
xmlContent = responseText.substring(xmlStart, xmlEnd + '</project_specification>'.length);
|
||||
logger.info(`Extracted XML content: ${xmlContent.length} chars (from position ${xmlStart})`);
|
||||
} else {
|
||||
// No valid XML structure found in the response text
|
||||
// This happens when structured output was expected but not received, and the agent
|
||||
// output conversational text instead of XML (e.g., "The project directory appears to be empty...")
|
||||
// We should NOT save this conversational text as it's not a valid spec
|
||||
logger.error("❌ Response does not contain valid <project_specification> XML structure");
|
||||
logger.error("This typically happens when structured output failed and the agent produced conversational text instead of XML");
|
||||
logger.error('❌ Response does not contain valid <project_specification> XML structure');
|
||||
logger.error(
|
||||
'This typically happens when structured output failed and the agent produced conversational text instead of XML'
|
||||
);
|
||||
throw new Error(
|
||||
"Failed to generate spec: No valid XML structure found in response. " +
|
||||
"The response contained conversational text but no <project_specification> tags. " +
|
||||
"Please try again."
|
||||
'Failed to generate spec: No valid XML structure found in response. ' +
|
||||
'The response contained conversational text but no <project_specification> tags. ' +
|
||||
'Please try again.'
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -234,60 +232,55 @@ ${getStructuredSpecPromptInstruction()}`;
|
||||
await ensureAutomakerDir(projectPath);
|
||||
const specPath = getAppSpecPath(projectPath);
|
||||
|
||||
logger.info("Saving spec to:", specPath);
|
||||
logger.info('Saving spec to:', specPath);
|
||||
logger.info(`Content to save (${xmlContent.length} chars)`);
|
||||
|
||||
await fs.writeFile(specPath, xmlContent);
|
||||
await secureFs.writeFile(specPath, xmlContent);
|
||||
|
||||
// Verify the file was written
|
||||
const savedContent = await fs.readFile(specPath, "utf-8");
|
||||
const savedContent = await secureFs.readFile(specPath, 'utf-8');
|
||||
logger.info(`Verified saved file: ${savedContent.length} chars`);
|
||||
if (savedContent.length === 0) {
|
||||
logger.error("❌ File was saved but is empty!");
|
||||
logger.error('❌ File was saved but is empty!');
|
||||
}
|
||||
|
||||
logger.info("Spec saved successfully");
|
||||
logger.info('Spec saved successfully');
|
||||
|
||||
// Emit spec completion event
|
||||
if (generateFeatures) {
|
||||
// If features will be generated, emit intermediate completion
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_progress",
|
||||
content: "[Phase: spec_complete] Spec created! Generating features...\n",
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: '[Phase: spec_complete] Spec created! Generating features...\n',
|
||||
projectPath: projectPath,
|
||||
});
|
||||
} else {
|
||||
// If no features, emit final completion
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_complete",
|
||||
message: "Spec regeneration complete!",
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_complete',
|
||||
message: 'Spec regeneration complete!',
|
||||
projectPath: projectPath,
|
||||
});
|
||||
}
|
||||
|
||||
// If generate features was requested, generate them from the spec
|
||||
if (generateFeatures) {
|
||||
logger.info("Starting feature generation from spec...");
|
||||
logger.info('Starting feature generation from spec...');
|
||||
// Create a new abort controller for feature generation
|
||||
const featureAbortController = new AbortController();
|
||||
try {
|
||||
await generateFeaturesFromSpec(
|
||||
projectPath,
|
||||
events,
|
||||
featureAbortController,
|
||||
maxFeatures
|
||||
);
|
||||
await generateFeaturesFromSpec(projectPath, events, featureAbortController, maxFeatures);
|
||||
// Final completion will be emitted by generateFeaturesFromSpec -> parseAndCreateFeatures
|
||||
} catch (featureError) {
|
||||
logger.error("Feature generation failed:", featureError);
|
||||
logger.error('Feature generation failed:', featureError);
|
||||
// Don't throw - spec generation succeeded, feature generation is optional
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_error",
|
||||
error: (featureError as Error).message || "Feature generation failed",
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_error',
|
||||
error: (featureError as Error).message || 'Feature generation failed',
|
||||
projectPath: projectPath,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("========== generateSpec() completed ==========");
|
||||
logger.debug('========== generateSpec() completed ==========');
|
||||
}
|
||||
|
||||
@@ -2,71 +2,71 @@
|
||||
* Parse agent response and create feature files
|
||||
*/
|
||||
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import type { EventEmitter } from "../../lib/events.js";
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import { getFeaturesDir } from "../../lib/automaker-paths.js";
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getFeaturesDir } from '@automaker/platform';
|
||||
|
||||
const logger = createLogger("SpecRegeneration");
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
export async function parseAndCreateFeatures(
|
||||
projectPath: string,
|
||||
content: string,
|
||||
events: EventEmitter
|
||||
): Promise<void> {
|
||||
logger.info("========== parseAndCreateFeatures() started ==========");
|
||||
logger.info('========== parseAndCreateFeatures() started ==========');
|
||||
logger.info(`Content length: ${content.length} chars`);
|
||||
logger.info("========== CONTENT RECEIVED FOR PARSING ==========");
|
||||
logger.info('========== CONTENT RECEIVED FOR PARSING ==========');
|
||||
logger.info(content);
|
||||
logger.info("========== END CONTENT ==========");
|
||||
logger.info('========== END CONTENT ==========');
|
||||
|
||||
try {
|
||||
// Extract JSON from response
|
||||
logger.info("Extracting JSON from response...");
|
||||
logger.info('Extracting JSON from response...');
|
||||
logger.info(`Looking for pattern: /{[\\s\\S]*"features"[\\s\\S]*}/`);
|
||||
const jsonMatch = content.match(/\{[\s\S]*"features"[\s\S]*\}/);
|
||||
if (!jsonMatch) {
|
||||
logger.error("❌ No valid JSON found in response");
|
||||
logger.error("Full content received:");
|
||||
logger.error('❌ No valid JSON found in response');
|
||||
logger.error('Full content received:');
|
||||
logger.error(content);
|
||||
throw new Error("No valid JSON found in response");
|
||||
throw new Error('No valid JSON found in response');
|
||||
}
|
||||
|
||||
logger.info(`JSON match found (${jsonMatch[0].length} chars)`);
|
||||
logger.info("========== MATCHED JSON ==========");
|
||||
logger.info('========== MATCHED JSON ==========');
|
||||
logger.info(jsonMatch[0]);
|
||||
logger.info("========== END MATCHED JSON ==========");
|
||||
logger.info('========== END MATCHED JSON ==========');
|
||||
|
||||
const parsed = JSON.parse(jsonMatch[0]);
|
||||
logger.info(`Parsed ${parsed.features?.length || 0} features`);
|
||||
logger.info("Parsed features:", JSON.stringify(parsed.features, null, 2));
|
||||
logger.info('Parsed features:', JSON.stringify(parsed.features, null, 2));
|
||||
|
||||
const featuresDir = getFeaturesDir(projectPath);
|
||||
await fs.mkdir(featuresDir, { recursive: true });
|
||||
await secureFs.mkdir(featuresDir, { recursive: true });
|
||||
|
||||
const createdFeatures: Array<{ id: string; title: string }> = [];
|
||||
|
||||
for (const feature of parsed.features) {
|
||||
logger.debug("Creating feature:", feature.id);
|
||||
logger.debug('Creating feature:', feature.id);
|
||||
const featureDir = path.join(featuresDir, feature.id);
|
||||
await fs.mkdir(featureDir, { recursive: true });
|
||||
await secureFs.mkdir(featureDir, { recursive: true });
|
||||
|
||||
const featureData = {
|
||||
id: feature.id,
|
||||
category: feature.category || "Uncategorized",
|
||||
category: feature.category || 'Uncategorized',
|
||||
title: feature.title,
|
||||
description: feature.description,
|
||||
status: "backlog", // Features go to backlog - user must manually start them
|
||||
status: 'backlog', // Features go to backlog - user must manually start them
|
||||
priority: feature.priority || 2,
|
||||
complexity: feature.complexity || "moderate",
|
||||
complexity: feature.complexity || 'moderate',
|
||||
dependencies: feature.dependencies || [],
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(featureDir, "feature.json"),
|
||||
await secureFs.writeFile(
|
||||
path.join(featureDir, 'feature.json'),
|
||||
JSON.stringify(featureData, null, 2)
|
||||
);
|
||||
|
||||
@@ -75,20 +75,20 @@ export async function parseAndCreateFeatures(
|
||||
|
||||
logger.info(`✓ Created ${createdFeatures.length} features successfully`);
|
||||
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_complete",
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_complete',
|
||||
message: `Spec regeneration complete! Created ${createdFeatures.length} features.`,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error("❌ parseAndCreateFeatures() failed:");
|
||||
logger.error("Error:", error);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_error",
|
||||
logger.error('❌ parseAndCreateFeatures() failed:');
|
||||
logger.error('Error:', error);
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_error',
|
||||
error: (error as Error).message,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
}
|
||||
|
||||
logger.debug("========== parseAndCreateFeatures() completed ==========");
|
||||
logger.debug('========== parseAndCreateFeatures() completed ==========');
|
||||
}
|
||||
|
||||
@@ -2,24 +2,24 @@
|
||||
* POST /create endpoint - Create project spec from overview
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { EventEmitter } from "../../../lib/events.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import {
|
||||
getSpecRegenerationStatus,
|
||||
setRunningState,
|
||||
logAuthStatus,
|
||||
logError,
|
||||
getErrorMessage,
|
||||
} from "../common.js";
|
||||
import { generateSpec } from "../generate-spec.js";
|
||||
} from '../common.js';
|
||||
import { generateSpec } from '../generate-spec.js';
|
||||
|
||||
const logger = createLogger("SpecRegeneration");
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
export function createCreateHandler(events: EventEmitter) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
logger.info("========== /create endpoint called ==========");
|
||||
logger.debug("Request body:", JSON.stringify(req.body, null, 2));
|
||||
logger.info('========== /create endpoint called ==========');
|
||||
logger.debug('Request body:', JSON.stringify(req.body, null, 2));
|
||||
|
||||
try {
|
||||
const { projectPath, projectOverview, generateFeatures, analyzeProject, maxFeatures } =
|
||||
@@ -31,37 +31,34 @@ export function createCreateHandler(events: EventEmitter) {
|
||||
maxFeatures?: number;
|
||||
};
|
||||
|
||||
logger.debug("Parsed params:");
|
||||
logger.debug(" projectPath:", projectPath);
|
||||
logger.debug(
|
||||
" projectOverview length:",
|
||||
`${projectOverview?.length || 0} chars`
|
||||
);
|
||||
logger.debug(" generateFeatures:", generateFeatures);
|
||||
logger.debug(" analyzeProject:", analyzeProject);
|
||||
logger.debug(" maxFeatures:", maxFeatures);
|
||||
logger.debug('Parsed params:');
|
||||
logger.debug(' projectPath:', projectPath);
|
||||
logger.debug(' projectOverview length:', `${projectOverview?.length || 0} chars`);
|
||||
logger.debug(' generateFeatures:', generateFeatures);
|
||||
logger.debug(' analyzeProject:', analyzeProject);
|
||||
logger.debug(' maxFeatures:', maxFeatures);
|
||||
|
||||
if (!projectPath || !projectOverview) {
|
||||
logger.error("Missing required parameters");
|
||||
logger.error('Missing required parameters');
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath and projectOverview required",
|
||||
error: 'projectPath and projectOverview required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const { isRunning } = getSpecRegenerationStatus();
|
||||
if (isRunning) {
|
||||
logger.warn("Generation already running, rejecting request");
|
||||
res.json({ success: false, error: "Spec generation already running" });
|
||||
logger.warn('Generation already running, rejecting request');
|
||||
res.json({ success: false, error: 'Spec generation already running' });
|
||||
return;
|
||||
}
|
||||
|
||||
logAuthStatus("Before starting generation");
|
||||
logAuthStatus('Before starting generation');
|
||||
|
||||
const abortController = new AbortController();
|
||||
setRunningState(true, abortController);
|
||||
logger.info("Starting background generation task...");
|
||||
logger.info('Starting background generation task...');
|
||||
|
||||
// Start generation in background
|
||||
generateSpec(
|
||||
@@ -74,24 +71,22 @@ export function createCreateHandler(events: EventEmitter) {
|
||||
maxFeatures
|
||||
)
|
||||
.catch((error) => {
|
||||
logError(error, "Generation failed with error");
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_error",
|
||||
logError(error, 'Generation failed with error');
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_error',
|
||||
error: getErrorMessage(error),
|
||||
projectPath: projectPath,
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
logger.info("Generation task finished (success or error)");
|
||||
logger.info('Generation task finished (success or error)');
|
||||
setRunningState(false, null);
|
||||
});
|
||||
|
||||
logger.info(
|
||||
"Returning success response (generation running in background)"
|
||||
);
|
||||
logger.info('Returning success response (generation running in background)');
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Create spec route handler failed");
|
||||
logError(error, 'Create spec route handler failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,24 +2,24 @@
|
||||
* POST /generate-features endpoint - Generate features from existing spec
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { EventEmitter } from "../../../lib/events.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import {
|
||||
getSpecRegenerationStatus,
|
||||
setRunningState,
|
||||
logAuthStatus,
|
||||
logError,
|
||||
getErrorMessage,
|
||||
} from "../common.js";
|
||||
import { generateFeaturesFromSpec } from "../generate-features-from-spec.js";
|
||||
} from '../common.js';
|
||||
import { generateFeaturesFromSpec } from '../generate-features-from-spec.js';
|
||||
|
||||
const logger = createLogger("SpecRegeneration");
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
export function createGenerateFeaturesHandler(events: EventEmitter) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
logger.info("========== /generate-features endpoint called ==========");
|
||||
logger.debug("Request body:", JSON.stringify(req.body, null, 2));
|
||||
logger.info('========== /generate-features endpoint called ==========');
|
||||
logger.debug('Request body:', JSON.stringify(req.body, null, 2));
|
||||
|
||||
try {
|
||||
const { projectPath, maxFeatures } = req.body as {
|
||||
@@ -27,52 +27,45 @@ export function createGenerateFeaturesHandler(events: EventEmitter) {
|
||||
maxFeatures?: number;
|
||||
};
|
||||
|
||||
logger.debug("projectPath:", projectPath);
|
||||
logger.debug("maxFeatures:", maxFeatures);
|
||||
logger.debug('projectPath:', projectPath);
|
||||
logger.debug('maxFeatures:', maxFeatures);
|
||||
|
||||
if (!projectPath) {
|
||||
logger.error("Missing projectPath parameter");
|
||||
res.status(400).json({ success: false, error: "projectPath required" });
|
||||
logger.error('Missing projectPath parameter');
|
||||
res.status(400).json({ success: false, error: 'projectPath required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const { isRunning } = getSpecRegenerationStatus();
|
||||
if (isRunning) {
|
||||
logger.warn("Generation already running, rejecting request");
|
||||
res.json({ success: false, error: "Generation already running" });
|
||||
logger.warn('Generation already running, rejecting request');
|
||||
res.json({ success: false, error: 'Generation already running' });
|
||||
return;
|
||||
}
|
||||
|
||||
logAuthStatus("Before starting feature generation");
|
||||
logAuthStatus('Before starting feature generation');
|
||||
|
||||
const abortController = new AbortController();
|
||||
setRunningState(true, abortController);
|
||||
logger.info("Starting background feature generation task...");
|
||||
logger.info('Starting background feature generation task...');
|
||||
|
||||
generateFeaturesFromSpec(
|
||||
projectPath,
|
||||
events,
|
||||
abortController,
|
||||
maxFeatures
|
||||
)
|
||||
generateFeaturesFromSpec(projectPath, events, abortController, maxFeatures)
|
||||
.catch((error) => {
|
||||
logError(error, "Feature generation failed with error");
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_error",
|
||||
logError(error, 'Feature generation failed with error');
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'features_error',
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
logger.info("Feature generation task finished (success or error)");
|
||||
logger.info('Feature generation task finished (success or error)');
|
||||
setRunningState(false, null);
|
||||
});
|
||||
|
||||
logger.info(
|
||||
"Returning success response (generation running in background)"
|
||||
);
|
||||
logger.info('Returning success response (generation running in background)');
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Generate features route handler failed");
|
||||
logError(error, 'Generate features route handler failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,71 +2,63 @@
|
||||
* POST /generate endpoint - Generate spec from project definition
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { EventEmitter } from "../../../lib/events.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import {
|
||||
getSpecRegenerationStatus,
|
||||
setRunningState,
|
||||
logAuthStatus,
|
||||
logError,
|
||||
getErrorMessage,
|
||||
} from "../common.js";
|
||||
import { generateSpec } from "../generate-spec.js";
|
||||
} from '../common.js';
|
||||
import { generateSpec } from '../generate-spec.js';
|
||||
|
||||
const logger = createLogger("SpecRegeneration");
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
export function createGenerateHandler(events: EventEmitter) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
logger.info("========== /generate endpoint called ==========");
|
||||
logger.debug("Request body:", JSON.stringify(req.body, null, 2));
|
||||
logger.info('========== /generate endpoint called ==========');
|
||||
logger.debug('Request body:', JSON.stringify(req.body, null, 2));
|
||||
|
||||
try {
|
||||
const {
|
||||
projectPath,
|
||||
projectDefinition,
|
||||
generateFeatures,
|
||||
analyzeProject,
|
||||
maxFeatures,
|
||||
} = req.body as {
|
||||
projectPath: string;
|
||||
projectDefinition: string;
|
||||
generateFeatures?: boolean;
|
||||
analyzeProject?: boolean;
|
||||
maxFeatures?: number;
|
||||
};
|
||||
const { projectPath, projectDefinition, generateFeatures, analyzeProject, maxFeatures } =
|
||||
req.body as {
|
||||
projectPath: string;
|
||||
projectDefinition: string;
|
||||
generateFeatures?: boolean;
|
||||
analyzeProject?: boolean;
|
||||
maxFeatures?: number;
|
||||
};
|
||||
|
||||
logger.debug("Parsed params:");
|
||||
logger.debug(" projectPath:", projectPath);
|
||||
logger.debug(
|
||||
" projectDefinition length:",
|
||||
`${projectDefinition?.length || 0} chars`
|
||||
);
|
||||
logger.debug(" generateFeatures:", generateFeatures);
|
||||
logger.debug(" analyzeProject:", analyzeProject);
|
||||
logger.debug(" maxFeatures:", maxFeatures);
|
||||
logger.debug('Parsed params:');
|
||||
logger.debug(' projectPath:', projectPath);
|
||||
logger.debug(' projectDefinition length:', `${projectDefinition?.length || 0} chars`);
|
||||
logger.debug(' generateFeatures:', generateFeatures);
|
||||
logger.debug(' analyzeProject:', analyzeProject);
|
||||
logger.debug(' maxFeatures:', maxFeatures);
|
||||
|
||||
if (!projectPath || !projectDefinition) {
|
||||
logger.error("Missing required parameters");
|
||||
logger.error('Missing required parameters');
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath and projectDefinition required",
|
||||
error: 'projectPath and projectDefinition required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const { isRunning } = getSpecRegenerationStatus();
|
||||
if (isRunning) {
|
||||
logger.warn("Generation already running, rejecting request");
|
||||
res.json({ success: false, error: "Spec generation already running" });
|
||||
logger.warn('Generation already running, rejecting request');
|
||||
res.json({ success: false, error: 'Spec generation already running' });
|
||||
return;
|
||||
}
|
||||
|
||||
logAuthStatus("Before starting generation");
|
||||
logAuthStatus('Before starting generation');
|
||||
|
||||
const abortController = new AbortController();
|
||||
setRunningState(true, abortController);
|
||||
logger.info("Starting background generation task...");
|
||||
logger.info('Starting background generation task...');
|
||||
|
||||
generateSpec(
|
||||
projectPath,
|
||||
@@ -78,24 +70,22 @@ export function createGenerateHandler(events: EventEmitter) {
|
||||
maxFeatures
|
||||
)
|
||||
.catch((error) => {
|
||||
logError(error, "Generation failed with error");
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_error",
|
||||
logError(error, 'Generation failed with error');
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_error',
|
||||
error: getErrorMessage(error),
|
||||
projectPath: projectPath,
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
logger.info("Generation task finished (success or error)");
|
||||
logger.info('Generation task finished (success or error)');
|
||||
setRunningState(false, null);
|
||||
});
|
||||
|
||||
logger.info(
|
||||
"Returning success response (generation running in background)"
|
||||
);
|
||||
logger.info('Returning success response (generation running in background)');
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Generate spec route handler failed");
|
||||
logError(error, 'Generate spec route handler failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* Common utilities for auto-mode routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("AutoMode");
|
||||
const logger = createLogger('AutoMode');
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -4,35 +4,65 @@
|
||||
* Uses the AutoModeService for real feature execution with Claude Agent SDK
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import type { AutoModeService } from "../../services/auto-mode-service.js";
|
||||
import { createStopFeatureHandler } from "./routes/stop-feature.js";
|
||||
import { createStatusHandler } from "./routes/status.js";
|
||||
import { createRunFeatureHandler } from "./routes/run-feature.js";
|
||||
import { createVerifyFeatureHandler } from "./routes/verify-feature.js";
|
||||
import { createResumeFeatureHandler } from "./routes/resume-feature.js";
|
||||
import { createContextExistsHandler } from "./routes/context-exists.js";
|
||||
import { createAnalyzeProjectHandler } from "./routes/analyze-project.js";
|
||||
import { createFollowUpFeatureHandler } from "./routes/follow-up-feature.js";
|
||||
import { createCommitFeatureHandler } from "./routes/commit-feature.js";
|
||||
import { createApprovePlanHandler } from "./routes/approve-plan.js";
|
||||
import { Router } from 'express';
|
||||
import type { AutoModeService } from '../../services/auto-mode-service.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createStopFeatureHandler } from './routes/stop-feature.js';
|
||||
import { createStatusHandler } from './routes/status.js';
|
||||
import { createRunFeatureHandler } from './routes/run-feature.js';
|
||||
import { createVerifyFeatureHandler } from './routes/verify-feature.js';
|
||||
import { createResumeFeatureHandler } from './routes/resume-feature.js';
|
||||
import { createContextExistsHandler } from './routes/context-exists.js';
|
||||
import { createAnalyzeProjectHandler } from './routes/analyze-project.js';
|
||||
import { createFollowUpFeatureHandler } from './routes/follow-up-feature.js';
|
||||
import { createCommitFeatureHandler } from './routes/commit-feature.js';
|
||||
import { createApprovePlanHandler } from './routes/approve-plan.js';
|
||||
|
||||
export function createAutoModeRoutes(autoModeService: AutoModeService): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/stop-feature", createStopFeatureHandler(autoModeService));
|
||||
router.post("/status", createStatusHandler(autoModeService));
|
||||
router.post("/run-feature", createRunFeatureHandler(autoModeService));
|
||||
router.post("/verify-feature", createVerifyFeatureHandler(autoModeService));
|
||||
router.post("/resume-feature", createResumeFeatureHandler(autoModeService));
|
||||
router.post("/context-exists", createContextExistsHandler(autoModeService));
|
||||
router.post("/analyze-project", createAnalyzeProjectHandler(autoModeService));
|
||||
router.post('/stop-feature', createStopFeatureHandler(autoModeService));
|
||||
router.post('/status', validatePathParams('projectPath?'), createStatusHandler(autoModeService));
|
||||
router.post(
|
||||
"/follow-up-feature",
|
||||
'/run-feature',
|
||||
validatePathParams('projectPath'),
|
||||
createRunFeatureHandler(autoModeService)
|
||||
);
|
||||
router.post(
|
||||
'/verify-feature',
|
||||
validatePathParams('projectPath'),
|
||||
createVerifyFeatureHandler(autoModeService)
|
||||
);
|
||||
router.post(
|
||||
'/resume-feature',
|
||||
validatePathParams('projectPath'),
|
||||
createResumeFeatureHandler(autoModeService)
|
||||
);
|
||||
router.post(
|
||||
'/context-exists',
|
||||
validatePathParams('projectPath'),
|
||||
createContextExistsHandler(autoModeService)
|
||||
);
|
||||
router.post(
|
||||
'/analyze-project',
|
||||
validatePathParams('projectPath'),
|
||||
createAnalyzeProjectHandler(autoModeService)
|
||||
);
|
||||
router.post(
|
||||
'/follow-up-feature',
|
||||
validatePathParams('projectPath', 'imagePaths[]'),
|
||||
createFollowUpFeatureHandler(autoModeService)
|
||||
);
|
||||
router.post("/commit-feature", createCommitFeatureHandler(autoModeService));
|
||||
router.post("/approve-plan", createApprovePlanHandler(autoModeService));
|
||||
router.post(
|
||||
'/commit-feature',
|
||||
validatePathParams('projectPath', 'worktreePath?'),
|
||||
createCommitFeatureHandler(autoModeService)
|
||||
);
|
||||
router.post(
|
||||
'/approve-plan',
|
||||
validatePathParams('projectPath'),
|
||||
createApprovePlanHandler(autoModeService)
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* POST /analyze-project endpoint - Analyze project
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { AutoModeService } from "../../../services/auto-mode-service.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const logger = createLogger("AutoMode");
|
||||
const logger = createLogger('AutoMode');
|
||||
|
||||
export function createAnalyzeProjectHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,9 +15,7 @@ export function createAnalyzeProjectHandler(autoModeService: AutoModeService) {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath is required" });
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -26,9 +24,9 @@ export function createAnalyzeProjectHandler(autoModeService: AutoModeService) {
|
||||
logger.error(`[AutoMode] Project analysis error:`, error);
|
||||
});
|
||||
|
||||
res.json({ success: true, message: "Project analysis started" });
|
||||
res.json({ success: true, message: 'Project analysis started' });
|
||||
} catch (error) {
|
||||
logError(error, "Analyze project failed");
|
||||
logError(error, 'Analyze project failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* POST /approve-plan endpoint - Approve or reject a generated plan/spec
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { AutoModeService } from "../../../services/auto-mode-service.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const logger = createLogger("AutoMode");
|
||||
const logger = createLogger('AutoMode');
|
||||
|
||||
export function createApprovePlanHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -23,15 +23,15 @@ export function createApprovePlanHandler(autoModeService: AutoModeService) {
|
||||
if (!featureId) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "featureId is required",
|
||||
error: 'featureId is required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof approved !== "boolean") {
|
||||
if (typeof approved !== 'boolean') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "approved must be a boolean",
|
||||
error: 'approved must be a boolean',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -41,9 +41,9 @@ export function createApprovePlanHandler(autoModeService: AutoModeService) {
|
||||
// This supports cases where the server restarted while waiting for approval
|
||||
|
||||
logger.info(
|
||||
`[AutoMode] Plan ${approved ? "approved" : "rejected"} for feature ${featureId}${
|
||||
editedPlan ? " (with edits)" : ""
|
||||
}${feedback ? ` - Feedback: ${feedback}` : ""}`
|
||||
`[AutoMode] Plan ${approved ? 'approved' : 'rejected'} for feature ${featureId}${
|
||||
editedPlan ? ' (with edits)' : ''
|
||||
}${feedback ? ` - Feedback: ${feedback}` : ''}`
|
||||
);
|
||||
|
||||
// Resolve the pending approval (with recovery support)
|
||||
@@ -67,11 +67,11 @@ export function createApprovePlanHandler(autoModeService: AutoModeService) {
|
||||
success: true,
|
||||
approved,
|
||||
message: approved
|
||||
? "Plan approved - implementation will continue"
|
||||
: "Plan rejected - feature execution stopped",
|
||||
? 'Plan approved - implementation will continue'
|
||||
: 'Plan rejected - feature execution stopped',
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Approve plan failed");
|
||||
logError(error, 'Approve plan failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,29 +2,28 @@
|
||||
* POST /follow-up-feature endpoint - Follow up on a feature
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { AutoModeService } from "../../../services/auto-mode-service.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const logger = createLogger("AutoMode");
|
||||
const logger = createLogger('AutoMode');
|
||||
|
||||
export function createFollowUpFeatureHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, featureId, prompt, imagePaths, useWorktrees } =
|
||||
req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
prompt: string;
|
||||
imagePaths?: string[];
|
||||
useWorktrees?: boolean;
|
||||
};
|
||||
const { projectPath, featureId, prompt, imagePaths, useWorktrees } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
prompt: string;
|
||||
imagePaths?: string[];
|
||||
useWorktrees?: boolean;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId || !prompt) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath, featureId, and prompt are required",
|
||||
error: 'projectPath, featureId, and prompt are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -32,18 +31,9 @@ export function createFollowUpFeatureHandler(autoModeService: AutoModeService) {
|
||||
// Start follow-up in background
|
||||
// followUpFeature derives workDir from feature.branchName
|
||||
autoModeService
|
||||
.followUpFeature(
|
||||
projectPath,
|
||||
featureId,
|
||||
prompt,
|
||||
imagePaths,
|
||||
useWorktrees ?? true
|
||||
)
|
||||
.followUpFeature(projectPath, featureId, prompt, imagePaths, useWorktrees ?? true)
|
||||
.catch((error) => {
|
||||
logger.error(
|
||||
`[AutoMode] Follow up feature ${featureId} error:`,
|
||||
error
|
||||
);
|
||||
logger.error(`[AutoMode] Follow up feature ${featureId} error:`, error);
|
||||
})
|
||||
.finally(() => {
|
||||
// Release the starting slot when follow-up completes (success or error)
|
||||
@@ -52,7 +42,7 @@ export function createFollowUpFeatureHandler(autoModeService: AutoModeService) {
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Follow up feature failed");
|
||||
logError(error, 'Follow up feature failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* POST /resume-feature endpoint - Resume a feature
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { AutoModeService } from "../../../services/auto-mode-service.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const logger = createLogger("AutoMode");
|
||||
const logger = createLogger('AutoMode');
|
||||
|
||||
export function createResumeFeatureHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -21,7 +21,7 @@ export function createResumeFeatureHandler(autoModeService: AutoModeService) {
|
||||
if (!projectPath || !featureId) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath and featureId are required",
|
||||
error: 'projectPath and featureId are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -36,7 +36,7 @@ export function createResumeFeatureHandler(autoModeService: AutoModeService) {
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Resume feature failed");
|
||||
logError(error, 'Resume feature failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* POST /run-feature endpoint - Run a single feature
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { AutoModeService } from "../../../services/auto-mode-service.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const logger = createLogger("AutoMode");
|
||||
const logger = createLogger('AutoMode');
|
||||
|
||||
export function createRunFeatureHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -21,7 +21,7 @@ export function createRunFeatureHandler(autoModeService: AutoModeService) {
|
||||
if (!projectPath || !featureId) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath and featureId are required",
|
||||
error: 'projectPath and featureId are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -40,7 +40,7 @@ export function createRunFeatureHandler(autoModeService: AutoModeService) {
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Run feature failed");
|
||||
logError(error, 'Run feature failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
43
apps/server/src/routes/claude/index.ts
Normal file
43
apps/server/src/routes/claude/index.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { ClaudeUsageService } from '../../services/claude-usage-service.js';
|
||||
|
||||
export function createClaudeRoutes(service: ClaudeUsageService): Router {
|
||||
const router = Router();
|
||||
|
||||
// Get current usage (fetches from Claude CLI)
|
||||
router.get('/usage', async (req: Request, res: Response) => {
|
||||
try {
|
||||
// Check if Claude CLI is available first
|
||||
const isAvailable = await service.isAvailable();
|
||||
if (!isAvailable) {
|
||||
res.status(503).json({
|
||||
error: 'Claude CLI not found',
|
||||
message: "Please install Claude Code CLI and run 'claude login' to authenticate",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const usage = await service.fetchUsageData();
|
||||
res.json(usage);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||
|
||||
if (message.includes('Authentication required') || message.includes('token_expired')) {
|
||||
res.status(401).json({
|
||||
error: 'Authentication required',
|
||||
message: "Please run 'claude login' to authenticate",
|
||||
});
|
||||
} else if (message.includes('timed out')) {
|
||||
res.status(504).json({
|
||||
error: 'Command timed out',
|
||||
message: 'The Claude CLI took too long to respond',
|
||||
});
|
||||
} else {
|
||||
console.error('Error fetching usage:', error);
|
||||
res.status(500).json({ error: message });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
35
apps/server/src/routes/claude/types.ts
Normal file
35
apps/server/src/routes/claude/types.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
/**
|
||||
* Claude Usage types for CLI-based usage tracking
|
||||
*/
|
||||
|
||||
export type ClaudeUsage = {
|
||||
sessionTokensUsed: number;
|
||||
sessionLimit: number;
|
||||
sessionPercentage: number;
|
||||
sessionResetTime: string; // ISO date string
|
||||
sessionResetText: string; // Raw text like "Resets 10:59am (Asia/Dubai)"
|
||||
|
||||
weeklyTokensUsed: number;
|
||||
weeklyLimit: number;
|
||||
weeklyPercentage: number;
|
||||
weeklyResetTime: string; // ISO date string
|
||||
weeklyResetText: string; // Raw text like "Resets Dec 22 at 7:59pm (Asia/Dubai)"
|
||||
|
||||
sonnetWeeklyTokensUsed: number;
|
||||
sonnetWeeklyPercentage: number;
|
||||
sonnetResetText: string; // Raw text like "Resets Dec 27 at 9:59am (Asia/Dubai)"
|
||||
|
||||
costUsed: number | null;
|
||||
costLimit: number | null;
|
||||
costCurrency: string | null;
|
||||
|
||||
lastUpdated: string; // ISO date string
|
||||
userTimezone: string;
|
||||
};
|
||||
|
||||
export type ClaudeStatus = {
|
||||
indicator: {
|
||||
color: 'green' | 'yellow' | 'orange' | 'red' | 'gray';
|
||||
};
|
||||
description: string;
|
||||
};
|
||||
@@ -2,373 +2,29 @@
|
||||
* Common utilities shared across all route modules
|
||||
*/
|
||||
|
||||
import { createLogger } from "../lib/logger.js";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
// Re-export git utilities from shared package
|
||||
export {
|
||||
BINARY_EXTENSIONS,
|
||||
GIT_STATUS_MAP,
|
||||
type FileStatus,
|
||||
isGitRepo,
|
||||
parseGitStatus,
|
||||
generateSyntheticDiffForNewFile,
|
||||
appendUntrackedFileDiffs,
|
||||
listAllFilesInDirectory,
|
||||
generateDiffsForNonGitDirectory,
|
||||
getGitRepositoryDiffs,
|
||||
} from '@automaker/git-utils';
|
||||
|
||||
type Logger = ReturnType<typeof createLogger>;
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
const logger = createLogger("Common");
|
||||
|
||||
// Max file size for generating synthetic diffs (1MB)
|
||||
const MAX_SYNTHETIC_DIFF_SIZE = 1024 * 1024;
|
||||
|
||||
// Binary file extensions to skip
|
||||
const BINARY_EXTENSIONS = new Set([
|
||||
".png", ".jpg", ".jpeg", ".gif", ".bmp", ".ico", ".webp", ".svg",
|
||||
".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx",
|
||||
".zip", ".tar", ".gz", ".rar", ".7z",
|
||||
".exe", ".dll", ".so", ".dylib",
|
||||
".mp3", ".mp4", ".wav", ".avi", ".mov", ".mkv",
|
||||
".ttf", ".otf", ".woff", ".woff2", ".eot",
|
||||
".db", ".sqlite", ".sqlite3",
|
||||
".pyc", ".pyo", ".class", ".o", ".obj",
|
||||
]);
|
||||
|
||||
// Status map for git status codes
|
||||
// Git porcelain format uses XY where X=staging area, Y=working tree
|
||||
const GIT_STATUS_MAP: Record<string, string> = {
|
||||
M: "Modified",
|
||||
A: "Added",
|
||||
D: "Deleted",
|
||||
R: "Renamed",
|
||||
C: "Copied",
|
||||
U: "Updated",
|
||||
"?": "Untracked",
|
||||
"!": "Ignored",
|
||||
" ": "Unmodified",
|
||||
};
|
||||
|
||||
/**
|
||||
* Get a readable status text from git status codes
|
||||
* Handles both single character and XY format status codes
|
||||
*/
|
||||
function getStatusText(indexStatus: string, workTreeStatus: string): string {
|
||||
// Untracked files
|
||||
if (indexStatus === "?" && workTreeStatus === "?") {
|
||||
return "Untracked";
|
||||
}
|
||||
|
||||
// Ignored files
|
||||
if (indexStatus === "!" && workTreeStatus === "!") {
|
||||
return "Ignored";
|
||||
}
|
||||
|
||||
// Prioritize staging area status, then working tree
|
||||
const primaryStatus = indexStatus !== " " && indexStatus !== "?" ? indexStatus : workTreeStatus;
|
||||
|
||||
// Handle combined statuses
|
||||
if (indexStatus !== " " && indexStatus !== "?" && workTreeStatus !== " " && workTreeStatus !== "?") {
|
||||
// Both staging and working tree have changes
|
||||
const indexText = GIT_STATUS_MAP[indexStatus] || "Changed";
|
||||
const workText = GIT_STATUS_MAP[workTreeStatus] || "Changed";
|
||||
if (indexText === workText) {
|
||||
return indexText;
|
||||
}
|
||||
return `${indexText} (staged), ${workText} (unstaged)`;
|
||||
}
|
||||
|
||||
return GIT_STATUS_MAP[primaryStatus] || "Changed";
|
||||
}
|
||||
|
||||
/**
|
||||
* File status interface for git status results
|
||||
*/
|
||||
export interface FileStatus {
|
||||
status: string;
|
||||
path: string;
|
||||
statusText: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file is likely binary based on extension
|
||||
*/
|
||||
function isBinaryFile(filePath: string): boolean {
|
||||
const ext = path.extname(filePath).toLowerCase();
|
||||
return BINARY_EXTENSIONS.has(ext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path is a git repository
|
||||
*/
|
||||
export async function isGitRepo(repoPath: string): Promise<boolean> {
|
||||
try {
|
||||
await execAsync("git rev-parse --is-inside-work-tree", { cwd: repoPath });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the output of `git status --porcelain` into FileStatus array
|
||||
* Git porcelain format: XY PATH where X=staging area status, Y=working tree status
|
||||
* For renamed files: XY ORIG_PATH -> NEW_PATH
|
||||
*/
|
||||
export function parseGitStatus(statusOutput: string): FileStatus[] {
|
||||
return statusOutput
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((line) => {
|
||||
// Git porcelain format uses two status characters: XY
|
||||
// X = status in staging area (index)
|
||||
// Y = status in working tree
|
||||
const indexStatus = line[0] || " ";
|
||||
const workTreeStatus = line[1] || " ";
|
||||
|
||||
// File path starts at position 3 (after "XY ")
|
||||
let filePath = line.slice(3);
|
||||
|
||||
// Handle renamed files (format: "R old_path -> new_path")
|
||||
if (indexStatus === "R" || workTreeStatus === "R") {
|
||||
const arrowIndex = filePath.indexOf(" -> ");
|
||||
if (arrowIndex !== -1) {
|
||||
filePath = filePath.slice(arrowIndex + 4); // Use new path
|
||||
}
|
||||
}
|
||||
|
||||
// Determine the primary status character for backwards compatibility
|
||||
// Prioritize staging area status, then working tree
|
||||
let primaryStatus: string;
|
||||
if (indexStatus === "?" && workTreeStatus === "?") {
|
||||
primaryStatus = "?"; // Untracked
|
||||
} else if (indexStatus !== " " && indexStatus !== "?") {
|
||||
primaryStatus = indexStatus; // Staged change
|
||||
} else {
|
||||
primaryStatus = workTreeStatus; // Working tree change
|
||||
}
|
||||
|
||||
return {
|
||||
status: primaryStatus,
|
||||
path: filePath,
|
||||
statusText: getStatusText(indexStatus, workTreeStatus),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a synthetic unified diff for an untracked (new) file
|
||||
* This is needed because `git diff HEAD` doesn't include untracked files
|
||||
*/
|
||||
export async function generateSyntheticDiffForNewFile(
|
||||
basePath: string,
|
||||
relativePath: string
|
||||
): Promise<string> {
|
||||
const fullPath = path.join(basePath, relativePath);
|
||||
|
||||
try {
|
||||
// Check if it's a binary file
|
||||
if (isBinaryFile(relativePath)) {
|
||||
return `diff --git a/${relativePath} b/${relativePath}
|
||||
new file mode 100644
|
||||
index 0000000..0000000
|
||||
Binary file ${relativePath} added
|
||||
`;
|
||||
}
|
||||
|
||||
// Get file stats to check size
|
||||
const stats = await fs.stat(fullPath);
|
||||
if (stats.size > MAX_SYNTHETIC_DIFF_SIZE) {
|
||||
const sizeKB = Math.round(stats.size / 1024);
|
||||
return `diff --git a/${relativePath} b/${relativePath}
|
||||
new file mode 100644
|
||||
index 0000000..0000000
|
||||
--- /dev/null
|
||||
+++ b/${relativePath}
|
||||
@@ -0,0 +1 @@
|
||||
+[File too large to display: ${sizeKB}KB]
|
||||
`;
|
||||
}
|
||||
|
||||
// Read file content
|
||||
const content = await fs.readFile(fullPath, "utf-8");
|
||||
const hasTrailingNewline = content.endsWith("\n");
|
||||
const lines = content.split("\n");
|
||||
|
||||
// Remove trailing empty line if the file ends with newline
|
||||
if (lines.length > 0 && lines.at(-1) === "") {
|
||||
lines.pop();
|
||||
}
|
||||
|
||||
// Generate diff format
|
||||
const lineCount = lines.length;
|
||||
const addedLines = lines.map(line => `+${line}`).join("\n");
|
||||
|
||||
let diff = `diff --git a/${relativePath} b/${relativePath}
|
||||
new file mode 100644
|
||||
index 0000000..0000000
|
||||
--- /dev/null
|
||||
+++ b/${relativePath}
|
||||
@@ -0,0 +1,${lineCount} @@
|
||||
${addedLines}`;
|
||||
|
||||
// Add "No newline at end of file" indicator if needed
|
||||
if (!hasTrailingNewline && content.length > 0) {
|
||||
diff += "\n\\ No newline at end of file";
|
||||
}
|
||||
|
||||
return diff + "\n";
|
||||
} catch (error) {
|
||||
// Log the error for debugging
|
||||
logger.error(`Failed to generate synthetic diff for ${fullPath}:`, error);
|
||||
// Return a placeholder diff
|
||||
return `diff --git a/${relativePath} b/${relativePath}
|
||||
new file mode 100644
|
||||
index 0000000..0000000
|
||||
--- /dev/null
|
||||
+++ b/${relativePath}
|
||||
@@ -0,0 +1 @@
|
||||
+[Unable to read file content]
|
||||
`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate synthetic diffs for all untracked files and combine with existing diff
|
||||
*/
|
||||
export async function appendUntrackedFileDiffs(
|
||||
basePath: string,
|
||||
existingDiff: string,
|
||||
files: Array<{ status: string; path: string }>
|
||||
): Promise<string> {
|
||||
// Find untracked files (status "?")
|
||||
const untrackedFiles = files.filter(f => f.status === "?");
|
||||
|
||||
if (untrackedFiles.length === 0) {
|
||||
return existingDiff;
|
||||
}
|
||||
|
||||
// Generate synthetic diffs for each untracked file
|
||||
const syntheticDiffs = await Promise.all(
|
||||
untrackedFiles.map(f => generateSyntheticDiffForNewFile(basePath, f.path))
|
||||
);
|
||||
|
||||
// Combine existing diff with synthetic diffs
|
||||
const combinedDiff = existingDiff + syntheticDiffs.join("");
|
||||
|
||||
return combinedDiff;
|
||||
}
|
||||
|
||||
/**
|
||||
* List all files in a directory recursively (for non-git repositories)
|
||||
* Excludes hidden files/folders and common build artifacts
|
||||
*/
|
||||
export async function listAllFilesInDirectory(
|
||||
basePath: string,
|
||||
relativePath: string = ""
|
||||
): Promise<string[]> {
|
||||
const files: string[] = [];
|
||||
const fullPath = path.join(basePath, relativePath);
|
||||
|
||||
// Directories to skip
|
||||
const skipDirs = new Set([
|
||||
"node_modules", ".git", ".automaker", "dist", "build",
|
||||
".next", ".nuxt", "__pycache__", ".cache", "coverage"
|
||||
]);
|
||||
|
||||
try {
|
||||
const entries = await fs.readdir(fullPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
// Skip hidden files/folders (except we want to allow some)
|
||||
if (entry.name.startsWith(".") && entry.name !== ".env") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const entryRelPath = relativePath ? `${relativePath}/${entry.name}` : entry.name;
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
if (!skipDirs.has(entry.name)) {
|
||||
const subFiles = await listAllFilesInDirectory(basePath, entryRelPath);
|
||||
files.push(...subFiles);
|
||||
}
|
||||
} else if (entry.isFile()) {
|
||||
files.push(entryRelPath);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Log the error to help diagnose file system issues
|
||||
logger.error(`Error reading directory ${fullPath}:`, error);
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate diffs for all files in a non-git directory
|
||||
* Treats all files as "new" files
|
||||
*/
|
||||
export async function generateDiffsForNonGitDirectory(
|
||||
basePath: string
|
||||
): Promise<{ diff: string; files: FileStatus[] }> {
|
||||
const allFiles = await listAllFilesInDirectory(basePath);
|
||||
|
||||
const files: FileStatus[] = allFiles.map(filePath => ({
|
||||
status: "?",
|
||||
path: filePath,
|
||||
statusText: "New",
|
||||
}));
|
||||
|
||||
// Generate synthetic diffs for all files
|
||||
const syntheticDiffs = await Promise.all(
|
||||
files.map(f => generateSyntheticDiffForNewFile(basePath, f.path))
|
||||
);
|
||||
|
||||
return {
|
||||
diff: syntheticDiffs.join(""),
|
||||
files,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get git repository diffs for a given path
|
||||
* Handles both git repos and non-git directories
|
||||
*/
|
||||
export async function getGitRepositoryDiffs(
|
||||
repoPath: string
|
||||
): Promise<{ diff: string; files: FileStatus[]; hasChanges: boolean }> {
|
||||
// Check if it's a git repository
|
||||
const isRepo = await isGitRepo(repoPath);
|
||||
|
||||
if (!isRepo) {
|
||||
// Not a git repo - list all files and treat them as new
|
||||
const result = await generateDiffsForNonGitDirectory(repoPath);
|
||||
return {
|
||||
diff: result.diff,
|
||||
files: result.files,
|
||||
hasChanges: result.files.length > 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Get git diff and status
|
||||
const { stdout: diff } = await execAsync("git diff HEAD", {
|
||||
cwd: repoPath,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
});
|
||||
const { stdout: status } = await execAsync("git status --porcelain", {
|
||||
cwd: repoPath,
|
||||
});
|
||||
|
||||
const files = parseGitStatus(status);
|
||||
|
||||
// Generate synthetic diffs for untracked (new) files
|
||||
const combinedDiff = await appendUntrackedFileDiffs(repoPath, diff, files);
|
||||
|
||||
return {
|
||||
diff: combinedDiff,
|
||||
files,
|
||||
hasChanges: files.length > 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get error message from error object
|
||||
*/
|
||||
export function getErrorMessage(error: unknown): string {
|
||||
return error instanceof Error ? error.message : "Unknown error";
|
||||
return error instanceof Error ? error.message : 'Unknown error';
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -5,18 +5,19 @@
|
||||
* Supports modes: improve, technical, simplify, acceptance
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { query } from "@anthropic-ai/claude-agent-sdk";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { query } from '@anthropic-ai/claude-agent-sdk';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { resolveModelString } from '@automaker/model-resolver';
|
||||
import { CLAUDE_MODEL_MAP } from '@automaker/types';
|
||||
import {
|
||||
getSystemPrompt,
|
||||
buildUserPrompt,
|
||||
isValidEnhancementMode,
|
||||
type EnhancementMode,
|
||||
} from "../../../lib/enhancement-prompts.js";
|
||||
import { resolveModelString, CLAUDE_MODEL_MAP } from "../../../lib/model-resolver.js";
|
||||
} from '../../../lib/enhancement-prompts.js';
|
||||
|
||||
const logger = createLogger("EnhancePrompt");
|
||||
const logger = createLogger('EnhancePrompt');
|
||||
|
||||
/**
|
||||
* Request body for the enhance endpoint
|
||||
@@ -62,16 +63,16 @@ async function extractTextFromStream(
|
||||
};
|
||||
}>
|
||||
): Promise<string> {
|
||||
let responseText = "";
|
||||
let responseText = '';
|
||||
|
||||
for await (const msg of stream) {
|
||||
if (msg.type === "assistant" && msg.message?.content) {
|
||||
if (msg.type === 'assistant' && msg.message?.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text" && block.text) {
|
||||
if (block.type === 'text' && block.text) {
|
||||
responseText += block.text;
|
||||
}
|
||||
}
|
||||
} else if (msg.type === "result" && msg.subtype === "success") {
|
||||
} else if (msg.type === 'result' && msg.subtype === 'success') {
|
||||
responseText = msg.result || responseText;
|
||||
}
|
||||
}
|
||||
@@ -84,29 +85,25 @@ async function extractTextFromStream(
|
||||
*
|
||||
* @returns Express request handler for text enhancement
|
||||
*/
|
||||
export function createEnhanceHandler(): (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => Promise<void> {
|
||||
export function createEnhanceHandler(): (req: Request, res: Response) => Promise<void> {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { originalText, enhancementMode, model } =
|
||||
req.body as EnhanceRequestBody;
|
||||
const { originalText, enhancementMode, model } = req.body as EnhanceRequestBody;
|
||||
|
||||
// Validate required fields
|
||||
if (!originalText || typeof originalText !== "string") {
|
||||
if (!originalText || typeof originalText !== 'string') {
|
||||
const response: EnhanceErrorResponse = {
|
||||
success: false,
|
||||
error: "originalText is required and must be a string",
|
||||
error: 'originalText is required and must be a string',
|
||||
};
|
||||
res.status(400).json(response);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!enhancementMode || typeof enhancementMode !== "string") {
|
||||
if (!enhancementMode || typeof enhancementMode !== 'string') {
|
||||
const response: EnhanceErrorResponse = {
|
||||
success: false,
|
||||
error: "enhancementMode is required and must be a string",
|
||||
error: 'enhancementMode is required and must be a string',
|
||||
};
|
||||
res.status(400).json(response);
|
||||
return;
|
||||
@@ -117,7 +114,7 @@ export function createEnhanceHandler(): (
|
||||
if (trimmedText.length === 0) {
|
||||
const response: EnhanceErrorResponse = {
|
||||
success: false,
|
||||
error: "originalText cannot be empty",
|
||||
error: 'originalText cannot be empty',
|
||||
};
|
||||
res.status(400).json(response);
|
||||
return;
|
||||
@@ -127,11 +124,9 @@ export function createEnhanceHandler(): (
|
||||
const normalizedMode = enhancementMode.toLowerCase();
|
||||
const validMode: EnhancementMode = isValidEnhancementMode(normalizedMode)
|
||||
? normalizedMode
|
||||
: "improve";
|
||||
: 'improve';
|
||||
|
||||
logger.info(
|
||||
`Enhancing text with mode: ${validMode}, length: ${trimmedText.length} chars`
|
||||
);
|
||||
logger.info(`Enhancing text with mode: ${validMode}, length: ${trimmedText.length} chars`);
|
||||
|
||||
// Get the system prompt for this mode
|
||||
const systemPrompt = getSystemPrompt(validMode);
|
||||
@@ -154,7 +149,7 @@ export function createEnhanceHandler(): (
|
||||
systemPrompt,
|
||||
maxTurns: 1,
|
||||
allowedTools: [],
|
||||
permissionMode: "acceptEdits",
|
||||
permissionMode: 'acceptEdits',
|
||||
},
|
||||
});
|
||||
|
||||
@@ -162,18 +157,16 @@ export function createEnhanceHandler(): (
|
||||
const enhancedText = await extractTextFromStream(stream);
|
||||
|
||||
if (!enhancedText || enhancedText.trim().length === 0) {
|
||||
logger.warn("Received empty response from Claude");
|
||||
logger.warn('Received empty response from Claude');
|
||||
const response: EnhanceErrorResponse = {
|
||||
success: false,
|
||||
error: "Failed to generate enhanced text - empty response",
|
||||
error: 'Failed to generate enhanced text - empty response',
|
||||
};
|
||||
res.status(500).json(response);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Enhancement complete, output length: ${enhancedText.length} chars`
|
||||
);
|
||||
logger.info(`Enhancement complete, output length: ${enhancedText.length} chars`);
|
||||
|
||||
const response: EnhanceSuccessResponse = {
|
||||
success: true,
|
||||
@@ -181,9 +174,8 @@ export function createEnhanceHandler(): (
|
||||
};
|
||||
res.json(response);
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "Unknown error occurred";
|
||||
logger.error("Enhancement failed:", errorMessage);
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
||||
logger.error('Enhancement failed:', errorMessage);
|
||||
|
||||
const response: EnhanceErrorResponse = {
|
||||
success: false,
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* Common utilities for features routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("Features");
|
||||
const logger = createLogger('Features');
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -2,26 +2,27 @@
|
||||
* Features routes - HTTP API for feature management
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { FeatureLoader } from "../../services/feature-loader.js";
|
||||
import { createListHandler } from "./routes/list.js";
|
||||
import { createGetHandler } from "./routes/get.js";
|
||||
import { createCreateHandler } from "./routes/create.js";
|
||||
import { createUpdateHandler } from "./routes/update.js";
|
||||
import { createDeleteHandler } from "./routes/delete.js";
|
||||
import { createAgentOutputHandler } from "./routes/agent-output.js";
|
||||
import { createGenerateTitleHandler } from "./routes/generate-title.js";
|
||||
import { Router } from 'express';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createListHandler } from './routes/list.js';
|
||||
import { createGetHandler } from './routes/get.js';
|
||||
import { createCreateHandler } from './routes/create.js';
|
||||
import { createUpdateHandler } from './routes/update.js';
|
||||
import { createDeleteHandler } from './routes/delete.js';
|
||||
import { createAgentOutputHandler } from './routes/agent-output.js';
|
||||
import { createGenerateTitleHandler } from './routes/generate-title.js';
|
||||
|
||||
export function createFeaturesRoutes(featureLoader: FeatureLoader): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/list", createListHandler(featureLoader));
|
||||
router.post("/get", createGetHandler(featureLoader));
|
||||
router.post("/create", createCreateHandler(featureLoader));
|
||||
router.post("/update", createUpdateHandler(featureLoader));
|
||||
router.post("/delete", createDeleteHandler(featureLoader));
|
||||
router.post("/agent-output", createAgentOutputHandler(featureLoader));
|
||||
router.post("/generate-title", createGenerateTitleHandler());
|
||||
router.post('/list', validatePathParams('projectPath'), createListHandler(featureLoader));
|
||||
router.post('/get', validatePathParams('projectPath'), createGetHandler(featureLoader));
|
||||
router.post('/create', validatePathParams('projectPath'), createCreateHandler(featureLoader));
|
||||
router.post('/update', validatePathParams('projectPath'), createUpdateHandler(featureLoader));
|
||||
router.post('/delete', validatePathParams('projectPath'), createDeleteHandler(featureLoader));
|
||||
router.post('/agent-output', createAgentOutputHandler(featureLoader));
|
||||
router.post('/generate-title', createGenerateTitleHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* POST /create endpoint - Create a new feature
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import {
|
||||
FeatureLoader,
|
||||
type Feature,
|
||||
} from "../../../services/feature-loader.js";
|
||||
import { addAllowedPath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import type { Feature } from '@automaker/types';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createCreateHandler(featureLoader: FeatureLoader) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -19,22 +16,17 @@ export function createCreateHandler(featureLoader: FeatureLoader) {
|
||||
};
|
||||
|
||||
if (!projectPath || !feature) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and feature are required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath and feature are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Add project path to allowed paths
|
||||
addAllowedPath(projectPath);
|
||||
|
||||
const created = await featureLoader.create(projectPath, feature);
|
||||
res.json({ success: true, feature: created });
|
||||
} catch (error) {
|
||||
logError(error, "Create feature failed");
|
||||
logError(error, 'Create feature failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
* Uses Claude Haiku to generate a short, descriptive title from feature description.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { query } from "@anthropic-ai/claude-agent-sdk";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import { CLAUDE_MODEL_MAP } from "../../../lib/model-resolver.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { query } from '@anthropic-ai/claude-agent-sdk';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { CLAUDE_MODEL_MAP } from '@automaker/model-resolver';
|
||||
|
||||
const logger = createLogger("GenerateTitle");
|
||||
const logger = createLogger('GenerateTitle');
|
||||
|
||||
interface GenerateTitleRequestBody {
|
||||
description: string;
|
||||
@@ -44,16 +44,16 @@ async function extractTextFromStream(
|
||||
};
|
||||
}>
|
||||
): Promise<string> {
|
||||
let responseText = "";
|
||||
let responseText = '';
|
||||
|
||||
for await (const msg of stream) {
|
||||
if (msg.type === "assistant" && msg.message?.content) {
|
||||
if (msg.type === 'assistant' && msg.message?.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text" && block.text) {
|
||||
if (block.type === 'text' && block.text) {
|
||||
responseText += block.text;
|
||||
}
|
||||
}
|
||||
} else if (msg.type === "result" && msg.subtype === "success") {
|
||||
} else if (msg.type === 'result' && msg.subtype === 'success') {
|
||||
responseText = msg.result || responseText;
|
||||
}
|
||||
}
|
||||
@@ -61,18 +61,15 @@ async function extractTextFromStream(
|
||||
return responseText;
|
||||
}
|
||||
|
||||
export function createGenerateTitleHandler(): (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => Promise<void> {
|
||||
export function createGenerateTitleHandler(): (req: Request, res: Response) => Promise<void> {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { description } = req.body as GenerateTitleRequestBody;
|
||||
|
||||
if (!description || typeof description !== "string") {
|
||||
if (!description || typeof description !== 'string') {
|
||||
const response: GenerateTitleErrorResponse = {
|
||||
success: false,
|
||||
error: "description is required and must be a string",
|
||||
error: 'description is required and must be a string',
|
||||
};
|
||||
res.status(400).json(response);
|
||||
return;
|
||||
@@ -82,7 +79,7 @@ export function createGenerateTitleHandler(): (
|
||||
if (trimmedDescription.length === 0) {
|
||||
const response: GenerateTitleErrorResponse = {
|
||||
success: false,
|
||||
error: "description cannot be empty",
|
||||
error: 'description cannot be empty',
|
||||
};
|
||||
res.status(400).json(response);
|
||||
return;
|
||||
@@ -99,17 +96,17 @@ export function createGenerateTitleHandler(): (
|
||||
systemPrompt: SYSTEM_PROMPT,
|
||||
maxTurns: 1,
|
||||
allowedTools: [],
|
||||
permissionMode: "acceptEdits",
|
||||
permissionMode: 'acceptEdits',
|
||||
},
|
||||
});
|
||||
|
||||
const title = await extractTextFromStream(stream);
|
||||
|
||||
if (!title || title.trim().length === 0) {
|
||||
logger.warn("Received empty response from Claude");
|
||||
logger.warn('Received empty response from Claude');
|
||||
const response: GenerateTitleErrorResponse = {
|
||||
success: false,
|
||||
error: "Failed to generate title - empty response",
|
||||
error: 'Failed to generate title - empty response',
|
||||
};
|
||||
res.status(500).json(response);
|
||||
return;
|
||||
@@ -123,9 +120,8 @@ export function createGenerateTitleHandler(): (
|
||||
};
|
||||
res.json(response);
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "Unknown error occurred";
|
||||
logger.error("Title generation failed:", errorMessage);
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
||||
logger.error('Title generation failed:', errorMessage);
|
||||
|
||||
const response: GenerateTitleErrorResponse = {
|
||||
success: false,
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
* POST /list endpoint - List all features for a project
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { FeatureLoader } from "../../../services/feature-loader.js";
|
||||
import { addAllowedPath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createListHandler(featureLoader: FeatureLoader) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -13,19 +12,14 @@ export function createListHandler(featureLoader: FeatureLoader) {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath is required" });
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Add project path to allowed paths
|
||||
addAllowedPath(projectPath);
|
||||
|
||||
const features = await featureLoader.getAll(projectPath);
|
||||
res.json({ success: true, features });
|
||||
} catch (error) {
|
||||
logError(error, "List features failed");
|
||||
logError(error, 'List features failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,12 +2,10 @@
|
||||
* POST /update endpoint - Update a feature
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import {
|
||||
FeatureLoader,
|
||||
type Feature,
|
||||
} from "../../../services/feature-loader.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import type { Feature } from '@automaker/types';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createUpdateHandler(featureLoader: FeatureLoader) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -21,19 +19,15 @@ export function createUpdateHandler(featureLoader: FeatureLoader) {
|
||||
if (!projectPath || !featureId || !updates) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath, featureId, and updates are required",
|
||||
error: 'projectPath, featureId, and updates are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const updated = await featureLoader.update(
|
||||
projectPath,
|
||||
featureId,
|
||||
updates
|
||||
);
|
||||
const updated = await featureLoader.update(projectPath, featureId, updates);
|
||||
res.json({ success: true, feature: updated });
|
||||
} catch (error) {
|
||||
logError(error, "Update feature failed");
|
||||
logError(error, 'Update feature failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* Common utilities for fs routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("FS");
|
||||
const logger = createLogger('FS');
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -2,33 +2,35 @@
|
||||
* POST /browse endpoint - Browse directories for file browser UI
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { getAllowedRootDirectory, PathNotAllowedError } from '@automaker/platform';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createBrowseHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { dirPath } = req.body as { dirPath?: string };
|
||||
|
||||
// Default to home directory if no path provided
|
||||
const targetPath = dirPath ? path.resolve(dirPath) : os.homedir();
|
||||
// Default to ALLOWED_ROOT_DIRECTORY if set, otherwise home directory
|
||||
const defaultPath = getAllowedRootDirectory() || os.homedir();
|
||||
const targetPath = dirPath ? path.resolve(dirPath) : defaultPath;
|
||||
|
||||
// Detect available drives on Windows
|
||||
const detectDrives = async (): Promise<string[]> => {
|
||||
if (os.platform() !== "win32") {
|
||||
if (os.platform() !== 'win32') {
|
||||
return [];
|
||||
}
|
||||
|
||||
const drives: string[] = [];
|
||||
const letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
const letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
||||
|
||||
for (const letter of letters) {
|
||||
const drivePath = `${letter}:\\`;
|
||||
try {
|
||||
await fs.access(drivePath);
|
||||
await secureFs.access(drivePath);
|
||||
drives.push(drivePath);
|
||||
} catch {
|
||||
// Drive doesn't exist, skip it
|
||||
@@ -46,21 +48,19 @@ export function createBrowseHandler() {
|
||||
const drives = await detectDrives();
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(targetPath);
|
||||
const stats = await secureFs.stat(targetPath);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "Path is not a directory" });
|
||||
res.status(400).json({ success: false, error: 'Path is not a directory' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Read directory contents
|
||||
const entries = await fs.readdir(targetPath, { withFileTypes: true });
|
||||
const entries = await secureFs.readdir(targetPath, { withFileTypes: true });
|
||||
|
||||
// Filter for directories only and add parent directory option
|
||||
const directories = entries
|
||||
.filter((entry) => entry.isDirectory() && !entry.name.startsWith("."))
|
||||
.filter((entry) => entry.isDirectory() && !entry.name.startsWith('.'))
|
||||
.map((entry) => ({
|
||||
name: entry.name,
|
||||
path: path.join(targetPath, entry.name),
|
||||
@@ -76,10 +76,8 @@ export function createBrowseHandler() {
|
||||
});
|
||||
} catch (error) {
|
||||
// Handle permission errors gracefully - still return path info so user can navigate away
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "Failed to read directory";
|
||||
const isPermissionError =
|
||||
errorMessage.includes("EPERM") || errorMessage.includes("EACCES");
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to read directory';
|
||||
const isPermissionError = errorMessage.includes('EPERM') || errorMessage.includes('EACCES');
|
||||
|
||||
if (isPermissionError) {
|
||||
// Return success with empty directories so user can still navigate to parent
|
||||
@@ -90,7 +88,7 @@ export function createBrowseHandler() {
|
||||
directories: [],
|
||||
drives,
|
||||
warning:
|
||||
"Permission denied - grant Full Disk Access to Terminal in System Preferences > Privacy & Security",
|
||||
'Permission denied - grant Full Disk Access to Terminal in System Preferences > Privacy & Security',
|
||||
});
|
||||
} else {
|
||||
res.status(400).json({
|
||||
@@ -100,7 +98,13 @@ export function createBrowseHandler() {
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Browse directories failed");
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, 'Browse directories failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
* POST /delete-board-background endpoint - Delete board background image
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import { getBoardDir } from "../../../lib/automaker-paths.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { getBoardDir } from '@automaker/platform';
|
||||
|
||||
export function createDeleteBoardBackgroundHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -16,7 +16,7 @@ export function createDeleteBoardBackgroundHandler() {
|
||||
if (!projectPath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath is required",
|
||||
error: 'projectPath is required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -26,10 +26,10 @@ export function createDeleteBoardBackgroundHandler() {
|
||||
|
||||
try {
|
||||
// Try to remove all background files in the board directory
|
||||
const files = await fs.readdir(boardDir);
|
||||
const files = await secureFs.readdir(boardDir);
|
||||
for (const file of files) {
|
||||
if (file.startsWith("background")) {
|
||||
await fs.unlink(path.join(boardDir, file));
|
||||
if (file.startsWith('background')) {
|
||||
await secureFs.unlink(path.join(boardDir, file));
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
@@ -38,7 +38,7 @@ export function createDeleteBoardBackgroundHandler() {
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Delete board background failed");
|
||||
logError(error, 'Delete board background failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
* POST /delete endpoint - Delete file
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import { validatePath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { PathNotAllowedError } from '@automaker/platform';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createDeleteHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -13,16 +13,21 @@ export function createDeleteHandler() {
|
||||
const { filePath } = req.body as { filePath: string };
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
res.status(400).json({ success: false, error: 'filePath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = validatePath(filePath);
|
||||
await fs.rm(resolvedPath, { recursive: true });
|
||||
await secureFs.rm(filePath, { recursive: true });
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Delete file failed");
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, 'Delete file failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
* POST /exists endpoint - Check if file/directory exists
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { PathNotAllowedError } from '@automaker/platform';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createExistsHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -13,22 +13,28 @@ export function createExistsHandler() {
|
||||
const { filePath } = req.body as { filePath: string };
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
res.status(400).json({ success: false, error: 'filePath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// For exists, we check but don't require the path to be pre-allowed
|
||||
// This allows the UI to validate user-entered paths
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
|
||||
try {
|
||||
await fs.access(resolvedPath);
|
||||
await secureFs.access(filePath);
|
||||
res.json({ success: true, exists: true });
|
||||
} catch {
|
||||
} catch (accessError) {
|
||||
// Check if it's a path not allowed error vs file not existing
|
||||
if (accessError instanceof PathNotAllowedError) {
|
||||
throw accessError;
|
||||
}
|
||||
res.json({ success: true, exists: false });
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Check exists failed");
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, 'Check exists failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
* GET /image endpoint - Serve image files
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { PathNotAllowedError } from '@automaker/platform';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createImageHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -16,7 +17,7 @@ export function createImageHandler() {
|
||||
};
|
||||
|
||||
if (!imagePath) {
|
||||
res.status(400).json({ success: false, error: "path is required" });
|
||||
res.status(400).json({ success: false, error: 'path is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -24,40 +25,41 @@ export function createImageHandler() {
|
||||
const fullPath = path.isAbsolute(imagePath)
|
||||
? imagePath
|
||||
: projectPath
|
||||
? path.join(projectPath, imagePath)
|
||||
: imagePath;
|
||||
? path.join(projectPath, imagePath)
|
||||
: imagePath;
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(fullPath);
|
||||
} catch {
|
||||
res.status(404).json({ success: false, error: "Image not found" });
|
||||
await secureFs.access(fullPath);
|
||||
} catch (accessError) {
|
||||
if (accessError instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: 'Path not allowed' });
|
||||
return;
|
||||
}
|
||||
res.status(404).json({ success: false, error: 'Image not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Read the file
|
||||
const buffer = await fs.readFile(fullPath);
|
||||
const buffer = await secureFs.readFile(fullPath);
|
||||
|
||||
// Determine MIME type from extension
|
||||
const ext = path.extname(fullPath).toLowerCase();
|
||||
const mimeTypes: Record<string, string> = {
|
||||
".png": "image/png",
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".gif": "image/gif",
|
||||
".webp": "image/webp",
|
||||
".svg": "image/svg+xml",
|
||||
".bmp": "image/bmp",
|
||||
'.png': 'image/png',
|
||||
'.jpg': 'image/jpeg',
|
||||
'.jpeg': 'image/jpeg',
|
||||
'.gif': 'image/gif',
|
||||
'.webp': 'image/webp',
|
||||
'.svg': 'image/svg+xml',
|
||||
'.bmp': 'image/bmp',
|
||||
};
|
||||
|
||||
res.setHeader(
|
||||
"Content-Type",
|
||||
mimeTypes[ext] || "application/octet-stream"
|
||||
);
|
||||
res.setHeader("Cache-Control", "public, max-age=3600");
|
||||
res.setHeader('Content-Type', mimeTypes[ext] || 'application/octet-stream');
|
||||
res.setHeader('Cache-Control', 'public, max-age=3600');
|
||||
res.send(buffer);
|
||||
} catch (error) {
|
||||
logError(error, "Serve image failed");
|
||||
logError(error, 'Serve image failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
* Handles symlinks safely to avoid ELOOP errors
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { PathNotAllowedError } from '@automaker/platform';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createMkdirHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,7 +15,7 @@ export function createMkdirHandler() {
|
||||
const { dirPath } = req.body as { dirPath: string };
|
||||
|
||||
if (!dirPath) {
|
||||
res.status(400).json({ success: false, error: "dirPath is required" });
|
||||
res.status(400).json({ success: false, error: 'dirPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -23,45 +23,47 @@ export function createMkdirHandler() {
|
||||
|
||||
// Check if path already exists using lstat (doesn't follow symlinks)
|
||||
try {
|
||||
const stats = await fs.lstat(resolvedPath);
|
||||
const stats = await secureFs.lstat(resolvedPath);
|
||||
// Path exists - if it's a directory or symlink, consider it success
|
||||
if (stats.isDirectory() || stats.isSymbolicLink()) {
|
||||
addAllowedPath(resolvedPath);
|
||||
res.json({ success: true });
|
||||
return;
|
||||
}
|
||||
// It's a file - can't create directory
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "Path exists and is not a directory",
|
||||
error: 'Path exists and is not a directory',
|
||||
});
|
||||
return;
|
||||
} catch (statError: any) {
|
||||
// ENOENT means path doesn't exist - we should create it
|
||||
if (statError.code !== "ENOENT") {
|
||||
if (statError.code !== 'ENOENT') {
|
||||
// Some other error (could be ELOOP in parent path)
|
||||
throw statError;
|
||||
}
|
||||
}
|
||||
|
||||
// Path doesn't exist, create it
|
||||
await fs.mkdir(resolvedPath, { recursive: true });
|
||||
|
||||
// Add the new directory to allowed paths for tracking
|
||||
addAllowedPath(resolvedPath);
|
||||
await secureFs.mkdir(resolvedPath, { recursive: true });
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error: any) {
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle ELOOP specifically
|
||||
if (error.code === "ELOOP") {
|
||||
logError(error, "Create directory failed - symlink loop detected");
|
||||
if (error.code === 'ELOOP') {
|
||||
logError(error, 'Create directory failed - symlink loop detected');
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "Cannot create directory: symlink loop detected in path",
|
||||
error: 'Cannot create directory: symlink loop detected in path',
|
||||
});
|
||||
return;
|
||||
}
|
||||
logError(error, "Create directory failed");
|
||||
logError(error, 'Create directory failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,26 +2,21 @@
|
||||
* POST /read endpoint - Read file
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import { validatePath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { PathNotAllowedError } from '@automaker/platform';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
// Optional files that are expected to not exist in new projects
|
||||
// Don't log ENOENT errors for these to reduce noise
|
||||
const OPTIONAL_FILES = ["categories.json", "app_spec.txt"];
|
||||
const OPTIONAL_FILES = ['categories.json', 'app_spec.txt'];
|
||||
|
||||
function isOptionalFile(filePath: string): boolean {
|
||||
return OPTIONAL_FILES.some((optionalFile) => filePath.endsWith(optionalFile));
|
||||
}
|
||||
|
||||
function isENOENT(error: unknown): boolean {
|
||||
return (
|
||||
error !== null &&
|
||||
typeof error === "object" &&
|
||||
"code" in error &&
|
||||
error.code === "ENOENT"
|
||||
);
|
||||
return error !== null && typeof error === 'object' && 'code' in error && error.code === 'ENOENT';
|
||||
}
|
||||
|
||||
export function createReadHandler() {
|
||||
@@ -30,19 +25,24 @@ export function createReadHandler() {
|
||||
const { filePath } = req.body as { filePath: string };
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
res.status(400).json({ success: false, error: 'filePath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = validatePath(filePath);
|
||||
const content = await fs.readFile(resolvedPath, "utf-8");
|
||||
const content = await secureFs.readFile(filePath, 'utf-8');
|
||||
|
||||
res.json({ success: true, content });
|
||||
} catch (error) {
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
// Don't log ENOENT errors for optional files (expected to be missing in new projects)
|
||||
const shouldLog = !(isENOENT(error) && isOptionalFile(req.body?.filePath || ""));
|
||||
const shouldLog = !(isENOENT(error) && isOptionalFile(req.body?.filePath || ''));
|
||||
if (shouldLog) {
|
||||
logError(error, "Read file failed");
|
||||
logError(error, 'Read file failed');
|
||||
}
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
* POST /readdir endpoint - Read directory
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import { validatePath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { PathNotAllowedError } from '@automaker/platform';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createReaddirHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -13,12 +13,11 @@ export function createReaddirHandler() {
|
||||
const { dirPath } = req.body as { dirPath: string };
|
||||
|
||||
if (!dirPath) {
|
||||
res.status(400).json({ success: false, error: "dirPath is required" });
|
||||
res.status(400).json({ success: false, error: 'dirPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = validatePath(dirPath);
|
||||
const entries = await fs.readdir(resolvedPath, { withFileTypes: true });
|
||||
const entries = await secureFs.readdir(dirPath, { withFileTypes: true });
|
||||
|
||||
const result = entries.map((entry) => ({
|
||||
name: entry.name,
|
||||
@@ -28,7 +27,13 @@ export function createReaddirHandler() {
|
||||
|
||||
res.json({ success: true, entries: result });
|
||||
} catch (error) {
|
||||
logError(error, "Read directory failed");
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, 'Read directory failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,11 +2,10 @@
|
||||
* POST /resolve-directory endpoint - Resolve directory path from directory name
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createResolveDirectoryHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -18,9 +17,7 @@ export function createResolveDirectoryHandler() {
|
||||
};
|
||||
|
||||
if (!directoryName) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "directoryName is required" });
|
||||
res.status(400).json({ success: false, error: 'directoryName is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -28,9 +25,8 @@ export function createResolveDirectoryHandler() {
|
||||
if (path.isAbsolute(directoryName) || directoryName.includes(path.sep)) {
|
||||
try {
|
||||
const resolvedPath = path.resolve(directoryName);
|
||||
const stats = await fs.stat(resolvedPath);
|
||||
const stats = await secureFs.stat(resolvedPath);
|
||||
if (stats.isDirectory()) {
|
||||
addAllowedPath(resolvedPath);
|
||||
res.json({
|
||||
success: true,
|
||||
path: resolvedPath,
|
||||
@@ -45,17 +41,11 @@ export function createResolveDirectoryHandler() {
|
||||
// Search for directory in common locations
|
||||
const searchPaths: string[] = [
|
||||
process.cwd(), // Current working directory
|
||||
process.env.HOME || process.env.USERPROFILE || "", // User home
|
||||
path.join(
|
||||
process.env.HOME || process.env.USERPROFILE || "",
|
||||
"Documents"
|
||||
),
|
||||
path.join(process.env.HOME || process.env.USERPROFILE || "", "Desktop"),
|
||||
process.env.HOME || process.env.USERPROFILE || '', // User home
|
||||
path.join(process.env.HOME || process.env.USERPROFILE || '', 'Documents'),
|
||||
path.join(process.env.HOME || process.env.USERPROFILE || '', 'Desktop'),
|
||||
// Common project locations
|
||||
path.join(
|
||||
process.env.HOME || process.env.USERPROFILE || "",
|
||||
"Projects"
|
||||
),
|
||||
path.join(process.env.HOME || process.env.USERPROFILE || '', 'Projects'),
|
||||
].filter(Boolean);
|
||||
|
||||
// Also check parent of current working directory
|
||||
@@ -72,7 +62,7 @@ export function createResolveDirectoryHandler() {
|
||||
for (const searchPath of searchPaths) {
|
||||
try {
|
||||
const candidatePath = path.join(searchPath, directoryName);
|
||||
const stats = await fs.stat(candidatePath);
|
||||
const stats = await secureFs.stat(candidatePath);
|
||||
|
||||
if (stats.isDirectory()) {
|
||||
// Verify it matches by checking for sample files
|
||||
@@ -80,15 +70,15 @@ export function createResolveDirectoryHandler() {
|
||||
let matches = 0;
|
||||
for (const sampleFile of sampleFiles.slice(0, 5)) {
|
||||
// Remove directory name prefix from sample file path
|
||||
const relativeFile = sampleFile.startsWith(directoryName + "/")
|
||||
const relativeFile = sampleFile.startsWith(directoryName + '/')
|
||||
? sampleFile.substring(directoryName.length + 1)
|
||||
: sampleFile.split("/").slice(1).join("/") ||
|
||||
sampleFile.split("/").pop() ||
|
||||
: sampleFile.split('/').slice(1).join('/') ||
|
||||
sampleFile.split('/').pop() ||
|
||||
sampleFile;
|
||||
|
||||
try {
|
||||
const filePath = path.join(candidatePath, relativeFile);
|
||||
await fs.access(filePath);
|
||||
await secureFs.access(filePath);
|
||||
matches++;
|
||||
} catch {
|
||||
// File doesn't exist, continue checking
|
||||
@@ -102,7 +92,6 @@ export function createResolveDirectoryHandler() {
|
||||
}
|
||||
|
||||
// Found matching directory
|
||||
addAllowedPath(candidatePath);
|
||||
res.json({
|
||||
success: true,
|
||||
path: candidatePath,
|
||||
@@ -121,7 +110,7 @@ export function createResolveDirectoryHandler() {
|
||||
error: `Directory "${directoryName}" not found in common locations. Please ensure the directory exists.`,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Resolve directory failed");
|
||||
logError(error, 'Resolve directory failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,12 +2,11 @@
|
||||
* POST /save-board-background endpoint - Save board background image
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import { getBoardDir } from "../../../lib/automaker-paths.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { getBoardDir } from '@automaker/platform';
|
||||
|
||||
export function createSaveBoardBackgroundHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -22,34 +21,31 @@ export function createSaveBoardBackgroundHandler() {
|
||||
if (!data || !filename || !projectPath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "data, filename, and projectPath are required",
|
||||
error: 'data, filename, and projectPath are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Get board directory
|
||||
const boardDir = getBoardDir(projectPath);
|
||||
await fs.mkdir(boardDir, { recursive: true });
|
||||
await secureFs.mkdir(boardDir, { recursive: true });
|
||||
|
||||
// Decode base64 data (remove data URL prefix if present)
|
||||
const base64Data = data.replace(/^data:image\/\w+;base64,/, "");
|
||||
const buffer = Buffer.from(base64Data, "base64");
|
||||
const base64Data = data.replace(/^data:image\/\w+;base64,/, '');
|
||||
const buffer = Buffer.from(base64Data, 'base64');
|
||||
|
||||
// Use a fixed filename for the board background (overwrite previous)
|
||||
const ext = path.extname(filename) || ".png";
|
||||
const ext = path.extname(filename) || '.png';
|
||||
const uniqueFilename = `background${ext}`;
|
||||
const filePath = path.join(boardDir, uniqueFilename);
|
||||
|
||||
// Write file
|
||||
await fs.writeFile(filePath, buffer);
|
||||
|
||||
// Add board directory to allowed paths
|
||||
addAllowedPath(boardDir);
|
||||
await secureFs.writeFile(filePath, buffer);
|
||||
|
||||
// Return the absolute path
|
||||
res.json({ success: true, path: filePath });
|
||||
} catch (error) {
|
||||
logError(error, "Save board background failed");
|
||||
logError(error, 'Save board background failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,12 +2,11 @@
|
||||
* POST /save-image endpoint - Save image to .automaker images directory
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import { getImagesDir } from "../../../lib/automaker-paths.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { getImagesDir } from '@automaker/platform';
|
||||
|
||||
export function createSaveImageHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -22,36 +21,33 @@ export function createSaveImageHandler() {
|
||||
if (!data || !filename || !projectPath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "data, filename, and projectPath are required",
|
||||
error: 'data, filename, and projectPath are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Get images directory
|
||||
const imagesDir = getImagesDir(projectPath);
|
||||
await fs.mkdir(imagesDir, { recursive: true });
|
||||
await secureFs.mkdir(imagesDir, { recursive: true });
|
||||
|
||||
// Decode base64 data (remove data URL prefix if present)
|
||||
const base64Data = data.replace(/^data:image\/\w+;base64,/, "");
|
||||
const buffer = Buffer.from(base64Data, "base64");
|
||||
const base64Data = data.replace(/^data:image\/\w+;base64,/, '');
|
||||
const buffer = Buffer.from(base64Data, 'base64');
|
||||
|
||||
// Generate unique filename with timestamp
|
||||
const timestamp = Date.now();
|
||||
const ext = path.extname(filename) || ".png";
|
||||
const ext = path.extname(filename) || '.png';
|
||||
const baseName = path.basename(filename, ext);
|
||||
const uniqueFilename = `${baseName}-${timestamp}${ext}`;
|
||||
const filePath = path.join(imagesDir, uniqueFilename);
|
||||
|
||||
// Write file
|
||||
await fs.writeFile(filePath, buffer);
|
||||
|
||||
// Add automaker directory to allowed paths
|
||||
addAllowedPath(imagesDir);
|
||||
await secureFs.writeFile(filePath, buffer);
|
||||
|
||||
// Return the absolute path
|
||||
res.json({ success: true, path: filePath });
|
||||
} catch (error) {
|
||||
logError(error, "Save image failed");
|
||||
logError(error, 'Save image failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
* POST /stat endpoint - Get file stats
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import { validatePath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { PathNotAllowedError } from '@automaker/platform';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStatHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -13,12 +13,11 @@ export function createStatHandler() {
|
||||
const { filePath } = req.body as { filePath: string };
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
res.status(400).json({ success: false, error: 'filePath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = validatePath(filePath);
|
||||
const stats = await fs.stat(resolvedPath);
|
||||
const stats = await secureFs.stat(filePath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
@@ -30,7 +29,13 @@ export function createStatHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Get file stats failed");
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, 'Get file stats failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
* POST /validate-path endpoint - Validate and add path to allowed list
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath, isPathAllowed } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { isPathAllowed } from '@automaker/platform';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createValidatePathHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -14,7 +14,7 @@ export function createValidatePathHandler() {
|
||||
const { filePath } = req.body as { filePath: string };
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
res.status(400).json({ success: false, error: 'filePath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -22,28 +22,23 @@ export function createValidatePathHandler() {
|
||||
|
||||
// Check if path exists
|
||||
try {
|
||||
const stats = await fs.stat(resolvedPath);
|
||||
const stats = await secureFs.stat(resolvedPath);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "Path is not a directory" });
|
||||
res.status(400).json({ success: false, error: 'Path is not a directory' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Add to allowed paths
|
||||
addAllowedPath(resolvedPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
path: resolvedPath,
|
||||
isAllowed: isPathAllowed(resolvedPath),
|
||||
});
|
||||
} catch {
|
||||
res.status(400).json({ success: false, error: "Path does not exist" });
|
||||
res.status(400).json({ success: false, error: 'Path does not exist' });
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Validate path failed");
|
||||
logError(error, 'Validate path failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* POST /write endpoint - Write file
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { validatePath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import { mkdirSafe } from "../../../lib/fs-utils.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { PathNotAllowedError } from '@automaker/platform';
|
||||
import { mkdirSafe } from '@automaker/utils';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createWriteHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -18,19 +18,23 @@ export function createWriteHandler() {
|
||||
};
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
res.status(400).json({ success: false, error: 'filePath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = validatePath(filePath);
|
||||
|
||||
// Ensure parent directory exists (symlink-safe)
|
||||
await mkdirSafe(path.dirname(resolvedPath));
|
||||
await fs.writeFile(resolvedPath, content, "utf-8");
|
||||
await mkdirSafe(path.dirname(path.resolve(filePath)));
|
||||
await secureFs.writeFile(filePath, content, 'utf-8');
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Write file failed");
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, 'Write file failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* Common utilities for git routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("Git");
|
||||
const logger = createLogger('Git');
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -2,15 +2,16 @@
|
||||
* Git routes - HTTP API for git operations (non-worktree)
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { createDiffsHandler } from "./routes/diffs.js";
|
||||
import { createFileDiffHandler } from "./routes/file-diff.js";
|
||||
import { Router } from 'express';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createDiffsHandler } from './routes/diffs.js';
|
||||
import { createFileDiffHandler } from './routes/file-diff.js';
|
||||
|
||||
export function createGitRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/diffs", createDiffsHandler());
|
||||
router.post("/file-diff", createFileDiffHandler());
|
||||
router.post('/diffs', validatePathParams('projectPath'), createDiffsHandler());
|
||||
router.post('/file-diff', validatePathParams('projectPath', 'filePath'), createFileDiffHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* Common utilities for health routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("Health");
|
||||
const logger = createLogger('Health');
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* Common utilities for models routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("Models");
|
||||
const logger = createLogger('Models');
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* GET /providers endpoint - Check provider status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { ProviderFactory } from "../../../providers/provider-factory.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { ProviderFactory } from '../../../providers/provider-factory.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createProvidersHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
@@ -17,15 +17,11 @@ export function createProvidersHandler() {
|
||||
available: statuses.claude?.installed || false,
|
||||
hasApiKey: !!process.env.ANTHROPIC_API_KEY,
|
||||
},
|
||||
google: {
|
||||
available: !!process.env.GOOGLE_API_KEY,
|
||||
hasApiKey: !!process.env.GOOGLE_API_KEY,
|
||||
},
|
||||
};
|
||||
|
||||
res.json({ success: true, providers });
|
||||
} catch (error) {
|
||||
logError(error, "Get providers failed");
|
||||
logError(error, 'Get providers failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* Common utilities for running-agents routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("RunningAgents");
|
||||
const logger = createLogger('RunningAgents');
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* Common utilities for sessions routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("Sessions");
|
||||
const logger = createLogger('Sessions');
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
26
apps/server/src/routes/settings/common.ts
Normal file
26
apps/server/src/routes/settings/common.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
/**
|
||||
* Common utilities for settings routes
|
||||
*
|
||||
* Provides logger and error handling utilities shared across all settings endpoints.
|
||||
* Re-exports error handling helpers from the parent routes module.
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
/** Logger instance for settings-related operations */
|
||||
export const logger = createLogger('Settings');
|
||||
|
||||
/**
|
||||
* Extract user-friendly error message from error objects
|
||||
*
|
||||
* Re-exported from parent routes common module for consistency.
|
||||
*/
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
/**
|
||||
* Log error with automatic logger binding
|
||||
*
|
||||
* Convenience function for logging errors with the Settings logger.
|
||||
*/
|
||||
export const logError = createLogError(logger);
|
||||
76
apps/server/src/routes/settings/index.ts
Normal file
76
apps/server/src/routes/settings/index.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
/**
|
||||
* Settings routes - HTTP API for persistent file-based settings
|
||||
*
|
||||
* Provides endpoints for:
|
||||
* - Status checking (migration readiness)
|
||||
* - Global settings CRUD
|
||||
* - Credentials management
|
||||
* - Project-specific settings
|
||||
* - localStorage to file migration
|
||||
*
|
||||
* All endpoints use handler factories that receive the SettingsService instance.
|
||||
* Mounted at /api/settings in the main server.
|
||||
*/
|
||||
|
||||
import { Router } from 'express';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createGetGlobalHandler } from './routes/get-global.js';
|
||||
import { createUpdateGlobalHandler } from './routes/update-global.js';
|
||||
import { createGetCredentialsHandler } from './routes/get-credentials.js';
|
||||
import { createUpdateCredentialsHandler } from './routes/update-credentials.js';
|
||||
import { createGetProjectHandler } from './routes/get-project.js';
|
||||
import { createUpdateProjectHandler } from './routes/update-project.js';
|
||||
import { createMigrateHandler } from './routes/migrate.js';
|
||||
import { createStatusHandler } from './routes/status.js';
|
||||
|
||||
/**
|
||||
* Create settings router with all endpoints
|
||||
*
|
||||
* Registers handlers for all settings-related HTTP endpoints.
|
||||
* Each handler is created with the provided SettingsService instance.
|
||||
*
|
||||
* Endpoints:
|
||||
* - GET /status - Check migration status and data availability
|
||||
* - GET /global - Get global settings
|
||||
* - PUT /global - Update global settings
|
||||
* - GET /credentials - Get masked credentials (safe for UI)
|
||||
* - PUT /credentials - Update API keys
|
||||
* - POST /project - Get project settings (requires projectPath in body)
|
||||
* - PUT /project - Update project settings
|
||||
* - POST /migrate - Migrate settings from localStorage
|
||||
*
|
||||
* @param settingsService - Instance of SettingsService for file I/O
|
||||
* @returns Express Router configured with all settings endpoints
|
||||
*/
|
||||
export function createSettingsRoutes(settingsService: SettingsService): Router {
|
||||
const router = Router();
|
||||
|
||||
// Status endpoint (check if migration needed)
|
||||
router.get('/status', createStatusHandler(settingsService));
|
||||
|
||||
// Global settings
|
||||
router.get('/global', createGetGlobalHandler(settingsService));
|
||||
router.put('/global', createUpdateGlobalHandler(settingsService));
|
||||
|
||||
// Credentials (separate for security)
|
||||
router.get('/credentials', createGetCredentialsHandler(settingsService));
|
||||
router.put('/credentials', createUpdateCredentialsHandler(settingsService));
|
||||
|
||||
// Project settings
|
||||
router.post(
|
||||
'/project',
|
||||
validatePathParams('projectPath'),
|
||||
createGetProjectHandler(settingsService)
|
||||
);
|
||||
router.put(
|
||||
'/project',
|
||||
validatePathParams('projectPath'),
|
||||
createUpdateProjectHandler(settingsService)
|
||||
);
|
||||
|
||||
// Migration from localStorage
|
||||
router.post('/migrate', createMigrateHandler(settingsService));
|
||||
|
||||
return router;
|
||||
}
|
||||
35
apps/server/src/routes/settings/routes/get-credentials.ts
Normal file
35
apps/server/src/routes/settings/routes/get-credentials.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
/**
|
||||
* GET /api/settings/credentials - Get API key status (masked for security)
|
||||
*
|
||||
* Returns masked credentials showing which providers have keys configured.
|
||||
* Each provider shows: `{ configured: boolean, masked: string }`
|
||||
* Masked shows first 4 and last 4 characters for verification.
|
||||
*
|
||||
* Response: `{ "success": true, "credentials": { anthropic } }`
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler factory for GET /api/settings/credentials
|
||||
*
|
||||
* @param settingsService - Instance of SettingsService for file I/O
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createGetCredentialsHandler(settingsService: SettingsService) {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const credentials = await settingsService.getMaskedCredentials();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
credentials,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Get credentials failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
34
apps/server/src/routes/settings/routes/get-global.ts
Normal file
34
apps/server/src/routes/settings/routes/get-global.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
/**
|
||||
* GET /api/settings/global - Retrieve global user settings
|
||||
*
|
||||
* Returns the complete GlobalSettings object with all user preferences,
|
||||
* keyboard shortcuts, AI profiles, and project history.
|
||||
*
|
||||
* Response: `{ "success": true, "settings": GlobalSettings }`
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler factory for GET /api/settings/global
|
||||
*
|
||||
* @param settingsService - Instance of SettingsService for file I/O
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createGetGlobalHandler(settingsService: SettingsService) {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const settings = await settingsService.getGlobalSettings();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
settings,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Get global settings failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
45
apps/server/src/routes/settings/routes/get-project.ts
Normal file
45
apps/server/src/routes/settings/routes/get-project.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
/**
|
||||
* POST /api/settings/project - Get project-specific settings
|
||||
*
|
||||
* Retrieves settings overrides for a specific project. Uses POST because
|
||||
* projectPath may contain special characters that don't work well in URLs.
|
||||
*
|
||||
* Request body: `{ projectPath: string }`
|
||||
* Response: `{ "success": true, "settings": ProjectSettings }`
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler factory for POST /api/settings/project
|
||||
*
|
||||
* @param settingsService - Instance of SettingsService for file I/O
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createGetProjectHandler(settingsService: SettingsService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath?: string };
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath is required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const settings = await settingsService.getProjectSettings(projectPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
settings,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Get project settings failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
86
apps/server/src/routes/settings/routes/migrate.ts
Normal file
86
apps/server/src/routes/settings/routes/migrate.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
/**
|
||||
* POST /api/settings/migrate - Migrate settings from localStorage to file storage
|
||||
*
|
||||
* Called during onboarding when UI detects localStorage data but no settings files.
|
||||
* Extracts settings from various localStorage keys and writes to new file structure.
|
||||
* Collects errors but continues on partial failures (graceful degradation).
|
||||
*
|
||||
* Request body:
|
||||
* ```json
|
||||
* {
|
||||
* "data": {
|
||||
* "automaker-storage"?: string,
|
||||
* "automaker-setup"?: string,
|
||||
* "worktree-panel-collapsed"?: string,
|
||||
* "file-browser-recent-folders"?: string,
|
||||
* "automaker:lastProjectDir"?: string
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* Response:
|
||||
* ```json
|
||||
* {
|
||||
* "success": boolean,
|
||||
* "migratedGlobalSettings": boolean,
|
||||
* "migratedCredentials": boolean,
|
||||
* "migratedProjectCount": number,
|
||||
* "errors": string[]
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getErrorMessage, logError, logger } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler factory for POST /api/settings/migrate
|
||||
*
|
||||
* @param settingsService - Instance of SettingsService for file I/O
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createMigrateHandler(settingsService: SettingsService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { data } = req.body as {
|
||||
data?: {
|
||||
'automaker-storage'?: string;
|
||||
'automaker-setup'?: string;
|
||||
'worktree-panel-collapsed'?: string;
|
||||
'file-browser-recent-folders'?: string;
|
||||
'automaker:lastProjectDir'?: string;
|
||||
};
|
||||
};
|
||||
|
||||
if (!data || typeof data !== 'object') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'data object is required containing localStorage data',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info('Starting settings migration from localStorage');
|
||||
|
||||
const result = await settingsService.migrateFromLocalStorage(data);
|
||||
|
||||
if (result.success) {
|
||||
logger.info(`Migration successful: ${result.migratedProjectCount} projects migrated`);
|
||||
} else {
|
||||
logger.warn(`Migration completed with errors: ${result.errors.join(', ')}`);
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: result.success,
|
||||
migratedGlobalSettings: result.migratedGlobalSettings,
|
||||
migratedCredentials: result.migratedCredentials,
|
||||
migratedProjectCount: result.migratedProjectCount,
|
||||
errors: result.errors,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Migration failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
47
apps/server/src/routes/settings/routes/status.ts
Normal file
47
apps/server/src/routes/settings/routes/status.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
/**
|
||||
* GET /api/settings/status - Get settings migration and availability status
|
||||
*
|
||||
* Checks which settings files exist to determine if migration from localStorage
|
||||
* is needed. Used by UI during onboarding to decide whether to show migration flow.
|
||||
*
|
||||
* Response:
|
||||
* ```json
|
||||
* {
|
||||
* "success": true,
|
||||
* "hasGlobalSettings": boolean,
|
||||
* "hasCredentials": boolean,
|
||||
* "dataDir": string,
|
||||
* "needsMigration": boolean
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler factory for GET /api/settings/status
|
||||
*
|
||||
* @param settingsService - Instance of SettingsService for file I/O
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createStatusHandler(settingsService: SettingsService) {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const hasGlobalSettings = await settingsService.hasGlobalSettings();
|
||||
const hasCredentials = await settingsService.hasCredentials();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
hasGlobalSettings,
|
||||
hasCredentials,
|
||||
dataDir: settingsService.getDataDir(),
|
||||
needsMigration: !hasGlobalSettings,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Get settings status failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
49
apps/server/src/routes/settings/routes/update-credentials.ts
Normal file
49
apps/server/src/routes/settings/routes/update-credentials.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
/**
|
||||
* PUT /api/settings/credentials - Update API credentials
|
||||
*
|
||||
* Updates API keys for Anthropic. Partial updates supported.
|
||||
* Returns masked credentials for verification without exposing full keys.
|
||||
*
|
||||
* Request body: `Partial<Credentials>` (usually just apiKeys)
|
||||
* Response: `{ "success": true, "credentials": { anthropic } }`
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import type { Credentials } from '../../../types/settings.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler factory for PUT /api/settings/credentials
|
||||
*
|
||||
* @param settingsService - Instance of SettingsService for file I/O
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createUpdateCredentialsHandler(settingsService: SettingsService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const updates = req.body as Partial<Credentials>;
|
||||
|
||||
if (!updates || typeof updates !== 'object') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid request body - expected credentials object',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await settingsService.updateCredentials(updates);
|
||||
|
||||
// Return masked credentials for confirmation
|
||||
const masked = await settingsService.getMaskedCredentials();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
credentials: masked,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Update credentials failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
46
apps/server/src/routes/settings/routes/update-global.ts
Normal file
46
apps/server/src/routes/settings/routes/update-global.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* PUT /api/settings/global - Update global user settings
|
||||
*
|
||||
* Accepts partial GlobalSettings update. Fields provided are merged into
|
||||
* existing settings (not replaced). Returns updated settings.
|
||||
*
|
||||
* Request body: `Partial<GlobalSettings>`
|
||||
* Response: `{ "success": true, "settings": GlobalSettings }`
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import type { GlobalSettings } from '../../../types/settings.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler factory for PUT /api/settings/global
|
||||
*
|
||||
* @param settingsService - Instance of SettingsService for file I/O
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createUpdateGlobalHandler(settingsService: SettingsService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const updates = req.body as Partial<GlobalSettings>;
|
||||
|
||||
if (!updates || typeof updates !== 'object') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid request body - expected settings object',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const settings = await settingsService.updateGlobalSettings(updates);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
settings,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Update global settings failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
57
apps/server/src/routes/settings/routes/update-project.ts
Normal file
57
apps/server/src/routes/settings/routes/update-project.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* PUT /api/settings/project - Update project-specific settings
|
||||
*
|
||||
* Updates settings for a specific project. Partial updates supported.
|
||||
* Project settings override global settings when present.
|
||||
*
|
||||
* Request body: `{ projectPath: string, updates: Partial<ProjectSettings> }`
|
||||
* Response: `{ "success": true, "settings": ProjectSettings }`
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import type { ProjectSettings } from '../../../types/settings.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler factory for PUT /api/settings/project
|
||||
*
|
||||
* @param settingsService - Instance of SettingsService for file I/O
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createUpdateProjectHandler(settingsService: SettingsService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, updates } = req.body as {
|
||||
projectPath?: string;
|
||||
updates?: Partial<ProjectSettings>;
|
||||
};
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath is required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (!updates || typeof updates !== 'object') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'updates object is required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const settings = await settingsService.updateProjectSettings(projectPath, updates);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
settings,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Update project settings failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -2,15 +2,12 @@
|
||||
* Common utilities and state for setup routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("Setup");
|
||||
const logger = createLogger('Setup');
|
||||
|
||||
// Storage for API keys (in-memory cache) - private
|
||||
const apiKeys: Record<string, string> = {};
|
||||
@@ -39,22 +36,19 @@ export function getAllApiKeys(): Record<string, string> {
|
||||
/**
|
||||
* Helper to persist API keys to .env file
|
||||
*/
|
||||
export async function persistApiKeyToEnv(
|
||||
key: string,
|
||||
value: string
|
||||
): Promise<void> {
|
||||
const envPath = path.join(process.cwd(), ".env");
|
||||
export async function persistApiKeyToEnv(key: string, value: string): Promise<void> {
|
||||
const envPath = path.join(process.cwd(), '.env');
|
||||
|
||||
try {
|
||||
let envContent = "";
|
||||
let envContent = '';
|
||||
try {
|
||||
envContent = await fs.readFile(envPath, "utf-8");
|
||||
envContent = await fs.readFile(envPath, 'utf-8');
|
||||
} catch {
|
||||
// .env file doesn't exist, we'll create it
|
||||
}
|
||||
|
||||
// Parse existing env content
|
||||
const lines = envContent.split("\n");
|
||||
const lines = envContent.split('\n');
|
||||
const keyRegex = new RegExp(`^${key}=`);
|
||||
let found = false;
|
||||
const newLines = lines.map((line) => {
|
||||
@@ -70,7 +64,7 @@ export async function persistApiKeyToEnv(
|
||||
newLines.push(`${key}=${value}`);
|
||||
}
|
||||
|
||||
await fs.writeFile(envPath, newLines.join("\n"));
|
||||
await fs.writeFile(envPath, newLines.join('\n'));
|
||||
logger.info(`[Setup] Persisted ${key} to .env file`);
|
||||
} catch (error) {
|
||||
logger.error(`[Setup] Failed to persist ${key} to .env:`, error);
|
||||
|
||||
@@ -2,20 +2,18 @@
|
||||
* GET /api-keys endpoint - Get API keys status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getApiKey, getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getApiKey, getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createApiKeysHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
res.json({
|
||||
success: true,
|
||||
hasAnthropicKey:
|
||||
!!getApiKey("anthropic") || !!process.env.ANTHROPIC_API_KEY,
|
||||
hasGoogleKey: !!getApiKey("google") || !!process.env.GOOGLE_API_KEY,
|
||||
hasAnthropicKey: !!getApiKey('anthropic') || !!process.env.ANTHROPIC_API_KEY,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Get API keys failed");
|
||||
logError(error, 'Get API keys failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,43 +2,43 @@
|
||||
* POST /delete-api-key endpoint - Delete a stored API key
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import type { Request, Response } from 'express';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
|
||||
const logger = createLogger("Setup");
|
||||
const logger = createLogger('Setup');
|
||||
|
||||
// In-memory storage reference (imported from common.ts pattern)
|
||||
// We need to modify common.ts to export a deleteApiKey function
|
||||
import { setApiKey } from "../common.js";
|
||||
import { setApiKey } from '../common.js';
|
||||
|
||||
/**
|
||||
* Remove an API key from the .env file
|
||||
*/
|
||||
async function removeApiKeyFromEnv(key: string): Promise<void> {
|
||||
const envPath = path.join(process.cwd(), ".env");
|
||||
const envPath = path.join(process.cwd(), '.env');
|
||||
|
||||
try {
|
||||
let envContent = "";
|
||||
let envContent = '';
|
||||
try {
|
||||
envContent = await fs.readFile(envPath, "utf-8");
|
||||
envContent = await fs.readFile(envPath, 'utf-8');
|
||||
} catch {
|
||||
// .env file doesn't exist, nothing to delete
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse existing env content and remove the key
|
||||
const lines = envContent.split("\n");
|
||||
const lines = envContent.split('\n');
|
||||
const keyRegex = new RegExp(`^${key}=`);
|
||||
const newLines = lines.filter((line) => !keyRegex.test(line));
|
||||
|
||||
// Remove empty lines at the end
|
||||
while (newLines.length > 0 && newLines[newLines.length - 1].trim() === "") {
|
||||
while (newLines.length > 0 && newLines[newLines.length - 1].trim() === '') {
|
||||
newLines.pop();
|
||||
}
|
||||
|
||||
await fs.writeFile(envPath, newLines.join("\n") + (newLines.length > 0 ? "\n" : ""));
|
||||
await fs.writeFile(envPath, newLines.join('\n') + (newLines.length > 0 ? '\n' : ''));
|
||||
logger.info(`[Setup] Removed ${key} from .env file`);
|
||||
} catch (error) {
|
||||
logger.error(`[Setup] Failed to remove ${key} from .env:`, error);
|
||||
@@ -54,7 +54,7 @@ export function createDeleteApiKeyHandler() {
|
||||
if (!provider) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "Provider is required",
|
||||
error: 'Provider is required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -63,22 +63,20 @@ export function createDeleteApiKeyHandler() {
|
||||
|
||||
// Map provider to env key name
|
||||
const envKeyMap: Record<string, string> = {
|
||||
anthropic: "ANTHROPIC_API_KEY",
|
||||
google: "GOOGLE_GENERATIVE_AI_API_KEY",
|
||||
openai: "OPENAI_API_KEY",
|
||||
anthropic: 'ANTHROPIC_API_KEY',
|
||||
};
|
||||
|
||||
const envKey = envKeyMap[provider];
|
||||
if (!envKey) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Unknown provider: ${provider}`,
|
||||
error: `Unknown provider: ${provider}. Only anthropic is supported.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Clear from in-memory storage
|
||||
setApiKey(provider, "");
|
||||
setApiKey(provider, '');
|
||||
|
||||
// Remove from environment
|
||||
delete process.env[envKey];
|
||||
@@ -93,14 +91,11 @@ export function createDeleteApiKeyHandler() {
|
||||
message: `API key for ${provider} has been deleted`,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error("[Setup] Delete API key error:", error);
|
||||
logger.error('[Setup] Delete API key error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : "Failed to delete API key",
|
||||
error: error instanceof Error ? error.message : 'Failed to delete API key',
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -2,16 +2,11 @@
|
||||
* POST /store-api-key endpoint - Store API key
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import {
|
||||
setApiKey,
|
||||
persistApiKeyToEnv,
|
||||
getErrorMessage,
|
||||
logError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { setApiKey, persistApiKeyToEnv, getErrorMessage, logError } from '../common.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger("Setup");
|
||||
const logger = createLogger('Setup');
|
||||
|
||||
export function createStoreApiKeyHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -22,28 +17,29 @@ export function createStoreApiKeyHandler() {
|
||||
};
|
||||
|
||||
if (!provider || !apiKey) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "provider and apiKey required" });
|
||||
res.status(400).json({ success: false, error: 'provider and apiKey required' });
|
||||
return;
|
||||
}
|
||||
|
||||
setApiKey(provider, apiKey);
|
||||
|
||||
// Also set as environment variable and persist to .env
|
||||
if (provider === "anthropic" || provider === "anthropic_oauth_token") {
|
||||
if (provider === 'anthropic' || provider === 'anthropic_oauth_token') {
|
||||
// Both API key and OAuth token use ANTHROPIC_API_KEY
|
||||
process.env.ANTHROPIC_API_KEY = apiKey;
|
||||
await persistApiKeyToEnv("ANTHROPIC_API_KEY", apiKey);
|
||||
logger.info("[Setup] Stored API key as ANTHROPIC_API_KEY");
|
||||
} else if (provider === "google") {
|
||||
process.env.GOOGLE_API_KEY = apiKey;
|
||||
await persistApiKeyToEnv("GOOGLE_API_KEY", apiKey);
|
||||
await persistApiKeyToEnv('ANTHROPIC_API_KEY', apiKey);
|
||||
logger.info('[Setup] Stored API key as ANTHROPIC_API_KEY');
|
||||
} else {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Unsupported provider: ${provider}. Only anthropic is supported.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Store API key failed");
|
||||
logError(error, 'Store API key failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -3,50 +3,50 @@
|
||||
* Supports verifying either CLI auth or API key auth independently
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { query } from "@anthropic-ai/claude-agent-sdk";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import { getApiKey } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { query } from '@anthropic-ai/claude-agent-sdk';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getApiKey } from '../common.js';
|
||||
|
||||
const logger = createLogger("Setup");
|
||||
const logger = createLogger('Setup');
|
||||
|
||||
// Known error patterns that indicate auth failure
|
||||
const AUTH_ERROR_PATTERNS = [
|
||||
"OAuth token revoked",
|
||||
"Please run /login",
|
||||
"please run /login",
|
||||
"token revoked",
|
||||
"invalid_api_key",
|
||||
"authentication_error",
|
||||
"unauthorized",
|
||||
"not authenticated",
|
||||
"authentication failed",
|
||||
"invalid api key",
|
||||
"api key is invalid",
|
||||
'OAuth token revoked',
|
||||
'Please run /login',
|
||||
'please run /login',
|
||||
'token revoked',
|
||||
'invalid_api_key',
|
||||
'authentication_error',
|
||||
'unauthorized',
|
||||
'not authenticated',
|
||||
'authentication failed',
|
||||
'invalid api key',
|
||||
'api key is invalid',
|
||||
];
|
||||
|
||||
// Patterns that indicate billing/credit issues - should FAIL verification
|
||||
const BILLING_ERROR_PATTERNS = [
|
||||
"credit balance is too low",
|
||||
"credit balance too low",
|
||||
"insufficient credits",
|
||||
"insufficient balance",
|
||||
"no credits",
|
||||
"out of credits",
|
||||
"billing",
|
||||
"payment required",
|
||||
"add credits",
|
||||
'credit balance is too low',
|
||||
'credit balance too low',
|
||||
'insufficient credits',
|
||||
'insufficient balance',
|
||||
'no credits',
|
||||
'out of credits',
|
||||
'billing',
|
||||
'payment required',
|
||||
'add credits',
|
||||
];
|
||||
|
||||
// Patterns that indicate rate/usage limits - should FAIL verification
|
||||
// Users need to wait or upgrade their plan
|
||||
const RATE_LIMIT_PATTERNS = [
|
||||
"limit reached",
|
||||
"rate limit",
|
||||
"rate_limit",
|
||||
"resets", // Only valid if it's a temporary reset, not a billing issue
|
||||
"/upgrade",
|
||||
"extra-usage",
|
||||
'limit reached',
|
||||
'rate limit',
|
||||
'rate_limit',
|
||||
'resets', // Only valid if it's a temporary reset, not a billing issue
|
||||
'/upgrade',
|
||||
'extra-usage',
|
||||
];
|
||||
|
||||
function isRateLimitError(text: string): boolean {
|
||||
@@ -55,43 +55,33 @@ function isRateLimitError(text: string): boolean {
|
||||
if (isBillingError(text)) {
|
||||
return false;
|
||||
}
|
||||
return RATE_LIMIT_PATTERNS.some((pattern) =>
|
||||
lowerText.includes(pattern.toLowerCase())
|
||||
);
|
||||
return RATE_LIMIT_PATTERNS.some((pattern) => lowerText.includes(pattern.toLowerCase()));
|
||||
}
|
||||
|
||||
function isBillingError(text: string): boolean {
|
||||
const lowerText = text.toLowerCase();
|
||||
return BILLING_ERROR_PATTERNS.some((pattern) =>
|
||||
lowerText.includes(pattern.toLowerCase())
|
||||
);
|
||||
return BILLING_ERROR_PATTERNS.some((pattern) => lowerText.includes(pattern.toLowerCase()));
|
||||
}
|
||||
|
||||
function containsAuthError(text: string): boolean {
|
||||
const lowerText = text.toLowerCase();
|
||||
return AUTH_ERROR_PATTERNS.some((pattern) =>
|
||||
lowerText.includes(pattern.toLowerCase())
|
||||
);
|
||||
return AUTH_ERROR_PATTERNS.some((pattern) => lowerText.includes(pattern.toLowerCase()));
|
||||
}
|
||||
|
||||
export function createVerifyClaudeAuthHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
// Get the auth method from the request body
|
||||
const { authMethod } = req.body as { authMethod?: "cli" | "api_key" };
|
||||
const { authMethod } = req.body as { authMethod?: 'cli' | 'api_key' };
|
||||
|
||||
logger.info(
|
||||
`[Setup] Verifying Claude authentication using method: ${
|
||||
authMethod || "auto"
|
||||
}`
|
||||
);
|
||||
logger.info(`[Setup] Verifying Claude authentication using method: ${authMethod || 'auto'}`);
|
||||
|
||||
// Create an AbortController with a 30-second timeout
|
||||
const abortController = new AbortController();
|
||||
const timeoutId = setTimeout(() => abortController.abort(), 30000);
|
||||
|
||||
let authenticated = false;
|
||||
let errorMessage = "";
|
||||
let errorMessage = '';
|
||||
let receivedAnyContent = false;
|
||||
|
||||
// Save original env values
|
||||
@@ -99,25 +89,23 @@ export function createVerifyClaudeAuthHandler() {
|
||||
|
||||
try {
|
||||
// Configure environment based on auth method
|
||||
if (authMethod === "cli") {
|
||||
if (authMethod === 'cli') {
|
||||
// For CLI verification, remove any API key so it uses CLI credentials only
|
||||
delete process.env.ANTHROPIC_API_KEY;
|
||||
logger.info(
|
||||
"[Setup] Cleared API key environment for CLI verification"
|
||||
);
|
||||
} else if (authMethod === "api_key") {
|
||||
logger.info('[Setup] Cleared API key environment for CLI verification');
|
||||
} else if (authMethod === 'api_key') {
|
||||
// For API key verification, ensure we're using the stored API key
|
||||
const storedApiKey = getApiKey("anthropic");
|
||||
const storedApiKey = getApiKey('anthropic');
|
||||
if (storedApiKey) {
|
||||
process.env.ANTHROPIC_API_KEY = storedApiKey;
|
||||
logger.info("[Setup] Using stored API key for verification");
|
||||
logger.info('[Setup] Using stored API key for verification');
|
||||
} else {
|
||||
// Check env var
|
||||
if (!process.env.ANTHROPIC_API_KEY) {
|
||||
res.json({
|
||||
success: true,
|
||||
authenticated: false,
|
||||
error: "No API key configured. Please enter an API key first.",
|
||||
error: 'No API key configured. Please enter an API key first.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -128,7 +116,7 @@ export function createVerifyClaudeAuthHandler() {
|
||||
const stream = query({
|
||||
prompt: "Reply with only the word 'ok'",
|
||||
options: {
|
||||
model: "claude-sonnet-4-20250514",
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
maxTurns: 1,
|
||||
allowedTools: [],
|
||||
abortController,
|
||||
@@ -141,50 +129,50 @@ export function createVerifyClaudeAuthHandler() {
|
||||
for await (const msg of stream) {
|
||||
const msgStr = JSON.stringify(msg);
|
||||
allMessages.push(msgStr);
|
||||
logger.info("[Setup] Stream message:", msgStr.substring(0, 500));
|
||||
logger.info('[Setup] Stream message:', msgStr.substring(0, 500));
|
||||
|
||||
// Check for billing errors FIRST - these should fail verification
|
||||
if (isBillingError(msgStr)) {
|
||||
logger.error("[Setup] Found billing error in message");
|
||||
logger.error('[Setup] Found billing error in message');
|
||||
errorMessage =
|
||||
"Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com";
|
||||
'Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com';
|
||||
authenticated = false;
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if any part of the message contains auth errors
|
||||
if (containsAuthError(msgStr)) {
|
||||
logger.error("[Setup] Found auth error in message");
|
||||
if (authMethod === "cli") {
|
||||
logger.error('[Setup] Found auth error in message');
|
||||
if (authMethod === 'cli') {
|
||||
errorMessage =
|
||||
"CLI authentication failed. Please run 'claude login' in your terminal to authenticate.";
|
||||
} else {
|
||||
errorMessage = "API key is invalid or has been revoked.";
|
||||
errorMessage = 'API key is invalid or has been revoked.';
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Check specifically for assistant messages with text content
|
||||
if (msg.type === "assistant" && (msg as any).message?.content) {
|
||||
if (msg.type === 'assistant' && (msg as any).message?.content) {
|
||||
const content = (msg as any).message.content;
|
||||
if (Array.isArray(content)) {
|
||||
for (const block of content) {
|
||||
if (block.type === "text" && block.text) {
|
||||
if (block.type === 'text' && block.text) {
|
||||
const text = block.text;
|
||||
logger.info("[Setup] Assistant text:", text);
|
||||
logger.info('[Setup] Assistant text:', text);
|
||||
|
||||
if (containsAuthError(text)) {
|
||||
if (authMethod === "cli") {
|
||||
if (authMethod === 'cli') {
|
||||
errorMessage =
|
||||
"CLI authentication failed. Please run 'claude login' in your terminal to authenticate.";
|
||||
} else {
|
||||
errorMessage = "API key is invalid or has been revoked.";
|
||||
errorMessage = 'API key is invalid or has been revoked.';
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Valid text response that's not an error
|
||||
if (text.toLowerCase().includes("ok") || text.length > 0) {
|
||||
if (text.toLowerCase().includes('ok') || text.length > 0) {
|
||||
receivedAnyContent = true;
|
||||
}
|
||||
}
|
||||
@@ -193,34 +181,30 @@ export function createVerifyClaudeAuthHandler() {
|
||||
}
|
||||
|
||||
// Check for result messages
|
||||
if (msg.type === "result") {
|
||||
if (msg.type === 'result') {
|
||||
const resultStr = JSON.stringify(msg);
|
||||
|
||||
// First check for billing errors - these should FAIL verification
|
||||
if (isBillingError(resultStr)) {
|
||||
logger.error(
|
||||
"[Setup] Billing error detected - insufficient credits"
|
||||
);
|
||||
logger.error('[Setup] Billing error detected - insufficient credits');
|
||||
errorMessage =
|
||||
"Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com";
|
||||
'Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com';
|
||||
authenticated = false;
|
||||
break;
|
||||
}
|
||||
// Check if it's a rate limit error - should FAIL verification
|
||||
else if (isRateLimitError(resultStr)) {
|
||||
logger.warn(
|
||||
"[Setup] Rate limit detected - treating as unverified"
|
||||
);
|
||||
logger.warn('[Setup] Rate limit detected - treating as unverified');
|
||||
errorMessage =
|
||||
"Rate limit reached. Please wait a while before trying again or upgrade your plan.";
|
||||
'Rate limit reached. Please wait a while before trying again or upgrade your plan.';
|
||||
authenticated = false;
|
||||
break;
|
||||
} else if (containsAuthError(resultStr)) {
|
||||
if (authMethod === "cli") {
|
||||
if (authMethod === 'cli') {
|
||||
errorMessage =
|
||||
"CLI authentication failed. Please run 'claude login' in your terminal to authenticate.";
|
||||
} else {
|
||||
errorMessage = "API key is invalid or has been revoked.";
|
||||
errorMessage = 'API key is invalid or has been revoked.';
|
||||
}
|
||||
} else {
|
||||
// Got a result without errors
|
||||
@@ -236,60 +220,48 @@ export function createVerifyClaudeAuthHandler() {
|
||||
authenticated = true;
|
||||
} else {
|
||||
// No content received - might be an issue
|
||||
logger.warn("[Setup] No content received from stream");
|
||||
logger.warn("[Setup] All messages:", allMessages.join("\n"));
|
||||
errorMessage =
|
||||
"No response received from Claude. Please check your authentication.";
|
||||
logger.warn('[Setup] No content received from stream');
|
||||
logger.warn('[Setup] All messages:', allMessages.join('\n'));
|
||||
errorMessage = 'No response received from Claude. Please check your authentication.';
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const errMessage =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
const errMessage = error instanceof Error ? error.message : String(error);
|
||||
|
||||
logger.error("[Setup] Claude auth verification exception:", errMessage);
|
||||
logger.error('[Setup] Claude auth verification exception:', errMessage);
|
||||
|
||||
// Check for billing errors FIRST - these always fail
|
||||
if (isBillingError(errMessage)) {
|
||||
authenticated = false;
|
||||
errorMessage =
|
||||
"Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com";
|
||||
'Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com';
|
||||
}
|
||||
// Check for rate limit in exception - should FAIL verification
|
||||
else if (isRateLimitError(errMessage)) {
|
||||
authenticated = false;
|
||||
errorMessage =
|
||||
"Rate limit reached. Please wait a while before trying again or upgrade your plan.";
|
||||
logger.warn(
|
||||
"[Setup] Rate limit in exception - treating as unverified"
|
||||
);
|
||||
'Rate limit reached. Please wait a while before trying again or upgrade your plan.';
|
||||
logger.warn('[Setup] Rate limit in exception - treating as unverified');
|
||||
}
|
||||
// If we already determined auth was successful, keep it
|
||||
else if (authenticated) {
|
||||
logger.info("[Setup] Auth already confirmed, ignoring exception");
|
||||
logger.info('[Setup] Auth already confirmed, ignoring exception');
|
||||
}
|
||||
// Check for auth-related errors in exception
|
||||
else if (containsAuthError(errMessage)) {
|
||||
if (authMethod === "cli") {
|
||||
if (authMethod === 'cli') {
|
||||
errorMessage =
|
||||
"CLI authentication failed. Please run 'claude login' in your terminal to authenticate.";
|
||||
} else {
|
||||
errorMessage = "API key is invalid or has been revoked.";
|
||||
errorMessage = 'API key is invalid or has been revoked.';
|
||||
}
|
||||
} else if (
|
||||
errMessage.includes("abort") ||
|
||||
errMessage.includes("timeout")
|
||||
) {
|
||||
errorMessage = "Verification timed out. Please try again.";
|
||||
} else if (
|
||||
errMessage.includes("exit") &&
|
||||
errMessage.includes("code 1")
|
||||
) {
|
||||
} else if (errMessage.includes('abort') || errMessage.includes('timeout')) {
|
||||
errorMessage = 'Verification timed out. Please try again.';
|
||||
} else if (errMessage.includes('exit') && errMessage.includes('code 1')) {
|
||||
// Process exited with code 1 but we might have gotten rate limit info in the stream
|
||||
// Check if we received any content that indicated auth worked
|
||||
if (receivedAnyContent && !errorMessage) {
|
||||
authenticated = true;
|
||||
logger.info(
|
||||
"[Setup] Process exit 1 but content received - auth valid"
|
||||
);
|
||||
logger.info('[Setup] Process exit 1 but content received - auth valid');
|
||||
} else if (!errorMessage) {
|
||||
errorMessage = errMessage;
|
||||
}
|
||||
@@ -301,13 +273,13 @@ export function createVerifyClaudeAuthHandler() {
|
||||
// Restore original environment
|
||||
if (originalAnthropicKey !== undefined) {
|
||||
process.env.ANTHROPIC_API_KEY = originalAnthropicKey;
|
||||
} else if (authMethod === "cli") {
|
||||
} else if (authMethod === 'cli') {
|
||||
// If we cleared it and there was no original, keep it cleared
|
||||
delete process.env.ANTHROPIC_API_KEY;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("[Setup] Verification result:", {
|
||||
logger.info('[Setup] Verification result:', {
|
||||
authenticated,
|
||||
errorMessage,
|
||||
authMethod,
|
||||
@@ -319,11 +291,11 @@ export function createVerifyClaudeAuthHandler() {
|
||||
error: errorMessage || undefined,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error("[Setup] Verify Claude auth endpoint error:", error);
|
||||
logger.error('[Setup] Verify Claude auth endpoint error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
authenticated: false,
|
||||
error: error instanceof Error ? error.message : "Verification failed",
|
||||
error: error instanceof Error ? error.message : 'Verification failed',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* Common utilities and state for suggestions routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("Suggestions");
|
||||
const logger = createLogger('Suggestions');
|
||||
|
||||
// Shared state for tracking generation status - private
|
||||
let isRunning = false;
|
||||
@@ -27,10 +24,7 @@ export function getSuggestionsStatus(): {
|
||||
/**
|
||||
* Set the running state and abort controller
|
||||
*/
|
||||
export function setRunningState(
|
||||
running: boolean,
|
||||
controller: AbortController | null = null
|
||||
): void {
|
||||
export function setRunningState(running: boolean, controller: AbortController | null = null): void {
|
||||
isRunning = running;
|
||||
currentAbortController = controller;
|
||||
}
|
||||
|
||||
@@ -2,43 +2,43 @@
|
||||
* Business logic for generating suggestions
|
||||
*/
|
||||
|
||||
import { query } from "@anthropic-ai/claude-agent-sdk";
|
||||
import type { EventEmitter } from "../../lib/events.js";
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import { createSuggestionsOptions } from "../../lib/sdk-options.js";
|
||||
import { query } from '@anthropic-ai/claude-agent-sdk';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createSuggestionsOptions } from '../../lib/sdk-options.js';
|
||||
|
||||
const logger = createLogger("Suggestions");
|
||||
const logger = createLogger('Suggestions');
|
||||
|
||||
/**
|
||||
* JSON Schema for suggestions output
|
||||
*/
|
||||
const suggestionsSchema = {
|
||||
type: "object",
|
||||
type: 'object',
|
||||
properties: {
|
||||
suggestions: {
|
||||
type: "array",
|
||||
type: 'array',
|
||||
items: {
|
||||
type: "object",
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: "string" },
|
||||
category: { type: "string" },
|
||||
description: { type: "string" },
|
||||
id: { type: 'string' },
|
||||
category: { type: 'string' },
|
||||
description: { type: 'string' },
|
||||
steps: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
},
|
||||
priority: {
|
||||
type: "number",
|
||||
priority: {
|
||||
type: 'number',
|
||||
minimum: 1,
|
||||
maximum: 3,
|
||||
},
|
||||
reasoning: { type: "string" },
|
||||
reasoning: { type: 'string' },
|
||||
},
|
||||
required: ["category", "description", "steps", "priority", "reasoning"],
|
||||
required: ['category', 'description', 'steps', 'priority', 'reasoning'],
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ["suggestions"],
|
||||
required: ['suggestions'],
|
||||
additionalProperties: false,
|
||||
};
|
||||
|
||||
@@ -49,13 +49,10 @@ export async function generateSuggestions(
|
||||
abortController: AbortController
|
||||
): Promise<void> {
|
||||
const typePrompts: Record<string, string> = {
|
||||
features:
|
||||
"Analyze this project and suggest new features that would add value.",
|
||||
refactoring: "Analyze this project and identify refactoring opportunities.",
|
||||
security:
|
||||
"Analyze this project for security vulnerabilities and suggest fixes.",
|
||||
performance:
|
||||
"Analyze this project for performance issues and suggest optimizations.",
|
||||
features: 'Analyze this project and suggest new features that would add value.',
|
||||
refactoring: 'Analyze this project and identify refactoring opportunities.',
|
||||
security: 'Analyze this project for security vulnerabilities and suggest fixes.',
|
||||
performance: 'Analyze this project for performance issues and suggest optimizations.',
|
||||
};
|
||||
|
||||
const prompt = `${typePrompts[suggestionType] || typePrompts.features}
|
||||
@@ -71,8 +68,8 @@ For each suggestion, provide:
|
||||
|
||||
The response will be automatically formatted as structured JSON.`;
|
||||
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_progress",
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_progress',
|
||||
content: `Starting ${suggestionType} analysis...\n`,
|
||||
});
|
||||
|
||||
@@ -80,48 +77,48 @@ The response will be automatically formatted as structured JSON.`;
|
||||
cwd: projectPath,
|
||||
abortController,
|
||||
outputFormat: {
|
||||
type: "json_schema",
|
||||
type: 'json_schema',
|
||||
schema: suggestionsSchema,
|
||||
},
|
||||
});
|
||||
|
||||
const stream = query({ prompt, options });
|
||||
let responseText = "";
|
||||
let responseText = '';
|
||||
let structuredOutput: { suggestions: Array<Record<string, unknown>> } | null = null;
|
||||
|
||||
for await (const msg of stream) {
|
||||
if (msg.type === "assistant" && msg.message.content) {
|
||||
if (msg.type === 'assistant' && msg.message.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text") {
|
||||
if (block.type === 'text') {
|
||||
responseText += block.text;
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_progress",
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_progress',
|
||||
content: block.text,
|
||||
});
|
||||
} else if (block.type === "tool_use") {
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_tool",
|
||||
} else if (block.type === 'tool_use') {
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_tool',
|
||||
tool: block.name,
|
||||
input: block.input,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (msg.type === "result" && msg.subtype === "success") {
|
||||
} else if (msg.type === 'result' && msg.subtype === 'success') {
|
||||
// Check for structured output
|
||||
const resultMsg = msg as any;
|
||||
if (resultMsg.structured_output) {
|
||||
structuredOutput = resultMsg.structured_output as {
|
||||
suggestions: Array<Record<string, unknown>>;
|
||||
};
|
||||
logger.debug("Received structured output:", structuredOutput);
|
||||
logger.debug('Received structured output:', structuredOutput);
|
||||
}
|
||||
} else if (msg.type === "result") {
|
||||
} else if (msg.type === 'result') {
|
||||
const resultMsg = msg as any;
|
||||
if (resultMsg.subtype === "error_max_structured_output_retries") {
|
||||
logger.error("Failed to produce valid structured output after retries");
|
||||
throw new Error("Could not produce valid suggestions output");
|
||||
} else if (resultMsg.subtype === "error_max_turns") {
|
||||
logger.error("Hit max turns limit before completing suggestions generation");
|
||||
if (resultMsg.subtype === 'error_max_structured_output_retries') {
|
||||
logger.error('Failed to produce valid structured output after retries');
|
||||
throw new Error('Could not produce valid suggestions output');
|
||||
} else if (resultMsg.subtype === 'error_max_turns') {
|
||||
logger.error('Hit max turns limit before completing suggestions generation');
|
||||
logger.warn(`Response text length: ${responseText.length} chars`);
|
||||
// Still try to parse what we have
|
||||
}
|
||||
@@ -132,49 +129,44 @@ The response will be automatically formatted as structured JSON.`;
|
||||
try {
|
||||
if (structuredOutput && structuredOutput.suggestions) {
|
||||
// Use structured output directly
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_complete",
|
||||
suggestions: structuredOutput.suggestions.map(
|
||||
(s: Record<string, unknown>, i: number) => ({
|
||||
...s,
|
||||
id: s.id || `suggestion-${Date.now()}-${i}`,
|
||||
})
|
||||
),
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_complete',
|
||||
suggestions: structuredOutput.suggestions.map((s: Record<string, unknown>, i: number) => ({
|
||||
...s,
|
||||
id: s.id || `suggestion-${Date.now()}-${i}`,
|
||||
})),
|
||||
});
|
||||
} else {
|
||||
// Fallback: try to parse from text (for backwards compatibility)
|
||||
logger.warn("No structured output received, attempting to parse from text");
|
||||
logger.warn('No structured output received, attempting to parse from text');
|
||||
const jsonMatch = responseText.match(/\{[\s\S]*"suggestions"[\s\S]*\}/);
|
||||
if (jsonMatch) {
|
||||
const parsed = JSON.parse(jsonMatch[0]);
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_complete",
|
||||
suggestions: parsed.suggestions.map(
|
||||
(s: Record<string, unknown>, i: number) => ({
|
||||
...s,
|
||||
id: s.id || `suggestion-${Date.now()}-${i}`,
|
||||
})
|
||||
),
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_complete',
|
||||
suggestions: parsed.suggestions.map((s: Record<string, unknown>, i: number) => ({
|
||||
...s,
|
||||
id: s.id || `suggestion-${Date.now()}-${i}`,
|
||||
})),
|
||||
});
|
||||
} else {
|
||||
throw new Error("No valid JSON found in response");
|
||||
throw new Error('No valid JSON found in response');
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Log the parsing error for debugging
|
||||
logger.error("Failed to parse suggestions JSON from AI response:", error);
|
||||
logger.error('Failed to parse suggestions JSON from AI response:', error);
|
||||
// Return generic suggestions if parsing fails
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_complete",
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_complete',
|
||||
suggestions: [
|
||||
{
|
||||
id: `suggestion-${Date.now()}-0`,
|
||||
category: "Analysis",
|
||||
description: "Review the AI analysis output for insights",
|
||||
steps: ["Review the generated analysis"],
|
||||
category: 'Analysis',
|
||||
description: 'Review the AI analysis output for insights',
|
||||
steps: ['Review the generated analysis'],
|
||||
priority: 1,
|
||||
reasoning:
|
||||
"The AI provided analysis but suggestions need manual review",
|
||||
reasoning: 'The AI provided analysis but suggestions need manual review',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
@@ -2,18 +2,19 @@
|
||||
* Suggestions routes - HTTP API for AI-powered feature suggestions
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import type { EventEmitter } from "../../lib/events.js";
|
||||
import { createGenerateHandler } from "./routes/generate.js";
|
||||
import { createStopHandler } from "./routes/stop.js";
|
||||
import { createStatusHandler } from "./routes/status.js";
|
||||
import { Router } from 'express';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createGenerateHandler } from './routes/generate.js';
|
||||
import { createStopHandler } from './routes/stop.js';
|
||||
import { createStatusHandler } from './routes/status.js';
|
||||
|
||||
export function createSuggestionsRoutes(events: EventEmitter): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/generate", createGenerateHandler(events));
|
||||
router.post("/stop", createStopHandler());
|
||||
router.get("/status", createStatusHandler());
|
||||
router.post('/generate', validatePathParams('projectPath'), createGenerateHandler(events));
|
||||
router.post('/stop', createStopHandler());
|
||||
router.get('/status', createStatusHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,29 +2,24 @@
|
||||
* POST /generate endpoint - Generate suggestions
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { EventEmitter } from "../../../lib/events.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import {
|
||||
getSuggestionsStatus,
|
||||
setRunningState,
|
||||
getErrorMessage,
|
||||
logError,
|
||||
} from "../common.js";
|
||||
import { generateSuggestions } from "../generate-suggestions.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getSuggestionsStatus, setRunningState, getErrorMessage, logError } from '../common.js';
|
||||
import { generateSuggestions } from '../generate-suggestions.js';
|
||||
|
||||
const logger = createLogger("Suggestions");
|
||||
const logger = createLogger('Suggestions');
|
||||
|
||||
export function createGenerateHandler(events: EventEmitter) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, suggestionType = "features" } = req.body as {
|
||||
const { projectPath, suggestionType = 'features' } = req.body as {
|
||||
projectPath: string;
|
||||
suggestionType?: string;
|
||||
};
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: "projectPath required" });
|
||||
res.status(400).json({ success: false, error: 'projectPath required' });
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -32,7 +27,7 @@ export function createGenerateHandler(events: EventEmitter) {
|
||||
if (isRunning) {
|
||||
res.json({
|
||||
success: false,
|
||||
error: "Suggestions generation is already running",
|
||||
error: 'Suggestions generation is already running',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -44,9 +39,9 @@ export function createGenerateHandler(events: EventEmitter) {
|
||||
// Start generation in background
|
||||
generateSuggestions(projectPath, suggestionType, events, abortController)
|
||||
.catch((error) => {
|
||||
logError(error, "Generate suggestions failed (background)");
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_error",
|
||||
logError(error, 'Generate suggestions failed (background)');
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_error',
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
})
|
||||
@@ -56,7 +51,7 @@ export function createGenerateHandler(events: EventEmitter) {
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Generate suggestions failed");
|
||||
logError(error, 'Generate suggestions failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* Common utilities for templates routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
export const logger = createLogger("Templates");
|
||||
export const logger = createLogger('Templates');
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* POST /clone endpoint - Clone a GitHub template to a new project directory
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { spawn } from "child_process";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import { addAllowedPath } from "../../../lib/security.js";
|
||||
import { logger, getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { spawn } from 'child_process';
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { PathNotAllowedError } from '@automaker/platform';
|
||||
import { logger, getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createCloneHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -22,7 +22,7 @@ export function createCloneHandler() {
|
||||
if (!repoUrl || !projectName || !parentDir) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "repoUrl, projectName, and parentDir are required",
|
||||
error: 'repoUrl, projectName, and parentDir are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -36,17 +36,15 @@ export function createCloneHandler() {
|
||||
if (!githubUrlPattern.test(repoUrl)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "Invalid GitHub repository URL",
|
||||
error: 'Invalid GitHub repository URL',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Sanitize project name (allow alphanumeric, dash, underscore)
|
||||
const sanitizedName = projectName.replace(/[^a-zA-Z0-9-_]/g, "-");
|
||||
const sanitizedName = projectName.replace(/[^a-zA-Z0-9-_]/g, '-');
|
||||
if (sanitizedName !== projectName) {
|
||||
logger.info(
|
||||
`[Templates] Sanitized project name: ${projectName} -> ${sanitizedName}`
|
||||
);
|
||||
logger.info(`[Templates] Sanitized project name: ${projectName} -> ${sanitizedName}`);
|
||||
}
|
||||
|
||||
// Build full project path
|
||||
@@ -55,23 +53,30 @@ export function createCloneHandler() {
|
||||
const resolvedParent = path.resolve(parentDir);
|
||||
const resolvedProject = path.resolve(projectPath);
|
||||
const relativePath = path.relative(resolvedParent, resolvedProject);
|
||||
if (relativePath.startsWith("..") || path.isAbsolute(relativePath)) {
|
||||
if (relativePath.startsWith('..') || path.isAbsolute(relativePath)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "Invalid project name; potential path traversal attempt.",
|
||||
error: 'Invalid project name; potential path traversal attempt.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if directory already exists
|
||||
// Check if directory already exists (secureFs.access also validates path is allowed)
|
||||
try {
|
||||
await fs.access(projectPath);
|
||||
await secureFs.access(projectPath);
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Directory "${sanitizedName}" already exists in ${parentDir}`,
|
||||
});
|
||||
return;
|
||||
} catch {
|
||||
} catch (accessError) {
|
||||
if (accessError instanceof PathNotAllowedError) {
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
error: `Project path not allowed: ${projectPath}. Must be within ALLOWED_ROOT_DIRECTORY.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
// Directory doesn't exist, which is what we want
|
||||
}
|
||||
|
||||
@@ -79,35 +84,33 @@ export function createCloneHandler() {
|
||||
try {
|
||||
// Check if parentDir is a root path (Windows: C:\, D:\, etc. or Unix: /)
|
||||
const isWindowsRoot = /^[A-Za-z]:\\?$/.test(parentDir);
|
||||
const isUnixRoot = parentDir === "/" || parentDir === "";
|
||||
const isUnixRoot = parentDir === '/' || parentDir === '';
|
||||
const isRoot = isWindowsRoot || isUnixRoot;
|
||||
|
||||
if (isRoot) {
|
||||
// Root paths always exist, just verify access
|
||||
logger.info(`[Templates] Using root path: ${parentDir}`);
|
||||
await fs.access(parentDir);
|
||||
await secureFs.access(parentDir);
|
||||
} else {
|
||||
// Check if parent directory exists
|
||||
const parentExists = await fs
|
||||
.access(parentDir)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
let parentExists = false;
|
||||
try {
|
||||
await secureFs.access(parentDir);
|
||||
parentExists = true;
|
||||
} catch {
|
||||
parentExists = false;
|
||||
}
|
||||
|
||||
if (!parentExists) {
|
||||
logger.info(`[Templates] Creating parent directory: ${parentDir}`);
|
||||
await fs.mkdir(parentDir, { recursive: true });
|
||||
await secureFs.mkdir(parentDir, { recursive: true });
|
||||
} else {
|
||||
logger.info(`[Templates] Parent directory exists: ${parentDir}`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
logger.error(
|
||||
"[Templates] Failed to access parent directory:",
|
||||
parentDir,
|
||||
error
|
||||
);
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
logger.error('[Templates] Failed to access parent directory:', parentDir, error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: `Failed to access parent directory: ${errorMessage}`,
|
||||
@@ -122,17 +125,17 @@ export function createCloneHandler() {
|
||||
success: boolean;
|
||||
error?: string;
|
||||
}>((resolve) => {
|
||||
const gitProcess = spawn("git", ["clone", repoUrl, projectPath], {
|
||||
const gitProcess = spawn('git', ['clone', repoUrl, projectPath], {
|
||||
cwd: parentDir,
|
||||
});
|
||||
|
||||
let stderr = "";
|
||||
let stderr = '';
|
||||
|
||||
gitProcess.stderr.on("data", (data) => {
|
||||
gitProcess.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
gitProcess.on("close", (code) => {
|
||||
gitProcess.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
resolve({ success: true });
|
||||
} else {
|
||||
@@ -143,7 +146,7 @@ export function createCloneHandler() {
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.on("error", (error) => {
|
||||
gitProcess.on('error', (error) => {
|
||||
resolve({
|
||||
success: false,
|
||||
error: `Failed to spawn git: ${error.message}`,
|
||||
@@ -154,41 +157,38 @@ export function createCloneHandler() {
|
||||
if (!cloneResult.success) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: cloneResult.error || "Failed to clone repository",
|
||||
error: cloneResult.error || 'Failed to clone repository',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Remove .git directory to start fresh
|
||||
try {
|
||||
const gitDir = path.join(projectPath, ".git");
|
||||
await fs.rm(gitDir, { recursive: true, force: true });
|
||||
logger.info("[Templates] Removed .git directory");
|
||||
const gitDir = path.join(projectPath, '.git');
|
||||
await secureFs.rm(gitDir, { recursive: true, force: true });
|
||||
logger.info('[Templates] Removed .git directory');
|
||||
} catch (error) {
|
||||
logger.warn("[Templates] Could not remove .git directory:", error);
|
||||
logger.warn('[Templates] Could not remove .git directory:', error);
|
||||
// Continue anyway - not critical
|
||||
}
|
||||
|
||||
// Initialize a fresh git repository
|
||||
await new Promise<void>((resolve) => {
|
||||
const gitInit = spawn("git", ["init"], {
|
||||
const gitInit = spawn('git', ['init'], {
|
||||
cwd: projectPath,
|
||||
});
|
||||
|
||||
gitInit.on("close", () => {
|
||||
logger.info("[Templates] Initialized fresh git repository");
|
||||
gitInit.on('close', () => {
|
||||
logger.info('[Templates] Initialized fresh git repository');
|
||||
resolve();
|
||||
});
|
||||
|
||||
gitInit.on("error", () => {
|
||||
logger.warn("[Templates] Could not initialize git");
|
||||
gitInit.on('error', () => {
|
||||
logger.warn('[Templates] Could not initialize git');
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
// Add to allowed paths
|
||||
addAllowedPath(projectPath);
|
||||
|
||||
logger.info(`[Templates] Successfully cloned template to ${projectPath}`);
|
||||
|
||||
res.json({
|
||||
@@ -197,7 +197,7 @@ export function createCloneHandler() {
|
||||
projectName: sanitizedName,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Clone template failed");
|
||||
logError(error, 'Clone template failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* Common utilities and state for terminal routes
|
||||
*/
|
||||
|
||||
import { randomBytes } from "crypto";
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import type { Request, Response, NextFunction } from "express";
|
||||
import { getTerminalService } from "../../services/terminal-service.js";
|
||||
import { randomBytes } from 'crypto';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { Request, Response, NextFunction } from 'express';
|
||||
import { getTerminalService } from '../../services/terminal-service.js';
|
||||
|
||||
const logger = createLogger("Terminal");
|
||||
const logger = createLogger('Terminal');
|
||||
|
||||
// Read env variables lazily to ensure dotenv has loaded them
|
||||
function getTerminalPassword(): string | undefined {
|
||||
@@ -15,21 +15,17 @@ function getTerminalPassword(): string | undefined {
|
||||
}
|
||||
|
||||
function getTerminalEnabledConfig(): boolean {
|
||||
return process.env.TERMINAL_ENABLED !== "false"; // Enabled by default
|
||||
return process.env.TERMINAL_ENABLED !== 'false'; // Enabled by default
|
||||
}
|
||||
|
||||
// In-memory session tokens (would use Redis in production) - private
|
||||
const validTokens: Map<string, { createdAt: Date; expiresAt: Date }> =
|
||||
new Map();
|
||||
const validTokens: Map<string, { createdAt: Date; expiresAt: Date }> = new Map();
|
||||
const TOKEN_EXPIRY_MS = 24 * 60 * 60 * 1000; // 24 hours
|
||||
|
||||
/**
|
||||
* Add a token to the valid tokens map
|
||||
*/
|
||||
export function addToken(
|
||||
token: string,
|
||||
data: { createdAt: Date; expiresAt: Date }
|
||||
): void {
|
||||
export function addToken(token: string, data: { createdAt: Date; expiresAt: Date }): void {
|
||||
validTokens.set(token, data);
|
||||
}
|
||||
|
||||
@@ -43,9 +39,7 @@ export function deleteToken(token: string): void {
|
||||
/**
|
||||
* Get token data for a given token
|
||||
*/
|
||||
export function getTokenData(
|
||||
token: string
|
||||
): { createdAt: Date; expiresAt: Date } | undefined {
|
||||
export function getTokenData(token: string): { createdAt: Date; expiresAt: Date } | undefined {
|
||||
return validTokens.get(token);
|
||||
}
|
||||
|
||||
@@ -53,7 +47,7 @@ export function getTokenData(
|
||||
* Generate a cryptographically secure random token
|
||||
*/
|
||||
export function generateToken(): string {
|
||||
return `term-${randomBytes(32).toString("base64url")}`;
|
||||
return `term-${randomBytes(32).toString('base64url')}`;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -106,16 +100,12 @@ export function isTerminalEnabled(): boolean {
|
||||
* Terminal authentication middleware
|
||||
* Checks for valid session token if password is configured
|
||||
*/
|
||||
export function terminalAuthMiddleware(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): void {
|
||||
export function terminalAuthMiddleware(req: Request, res: Response, next: NextFunction): void {
|
||||
// Check if terminal is enabled
|
||||
if (!getTerminalEnabledConfig()) {
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
error: "Terminal access is disabled",
|
||||
error: 'Terminal access is disabled',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -127,13 +117,12 @@ export function terminalAuthMiddleware(
|
||||
}
|
||||
|
||||
// Check for session token
|
||||
const token =
|
||||
(req.headers["x-terminal-token"] as string) || (req.query.token as string);
|
||||
const token = (req.headers['x-terminal-token'] as string) || (req.query.token as string);
|
||||
|
||||
if (!validateTerminalToken(token)) {
|
||||
res.status(401).json({
|
||||
success: false,
|
||||
error: "Terminal authentication required",
|
||||
error: 'Terminal authentication required',
|
||||
passwordRequired: true,
|
||||
});
|
||||
return;
|
||||
@@ -154,10 +143,7 @@ export function getTokenExpiryMs(): number {
|
||||
return TOKEN_EXPIRY_MS;
|
||||
}
|
||||
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -3,12 +3,12 @@
|
||||
* POST /sessions endpoint - Create a new terminal session
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getTerminalService } from "../../../services/terminal-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import { createLogger } from "../../../lib/logger.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getTerminalService } from '../../../services/terminal-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger("Terminal");
|
||||
const logger = createLogger('Terminal');
|
||||
|
||||
export function createSessionsListHandler() {
|
||||
return (_req: Request, res: Response): void => {
|
||||
@@ -41,7 +41,7 @@ export function createSessionsCreateHandler() {
|
||||
logger.warn(`Session limit reached: ${currentSessions}/${maxSessions}`);
|
||||
res.status(429).json({
|
||||
success: false,
|
||||
error: "Maximum terminal sessions reached",
|
||||
error: 'Maximum terminal sessions reached',
|
||||
details: `Server limit is ${maxSessions} concurrent sessions. Please close unused terminals.`,
|
||||
currentSessions,
|
||||
maxSessions,
|
||||
@@ -59,10 +59,10 @@ export function createSessionsCreateHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Create terminal session failed");
|
||||
logError(error, 'Create terminal session failed');
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to create terminal session",
|
||||
error: 'Failed to create terminal session',
|
||||
details: getErrorMessage(error),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2,13 +2,10 @@
|
||||
* Common utilities for workspace routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger("Workspace");
|
||||
const logger = createLogger('Workspace');
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
@@ -2,53 +2,56 @@
|
||||
* GET /config endpoint - Get workspace configuration status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import { addAllowedPath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { getAllowedRootDirectory, getDataDirectory } from '@automaker/platform';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createConfigHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const workspaceDir = process.env.WORKSPACE_DIR;
|
||||
const allowedRootDirectory = getAllowedRootDirectory();
|
||||
const dataDirectory = getDataDirectory();
|
||||
|
||||
if (!workspaceDir) {
|
||||
if (!allowedRootDirectory) {
|
||||
// When ALLOWED_ROOT_DIRECTORY is not set, return DATA_DIR as default directory
|
||||
res.json({
|
||||
success: true,
|
||||
configured: false,
|
||||
defaultDir: dataDirectory || null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if the directory exists
|
||||
try {
|
||||
const stats = await fs.stat(workspaceDir);
|
||||
const resolvedWorkspaceDir = path.resolve(allowedRootDirectory);
|
||||
const stats = await secureFs.stat(resolvedWorkspaceDir);
|
||||
if (!stats.isDirectory()) {
|
||||
res.json({
|
||||
success: true,
|
||||
configured: false,
|
||||
error: "WORKSPACE_DIR is not a valid directory",
|
||||
error: 'ALLOWED_ROOT_DIRECTORY is not a valid directory',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Add workspace dir to allowed paths
|
||||
addAllowedPath(workspaceDir);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
configured: true,
|
||||
workspaceDir,
|
||||
workspaceDir: resolvedWorkspaceDir,
|
||||
defaultDir: resolvedWorkspaceDir,
|
||||
});
|
||||
} catch {
|
||||
res.json({
|
||||
success: true,
|
||||
configured: false,
|
||||
error: "WORKSPACE_DIR path does not exist",
|
||||
error: 'ALLOWED_ROOT_DIRECTORY path does not exist',
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Get workspace config failed");
|
||||
logError(error, 'Get workspace config failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,60 +2,58 @@
|
||||
* GET /directories endpoint - List directories in workspace
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath } from "../../../lib/security.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { getAllowedRootDirectory } from '@automaker/platform';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createDirectoriesHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const workspaceDir = process.env.WORKSPACE_DIR;
|
||||
const allowedRootDirectory = getAllowedRootDirectory();
|
||||
|
||||
if (!workspaceDir) {
|
||||
if (!allowedRootDirectory) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "WORKSPACE_DIR is not configured",
|
||||
error: 'ALLOWED_ROOT_DIRECTORY is not configured',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedWorkspaceDir = path.resolve(allowedRootDirectory);
|
||||
|
||||
// Check if directory exists
|
||||
try {
|
||||
await fs.stat(workspaceDir);
|
||||
await secureFs.stat(resolvedWorkspaceDir);
|
||||
} catch {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "WORKSPACE_DIR path does not exist",
|
||||
error: 'Workspace directory path does not exist',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Add workspace dir to allowed paths
|
||||
addAllowedPath(workspaceDir);
|
||||
|
||||
// Read directory contents
|
||||
const entries = await fs.readdir(workspaceDir, { withFileTypes: true });
|
||||
const entries = await secureFs.readdir(resolvedWorkspaceDir, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
// Filter to directories only and map to result format
|
||||
const directories = entries
|
||||
.filter((entry) => entry.isDirectory() && !entry.name.startsWith("."))
|
||||
.filter((entry) => entry.isDirectory() && !entry.name.startsWith('.'))
|
||||
.map((entry) => ({
|
||||
name: entry.name,
|
||||
path: path.join(workspaceDir, entry.name),
|
||||
path: path.join(resolvedWorkspaceDir, entry.name),
|
||||
}))
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
// Add each directory to allowed paths
|
||||
directories.forEach((dir) => addAllowedPath(dir.path));
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
directories,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "List workspace directories failed");
|
||||
logError(error, 'List workspace directories failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,18 +2,14 @@
|
||||
* Common utilities for worktree routes
|
||||
*/
|
||||
|
||||
import { createLogger } from "../../lib/logger.js";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import {
|
||||
getErrorMessage as getErrorMessageShared,
|
||||
createLogError,
|
||||
} from "../common.js";
|
||||
import { FeatureLoader } from "../../services/feature-loader.js";
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import path from 'path';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
|
||||
const logger = createLogger("Worktree");
|
||||
const logger = createLogger('Worktree');
|
||||
export const execAsync = promisify(exec);
|
||||
const featureLoader = new FeatureLoader();
|
||||
|
||||
@@ -28,10 +24,10 @@ export const MAX_BRANCH_NAME_LENGTH = 250;
|
||||
// Extended PATH configuration for Electron apps
|
||||
// ============================================================================
|
||||
|
||||
const pathSeparator = process.platform === "win32" ? ";" : ":";
|
||||
const pathSeparator = process.platform === 'win32' ? ';' : ':';
|
||||
const additionalPaths: string[] = [];
|
||||
|
||||
if (process.platform === "win32") {
|
||||
if (process.platform === 'win32') {
|
||||
// Windows paths
|
||||
if (process.env.LOCALAPPDATA) {
|
||||
additionalPaths.push(`${process.env.LOCALAPPDATA}\\Programs\\Git\\cmd`);
|
||||
@@ -39,23 +35,22 @@ if (process.platform === "win32") {
|
||||
if (process.env.PROGRAMFILES) {
|
||||
additionalPaths.push(`${process.env.PROGRAMFILES}\\Git\\cmd`);
|
||||
}
|
||||
if (process.env["ProgramFiles(x86)"]) {
|
||||
additionalPaths.push(`${process.env["ProgramFiles(x86)"]}\\Git\\cmd`);
|
||||
if (process.env['ProgramFiles(x86)']) {
|
||||
additionalPaths.push(`${process.env['ProgramFiles(x86)']}\\Git\\cmd`);
|
||||
}
|
||||
} else {
|
||||
// Unix/Mac paths
|
||||
additionalPaths.push(
|
||||
"/opt/homebrew/bin", // Homebrew on Apple Silicon
|
||||
"/usr/local/bin", // Homebrew on Intel Mac, common Linux location
|
||||
"/home/linuxbrew/.linuxbrew/bin", // Linuxbrew
|
||||
`${process.env.HOME}/.local/bin`, // pipx, other user installs
|
||||
'/opt/homebrew/bin', // Homebrew on Apple Silicon
|
||||
'/usr/local/bin', // Homebrew on Intel Mac, common Linux location
|
||||
'/home/linuxbrew/.linuxbrew/bin', // Linuxbrew
|
||||
`${process.env.HOME}/.local/bin` // pipx, other user installs
|
||||
);
|
||||
}
|
||||
|
||||
const extendedPath = [
|
||||
process.env.PATH,
|
||||
...additionalPaths.filter(Boolean),
|
||||
].filter(Boolean).join(pathSeparator);
|
||||
const extendedPath = [process.env.PATH, ...additionalPaths.filter(Boolean)]
|
||||
.filter(Boolean)
|
||||
.join(pathSeparator);
|
||||
|
||||
/**
|
||||
* Environment variables with extended PATH for executing shell commands.
|
||||
@@ -85,9 +80,7 @@ export function isValidBranchName(name: string): boolean {
|
||||
*/
|
||||
export async function isGhCliAvailable(): Promise<boolean> {
|
||||
try {
|
||||
const checkCommand = process.platform === "win32"
|
||||
? "where gh"
|
||||
: "command -v gh";
|
||||
const checkCommand = process.platform === 'win32' ? 'where gh' : 'command -v gh';
|
||||
await execAsync(checkCommand, { env: execEnv });
|
||||
return true;
|
||||
} catch {
|
||||
@@ -95,8 +88,7 @@ export async function isGhCliAvailable(): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
export const AUTOMAKER_INITIAL_COMMIT_MESSAGE =
|
||||
"chore: automaker initial commit";
|
||||
export const AUTOMAKER_INITIAL_COMMIT_MESSAGE = 'chore: automaker initial commit';
|
||||
|
||||
/**
|
||||
* Normalize path separators to forward slashes for cross-platform consistency.
|
||||
@@ -104,7 +96,7 @@ export const AUTOMAKER_INITIAL_COMMIT_MESSAGE =
|
||||
* from git commands (which may use forward slashes).
|
||||
*/
|
||||
export function normalizePath(p: string): string {
|
||||
return p.replace(/\\/g, "/");
|
||||
return p.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -112,7 +104,7 @@ export function normalizePath(p: string): string {
|
||||
*/
|
||||
export async function isGitRepo(repoPath: string): Promise<boolean> {
|
||||
try {
|
||||
await execAsync("git rev-parse --is-inside-work-tree", { cwd: repoPath });
|
||||
await execAsync('git rev-parse --is-inside-work-tree', { cwd: repoPath });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
@@ -124,30 +116,21 @@ export async function isGitRepo(repoPath: string): Promise<boolean> {
|
||||
* These are expected in test environments with mock paths
|
||||
*/
|
||||
export function isENOENT(error: unknown): boolean {
|
||||
return (
|
||||
error !== null &&
|
||||
typeof error === "object" &&
|
||||
"code" in error &&
|
||||
error.code === "ENOENT"
|
||||
);
|
||||
return error !== null && typeof error === 'object' && 'code' in error && error.code === 'ENOENT';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path is a mock/test path that doesn't exist
|
||||
*/
|
||||
export function isMockPath(worktreePath: string): boolean {
|
||||
return worktreePath.startsWith("/mock/") || worktreePath.includes("/mock/");
|
||||
return worktreePath.startsWith('/mock/') || worktreePath.includes('/mock/');
|
||||
}
|
||||
|
||||
/**
|
||||
* Conditionally log worktree errors - suppress ENOENT for mock paths
|
||||
* to reduce noise in test output
|
||||
*/
|
||||
export function logWorktreeError(
|
||||
error: unknown,
|
||||
message: string,
|
||||
worktreePath?: string
|
||||
): void {
|
||||
export function logWorktreeError(error: unknown, message: string, worktreePath?: string): void {
|
||||
// Don't log ENOENT errors for mock paths (expected in tests)
|
||||
if (isENOENT(error) && worktreePath && isMockPath(worktreePath)) {
|
||||
return;
|
||||
@@ -165,17 +148,14 @@ export const logError = createLogError(logger);
|
||||
*/
|
||||
export async function ensureInitialCommit(repoPath: string): Promise<boolean> {
|
||||
try {
|
||||
await execAsync("git rev-parse --verify HEAD", { cwd: repoPath });
|
||||
await execAsync('git rev-parse --verify HEAD', { cwd: repoPath });
|
||||
return false;
|
||||
} catch {
|
||||
try {
|
||||
await execAsync(
|
||||
`git commit --allow-empty -m "${AUTOMAKER_INITIAL_COMMIT_MESSAGE}"`,
|
||||
{ cwd: repoPath }
|
||||
);
|
||||
logger.info(
|
||||
`[Worktree] Created initial empty commit to enable worktrees in ${repoPath}`
|
||||
);
|
||||
await execAsync(`git commit --allow-empty -m "${AUTOMAKER_INITIAL_COMMIT_MESSAGE}"`, {
|
||||
cwd: repoPath,
|
||||
});
|
||||
logger.info(`[Worktree] Created initial empty commit to enable worktrees in ${repoPath}`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
const reason = getErrorMessageShared(error);
|
||||
|
||||
@@ -2,59 +2,64 @@
|
||||
* Worktree routes - HTTP API for git worktree operations
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { createInfoHandler } from "./routes/info.js";
|
||||
import { createStatusHandler } from "./routes/status.js";
|
||||
import { createListHandler } from "./routes/list.js";
|
||||
import { createDiffsHandler } from "./routes/diffs.js";
|
||||
import { createFileDiffHandler } from "./routes/file-diff.js";
|
||||
import { createMergeHandler } from "./routes/merge.js";
|
||||
import { createCreateHandler } from "./routes/create.js";
|
||||
import { createDeleteHandler } from "./routes/delete.js";
|
||||
import { createCreatePRHandler } from "./routes/create-pr.js";
|
||||
import { createPRInfoHandler } from "./routes/pr-info.js";
|
||||
import { createCommitHandler } from "./routes/commit.js";
|
||||
import { createPushHandler } from "./routes/push.js";
|
||||
import { createPullHandler } from "./routes/pull.js";
|
||||
import { createCheckoutBranchHandler } from "./routes/checkout-branch.js";
|
||||
import { createListBranchesHandler } from "./routes/list-branches.js";
|
||||
import { createSwitchBranchHandler } from "./routes/switch-branch.js";
|
||||
import { Router } from 'express';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createInfoHandler } from './routes/info.js';
|
||||
import { createStatusHandler } from './routes/status.js';
|
||||
import { createListHandler } from './routes/list.js';
|
||||
import { createDiffsHandler } from './routes/diffs.js';
|
||||
import { createFileDiffHandler } from './routes/file-diff.js';
|
||||
import { createMergeHandler } from './routes/merge.js';
|
||||
import { createCreateHandler } from './routes/create.js';
|
||||
import { createDeleteHandler } from './routes/delete.js';
|
||||
import { createCreatePRHandler } from './routes/create-pr.js';
|
||||
import { createPRInfoHandler } from './routes/pr-info.js';
|
||||
import { createCommitHandler } from './routes/commit.js';
|
||||
import { createPushHandler } from './routes/push.js';
|
||||
import { createPullHandler } from './routes/pull.js';
|
||||
import { createCheckoutBranchHandler } from './routes/checkout-branch.js';
|
||||
import { createListBranchesHandler } from './routes/list-branches.js';
|
||||
import { createSwitchBranchHandler } from './routes/switch-branch.js';
|
||||
import {
|
||||
createOpenInEditorHandler,
|
||||
createGetDefaultEditorHandler,
|
||||
} from "./routes/open-in-editor.js";
|
||||
import { createInitGitHandler } from "./routes/init-git.js";
|
||||
import { createMigrateHandler } from "./routes/migrate.js";
|
||||
import { createStartDevHandler } from "./routes/start-dev.js";
|
||||
import { createStopDevHandler } from "./routes/stop-dev.js";
|
||||
import { createListDevServersHandler } from "./routes/list-dev-servers.js";
|
||||
} from './routes/open-in-editor.js';
|
||||
import { createInitGitHandler } from './routes/init-git.js';
|
||||
import { createMigrateHandler } from './routes/migrate.js';
|
||||
import { createStartDevHandler } from './routes/start-dev.js';
|
||||
import { createStopDevHandler } from './routes/stop-dev.js';
|
||||
import { createListDevServersHandler } from './routes/list-dev-servers.js';
|
||||
|
||||
export function createWorktreeRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/info", createInfoHandler());
|
||||
router.post("/status", createStatusHandler());
|
||||
router.post("/list", createListHandler());
|
||||
router.post("/diffs", createDiffsHandler());
|
||||
router.post("/file-diff", createFileDiffHandler());
|
||||
router.post("/merge", createMergeHandler());
|
||||
router.post("/create", createCreateHandler());
|
||||
router.post("/delete", createDeleteHandler());
|
||||
router.post("/create-pr", createCreatePRHandler());
|
||||
router.post("/pr-info", createPRInfoHandler());
|
||||
router.post("/commit", createCommitHandler());
|
||||
router.post("/push", createPushHandler());
|
||||
router.post("/pull", createPullHandler());
|
||||
router.post("/checkout-branch", createCheckoutBranchHandler());
|
||||
router.post("/list-branches", createListBranchesHandler());
|
||||
router.post("/switch-branch", createSwitchBranchHandler());
|
||||
router.post("/open-in-editor", createOpenInEditorHandler());
|
||||
router.get("/default-editor", createGetDefaultEditorHandler());
|
||||
router.post("/init-git", createInitGitHandler());
|
||||
router.post("/migrate", createMigrateHandler());
|
||||
router.post("/start-dev", createStartDevHandler());
|
||||
router.post("/stop-dev", createStopDevHandler());
|
||||
router.post("/list-dev-servers", createListDevServersHandler());
|
||||
router.post('/info', validatePathParams('projectPath'), createInfoHandler());
|
||||
router.post('/status', validatePathParams('projectPath'), createStatusHandler());
|
||||
router.post('/list', createListHandler());
|
||||
router.post('/diffs', validatePathParams('projectPath'), createDiffsHandler());
|
||||
router.post('/file-diff', validatePathParams('projectPath', 'filePath'), createFileDiffHandler());
|
||||
router.post('/merge', validatePathParams('projectPath'), createMergeHandler());
|
||||
router.post('/create', validatePathParams('projectPath'), createCreateHandler());
|
||||
router.post('/delete', validatePathParams('projectPath', 'worktreePath'), createDeleteHandler());
|
||||
router.post('/create-pr', createCreatePRHandler());
|
||||
router.post('/pr-info', createPRInfoHandler());
|
||||
router.post('/commit', validatePathParams('worktreePath'), createCommitHandler());
|
||||
router.post('/push', validatePathParams('worktreePath'), createPushHandler());
|
||||
router.post('/pull', validatePathParams('worktreePath'), createPullHandler());
|
||||
router.post('/checkout-branch', createCheckoutBranchHandler());
|
||||
router.post('/list-branches', validatePathParams('worktreePath'), createListBranchesHandler());
|
||||
router.post('/switch-branch', createSwitchBranchHandler());
|
||||
router.post('/open-in-editor', validatePathParams('worktreePath'), createOpenInEditorHandler());
|
||||
router.get('/default-editor', createGetDefaultEditorHandler());
|
||||
router.post('/init-git', validatePathParams('projectPath'), createInitGitHandler());
|
||||
router.post('/migrate', createMigrateHandler());
|
||||
router.post(
|
||||
'/start-dev',
|
||||
validatePathParams('projectPath', 'worktreePath'),
|
||||
createStartDevHandler()
|
||||
);
|
||||
router.post('/stop-dev', createStopDevHandler());
|
||||
router.post('/list-dev-servers', createListDevServersHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -5,12 +5,9 @@
|
||||
* can switch between branches even after worktrees are removed.
|
||||
*/
|
||||
|
||||
import { readFile, writeFile } from "fs/promises";
|
||||
import path from "path";
|
||||
import {
|
||||
getBranchTrackingPath,
|
||||
ensureAutomakerDir,
|
||||
} from "../../../lib/automaker-paths.js";
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import { getBranchTrackingPath, ensureAutomakerDir } from '@automaker/platform';
|
||||
|
||||
export interface TrackedBranch {
|
||||
name: string;
|
||||
@@ -25,19 +22,17 @@ interface BranchTrackingData {
|
||||
/**
|
||||
* Read tracked branches from file
|
||||
*/
|
||||
export async function getTrackedBranches(
|
||||
projectPath: string
|
||||
): Promise<TrackedBranch[]> {
|
||||
export async function getTrackedBranches(projectPath: string): Promise<TrackedBranch[]> {
|
||||
try {
|
||||
const filePath = getBranchTrackingPath(projectPath);
|
||||
const content = await readFile(filePath, "utf-8");
|
||||
const content = (await secureFs.readFile(filePath, 'utf-8')) as string;
|
||||
const data: BranchTrackingData = JSON.parse(content);
|
||||
return data.branches || [];
|
||||
} catch (error: any) {
|
||||
if (error.code === "ENOENT") {
|
||||
if (error.code === 'ENOENT') {
|
||||
return [];
|
||||
}
|
||||
console.warn("[branch-tracking] Failed to read tracked branches:", error);
|
||||
console.warn('[branch-tracking] Failed to read tracked branches:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -45,23 +40,17 @@ export async function getTrackedBranches(
|
||||
/**
|
||||
* Save tracked branches to file
|
||||
*/
|
||||
async function saveTrackedBranches(
|
||||
projectPath: string,
|
||||
branches: TrackedBranch[]
|
||||
): Promise<void> {
|
||||
async function saveTrackedBranches(projectPath: string, branches: TrackedBranch[]): Promise<void> {
|
||||
const automakerDir = await ensureAutomakerDir(projectPath);
|
||||
const filePath = path.join(automakerDir, "active-branches.json");
|
||||
const filePath = path.join(automakerDir, 'active-branches.json');
|
||||
const data: BranchTrackingData = { branches };
|
||||
await writeFile(filePath, JSON.stringify(data, null, 2), "utf-8");
|
||||
await secureFs.writeFile(filePath, JSON.stringify(data, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a branch to tracking
|
||||
*/
|
||||
export async function trackBranch(
|
||||
projectPath: string,
|
||||
branchName: string
|
||||
): Promise<void> {
|
||||
export async function trackBranch(projectPath: string, branchName: string): Promise<void> {
|
||||
const branches = await getTrackedBranches(projectPath);
|
||||
|
||||
// Check if already tracked
|
||||
@@ -82,10 +71,7 @@ export async function trackBranch(
|
||||
/**
|
||||
* Remove a branch from tracking
|
||||
*/
|
||||
export async function untrackBranch(
|
||||
projectPath: string,
|
||||
branchName: string
|
||||
): Promise<void> {
|
||||
export async function untrackBranch(projectPath: string, branchName: string): Promise<void> {
|
||||
const branches = await getTrackedBranches(projectPath);
|
||||
const filtered = branches.filter((b) => b.name !== branchName);
|
||||
|
||||
@@ -114,10 +100,7 @@ export async function updateBranchActivation(
|
||||
/**
|
||||
* Check if a branch is tracked
|
||||
*/
|
||||
export async function isBranchTracked(
|
||||
projectPath: string,
|
||||
branchName: string
|
||||
): Promise<boolean> {
|
||||
export async function isBranchTracked(projectPath: string, branchName: string): Promise<boolean> {
|
||||
const branches = await getTrackedBranches(projectPath);
|
||||
return branches.some((b) => b.name === branchName);
|
||||
}
|
||||
|
||||
@@ -7,19 +7,19 @@
|
||||
* 3. Only creates a new worktree if none exists for the branch
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import path from "path";
|
||||
import { mkdir } from "fs/promises";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import {
|
||||
isGitRepo,
|
||||
getErrorMessage,
|
||||
logError,
|
||||
normalizePath,
|
||||
ensureInitialCommit,
|
||||
} from "../common.js";
|
||||
import { trackBranch } from "./branch-tracking.js";
|
||||
} from '../common.js';
|
||||
import { trackBranch } from './branch-tracking.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -31,20 +31,20 @@ async function findExistingWorktreeForBranch(
|
||||
branchName: string
|
||||
): Promise<{ path: string; branch: string } | null> {
|
||||
try {
|
||||
const { stdout } = await execAsync("git worktree list --porcelain", {
|
||||
const { stdout } = await execAsync('git worktree list --porcelain', {
|
||||
cwd: projectPath,
|
||||
});
|
||||
|
||||
const lines = stdout.split("\n");
|
||||
const lines = stdout.split('\n');
|
||||
let currentPath: string | null = null;
|
||||
let currentBranch: string | null = null;
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.startsWith("worktree ")) {
|
||||
if (line.startsWith('worktree ')) {
|
||||
currentPath = line.slice(9);
|
||||
} else if (line.startsWith("branch ")) {
|
||||
currentBranch = line.slice(7).replace("refs/heads/", "");
|
||||
} else if (line === "" && currentPath && currentBranch) {
|
||||
} else if (line.startsWith('branch ')) {
|
||||
currentBranch = line.slice(7).replace('refs/heads/', '');
|
||||
} else if (line === '' && currentPath && currentBranch) {
|
||||
// End of a worktree entry
|
||||
if (currentBranch === branchName) {
|
||||
// Resolve to absolute path - git may return relative paths
|
||||
@@ -86,7 +86,7 @@ export function createCreateHandler() {
|
||||
if (!projectPath || !branchName) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath and branchName required",
|
||||
error: 'projectPath and branchName required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -94,7 +94,7 @@ export function createCreateHandler() {
|
||||
if (!(await isGitRepo(projectPath))) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "Not a git repository",
|
||||
error: 'Not a git repository',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -107,7 +107,9 @@ export function createCreateHandler() {
|
||||
if (existingWorktree) {
|
||||
// Worktree already exists, return it as success (not an error)
|
||||
// This handles manually created worktrees or worktrees from previous runs
|
||||
console.log(`[Worktree] Found existing worktree for branch "${branchName}" at: ${existingWorktree.path}`);
|
||||
console.log(
|
||||
`[Worktree] Found existing worktree for branch "${branchName}" at: ${existingWorktree.path}`
|
||||
);
|
||||
|
||||
// Track the branch so it persists in the UI
|
||||
await trackBranch(projectPath, branchName);
|
||||
@@ -124,12 +126,12 @@ export function createCreateHandler() {
|
||||
}
|
||||
|
||||
// Sanitize branch name for directory usage
|
||||
const sanitizedName = branchName.replace(/[^a-zA-Z0-9_-]/g, "-");
|
||||
const worktreesDir = path.join(projectPath, ".worktrees");
|
||||
const sanitizedName = branchName.replace(/[^a-zA-Z0-9_-]/g, '-');
|
||||
const worktreesDir = path.join(projectPath, '.worktrees');
|
||||
const worktreePath = path.join(worktreesDir, sanitizedName);
|
||||
|
||||
// Create worktrees directory if it doesn't exist
|
||||
await mkdir(worktreesDir, { recursive: true });
|
||||
await secureFs.mkdir(worktreesDir, { recursive: true });
|
||||
|
||||
// Check if branch exists
|
||||
let branchExists = false;
|
||||
@@ -149,7 +151,7 @@ export function createCreateHandler() {
|
||||
createCmd = `git worktree add "${worktreePath}" ${branchName}`;
|
||||
} else {
|
||||
// Create new branch from base or HEAD
|
||||
const base = baseBranch || "HEAD";
|
||||
const base = baseBranch || 'HEAD';
|
||||
createCmd = `git worktree add -b ${branchName} "${worktreePath}" ${base}`;
|
||||
}
|
||||
|
||||
@@ -174,7 +176,7 @@ export function createCreateHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Create worktree failed");
|
||||
logError(error, 'Create worktree failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
* POST /delete endpoint - Delete a git worktree
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { isGitRepo, getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { isGitRepo } from '@automaker/git-utils';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -21,7 +22,7 @@ export function createDeleteHandler() {
|
||||
if (!projectPath || !worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath and worktreePath required",
|
||||
error: 'projectPath and worktreePath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -29,7 +30,7 @@ export function createDeleteHandler() {
|
||||
if (!(await isGitRepo(projectPath))) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "Not a git repository",
|
||||
error: 'Not a git repository',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -37,7 +38,7 @@ export function createDeleteHandler() {
|
||||
// Get branch name before removing worktree
|
||||
let branchName: string | null = null;
|
||||
try {
|
||||
const { stdout } = await execAsync("git rev-parse --abbrev-ref HEAD", {
|
||||
const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
branchName = stdout.trim();
|
||||
@@ -52,11 +53,11 @@ export function createDeleteHandler() {
|
||||
});
|
||||
} catch (error) {
|
||||
// Try with prune if remove fails
|
||||
await execAsync("git worktree prune", { cwd: projectPath });
|
||||
await execAsync('git worktree prune', { cwd: projectPath });
|
||||
}
|
||||
|
||||
// Optionally delete the branch
|
||||
if (deleteBranch && branchName && branchName !== "main" && branchName !== "master") {
|
||||
if (deleteBranch && branchName && branchName !== 'main' && branchName !== 'master') {
|
||||
try {
|
||||
await execAsync(`git branch -D ${branchName}`, { cwd: projectPath });
|
||||
} catch {
|
||||
@@ -72,7 +73,7 @@ export function createDeleteHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Delete worktree failed");
|
||||
logError(error, 'Delete worktree failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
* POST /diffs endpoint - Get diffs for a worktree
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import { getGitRepositoryDiffs } from "../../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { getGitRepositoryDiffs } from '../../common.js';
|
||||
|
||||
export function createDiffsHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -17,21 +17,19 @@ export function createDiffsHandler() {
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and featureId required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath and featureId required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Git worktrees are stored in project directory
|
||||
const worktreePath = path.join(projectPath, ".worktrees", featureId);
|
||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
||||
|
||||
try {
|
||||
// Check if worktree exists
|
||||
await fs.access(worktreePath);
|
||||
await secureFs.access(worktreePath);
|
||||
|
||||
// Get diffs from worktree
|
||||
const result = await getGitRepositoryDiffs(worktreePath);
|
||||
@@ -43,7 +41,7 @@ export function createDiffsHandler() {
|
||||
});
|
||||
} catch (innerError) {
|
||||
// Worktree doesn't exist - fallback to main project path
|
||||
logError(innerError, "Worktree access failed, falling back to main project");
|
||||
logError(innerError, 'Worktree access failed, falling back to main project');
|
||||
|
||||
try {
|
||||
const result = await getGitRepositoryDiffs(projectPath);
|
||||
@@ -54,12 +52,12 @@ export function createDiffsHandler() {
|
||||
hasChanges: result.hasChanges,
|
||||
});
|
||||
} catch (fallbackError) {
|
||||
logError(fallbackError, "Fallback to main project also failed");
|
||||
res.json({ success: true, diff: "", files: [], hasChanges: false });
|
||||
logError(fallbackError, 'Fallback to main project also failed');
|
||||
res.json({ success: true, diff: '', files: [], hasChanges: false });
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Get worktree diffs failed");
|
||||
logError(error, 'Get worktree diffs failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
* POST /file-diff endpoint - Get diff for a specific file
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import { generateSyntheticDiffForNewFile } from "../../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { generateSyntheticDiffForNewFile } from '../../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -24,24 +24,23 @@ export function createFileDiffHandler() {
|
||||
if (!projectPath || !featureId || !filePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath, featureId, and filePath required",
|
||||
error: 'projectPath, featureId, and filePath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Git worktrees are stored in project directory
|
||||
const worktreePath = path.join(projectPath, ".worktrees", featureId);
|
||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
||||
|
||||
try {
|
||||
await fs.access(worktreePath);
|
||||
await secureFs.access(worktreePath);
|
||||
|
||||
// First check if the file is untracked
|
||||
const { stdout: status } = await execAsync(
|
||||
`git status --porcelain -- "${filePath}"`,
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
const { stdout: status } = await execAsync(`git status --porcelain -- "${filePath}"`, {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
|
||||
const isUntracked = status.trim().startsWith("??");
|
||||
const isUntracked = status.trim().startsWith('??');
|
||||
|
||||
let diff: string;
|
||||
if (isUntracked) {
|
||||
@@ -49,23 +48,20 @@ export function createFileDiffHandler() {
|
||||
diff = await generateSyntheticDiffForNewFile(worktreePath, filePath);
|
||||
} else {
|
||||
// Use regular git diff for tracked files
|
||||
const result = await execAsync(
|
||||
`git diff HEAD -- "${filePath}"`,
|
||||
{
|
||||
cwd: worktreePath,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
}
|
||||
);
|
||||
const result = await execAsync(`git diff HEAD -- "${filePath}"`, {
|
||||
cwd: worktreePath,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
});
|
||||
diff = result.stdout;
|
||||
}
|
||||
|
||||
res.json({ success: true, diff, filePath });
|
||||
} catch (innerError) {
|
||||
logError(innerError, "Worktree file diff failed");
|
||||
res.json({ success: true, diff: "", filePath });
|
||||
logError(innerError, 'Worktree file diff failed');
|
||||
res.json({ success: true, diff: '', filePath });
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Get worktree file diff failed");
|
||||
logError(error, 'Get worktree file diff failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* POST /info endpoint - Get worktree info
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import { getErrorMessage, logError, normalizePath } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { getErrorMessage, logError, normalizePath } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -20,20 +20,18 @@ export function createInfoHandler() {
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and featureId required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath and featureId required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if worktree exists (git worktrees are stored in project directory)
|
||||
const worktreePath = path.join(projectPath, ".worktrees", featureId);
|
||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
||||
try {
|
||||
await fs.access(worktreePath);
|
||||
const { stdout } = await execAsync("git rev-parse --abbrev-ref HEAD", {
|
||||
await secureFs.access(worktreePath);
|
||||
const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
res.json({
|
||||
@@ -45,7 +43,7 @@ export function createInfoHandler() {
|
||||
res.json({ success: true, worktreePath: null, branchName: null });
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Get worktree info failed");
|
||||
logError(error, 'Get worktree info failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* POST /init-git endpoint - Initialize a git repository in a directory
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { existsSync } from "fs";
|
||||
import { join } from "path";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { join } from 'path';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -21,39 +21,42 @@ export function createInitGitHandler() {
|
||||
if (!projectPath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath required",
|
||||
error: 'projectPath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if .git already exists
|
||||
const gitDirPath = join(projectPath, ".git");
|
||||
if (existsSync(gitDirPath)) {
|
||||
const gitDirPath = join(projectPath, '.git');
|
||||
try {
|
||||
await secureFs.access(gitDirPath);
|
||||
// .git exists
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
initialized: false,
|
||||
message: "Git repository already exists",
|
||||
message: 'Git repository already exists',
|
||||
},
|
||||
});
|
||||
return;
|
||||
} catch {
|
||||
// .git doesn't exist, continue with initialization
|
||||
}
|
||||
|
||||
// Initialize git and create an initial empty commit
|
||||
await execAsync(
|
||||
`git init && git commit --allow-empty -m "Initial commit"`,
|
||||
{ cwd: projectPath }
|
||||
);
|
||||
await execAsync(`git init && git commit --allow-empty -m "Initial commit"`, {
|
||||
cwd: projectPath,
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
initialized: true,
|
||||
message: "Git repository initialized with initial commit",
|
||||
message: 'Git repository initialized with initial commit',
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Init git failed");
|
||||
logError(error, 'Init git failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -5,12 +5,13 @@
|
||||
* Does NOT include tracked branches - only real worktrees with separate directories.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { existsSync } from "fs";
|
||||
import { isGitRepo, getErrorMessage, logError, normalizePath } from "../common.js";
|
||||
import { readAllWorktreeMetadata, type WorktreePRInfo } from "../../../lib/worktree-metadata.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { isGitRepo } from '@automaker/git-utils';
|
||||
import { getErrorMessage, logError, normalizePath } from '../common.js';
|
||||
import { readAllWorktreeMetadata, type WorktreePRInfo } from '../../../lib/worktree-metadata.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -27,10 +28,10 @@ interface WorktreeInfo {
|
||||
|
||||
async function getCurrentBranch(cwd: string): Promise<string> {
|
||||
try {
|
||||
const { stdout } = await execAsync("git branch --show-current", { cwd });
|
||||
const { stdout } = await execAsync('git branch --show-current', { cwd });
|
||||
return stdout.trim();
|
||||
} catch {
|
||||
return "";
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,7 +44,7 @@ export function createListHandler() {
|
||||
};
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: "projectPath required" });
|
||||
res.status(400).json({ success: false, error: 'projectPath required' });
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -56,28 +57,35 @@ export function createListHandler() {
|
||||
const currentBranch = await getCurrentBranch(projectPath);
|
||||
|
||||
// Get actual worktrees from git
|
||||
const { stdout } = await execAsync("git worktree list --porcelain", {
|
||||
const { stdout } = await execAsync('git worktree list --porcelain', {
|
||||
cwd: projectPath,
|
||||
});
|
||||
|
||||
const worktrees: WorktreeInfo[] = [];
|
||||
const removedWorktrees: Array<{ path: string; branch: string }> = [];
|
||||
const lines = stdout.split("\n");
|
||||
const lines = stdout.split('\n');
|
||||
let current: { path?: string; branch?: string } = {};
|
||||
let isFirst = true;
|
||||
|
||||
// First pass: detect removed worktrees
|
||||
for (const line of lines) {
|
||||
if (line.startsWith("worktree ")) {
|
||||
if (line.startsWith('worktree ')) {
|
||||
current.path = normalizePath(line.slice(9));
|
||||
} else if (line.startsWith("branch ")) {
|
||||
current.branch = line.slice(7).replace("refs/heads/", "");
|
||||
} else if (line === "") {
|
||||
} else if (line.startsWith('branch ')) {
|
||||
current.branch = line.slice(7).replace('refs/heads/', '');
|
||||
} else if (line === '') {
|
||||
if (current.path && current.branch) {
|
||||
const isMainWorktree = isFirst;
|
||||
// Check if the worktree directory actually exists
|
||||
// Skip checking/pruning the main worktree (projectPath itself)
|
||||
if (!isMainWorktree && !existsSync(current.path)) {
|
||||
let worktreeExists = false;
|
||||
try {
|
||||
await secureFs.access(current.path);
|
||||
worktreeExists = true;
|
||||
} catch {
|
||||
worktreeExists = false;
|
||||
}
|
||||
if (!isMainWorktree && !worktreeExists) {
|
||||
// Worktree directory doesn't exist - it was manually deleted
|
||||
removedWorktrees.push({
|
||||
path: current.path,
|
||||
@@ -102,7 +110,7 @@ export function createListHandler() {
|
||||
// Prune removed worktrees from git (only if any were detected)
|
||||
if (removedWorktrees.length > 0) {
|
||||
try {
|
||||
await execAsync("git worktree prune", { cwd: projectPath });
|
||||
await execAsync('git worktree prune', { cwd: projectPath });
|
||||
} catch {
|
||||
// Prune failed, but we'll still report the removed worktrees
|
||||
}
|
||||
@@ -115,13 +123,12 @@ export function createListHandler() {
|
||||
if (includeDetails) {
|
||||
for (const worktree of worktrees) {
|
||||
try {
|
||||
const { stdout: statusOutput } = await execAsync(
|
||||
"git status --porcelain",
|
||||
{ cwd: worktree.path }
|
||||
);
|
||||
const { stdout: statusOutput } = await execAsync('git status --porcelain', {
|
||||
cwd: worktree.path,
|
||||
});
|
||||
const changedFiles = statusOutput
|
||||
.trim()
|
||||
.split("\n")
|
||||
.split('\n')
|
||||
.filter((line) => line.trim());
|
||||
worktree.hasChanges = changedFiles.length > 0;
|
||||
worktree.changedFilesCount = changedFiles.length;
|
||||
@@ -140,13 +147,13 @@ export function createListHandler() {
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
res.json({
|
||||
success: true,
|
||||
worktrees,
|
||||
removedWorktrees: removedWorktrees.length > 0 ? removedWorktrees : undefined,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "List worktrees failed");
|
||||
logError(error, 'List worktrees failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
* any migration since .automaker is now stored in the project directory.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getAutomakerDir } from "../../../lib/automaker-paths.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getAutomakerDir } from '@automaker/platform';
|
||||
|
||||
export function createMigrateHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,7 +15,7 @@ export function createMigrateHandler() {
|
||||
if (!projectPath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath is required",
|
||||
error: 'projectPath is required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -25,7 +25,7 @@ export function createMigrateHandler() {
|
||||
res.json({
|
||||
success: true,
|
||||
migrated: false,
|
||||
message: "No migration needed - .automaker is stored in project directory",
|
||||
message: 'No migration needed - .automaker is stored in project directory',
|
||||
path: automakerDir,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* POST /status endpoint - Get worktree status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -20,53 +20,50 @@ export function createStatusHandler() {
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and featureId required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath and featureId required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Git worktrees are stored in project directory
|
||||
const worktreePath = path.join(projectPath, ".worktrees", featureId);
|
||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
||||
|
||||
try {
|
||||
await fs.access(worktreePath);
|
||||
const { stdout: status } = await execAsync("git status --porcelain", {
|
||||
await secureFs.access(worktreePath);
|
||||
const { stdout: status } = await execAsync('git status --porcelain', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const files = status
|
||||
.split("\n")
|
||||
.split('\n')
|
||||
.filter(Boolean)
|
||||
.map((line) => line.slice(3));
|
||||
const { stdout: diffStat } = await execAsync("git diff --stat", {
|
||||
const { stdout: diffStat } = await execAsync('git diff --stat', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const { stdout: logOutput } = await execAsync('git log --oneline -5 --format="%h %s"', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const { stdout: logOutput } = await execAsync(
|
||||
'git log --oneline -5 --format="%h %s"',
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
modifiedFiles: files.length,
|
||||
files,
|
||||
diffStat: diffStat.trim(),
|
||||
recentCommits: logOutput.trim().split("\n").filter(Boolean),
|
||||
recentCommits: logOutput.trim().split('\n').filter(Boolean),
|
||||
});
|
||||
} catch {
|
||||
res.json({
|
||||
success: true,
|
||||
modifiedFiles: 0,
|
||||
files: [],
|
||||
diffStat: "",
|
||||
diffStat: '',
|
||||
recentCommits: [],
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Get worktree status failed");
|
||||
logError(error, 'Get worktree status failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user