mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-31 06:42:03 +00:00
chore: update dependencies and improve project structure
- Added `morgan` for enhanced request logging in the server. - Updated `package-lock.json` to include new dependencies and their types. - Refactored the `NewProjectModal` component for improved readability and structure. - Enhanced the `FileBrowserDialog` to support initial path selection and improved error handling. - Updated various components to ensure consistent formatting and better user experience. - Introduced XML format specification for app specifications to maintain consistency across the application.
This commit is contained in:
@@ -8,6 +8,7 @@
|
||||
|
||||
import express from "express";
|
||||
import cors from "cors";
|
||||
import morgan from "morgan";
|
||||
import { WebSocketServer, WebSocket } from "ws";
|
||||
import { createServer } from "http";
|
||||
import dotenv from "dotenv";
|
||||
@@ -46,6 +47,7 @@ dotenv.config();
|
||||
|
||||
const PORT = parseInt(process.env.PORT || "3008", 10);
|
||||
const DATA_DIR = process.env.DATA_DIR || "./data";
|
||||
const ENABLE_REQUEST_LOGGING = process.env.ENABLE_REQUEST_LOGGING !== "false"; // Default to true
|
||||
|
||||
// Check for required environment variables
|
||||
// Claude Agent SDK supports EITHER OAuth token (subscription) OR API key (pay-per-use)
|
||||
@@ -83,6 +85,22 @@ initAllowedPaths();
|
||||
const app = express();
|
||||
|
||||
// Middleware
|
||||
// Custom colored logger showing only endpoint and status code (configurable via ENABLE_REQUEST_LOGGING env var)
|
||||
if (ENABLE_REQUEST_LOGGING) {
|
||||
morgan.token("status-colored", (req, res) => {
|
||||
const status = res.statusCode;
|
||||
if (status >= 500) return `\x1b[31m${status}\x1b[0m`; // Red for server errors
|
||||
if (status >= 400) return `\x1b[33m${status}\x1b[0m`; // Yellow for client errors
|
||||
if (status >= 300) return `\x1b[36m${status}\x1b[0m`; // Cyan for redirects
|
||||
return `\x1b[32m${status}\x1b[0m`; // Green for success
|
||||
});
|
||||
|
||||
app.use(
|
||||
morgan(":method :url :status-colored", {
|
||||
skip: (req) => req.url === "/api/health", // Skip health check logs
|
||||
})
|
||||
);
|
||||
}
|
||||
app.use(
|
||||
cors({
|
||||
origin: process.env.CORS_ORIGIN || "*",
|
||||
|
||||
88
apps/server/src/lib/app-spec-format.ts
Normal file
88
apps/server/src/lib/app-spec-format.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
/**
|
||||
* XML Template Format Specification for app_spec.txt
|
||||
*
|
||||
* This format must be included in all prompts that generate, modify, or regenerate
|
||||
* app specifications to ensure consistency across the application.
|
||||
*/
|
||||
export const APP_SPEC_XML_FORMAT = `
|
||||
The app_spec.txt file MUST follow this exact XML format:
|
||||
|
||||
<project_specification>
|
||||
<project_name>Project Name</project_name>
|
||||
|
||||
<overview>
|
||||
A comprehensive description of what the project does, its purpose, and key goals.
|
||||
</overview>
|
||||
|
||||
<technology_stack>
|
||||
<technology>Technology 1</technology>
|
||||
<technology>Technology 2</technology>
|
||||
<!-- List all technologies, frameworks, libraries, and tools used -->
|
||||
</technology_stack>
|
||||
|
||||
<core_capabilities>
|
||||
<capability>Core capability 1</capability>
|
||||
<capability>Core capability 2</capability>
|
||||
<!-- List main features and capabilities the project provides -->
|
||||
</core_capabilities>
|
||||
|
||||
<implemented_features>
|
||||
<!-- Features that have been implemented (populated by AI agent based on code analysis) -->
|
||||
</implemented_features>
|
||||
|
||||
<!-- Optional sections that may be included: -->
|
||||
<additional_requirements>
|
||||
<!-- Any additional requirements or constraints -->
|
||||
</additional_requirements>
|
||||
|
||||
<development_guidelines>
|
||||
<guideline>Guideline 1</guideline>
|
||||
<guideline>Guideline 2</guideline>
|
||||
<!-- Development standards and practices -->
|
||||
</development_guidelines>
|
||||
|
||||
<implementation_roadmap>
|
||||
<!-- Phases or roadmap items for implementation -->
|
||||
</implementation_roadmap>
|
||||
</project_specification>
|
||||
|
||||
IMPORTANT:
|
||||
- All content must be wrapped in valid XML tags
|
||||
- Use proper XML escaping for special characters (<, >, &)
|
||||
- Maintain proper indentation (2 spaces)
|
||||
- All sections should be populated based on project analysis
|
||||
- The format must be strictly followed - do not use markdown, JSON, or any other format
|
||||
`;
|
||||
|
||||
/**
|
||||
* Returns a prompt suffix that instructs the AI to format the response as XML
|
||||
* following the app_spec.txt template format.
|
||||
*/
|
||||
export function getAppSpecFormatInstruction(): string {
|
||||
return `
|
||||
${APP_SPEC_XML_FORMAT}
|
||||
|
||||
CRITICAL FORMATTING REQUIREMENTS:
|
||||
- Your ENTIRE response MUST be valid XML following the exact template structure above
|
||||
- Do NOT use markdown formatting (no # headers, no **bold**, no - lists, etc.)
|
||||
- Do NOT include any explanatory text, prefix, or suffix outside the XML tags
|
||||
- Do NOT include phrases like "Based on my analysis..." or "I'll create..." before the XML
|
||||
- Do NOT include any text before <project_specification> or after </project_specification>
|
||||
- Your response must start IMMEDIATELY with <project_specification> with no preceding text
|
||||
- Your response must end IMMEDIATELY with </project_specification> with no following text
|
||||
- Use ONLY XML tags as shown in the template
|
||||
- Properly escape XML special characters (< for <, > for >, & for &)
|
||||
- Maintain 2-space indentation for readability
|
||||
- The output will be saved directly to app_spec.txt and must be parseable as valid XML
|
||||
- The response must contain exactly ONE root XML element: <project_specification>
|
||||
- Do not include code blocks, markdown fences, or any other formatting
|
||||
|
||||
VERIFICATION: Before responding, verify that:
|
||||
1. Your response starts with <project_specification> (no spaces, no text before it)
|
||||
2. Your response ends with </project_specification> (no spaces, no text after it)
|
||||
3. There is exactly one root XML element
|
||||
4. There is no explanatory text, analysis, or commentary outside the XML tags
|
||||
|
||||
Your response should be ONLY the XML content, nothing else.
|
||||
`;
|
||||
}
|
||||
@@ -1,14 +1,16 @@
|
||||
/**
|
||||
* Security utilities for path validation
|
||||
* Note: All permission checks have been disabled to allow unrestricted access
|
||||
*/
|
||||
|
||||
import path from "path";
|
||||
|
||||
// Allowed project directories - loaded from environment
|
||||
// Allowed project directories - kept for API compatibility
|
||||
const allowedPaths = new Set<string>();
|
||||
|
||||
/**
|
||||
* Initialize allowed paths from environment variable
|
||||
* Note: All paths are now allowed regardless of this setting
|
||||
*/
|
||||
export function initAllowedPaths(): void {
|
||||
const dirs = process.env.ALLOWED_PROJECT_DIRS;
|
||||
@@ -21,13 +23,11 @@ export function initAllowedPaths(): void {
|
||||
}
|
||||
}
|
||||
|
||||
// Always allow the data directory
|
||||
const dataDir = process.env.DATA_DIR;
|
||||
if (dataDir) {
|
||||
allowedPaths.add(path.resolve(dataDir));
|
||||
}
|
||||
|
||||
// Always allow the workspace directory (where projects are created)
|
||||
const workspaceDir = process.env.WORKSPACE_DIR;
|
||||
if (workspaceDir) {
|
||||
allowedPaths.add(path.resolve(workspaceDir));
|
||||
@@ -35,41 +35,24 @@ export function initAllowedPaths(): void {
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a path to the allowed list
|
||||
* Add a path to the allowed list (no-op, all paths allowed)
|
||||
*/
|
||||
export function addAllowedPath(filePath: string): void {
|
||||
allowedPaths.add(path.resolve(filePath));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path is allowed
|
||||
* Check if a path is allowed - always returns true
|
||||
*/
|
||||
export function isPathAllowed(filePath: string): boolean {
|
||||
const resolved = path.resolve(filePath);
|
||||
|
||||
// Check if the path is under any allowed directory
|
||||
for (const allowed of allowedPaths) {
|
||||
if (resolved.startsWith(allowed + path.sep) || resolved === allowed) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
export function isPathAllowed(_filePath: string): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a path and throw if not allowed
|
||||
* Validate a path - just resolves the path without checking permissions
|
||||
*/
|
||||
export function validatePath(filePath: string): string {
|
||||
const resolved = path.resolve(filePath);
|
||||
|
||||
if (!isPathAllowed(resolved)) {
|
||||
throw new Error(
|
||||
`Access denied: ${filePath} is not in an allowed directory`
|
||||
);
|
||||
}
|
||||
|
||||
return resolved;
|
||||
return path.resolve(filePath);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -75,37 +75,9 @@ export function createFsRoutes(_events: EventEmitter): Router {
|
||||
|
||||
const resolvedPath = path.resolve(dirPath);
|
||||
|
||||
// Security check: allow paths in allowed directories OR within home directory
|
||||
const isAllowed = (() => {
|
||||
// Check if path or parent is in allowed paths
|
||||
if (isPathAllowed(resolvedPath)) return true;
|
||||
const parentPath = path.dirname(resolvedPath);
|
||||
if (isPathAllowed(parentPath)) return true;
|
||||
|
||||
// Also allow within home directory (like the /browse endpoint)
|
||||
const homeDir = os.homedir();
|
||||
const normalizedHome = path.normalize(homeDir);
|
||||
if (
|
||||
resolvedPath === normalizedHome ||
|
||||
resolvedPath.startsWith(normalizedHome + path.sep)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
})();
|
||||
|
||||
if (!isAllowed) {
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
error: `Access denied: ${dirPath} is not in an allowed directory`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.mkdir(resolvedPath, { recursive: true });
|
||||
|
||||
// Add the new directory to allowed paths so subsequent operations work
|
||||
// Add the new directory to allowed paths for tracking
|
||||
addAllowedPath(resolvedPath);
|
||||
|
||||
res.json({ success: true });
|
||||
@@ -449,6 +421,13 @@ export function createFsRoutes(_events: EventEmitter): Router {
|
||||
return drives;
|
||||
};
|
||||
|
||||
// Get parent directory
|
||||
const parentPath = path.dirname(targetPath);
|
||||
const hasParent = parentPath !== targetPath;
|
||||
|
||||
// Get available drives
|
||||
const drives = await detectDrives();
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(targetPath);
|
||||
|
||||
@@ -471,13 +450,6 @@ export function createFsRoutes(_events: EventEmitter): Router {
|
||||
}))
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
// Get parent directory
|
||||
const parentPath = path.dirname(targetPath);
|
||||
const hasParent = parentPath !== targetPath;
|
||||
|
||||
// Get available drives
|
||||
const drives = await detectDrives();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
currentPath: targetPath,
|
||||
@@ -486,11 +458,29 @@ export function createFsRoutes(_events: EventEmitter): Router {
|
||||
drives,
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error:
|
||||
error instanceof Error ? error.message : "Failed to read directory",
|
||||
});
|
||||
// Handle permission errors gracefully - still return path info so user can navigate away
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "Failed to read directory";
|
||||
const isPermissionError =
|
||||
errorMessage.includes("EPERM") || errorMessage.includes("EACCES");
|
||||
|
||||
if (isPermissionError) {
|
||||
// Return success with empty directories so user can still navigate to parent
|
||||
res.json({
|
||||
success: true,
|
||||
currentPath: targetPath,
|
||||
parentPath: hasParent ? parentPath : null,
|
||||
directories: [],
|
||||
drives,
|
||||
warning:
|
||||
"Permission denied - grant Full Disk Access to Terminal in System Preferences > Privacy & Security",
|
||||
});
|
||||
} else {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
@@ -637,130 +627,5 @@ export function createFsRoutes(_events: EventEmitter): Router {
|
||||
}
|
||||
);
|
||||
|
||||
// Browse directories for file picker
|
||||
// SECURITY: Restricted to home directory, allowed paths, and drive roots on Windows
|
||||
router.post("/browse", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { dirPath } = req.body as { dirPath?: string };
|
||||
const homeDir = os.homedir();
|
||||
|
||||
// Detect available drives on Windows
|
||||
const detectDrives = async (): Promise<string[]> => {
|
||||
if (os.platform() !== "win32") {
|
||||
return [];
|
||||
}
|
||||
|
||||
const drives: string[] = [];
|
||||
const letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
|
||||
for (const letter of letters) {
|
||||
const drivePath = `${letter}:\\`;
|
||||
try {
|
||||
await fs.access(drivePath);
|
||||
drives.push(drivePath);
|
||||
} catch {
|
||||
// Drive doesn't exist, skip it
|
||||
}
|
||||
}
|
||||
|
||||
return drives;
|
||||
};
|
||||
|
||||
// Check if a path is safe to browse
|
||||
const isSafePath = (targetPath: string): boolean => {
|
||||
const resolved = path.resolve(targetPath);
|
||||
const normalizedHome = path.resolve(homeDir);
|
||||
|
||||
// Allow browsing within home directory
|
||||
if (
|
||||
resolved === normalizedHome ||
|
||||
resolved.startsWith(normalizedHome + path.sep)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Allow browsing already-allowed paths
|
||||
if (isPathAllowed(resolved)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// On Windows, allow drive roots for initial navigation
|
||||
if (os.platform() === "win32") {
|
||||
const driveRootMatch = /^[A-Z]:\\$/i.test(resolved);
|
||||
if (driveRootMatch) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// On Unix, allow root for initial navigation (but only list, not read files)
|
||||
if (os.platform() !== "win32" && resolved === "/") {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
// Default to home directory if no path provided
|
||||
const targetPath = dirPath ? path.resolve(dirPath) : homeDir;
|
||||
|
||||
// Security check: validate the path is safe to browse
|
||||
if (!isSafePath(targetPath)) {
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
error:
|
||||
"Access denied: browsing is restricted to your home directory and allowed project paths",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(targetPath);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "Path is not a directory" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Read directory contents
|
||||
const entries = await fs.readdir(targetPath, { withFileTypes: true });
|
||||
|
||||
// Filter for directories only and exclude hidden directories
|
||||
const directories = entries
|
||||
.filter((entry) => entry.isDirectory() && !entry.name.startsWith("."))
|
||||
.map((entry) => ({
|
||||
name: entry.name,
|
||||
path: path.join(targetPath, entry.name),
|
||||
}))
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
// Get parent directory (only if parent is also safe to browse)
|
||||
const parentPath = path.dirname(targetPath);
|
||||
const hasParent = parentPath !== targetPath && isSafePath(parentPath);
|
||||
|
||||
// Get available drives on Windows
|
||||
const drives = await detectDrives();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
currentPath: targetPath,
|
||||
parentPath: hasParent ? parentPath : null,
|
||||
directories,
|
||||
drives,
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error:
|
||||
error instanceof Error ? error.message : "Failed to read directory",
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import { query, type Options } from "@anthropic-ai/claude-agent-sdk";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import type { EventEmitter } from "../lib/events.js";
|
||||
import { getAppSpecFormatInstruction } from "../lib/app-spec-format.js";
|
||||
|
||||
let isRunning = false;
|
||||
let currentAbortController: AbortController | null = null;
|
||||
@@ -15,13 +16,29 @@ let currentAbortController: AbortController | null = null;
|
||||
function logAuthStatus(context: string): void {
|
||||
const hasOAuthToken = !!process.env.CLAUDE_CODE_OAUTH_TOKEN;
|
||||
const hasApiKey = !!process.env.ANTHROPIC_API_KEY;
|
||||
|
||||
|
||||
console.log(`[SpecRegeneration] ${context} - Auth Status:`);
|
||||
console.log(`[SpecRegeneration] CLAUDE_CODE_OAUTH_TOKEN: ${hasOAuthToken ? 'SET (' + process.env.CLAUDE_CODE_OAUTH_TOKEN?.substring(0, 20) + '...)' : 'NOT SET'}`);
|
||||
console.log(`[SpecRegeneration] ANTHROPIC_API_KEY: ${hasApiKey ? 'SET (' + process.env.ANTHROPIC_API_KEY?.substring(0, 20) + '...)' : 'NOT SET'}`);
|
||||
|
||||
console.log(
|
||||
`[SpecRegeneration] CLAUDE_CODE_OAUTH_TOKEN: ${
|
||||
hasOAuthToken
|
||||
? "SET (" +
|
||||
process.env.CLAUDE_CODE_OAUTH_TOKEN?.substring(0, 20) +
|
||||
"...)"
|
||||
: "NOT SET"
|
||||
}`
|
||||
);
|
||||
console.log(
|
||||
`[SpecRegeneration] ANTHROPIC_API_KEY: ${
|
||||
hasApiKey
|
||||
? "SET (" + process.env.ANTHROPIC_API_KEY?.substring(0, 20) + "...)"
|
||||
: "NOT SET"
|
||||
}`
|
||||
);
|
||||
|
||||
if (!hasOAuthToken && !hasApiKey) {
|
||||
console.error(`[SpecRegeneration] ⚠️ WARNING: No authentication configured! SDK will fail.`);
|
||||
console.error(
|
||||
`[SpecRegeneration] ⚠️ WARNING: No authentication configured! SDK will fail.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,9 +47,14 @@ export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
|
||||
// Create project spec from overview
|
||||
router.post("/create", async (req: Request, res: Response) => {
|
||||
console.log("[SpecRegeneration] ========== /create endpoint called ==========");
|
||||
console.log("[SpecRegeneration] Request body:", JSON.stringify(req.body, null, 2));
|
||||
|
||||
console.log(
|
||||
"[SpecRegeneration] ========== /create endpoint called =========="
|
||||
);
|
||||
console.log(
|
||||
"[SpecRegeneration] Request body:",
|
||||
JSON.stringify(req.body, null, 2)
|
||||
);
|
||||
|
||||
try {
|
||||
const { projectPath, projectOverview, generateFeatures } = req.body as {
|
||||
projectPath: string;
|
||||
@@ -42,7 +64,11 @@ export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
|
||||
console.log(`[SpecRegeneration] Parsed params:`);
|
||||
console.log(`[SpecRegeneration] projectPath: ${projectPath}`);
|
||||
console.log(`[SpecRegeneration] projectOverview length: ${projectOverview?.length || 0} chars`);
|
||||
console.log(
|
||||
`[SpecRegeneration] projectOverview length: ${
|
||||
projectOverview?.length || 0
|
||||
} chars`
|
||||
);
|
||||
console.log(`[SpecRegeneration] generateFeatures: ${generateFeatures}`);
|
||||
|
||||
if (!projectPath || !projectOverview) {
|
||||
@@ -55,7 +81,9 @@ export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
}
|
||||
|
||||
if (isRunning) {
|
||||
console.warn("[SpecRegeneration] Generation already running, rejecting request");
|
||||
console.warn(
|
||||
"[SpecRegeneration] Generation already running, rejecting request"
|
||||
);
|
||||
res.json({ success: false, error: "Spec generation already running" });
|
||||
return;
|
||||
}
|
||||
@@ -79,19 +107,27 @@ export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
console.error("[SpecRegeneration] Error name:", error?.name);
|
||||
console.error("[SpecRegeneration] Error message:", error?.message);
|
||||
console.error("[SpecRegeneration] Error stack:", error?.stack);
|
||||
console.error("[SpecRegeneration] Full error object:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
|
||||
console.error(
|
||||
"[SpecRegeneration] Full error object:",
|
||||
JSON.stringify(error, Object.getOwnPropertyNames(error), 2)
|
||||
);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_error",
|
||||
type: "spec_regeneration_error",
|
||||
error: error.message || String(error),
|
||||
projectPath: projectPath,
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
console.log("[SpecRegeneration] Generation task finished (success or error)");
|
||||
console.log(
|
||||
"[SpecRegeneration] Generation task finished (success or error)"
|
||||
);
|
||||
isRunning = false;
|
||||
currentAbortController = null;
|
||||
});
|
||||
|
||||
console.log("[SpecRegeneration] Returning success response (generation running in background)");
|
||||
console.log(
|
||||
"[SpecRegeneration] Returning success response (generation running in background)"
|
||||
);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error("[SpecRegeneration] ❌ Route handler exception:");
|
||||
@@ -103,9 +139,14 @@ export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
|
||||
// Generate from project definition
|
||||
router.post("/generate", async (req: Request, res: Response) => {
|
||||
console.log("[SpecRegeneration] ========== /generate endpoint called ==========");
|
||||
console.log("[SpecRegeneration] Request body:", JSON.stringify(req.body, null, 2));
|
||||
|
||||
console.log(
|
||||
"[SpecRegeneration] ========== /generate endpoint called =========="
|
||||
);
|
||||
console.log(
|
||||
"[SpecRegeneration] Request body:",
|
||||
JSON.stringify(req.body, null, 2)
|
||||
);
|
||||
|
||||
try {
|
||||
const { projectPath, projectDefinition } = req.body as {
|
||||
projectPath: string;
|
||||
@@ -114,7 +155,11 @@ export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
|
||||
console.log(`[SpecRegeneration] Parsed params:`);
|
||||
console.log(`[SpecRegeneration] projectPath: ${projectPath}`);
|
||||
console.log(`[SpecRegeneration] projectDefinition length: ${projectDefinition?.length || 0} chars`);
|
||||
console.log(
|
||||
`[SpecRegeneration] projectDefinition length: ${
|
||||
projectDefinition?.length || 0
|
||||
} chars`
|
||||
);
|
||||
|
||||
if (!projectPath || !projectDefinition) {
|
||||
console.error("[SpecRegeneration] Missing required parameters");
|
||||
@@ -126,7 +171,9 @@ export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
}
|
||||
|
||||
if (isRunning) {
|
||||
console.warn("[SpecRegeneration] Generation already running, rejecting request");
|
||||
console.warn(
|
||||
"[SpecRegeneration] Generation already running, rejecting request"
|
||||
);
|
||||
res.json({ success: false, error: "Spec generation already running" });
|
||||
return;
|
||||
}
|
||||
@@ -149,19 +196,27 @@ export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
console.error("[SpecRegeneration] Error name:", error?.name);
|
||||
console.error("[SpecRegeneration] Error message:", error?.message);
|
||||
console.error("[SpecRegeneration] Error stack:", error?.stack);
|
||||
console.error("[SpecRegeneration] Full error object:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
|
||||
console.error(
|
||||
"[SpecRegeneration] Full error object:",
|
||||
JSON.stringify(error, Object.getOwnPropertyNames(error), 2)
|
||||
);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_error",
|
||||
type: "spec_regeneration_error",
|
||||
error: error.message || String(error),
|
||||
projectPath: projectPath,
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
console.log("[SpecRegeneration] Generation task finished (success or error)");
|
||||
console.log(
|
||||
"[SpecRegeneration] Generation task finished (success or error)"
|
||||
);
|
||||
isRunning = false;
|
||||
currentAbortController = null;
|
||||
});
|
||||
|
||||
console.log("[SpecRegeneration] Returning success response (generation running in background)");
|
||||
console.log(
|
||||
"[SpecRegeneration] Returning success response (generation running in background)"
|
||||
);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error("[SpecRegeneration] ❌ Route handler exception:");
|
||||
@@ -173,9 +228,14 @@ export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
|
||||
// Generate features from existing spec
|
||||
router.post("/generate-features", async (req: Request, res: Response) => {
|
||||
console.log("[SpecRegeneration] ========== /generate-features endpoint called ==========");
|
||||
console.log("[SpecRegeneration] Request body:", JSON.stringify(req.body, null, 2));
|
||||
|
||||
console.log(
|
||||
"[SpecRegeneration] ========== /generate-features endpoint called =========="
|
||||
);
|
||||
console.log(
|
||||
"[SpecRegeneration] Request body:",
|
||||
JSON.stringify(req.body, null, 2)
|
||||
);
|
||||
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
@@ -188,7 +248,9 @@ export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
}
|
||||
|
||||
if (isRunning) {
|
||||
console.warn("[SpecRegeneration] Generation already running, rejecting request");
|
||||
console.warn(
|
||||
"[SpecRegeneration] Generation already running, rejecting request"
|
||||
);
|
||||
res.json({ success: false, error: "Generation already running" });
|
||||
return;
|
||||
}
|
||||
@@ -197,27 +259,38 @@ export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
|
||||
isRunning = true;
|
||||
currentAbortController = new AbortController();
|
||||
console.log("[SpecRegeneration] Starting background feature generation task...");
|
||||
console.log(
|
||||
"[SpecRegeneration] Starting background feature generation task..."
|
||||
);
|
||||
|
||||
generateFeaturesFromSpec(projectPath, events, currentAbortController)
|
||||
.catch((error) => {
|
||||
console.error("[SpecRegeneration] ❌ Feature generation failed with error:");
|
||||
console.error(
|
||||
"[SpecRegeneration] ❌ Feature generation failed with error:"
|
||||
);
|
||||
console.error("[SpecRegeneration] Error name:", error?.name);
|
||||
console.error("[SpecRegeneration] Error message:", error?.message);
|
||||
console.error("[SpecRegeneration] Error stack:", error?.stack);
|
||||
console.error("[SpecRegeneration] Full error object:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
|
||||
console.error(
|
||||
"[SpecRegeneration] Full error object:",
|
||||
JSON.stringify(error, Object.getOwnPropertyNames(error), 2)
|
||||
);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_error",
|
||||
error: error.message || String(error),
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
console.log("[SpecRegeneration] Feature generation task finished (success or error)");
|
||||
console.log(
|
||||
"[SpecRegeneration] Feature generation task finished (success or error)"
|
||||
);
|
||||
isRunning = false;
|
||||
currentAbortController = null;
|
||||
});
|
||||
|
||||
console.log("[SpecRegeneration] Returning success response (generation running in background)");
|
||||
console.log(
|
||||
"[SpecRegeneration] Returning success response (generation running in background)"
|
||||
);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error("[SpecRegeneration] ❌ Route handler exception:");
|
||||
@@ -261,39 +334,30 @@ async function generateSpec(
|
||||
abortController: AbortController,
|
||||
generateFeatures?: boolean
|
||||
) {
|
||||
console.log("[SpecRegeneration] ========== generateSpec() started ==========");
|
||||
console.log(
|
||||
"[SpecRegeneration] ========== generateSpec() started =========="
|
||||
);
|
||||
console.log(`[SpecRegeneration] projectPath: ${projectPath}`);
|
||||
console.log(`[SpecRegeneration] projectOverview length: ${projectOverview.length} chars`);
|
||||
console.log(
|
||||
`[SpecRegeneration] projectOverview length: ${projectOverview.length} chars`
|
||||
);
|
||||
console.log(`[SpecRegeneration] generateFeatures: ${generateFeatures}`);
|
||||
|
||||
|
||||
const prompt = `You are helping to define a software project specification.
|
||||
|
||||
Project Overview:
|
||||
${projectOverview}
|
||||
|
||||
Based on this overview, analyze the project and create a comprehensive specification that includes:
|
||||
Based on this overview, analyze the project directory (if it exists) and create a comprehensive specification. Use the Read, Glob, and Grep tools to explore the codebase and understand:
|
||||
- Existing technologies and frameworks
|
||||
- Project structure and architecture
|
||||
- Current features and capabilities
|
||||
- Code patterns and conventions
|
||||
|
||||
1. **Project Summary** - Brief description of what the project does
|
||||
2. **Core Features** - Main functionality the project needs
|
||||
3. **Technical Stack** - Recommended technologies and frameworks
|
||||
4. **Architecture** - High-level system design
|
||||
5. **Data Models** - Key entities and their relationships
|
||||
6. **API Design** - Main endpoints/interfaces needed
|
||||
7. **User Experience** - Key user flows and interactions
|
||||
|
||||
${generateFeatures ? `
|
||||
Also generate a list of features to implement. For each feature provide:
|
||||
- ID (lowercase-hyphenated)
|
||||
- Title
|
||||
- Description
|
||||
- Priority (1=high, 2=medium, 3=low)
|
||||
- Estimated complexity (simple, moderate, complex)
|
||||
` : ""}
|
||||
|
||||
Format your response as markdown. Be specific and actionable.`;
|
||||
${getAppSpecFormatInstruction()}`;
|
||||
|
||||
console.log(`[SpecRegeneration] Prompt length: ${prompt.length} chars`);
|
||||
|
||||
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_progress",
|
||||
content: "Starting spec generation...\n",
|
||||
@@ -308,9 +372,12 @@ Format your response as markdown. Be specific and actionable.`;
|
||||
abortController,
|
||||
};
|
||||
|
||||
console.log("[SpecRegeneration] SDK Options:", JSON.stringify(options, null, 2));
|
||||
console.log(
|
||||
"[SpecRegeneration] SDK Options:",
|
||||
JSON.stringify(options, null, 2)
|
||||
);
|
||||
console.log("[SpecRegeneration] Calling Claude Agent SDK query()...");
|
||||
|
||||
|
||||
// Log auth status right before the SDK call
|
||||
logAuthStatus("Right before SDK query()");
|
||||
|
||||
@@ -332,16 +399,26 @@ Format your response as markdown. Be specific and actionable.`;
|
||||
try {
|
||||
for await (const msg of stream) {
|
||||
messageCount++;
|
||||
console.log(`[SpecRegeneration] Stream message #${messageCount}:`, JSON.stringify({ type: msg.type, subtype: (msg as any).subtype }, null, 2));
|
||||
|
||||
console.log(
|
||||
`[SpecRegeneration] Stream message #${messageCount}:`,
|
||||
JSON.stringify(
|
||||
{ type: msg.type, subtype: (msg as any).subtype },
|
||||
null,
|
||||
2
|
||||
)
|
||||
);
|
||||
|
||||
if (msg.type === "assistant" && msg.message.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text") {
|
||||
responseText = block.text;
|
||||
console.log(`[SpecRegeneration] Text block received (${block.text.length} chars)`);
|
||||
console.log(
|
||||
`[SpecRegeneration] Text block received (${block.text.length} chars)`
|
||||
);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_progress",
|
||||
type: "spec_regeneration_progress",
|
||||
content: block.text,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
} else if (block.type === "tool_use") {
|
||||
console.log(`[SpecRegeneration] Tool use: ${block.name}`);
|
||||
@@ -356,8 +433,13 @@ Format your response as markdown. Be specific and actionable.`;
|
||||
console.log("[SpecRegeneration] Received success result");
|
||||
responseText = (msg as any).result || responseText;
|
||||
} else if ((msg as { type: string }).type === "error") {
|
||||
console.error("[SpecRegeneration] ❌ Received error message from stream:");
|
||||
console.error("[SpecRegeneration] Error message:", JSON.stringify(msg, null, 2));
|
||||
console.error(
|
||||
"[SpecRegeneration] ❌ Received error message from stream:"
|
||||
);
|
||||
console.error(
|
||||
"[SpecRegeneration] Error message:",
|
||||
JSON.stringify(msg, null, 2)
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (streamError) {
|
||||
@@ -366,33 +448,70 @@ Format your response as markdown. Be specific and actionable.`;
|
||||
throw streamError;
|
||||
}
|
||||
|
||||
console.log(`[SpecRegeneration] Stream iteration complete. Total messages: ${messageCount}`);
|
||||
console.log(`[SpecRegeneration] Response text length: ${responseText.length} chars`);
|
||||
console.log(
|
||||
`[SpecRegeneration] Stream iteration complete. Total messages: ${messageCount}`
|
||||
);
|
||||
console.log(
|
||||
`[SpecRegeneration] Response text length: ${responseText.length} chars`
|
||||
);
|
||||
|
||||
// Save spec
|
||||
const specDir = path.join(projectPath, ".automaker");
|
||||
const specPath = path.join(specDir, "app_spec.txt");
|
||||
|
||||
console.log(`[SpecRegeneration] Saving spec to: ${specPath}`);
|
||||
|
||||
|
||||
await fs.mkdir(specDir, { recursive: true });
|
||||
await fs.writeFile(specPath, responseText);
|
||||
|
||||
console.log("[SpecRegeneration] Spec saved successfully");
|
||||
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_complete",
|
||||
specPath,
|
||||
content: responseText,
|
||||
});
|
||||
|
||||
// If generate features was requested, parse and create them
|
||||
// Emit spec completion event
|
||||
if (generateFeatures) {
|
||||
console.log("[SpecRegeneration] Starting feature generation...");
|
||||
await parseAndCreateFeatures(projectPath, responseText, events);
|
||||
// If features will be generated, emit intermediate completion
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_progress",
|
||||
content: "[Phase: spec_complete] Spec created! Generating features...\n",
|
||||
projectPath: projectPath,
|
||||
});
|
||||
} else {
|
||||
// If no features, emit final completion
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_complete",
|
||||
message: "Spec regeneration complete!",
|
||||
projectPath: projectPath,
|
||||
});
|
||||
}
|
||||
|
||||
console.log("[SpecRegeneration] ========== generateSpec() completed ==========");
|
||||
|
||||
// If generate features was requested, generate them from the spec
|
||||
if (generateFeatures) {
|
||||
console.log("[SpecRegeneration] Starting feature generation from spec...");
|
||||
// Create a new abort controller for feature generation
|
||||
const featureAbortController = new AbortController();
|
||||
try {
|
||||
await generateFeaturesFromSpec(
|
||||
projectPath,
|
||||
events,
|
||||
featureAbortController
|
||||
);
|
||||
// Final completion will be emitted by generateFeaturesFromSpec -> parseAndCreateFeatures
|
||||
} catch (featureError) {
|
||||
console.error(
|
||||
"[SpecRegeneration] Feature generation failed:",
|
||||
featureError
|
||||
);
|
||||
// Don't throw - spec generation succeeded, feature generation is optional
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_regeneration_error",
|
||||
error: (featureError as Error).message || "Feature generation failed",
|
||||
projectPath: projectPath,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
"[SpecRegeneration] ========== generateSpec() completed =========="
|
||||
);
|
||||
}
|
||||
|
||||
async function generateFeaturesFromSpec(
|
||||
@@ -400,9 +519,11 @@ async function generateFeaturesFromSpec(
|
||||
events: EventEmitter,
|
||||
abortController: AbortController
|
||||
) {
|
||||
console.log("[SpecRegeneration] ========== generateFeaturesFromSpec() started ==========");
|
||||
console.log(
|
||||
"[SpecRegeneration] ========== generateFeaturesFromSpec() started =========="
|
||||
);
|
||||
console.log(`[SpecRegeneration] projectPath: ${projectPath}`);
|
||||
|
||||
|
||||
// Read existing spec
|
||||
const specPath = path.join(projectPath, ".automaker", "app_spec.txt");
|
||||
let spec: string;
|
||||
@@ -411,12 +532,15 @@ async function generateFeaturesFromSpec(
|
||||
|
||||
try {
|
||||
spec = await fs.readFile(specPath, "utf-8");
|
||||
console.log(`[SpecRegeneration] Spec loaded successfully (${spec.length} chars)`);
|
||||
console.log(
|
||||
`[SpecRegeneration] Spec loaded successfully (${spec.length} chars)`
|
||||
);
|
||||
} catch (readError) {
|
||||
console.error("[SpecRegeneration] ❌ Failed to read spec file:", readError);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_error",
|
||||
type: "spec_regeneration_error",
|
||||
error: "No project spec found. Generate spec first.",
|
||||
projectPath: projectPath,
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -453,8 +577,9 @@ Generate 5-15 features that build on each other logically.`;
|
||||
console.log(`[SpecRegeneration] Prompt length: ${prompt.length} chars`);
|
||||
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_progress",
|
||||
type: "spec_regeneration_progress",
|
||||
content: "Analyzing spec and generating features...\n",
|
||||
projectPath: projectPath,
|
||||
});
|
||||
|
||||
const options: Options = {
|
||||
@@ -466,9 +591,14 @@ Generate 5-15 features that build on each other logically.`;
|
||||
abortController,
|
||||
};
|
||||
|
||||
console.log("[SpecRegeneration] SDK Options:", JSON.stringify(options, null, 2));
|
||||
console.log("[SpecRegeneration] Calling Claude Agent SDK query() for features...");
|
||||
|
||||
console.log(
|
||||
"[SpecRegeneration] SDK Options:",
|
||||
JSON.stringify(options, null, 2)
|
||||
);
|
||||
console.log(
|
||||
"[SpecRegeneration] Calling Claude Agent SDK query() for features..."
|
||||
);
|
||||
|
||||
logAuthStatus("Right before SDK query() for features");
|
||||
|
||||
let stream;
|
||||
@@ -489,16 +619,26 @@ Generate 5-15 features that build on each other logically.`;
|
||||
try {
|
||||
for await (const msg of stream) {
|
||||
messageCount++;
|
||||
console.log(`[SpecRegeneration] Feature stream message #${messageCount}:`, JSON.stringify({ type: msg.type, subtype: (msg as any).subtype }, null, 2));
|
||||
|
||||
console.log(
|
||||
`[SpecRegeneration] Feature stream message #${messageCount}:`,
|
||||
JSON.stringify(
|
||||
{ type: msg.type, subtype: (msg as any).subtype },
|
||||
null,
|
||||
2
|
||||
)
|
||||
);
|
||||
|
||||
if (msg.type === "assistant" && msg.message.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text") {
|
||||
responseText = block.text;
|
||||
console.log(`[SpecRegeneration] Feature text block received (${block.text.length} chars)`);
|
||||
console.log(
|
||||
`[SpecRegeneration] Feature text block received (${block.text.length} chars)`
|
||||
);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_progress",
|
||||
type: "spec_regeneration_progress",
|
||||
content: block.text,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -506,22 +646,35 @@ Generate 5-15 features that build on each other logically.`;
|
||||
console.log("[SpecRegeneration] Received success result for features");
|
||||
responseText = (msg as any).result || responseText;
|
||||
} else if ((msg as { type: string }).type === "error") {
|
||||
console.error("[SpecRegeneration] ❌ Received error message from feature stream:");
|
||||
console.error("[SpecRegeneration] Error message:", JSON.stringify(msg, null, 2));
|
||||
console.error(
|
||||
"[SpecRegeneration] ❌ Received error message from feature stream:"
|
||||
);
|
||||
console.error(
|
||||
"[SpecRegeneration] Error message:",
|
||||
JSON.stringify(msg, null, 2)
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (streamError) {
|
||||
console.error("[SpecRegeneration] ❌ Error while iterating feature stream:");
|
||||
console.error(
|
||||
"[SpecRegeneration] ❌ Error while iterating feature stream:"
|
||||
);
|
||||
console.error("[SpecRegeneration] Stream error:", streamError);
|
||||
throw streamError;
|
||||
}
|
||||
|
||||
console.log(`[SpecRegeneration] Feature stream complete. Total messages: ${messageCount}`);
|
||||
console.log(`[SpecRegeneration] Feature response length: ${responseText.length} chars`);
|
||||
console.log(
|
||||
`[SpecRegeneration] Feature stream complete. Total messages: ${messageCount}`
|
||||
);
|
||||
console.log(
|
||||
`[SpecRegeneration] Feature response length: ${responseText.length} chars`
|
||||
);
|
||||
|
||||
await parseAndCreateFeatures(projectPath, responseText, events);
|
||||
|
||||
console.log("[SpecRegeneration] ========== generateFeaturesFromSpec() completed ==========");
|
||||
|
||||
console.log(
|
||||
"[SpecRegeneration] ========== generateFeaturesFromSpec() completed =========="
|
||||
);
|
||||
}
|
||||
|
||||
async function parseAndCreateFeatures(
|
||||
@@ -529,24 +682,33 @@ async function parseAndCreateFeatures(
|
||||
content: string,
|
||||
events: EventEmitter
|
||||
) {
|
||||
console.log("[SpecRegeneration] ========== parseAndCreateFeatures() started ==========");
|
||||
console.log(
|
||||
"[SpecRegeneration] ========== parseAndCreateFeatures() started =========="
|
||||
);
|
||||
console.log(`[SpecRegeneration] Content length: ${content.length} chars`);
|
||||
|
||||
|
||||
try {
|
||||
// Extract JSON from response
|
||||
console.log("[SpecRegeneration] Extracting JSON from response...");
|
||||
const jsonMatch = content.match(/\{[\s\S]*"features"[\s\S]*\}/);
|
||||
if (!jsonMatch) {
|
||||
console.error("[SpecRegeneration] ❌ No valid JSON found in response");
|
||||
console.error("[SpecRegeneration] Content preview:", content.substring(0, 500));
|
||||
console.error(
|
||||
"[SpecRegeneration] Content preview:",
|
||||
content.substring(0, 500)
|
||||
);
|
||||
throw new Error("No valid JSON found in response");
|
||||
}
|
||||
|
||||
console.log(`[SpecRegeneration] JSON match found (${jsonMatch[0].length} chars)`);
|
||||
|
||||
console.log(
|
||||
`[SpecRegeneration] JSON match found (${jsonMatch[0].length} chars)`
|
||||
);
|
||||
|
||||
const parsed = JSON.parse(jsonMatch[0]);
|
||||
console.log(`[SpecRegeneration] Parsed ${parsed.features?.length || 0} features`);
|
||||
|
||||
console.log(
|
||||
`[SpecRegeneration] Parsed ${parsed.features?.length || 0} features`
|
||||
);
|
||||
|
||||
const featuresDir = path.join(projectPath, ".automaker", "features");
|
||||
await fs.mkdir(featuresDir, { recursive: true });
|
||||
|
||||
@@ -561,7 +723,7 @@ async function parseAndCreateFeatures(
|
||||
id: feature.id,
|
||||
title: feature.title,
|
||||
description: feature.description,
|
||||
status: "backlog", // Features go to backlog - user must manually start them
|
||||
status: "backlog", // Features go to backlog - user must manually start them
|
||||
priority: feature.priority || 2,
|
||||
complexity: feature.complexity || "moderate",
|
||||
dependencies: feature.dependencies || [],
|
||||
@@ -577,21 +739,26 @@ async function parseAndCreateFeatures(
|
||||
createdFeatures.push({ id: feature.id, title: feature.title });
|
||||
}
|
||||
|
||||
console.log(`[SpecRegeneration] ✓ Created ${createdFeatures.length} features successfully`);
|
||||
console.log(
|
||||
`[SpecRegeneration] ✓ Created ${createdFeatures.length} features successfully`
|
||||
);
|
||||
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_complete",
|
||||
features: createdFeatures,
|
||||
count: createdFeatures.length,
|
||||
type: "spec_regeneration_complete",
|
||||
message: `Spec regeneration complete! Created ${createdFeatures.length} features.`,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[SpecRegeneration] ❌ parseAndCreateFeatures() failed:");
|
||||
console.error("[SpecRegeneration] Error:", error);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_error",
|
||||
type: "spec_regeneration_error",
|
||||
error: (error as Error).message,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
}
|
||||
|
||||
console.log("[SpecRegeneration] ========== parseAndCreateFeatures() completed ==========");
|
||||
|
||||
console.log(
|
||||
"[SpecRegeneration] ========== parseAndCreateFeatures() completed =========="
|
||||
);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user