mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-30 06:12:03 +00:00
Merge branch 'main' into feature/shared-packages
This commit is contained in:
@@ -16,9 +16,11 @@ ANTHROPIC_API_KEY=sk-ant-...
|
||||
# If set, all API requests must include X-API-Key header
|
||||
AUTOMAKER_API_KEY=
|
||||
|
||||
# Restrict file operations to these directories (comma-separated)
|
||||
# Important for security in multi-tenant environments
|
||||
ALLOWED_PROJECT_DIRS=/home/user/projects,/var/www
|
||||
# Root directory for projects and file operations
|
||||
# If set, users can only create/open projects and files within this directory
|
||||
# Recommended for sandboxed deployments (Docker, restricted environments)
|
||||
# Example: ALLOWED_ROOT_DIRECTORY=/projects
|
||||
ALLOWED_ROOT_DIRECTORY=
|
||||
|
||||
# CORS origin - which domains can access the API
|
||||
# Use "*" for development, set specific origin for production
|
||||
@@ -34,13 +36,6 @@ PORT=3008
|
||||
# Data directory for sessions and metadata
|
||||
DATA_DIR=./data
|
||||
|
||||
# ============================================
|
||||
# OPTIONAL - Additional AI Providers
|
||||
# ============================================
|
||||
|
||||
# Google API key (for future Gemini support)
|
||||
GOOGLE_API_KEY=
|
||||
|
||||
# ============================================
|
||||
# OPTIONAL - Terminal Access
|
||||
# ============================================
|
||||
|
||||
@@ -26,6 +26,14 @@ RUN npm run build --workspace=apps/server
|
||||
# Production stage
|
||||
FROM node:20-alpine
|
||||
|
||||
# Install git, curl, and GitHub CLI (pinned version for reproducible builds)
|
||||
RUN apk add --no-cache git curl && \
|
||||
GH_VERSION="2.63.2" && \
|
||||
curl -L "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_amd64.tar.gz" -o gh.tar.gz && \
|
||||
tar -xzf gh.tar.gz && \
|
||||
mv "gh_${GH_VERSION}_linux_amd64/bin/gh" /usr/local/bin/gh && \
|
||||
rm -rf gh.tar.gz "gh_${GH_VERSION}_linux_amd64"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Create non-root user
|
||||
|
||||
23
apps/server/src/lib/secure-fs.ts
Normal file
23
apps/server/src/lib/secure-fs.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
/**
|
||||
* Re-export secure file system utilities from @automaker/platform
|
||||
* This file exists for backward compatibility with existing imports
|
||||
*/
|
||||
|
||||
import { secureFs } from "@automaker/platform";
|
||||
|
||||
export const {
|
||||
access,
|
||||
readFile,
|
||||
writeFile,
|
||||
mkdir,
|
||||
readdir,
|
||||
stat,
|
||||
rm,
|
||||
unlink,
|
||||
copyFile,
|
||||
appendFile,
|
||||
rename,
|
||||
lstat,
|
||||
joinPath,
|
||||
resolvePath,
|
||||
} = secureFs;
|
||||
69
apps/server/src/middleware/validate-paths.ts
Normal file
69
apps/server/src/middleware/validate-paths.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
/**
|
||||
* Middleware for validating path parameters against ALLOWED_ROOT_DIRECTORY
|
||||
* Provides a clean, reusable way to validate paths without repeating the same
|
||||
* try-catch block in every route handler
|
||||
*/
|
||||
|
||||
import type { Request, Response, NextFunction } from "express";
|
||||
import { validatePath, PathNotAllowedError } from "@automaker/platform";
|
||||
|
||||
/**
|
||||
* Creates a middleware that validates specified path parameters in req.body
|
||||
* @param paramNames - Names of parameters to validate (e.g., 'projectPath', 'worktreePath')
|
||||
* @example
|
||||
* router.post('/create', validatePathParams('projectPath'), handler);
|
||||
* router.post('/delete', validatePathParams('projectPath', 'worktreePath'), handler);
|
||||
* router.post('/send', validatePathParams('workingDirectory?', 'imagePaths[]'), handler);
|
||||
*
|
||||
* Special syntax:
|
||||
* - 'paramName?' - Optional parameter (only validated if present)
|
||||
* - 'paramName[]' - Array parameter (validates each element)
|
||||
*/
|
||||
export function validatePathParams(...paramNames: string[]) {
|
||||
return (req: Request, res: Response, next: NextFunction): void => {
|
||||
try {
|
||||
for (const paramName of paramNames) {
|
||||
// Handle optional parameters (paramName?)
|
||||
if (paramName.endsWith("?")) {
|
||||
const actualName = paramName.slice(0, -1);
|
||||
const value = req.body[actualName];
|
||||
if (value) {
|
||||
validatePath(value);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle array parameters (paramName[])
|
||||
if (paramName.endsWith("[]")) {
|
||||
const actualName = paramName.slice(0, -2);
|
||||
const values = req.body[actualName];
|
||||
if (Array.isArray(values) && values.length > 0) {
|
||||
for (const value of values) {
|
||||
validatePath(value);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle regular parameters
|
||||
const value = req.body[paramName];
|
||||
if (value) {
|
||||
validatePath(value);
|
||||
}
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
error: error.message,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Re-throw unexpected errors
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -5,6 +5,7 @@
|
||||
import { Router } from "express";
|
||||
import { AgentService } from "../../services/agent-service.js";
|
||||
import type { EventEmitter } from "../../lib/events.js";
|
||||
import { validatePathParams } from "../../middleware/validate-paths.js";
|
||||
import { createStartHandler } from "./routes/start.js";
|
||||
import { createSendHandler } from "./routes/send.js";
|
||||
import { createHistoryHandler } from "./routes/history.js";
|
||||
@@ -18,8 +19,8 @@ export function createAgentRoutes(
|
||||
): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/start", createStartHandler(agentService));
|
||||
router.post("/send", createSendHandler(agentService));
|
||||
router.post("/start", validatePathParams("workingDirectory?"), createStartHandler(agentService));
|
||||
router.post("/send", validatePathParams("workingDirectory?", "imagePaths[]"), createSendHandler(agentService));
|
||||
router.post("/history", createHistoryHandler(agentService));
|
||||
router.post("/stop", createStopHandler(agentService));
|
||||
router.post("/clear", createClearHandler(agentService));
|
||||
|
||||
@@ -6,7 +6,6 @@ import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { createLogger } from "@automaker/utils";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
const logger = createLogger("Agent");
|
||||
|
||||
export function createSendHandler(agentService: AgentService) {
|
||||
|
||||
@@ -6,7 +6,6 @@ import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { createLogger } from "@automaker/utils";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
const logger = createLogger("Agent");
|
||||
|
||||
export function createStartHandler(agentService: AgentService) {
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
import { Router } from "express";
|
||||
import type { AutoModeService } from "../../services/auto-mode-service.js";
|
||||
import { validatePathParams } from "../../middleware/validate-paths.js";
|
||||
import { createStopFeatureHandler } from "./routes/stop-feature.js";
|
||||
import { createStatusHandler } from "./routes/status.js";
|
||||
import { createRunFeatureHandler } from "./routes/run-feature.js";
|
||||
@@ -21,18 +22,19 @@ export function createAutoModeRoutes(autoModeService: AutoModeService): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/stop-feature", createStopFeatureHandler(autoModeService));
|
||||
router.post("/status", createStatusHandler(autoModeService));
|
||||
router.post("/run-feature", createRunFeatureHandler(autoModeService));
|
||||
router.post("/verify-feature", createVerifyFeatureHandler(autoModeService));
|
||||
router.post("/resume-feature", createResumeFeatureHandler(autoModeService));
|
||||
router.post("/context-exists", createContextExistsHandler(autoModeService));
|
||||
router.post("/analyze-project", createAnalyzeProjectHandler(autoModeService));
|
||||
router.post("/status", validatePathParams("projectPath?"), createStatusHandler(autoModeService));
|
||||
router.post("/run-feature", validatePathParams("projectPath"), createRunFeatureHandler(autoModeService));
|
||||
router.post("/verify-feature", validatePathParams("projectPath"), createVerifyFeatureHandler(autoModeService));
|
||||
router.post("/resume-feature", validatePathParams("projectPath"), createResumeFeatureHandler(autoModeService));
|
||||
router.post("/context-exists", validatePathParams("projectPath"), createContextExistsHandler(autoModeService));
|
||||
router.post("/analyze-project", validatePathParams("projectPath"), createAnalyzeProjectHandler(autoModeService));
|
||||
router.post(
|
||||
"/follow-up-feature",
|
||||
validatePathParams("projectPath", "imagePaths[]"),
|
||||
createFollowUpFeatureHandler(autoModeService)
|
||||
);
|
||||
router.post("/commit-feature", createCommitFeatureHandler(autoModeService));
|
||||
router.post("/approve-plan", createApprovePlanHandler(autoModeService));
|
||||
router.post("/commit-feature", validatePathParams("projectPath", "worktreePath?"), createCommitFeatureHandler(autoModeService));
|
||||
router.post("/approve-plan", validatePathParams("projectPath"), createApprovePlanHandler(autoModeService));
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
import { Router } from "express";
|
||||
import { FeatureLoader } from "../../services/feature-loader.js";
|
||||
import { validatePathParams } from "../../middleware/validate-paths.js";
|
||||
import { createListHandler } from "./routes/list.js";
|
||||
import { createGetHandler } from "./routes/get.js";
|
||||
import { createCreateHandler } from "./routes/create.js";
|
||||
@@ -15,11 +16,11 @@ import { createGenerateTitleHandler } from "./routes/generate-title.js";
|
||||
export function createFeaturesRoutes(featureLoader: FeatureLoader): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/list", createListHandler(featureLoader));
|
||||
router.post("/get", createGetHandler(featureLoader));
|
||||
router.post("/create", createCreateHandler(featureLoader));
|
||||
router.post("/update", createUpdateHandler(featureLoader));
|
||||
router.post("/delete", createDeleteHandler(featureLoader));
|
||||
router.post("/list", validatePathParams("projectPath"), createListHandler(featureLoader));
|
||||
router.post("/get", validatePathParams("projectPath"), createGetHandler(featureLoader));
|
||||
router.post("/create", validatePathParams("projectPath"), createCreateHandler(featureLoader));
|
||||
router.post("/update", validatePathParams("projectPath"), createUpdateHandler(featureLoader));
|
||||
router.post("/delete", validatePathParams("projectPath"), createDeleteHandler(featureLoader));
|
||||
router.post("/agent-output", createAgentOutputHandler(featureLoader));
|
||||
router.post("/generate-title", createGenerateTitleHandler());
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import type { Request, Response } from "express";
|
||||
import { FeatureLoader } from "../../../services/feature-loader.js";
|
||||
import type { Feature } from "@automaker/types";
|
||||
import { addAllowedPath } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createCreateHandler(featureLoader: FeatureLoader) {
|
||||
@@ -17,18 +16,13 @@ export function createCreateHandler(featureLoader: FeatureLoader) {
|
||||
};
|
||||
|
||||
if (!projectPath || !feature) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and feature are required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath and feature are required",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Add project path to allowed paths
|
||||
addAllowedPath(projectPath);
|
||||
|
||||
const created = await featureLoader.create(projectPath, feature);
|
||||
res.json({ success: true, feature: created });
|
||||
} catch (error) {
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { FeatureLoader } from "../../../services/feature-loader.js";
|
||||
import { addAllowedPath } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createListHandler(featureLoader: FeatureLoader) {
|
||||
@@ -19,9 +18,6 @@ export function createListHandler(featureLoader: FeatureLoader) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Add project path to allowed paths
|
||||
addAllowedPath(projectPath);
|
||||
|
||||
const features = await featureLoader.getAll(projectPath);
|
||||
res.json({ success: true, features });
|
||||
} catch (error) {
|
||||
|
||||
@@ -6,6 +6,11 @@ import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import {
|
||||
getAllowedRootDirectory,
|
||||
isPathAllowed,
|
||||
PathNotAllowedError,
|
||||
} from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createBrowseHandler() {
|
||||
@@ -13,8 +18,14 @@ export function createBrowseHandler() {
|
||||
try {
|
||||
const { dirPath } = req.body as { dirPath?: string };
|
||||
|
||||
// Default to home directory if no path provided
|
||||
const targetPath = dirPath ? path.resolve(dirPath) : os.homedir();
|
||||
// Default to ALLOWED_ROOT_DIRECTORY if set, otherwise home directory
|
||||
const defaultPath = getAllowedRootDirectory() || os.homedir();
|
||||
const targetPath = dirPath ? path.resolve(dirPath) : defaultPath;
|
||||
|
||||
// Validate that the path is allowed
|
||||
if (!isPathAllowed(targetPath)) {
|
||||
throw new PathNotAllowedError(dirPath || targetPath);
|
||||
}
|
||||
|
||||
// Detect available drives on Windows
|
||||
const detectDrives = async (): Promise<string[]> => {
|
||||
@@ -100,6 +111,12 @@ export function createBrowseHandler() {
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, "Browse directories failed");
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import { validatePath } from "@automaker/platform";
|
||||
import { validatePath, PathNotAllowedError } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createDeleteHandler() {
|
||||
@@ -22,6 +22,12 @@ export function createDeleteHandler() {
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, "Delete file failed");
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { isPathAllowed, PathNotAllowedError } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createExistsHandler() {
|
||||
@@ -17,10 +18,13 @@ export function createExistsHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
// For exists, we check but don't require the path to be pre-allowed
|
||||
// This allows the UI to validate user-entered paths
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
|
||||
// Validate that the path is allowed
|
||||
if (!isPathAllowed(resolvedPath)) {
|
||||
throw new PathNotAllowedError(filePath);
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.access(resolvedPath);
|
||||
res.json({ success: true, exists: true });
|
||||
@@ -28,6 +32,12 @@ export function createExistsHandler() {
|
||||
res.json({ success: true, exists: false });
|
||||
}
|
||||
} catch (error) {
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, "Check exists failed");
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath } from "@automaker/platform";
|
||||
import { isPathAllowed, PathNotAllowedError } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createMkdirHandler() {
|
||||
@@ -21,12 +21,16 @@ export function createMkdirHandler() {
|
||||
|
||||
const resolvedPath = path.resolve(dirPath);
|
||||
|
||||
// Validate that the path is allowed
|
||||
if (!isPathAllowed(resolvedPath)) {
|
||||
throw new PathNotAllowedError(dirPath);
|
||||
}
|
||||
|
||||
// Check if path already exists using lstat (doesn't follow symlinks)
|
||||
try {
|
||||
const stats = await fs.lstat(resolvedPath);
|
||||
// Path exists - if it's a directory or symlink, consider it success
|
||||
if (stats.isDirectory() || stats.isSymbolicLink()) {
|
||||
addAllowedPath(resolvedPath);
|
||||
res.json({ success: true });
|
||||
return;
|
||||
}
|
||||
@@ -47,11 +51,14 @@ export function createMkdirHandler() {
|
||||
// Path doesn't exist, create it
|
||||
await fs.mkdir(resolvedPath, { recursive: true });
|
||||
|
||||
// Add the new directory to allowed paths for tracking
|
||||
addAllowedPath(resolvedPath);
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error: any) {
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle ELOOP specifically
|
||||
if (error.code === "ELOOP") {
|
||||
logError(error, "Create directory failed - symlink loop detected");
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import { validatePath } from "@automaker/platform";
|
||||
import { validatePath, PathNotAllowedError } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
// Optional files that are expected to not exist in new projects
|
||||
@@ -39,6 +39,12 @@ export function createReadHandler() {
|
||||
|
||||
res.json({ success: true, content });
|
||||
} catch (error) {
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
// Don't log ENOENT errors for optional files (expected to be missing in new projects)
|
||||
const shouldLog = !(isENOENT(error) && isOptionalFile(req.body?.filePath || ""));
|
||||
if (shouldLog) {
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import { validatePath } from "@automaker/platform";
|
||||
import { validatePath, PathNotAllowedError } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createReaddirHandler() {
|
||||
@@ -28,6 +28,12 @@ export function createReaddirHandler() {
|
||||
|
||||
res.json({ success: true, entries: result });
|
||||
} catch (error) {
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, "Read directory failed");
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createResolveDirectoryHandler() {
|
||||
@@ -30,7 +29,6 @@ export function createResolveDirectoryHandler() {
|
||||
const resolvedPath = path.resolve(directoryName);
|
||||
const stats = await fs.stat(resolvedPath);
|
||||
if (stats.isDirectory()) {
|
||||
addAllowedPath(resolvedPath);
|
||||
res.json({
|
||||
success: true,
|
||||
path: resolvedPath,
|
||||
@@ -102,7 +100,6 @@ export function createResolveDirectoryHandler() {
|
||||
}
|
||||
|
||||
// Found matching directory
|
||||
addAllowedPath(candidatePath);
|
||||
res.json({
|
||||
success: true,
|
||||
path: candidatePath,
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import { getBoardDir } from "@automaker/platform";
|
||||
|
||||
@@ -43,9 +42,6 @@ export function createSaveBoardBackgroundHandler() {
|
||||
// Write file
|
||||
await fs.writeFile(filePath, buffer);
|
||||
|
||||
// Add board directory to allowed paths
|
||||
addAllowedPath(boardDir);
|
||||
|
||||
// Return the absolute path
|
||||
res.json({ success: true, path: filePath });
|
||||
} catch (error) {
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import { getImagesDir } from "@automaker/platform";
|
||||
|
||||
@@ -45,9 +44,6 @@ export function createSaveImageHandler() {
|
||||
// Write file
|
||||
await fs.writeFile(filePath, buffer);
|
||||
|
||||
// Add automaker directory to allowed paths
|
||||
addAllowedPath(imagesDir);
|
||||
|
||||
// Return the absolute path
|
||||
res.json({ success: true, path: filePath });
|
||||
} catch (error) {
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import { validatePath } from "@automaker/platform";
|
||||
import { validatePath, PathNotAllowedError } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createStatHandler() {
|
||||
@@ -30,6 +30,12 @@ export function createStatHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, "Get file stats failed");
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath, isPathAllowed } from "@automaker/platform";
|
||||
import { isPathAllowed } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createValidatePathHandler() {
|
||||
@@ -31,9 +31,6 @@ export function createValidatePathHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Add to allowed paths
|
||||
addAllowedPath(resolvedPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
path: resolvedPath,
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { validatePath } from "@automaker/platform";
|
||||
import { validatePath, PathNotAllowedError } from "@automaker/platform";
|
||||
import { mkdirSafe } from "@automaker/utils";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
@@ -30,6 +30,12 @@ export function createWriteHandler() {
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
// Path not allowed - return 403 Forbidden
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
res.status(403).json({ success: false, error: getErrorMessage(error) });
|
||||
return;
|
||||
}
|
||||
|
||||
logError(error, "Write file failed");
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -3,14 +3,15 @@
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { validatePathParams } from "../../middleware/validate-paths.js";
|
||||
import { createDiffsHandler } from "./routes/diffs.js";
|
||||
import { createFileDiffHandler } from "./routes/file-diff.js";
|
||||
|
||||
export function createGitRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/diffs", createDiffsHandler());
|
||||
router.post("/file-diff", createFileDiffHandler());
|
||||
router.post("/diffs", validatePathParams("projectPath"), createDiffsHandler());
|
||||
router.post("/file-diff", validatePathParams("projectPath", "filePath"), createFileDiffHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -17,10 +17,6 @@ export function createProvidersHandler() {
|
||||
available: statuses.claude?.installed || false,
|
||||
hasApiKey: !!process.env.ANTHROPIC_API_KEY,
|
||||
},
|
||||
google: {
|
||||
available: !!process.env.GOOGLE_API_KEY,
|
||||
hasApiKey: !!process.env.GOOGLE_API_KEY,
|
||||
},
|
||||
};
|
||||
|
||||
res.json({ success: true, providers });
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
import { Router } from "express";
|
||||
import type { SettingsService } from "../../services/settings-service.js";
|
||||
import { validatePathParams } from "../../middleware/validate-paths.js";
|
||||
import { createGetGlobalHandler } from "./routes/get-global.js";
|
||||
import { createUpdateGlobalHandler } from "./routes/update-global.js";
|
||||
import { createGetCredentialsHandler } from "./routes/get-credentials.js";
|
||||
@@ -57,8 +58,8 @@ export function createSettingsRoutes(settingsService: SettingsService): Router {
|
||||
router.put("/credentials", createUpdateCredentialsHandler(settingsService));
|
||||
|
||||
// Project settings
|
||||
router.post("/project", createGetProjectHandler(settingsService));
|
||||
router.put("/project", createUpdateProjectHandler(settingsService));
|
||||
router.post("/project", validatePathParams("projectPath"), createGetProjectHandler(settingsService));
|
||||
router.put("/project", validatePathParams("projectPath"), createUpdateProjectHandler(settingsService));
|
||||
|
||||
// Migration from localStorage
|
||||
router.post("/migrate", createMigrateHandler(settingsService));
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* Each provider shows: `{ configured: boolean, masked: string }`
|
||||
* Masked shows first 4 and last 4 characters for verification.
|
||||
*
|
||||
* Response: `{ "success": true, "credentials": { anthropic, google, openai } }`
|
||||
* Response: `{ "success": true, "credentials": { anthropic } }`
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/**
|
||||
* PUT /api/settings/credentials - Update API credentials
|
||||
*
|
||||
* Updates API keys for Anthropic, Google, or OpenAI. Partial updates supported.
|
||||
* Updates API keys for Anthropic. Partial updates supported.
|
||||
* Returns masked credentials for verification without exposing full keys.
|
||||
*
|
||||
* Request body: `Partial<Credentials>` (usually just apiKeys)
|
||||
* Response: `{ "success": true, "credentials": { anthropic, google, openai } }`
|
||||
* Response: `{ "success": true, "credentials": { anthropic } }`
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
|
||||
@@ -12,7 +12,6 @@ export function createApiKeysHandler() {
|
||||
success: true,
|
||||
hasAnthropicKey:
|
||||
!!getApiKey("anthropic") || !!process.env.ANTHROPIC_API_KEY,
|
||||
hasGoogleKey: !!getApiKey("google") || !!process.env.GOOGLE_API_KEY,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Get API keys failed");
|
||||
|
||||
@@ -64,15 +64,13 @@ export function createDeleteApiKeyHandler() {
|
||||
// Map provider to env key name
|
||||
const envKeyMap: Record<string, string> = {
|
||||
anthropic: "ANTHROPIC_API_KEY",
|
||||
google: "GOOGLE_GENERATIVE_AI_API_KEY",
|
||||
openai: "OPENAI_API_KEY",
|
||||
};
|
||||
|
||||
const envKey = envKeyMap[provider];
|
||||
if (!envKey) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Unknown provider: ${provider}`,
|
||||
error: `Unknown provider: ${provider}. Only anthropic is supported.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -36,9 +36,12 @@ export function createStoreApiKeyHandler() {
|
||||
process.env.ANTHROPIC_API_KEY = apiKey;
|
||||
await persistApiKeyToEnv("ANTHROPIC_API_KEY", apiKey);
|
||||
logger.info("[Setup] Stored API key as ANTHROPIC_API_KEY");
|
||||
} else if (provider === "google") {
|
||||
process.env.GOOGLE_API_KEY = apiKey;
|
||||
await persistApiKeyToEnv("GOOGLE_API_KEY", apiKey);
|
||||
} else {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Unsupported provider: ${provider}. Only anthropic is supported.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
import { Router } from "express";
|
||||
import type { EventEmitter } from "../../lib/events.js";
|
||||
import { validatePathParams } from "../../middleware/validate-paths.js";
|
||||
import { createGenerateHandler } from "./routes/generate.js";
|
||||
import { createStopHandler } from "./routes/stop.js";
|
||||
import { createStatusHandler } from "./routes/status.js";
|
||||
@@ -11,7 +12,7 @@ import { createStatusHandler } from "./routes/status.js";
|
||||
export function createSuggestionsRoutes(events: EventEmitter): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/generate", createGenerateHandler(events));
|
||||
router.post("/generate", validatePathParams("projectPath"), createGenerateHandler(events));
|
||||
router.post("/stop", createStopHandler());
|
||||
router.get("/status", createStatusHandler());
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import type { Request, Response } from "express";
|
||||
import { spawn } from "child_process";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import { addAllowedPath } from "@automaker/platform";
|
||||
import { isPathAllowed } from "@automaker/platform";
|
||||
import { logger, getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createCloneHandler() {
|
||||
@@ -63,6 +63,24 @@ export function createCloneHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate that parent directory is within allowed root directory
|
||||
if (!isPathAllowed(resolvedParent)) {
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
error: `Parent directory not allowed: ${parentDir}. Must be within ALLOWED_ROOT_DIRECTORY.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate that project path will be within allowed root directory
|
||||
if (!isPathAllowed(resolvedProject)) {
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
error: `Project path not allowed: ${projectPath}. Must be within ALLOWED_ROOT_DIRECTORY.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if directory already exists
|
||||
try {
|
||||
await fs.access(projectPath);
|
||||
@@ -186,9 +204,6 @@ export function createCloneHandler() {
|
||||
});
|
||||
});
|
||||
|
||||
// Add to allowed paths
|
||||
addAllowedPath(projectPath);
|
||||
|
||||
logger.info(`[Templates] Successfully cloned template to ${projectPath}`);
|
||||
|
||||
res.json({
|
||||
|
||||
@@ -4,47 +4,53 @@
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import { addAllowedPath } from "@automaker/platform";
|
||||
import path from "path";
|
||||
import {
|
||||
getAllowedRootDirectory,
|
||||
getDataDirectory,
|
||||
} from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createConfigHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const workspaceDir = process.env.WORKSPACE_DIR;
|
||||
const allowedRootDirectory = getAllowedRootDirectory();
|
||||
const dataDirectory = getDataDirectory();
|
||||
|
||||
if (!workspaceDir) {
|
||||
if (!allowedRootDirectory) {
|
||||
// When ALLOWED_ROOT_DIRECTORY is not set, return DATA_DIR as default directory
|
||||
res.json({
|
||||
success: true,
|
||||
configured: false,
|
||||
defaultDir: dataDirectory || null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if the directory exists
|
||||
try {
|
||||
const stats = await fs.stat(workspaceDir);
|
||||
const resolvedWorkspaceDir = path.resolve(allowedRootDirectory);
|
||||
const stats = await fs.stat(resolvedWorkspaceDir);
|
||||
if (!stats.isDirectory()) {
|
||||
res.json({
|
||||
success: true,
|
||||
configured: false,
|
||||
error: "WORKSPACE_DIR is not a valid directory",
|
||||
error: "ALLOWED_ROOT_DIRECTORY is not a valid directory",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Add workspace dir to allowed paths
|
||||
addAllowedPath(workspaceDir);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
configured: true,
|
||||
workspaceDir,
|
||||
workspaceDir: resolvedWorkspaceDir,
|
||||
defaultDir: resolvedWorkspaceDir,
|
||||
});
|
||||
} catch {
|
||||
res.json({
|
||||
success: true,
|
||||
configured: false,
|
||||
error: "WORKSPACE_DIR path does not exist",
|
||||
error: "ALLOWED_ROOT_DIRECTORY path does not exist",
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -5,51 +5,49 @@
|
||||
import type { Request, Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath } from "@automaker/platform";
|
||||
import { getAllowedRootDirectory } from "@automaker/platform";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
|
||||
export function createDirectoriesHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const workspaceDir = process.env.WORKSPACE_DIR;
|
||||
const allowedRootDirectory = getAllowedRootDirectory();
|
||||
|
||||
if (!workspaceDir) {
|
||||
if (!allowedRootDirectory) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "WORKSPACE_DIR is not configured",
|
||||
error: "ALLOWED_ROOT_DIRECTORY is not configured",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedWorkspaceDir = path.resolve(allowedRootDirectory);
|
||||
|
||||
// Check if directory exists
|
||||
try {
|
||||
await fs.stat(workspaceDir);
|
||||
await fs.stat(resolvedWorkspaceDir);
|
||||
} catch {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "WORKSPACE_DIR path does not exist",
|
||||
error: "Workspace directory path does not exist",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Add workspace dir to allowed paths
|
||||
addAllowedPath(workspaceDir);
|
||||
|
||||
// Read directory contents
|
||||
const entries = await fs.readdir(workspaceDir, { withFileTypes: true });
|
||||
const entries = await fs.readdir(resolvedWorkspaceDir, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
// Filter to directories only and map to result format
|
||||
const directories = entries
|
||||
.filter((entry) => entry.isDirectory() && !entry.name.startsWith("."))
|
||||
.map((entry) => ({
|
||||
name: entry.name,
|
||||
path: path.join(workspaceDir, entry.name),
|
||||
path: path.join(resolvedWorkspaceDir, entry.name),
|
||||
}))
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
// Add each directory to allowed paths
|
||||
directories.forEach((dir) => addAllowedPath(dir.path));
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
directories,
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { validatePathParams } from "../../middleware/validate-paths.js";
|
||||
import { createInfoHandler } from "./routes/info.js";
|
||||
import { createStatusHandler } from "./routes/status.js";
|
||||
import { createListHandler } from "./routes/list.js";
|
||||
@@ -32,27 +33,27 @@ import { createListDevServersHandler } from "./routes/list-dev-servers.js";
|
||||
export function createWorktreeRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/info", createInfoHandler());
|
||||
router.post("/status", createStatusHandler());
|
||||
router.post("/info", validatePathParams("projectPath"), createInfoHandler());
|
||||
router.post("/status", validatePathParams("projectPath"), createStatusHandler());
|
||||
router.post("/list", createListHandler());
|
||||
router.post("/diffs", createDiffsHandler());
|
||||
router.post("/file-diff", createFileDiffHandler());
|
||||
router.post("/merge", createMergeHandler());
|
||||
router.post("/create", createCreateHandler());
|
||||
router.post("/delete", createDeleteHandler());
|
||||
router.post("/diffs", validatePathParams("projectPath"), createDiffsHandler());
|
||||
router.post("/file-diff", validatePathParams("projectPath", "filePath"), createFileDiffHandler());
|
||||
router.post("/merge", validatePathParams("projectPath"), createMergeHandler());
|
||||
router.post("/create", validatePathParams("projectPath"), createCreateHandler());
|
||||
router.post("/delete", validatePathParams("projectPath", "worktreePath"), createDeleteHandler());
|
||||
router.post("/create-pr", createCreatePRHandler());
|
||||
router.post("/pr-info", createPRInfoHandler());
|
||||
router.post("/commit", createCommitHandler());
|
||||
router.post("/push", createPushHandler());
|
||||
router.post("/pull", createPullHandler());
|
||||
router.post("/commit", validatePathParams("worktreePath"), createCommitHandler());
|
||||
router.post("/push", validatePathParams("worktreePath"), createPushHandler());
|
||||
router.post("/pull", validatePathParams("worktreePath"), createPullHandler());
|
||||
router.post("/checkout-branch", createCheckoutBranchHandler());
|
||||
router.post("/list-branches", createListBranchesHandler());
|
||||
router.post("/list-branches", validatePathParams("worktreePath"), createListBranchesHandler());
|
||||
router.post("/switch-branch", createSwitchBranchHandler());
|
||||
router.post("/open-in-editor", createOpenInEditorHandler());
|
||||
router.post("/open-in-editor", validatePathParams("worktreePath"), createOpenInEditorHandler());
|
||||
router.get("/default-editor", createGetDefaultEditorHandler());
|
||||
router.post("/init-git", createInitGitHandler());
|
||||
router.post("/init-git", validatePathParams("projectPath"), createInitGitHandler());
|
||||
router.post("/migrate", createMigrateHandler());
|
||||
router.post("/start-dev", createStartDevHandler());
|
||||
router.post("/start-dev", validatePathParams("projectPath", "worktreePath"), createStartDevHandler());
|
||||
router.post("/stop-dev", createStopDevHandler());
|
||||
router.post("/list-dev-servers", createListDevServersHandler());
|
||||
|
||||
|
||||
@@ -4,12 +4,17 @@
|
||||
*/
|
||||
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import * as secureFs from "../lib/secure-fs.js";
|
||||
import type { EventEmitter } from "../lib/events.js";
|
||||
import type { ExecuteOptions } from "@automaker/types";
|
||||
import { readImageAsBase64, buildPromptWithImages, isAbortError } from "@automaker/utils";
|
||||
import {
|
||||
readImageAsBase64,
|
||||
buildPromptWithImages,
|
||||
isAbortError,
|
||||
} from "@automaker/utils";
|
||||
import { ProviderFactory } from "../providers/provider-factory.js";
|
||||
import { createChatOptions } from "../lib/sdk-options.js";
|
||||
import { isPathAllowed, PathNotAllowedError } from "@automaker/platform";
|
||||
|
||||
interface Message {
|
||||
id: string;
|
||||
@@ -59,7 +64,7 @@ export class AgentService {
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
await fs.mkdir(this.stateDir, { recursive: true });
|
||||
await secureFs.mkdir(this.stateDir, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -77,11 +82,22 @@ export class AgentService {
|
||||
const metadata = await this.loadMetadata();
|
||||
const sessionMetadata = metadata[sessionId];
|
||||
|
||||
// Determine the effective working directory
|
||||
const effectiveWorkingDirectory = workingDirectory || process.cwd();
|
||||
const resolvedWorkingDirectory = path.resolve(effectiveWorkingDirectory);
|
||||
|
||||
// Validate that the working directory is allowed
|
||||
if (!isPathAllowed(resolvedWorkingDirectory)) {
|
||||
throw new Error(
|
||||
`Working directory ${effectiveWorkingDirectory} is not allowed`
|
||||
);
|
||||
}
|
||||
|
||||
this.sessions.set(sessionId, {
|
||||
messages,
|
||||
isRunning: false,
|
||||
abortController: null,
|
||||
workingDirectory: workingDirectory || process.cwd(),
|
||||
workingDirectory: resolvedWorkingDirectory,
|
||||
sdkSessionId: sessionMetadata?.sdkSessionId, // Load persisted SDK session ID
|
||||
});
|
||||
}
|
||||
@@ -388,7 +404,7 @@ export class AgentService {
|
||||
const sessionFile = path.join(this.stateDir, `${sessionId}.json`);
|
||||
|
||||
try {
|
||||
const data = await fs.readFile(sessionFile, "utf-8");
|
||||
const data = (await secureFs.readFile(sessionFile, "utf-8")) as string;
|
||||
return JSON.parse(data);
|
||||
} catch {
|
||||
return [];
|
||||
@@ -399,7 +415,7 @@ export class AgentService {
|
||||
const sessionFile = path.join(this.stateDir, `${sessionId}.json`);
|
||||
|
||||
try {
|
||||
await fs.writeFile(
|
||||
await secureFs.writeFile(
|
||||
sessionFile,
|
||||
JSON.stringify(messages, null, 2),
|
||||
"utf-8"
|
||||
@@ -412,7 +428,10 @@ export class AgentService {
|
||||
|
||||
async loadMetadata(): Promise<Record<string, SessionMetadata>> {
|
||||
try {
|
||||
const data = await fs.readFile(this.metadataFile, "utf-8");
|
||||
const data = (await secureFs.readFile(
|
||||
this.metadataFile,
|
||||
"utf-8"
|
||||
)) as string;
|
||||
return JSON.parse(data);
|
||||
} catch {
|
||||
return {};
|
||||
@@ -420,7 +439,7 @@ export class AgentService {
|
||||
}
|
||||
|
||||
async saveMetadata(metadata: Record<string, SessionMetadata>): Promise<void> {
|
||||
await fs.writeFile(
|
||||
await secureFs.writeFile(
|
||||
this.metadataFile,
|
||||
JSON.stringify(metadata, null, 2),
|
||||
"utf-8"
|
||||
@@ -458,11 +477,29 @@ export class AgentService {
|
||||
const sessionId = this.generateId();
|
||||
const metadata = await this.loadMetadata();
|
||||
|
||||
// Determine the effective working directory
|
||||
const effectiveWorkingDirectory =
|
||||
workingDirectory || projectPath || process.cwd();
|
||||
const resolvedWorkingDirectory = path.resolve(effectiveWorkingDirectory);
|
||||
|
||||
// Validate that the working directory is allowed
|
||||
if (!isPathAllowed(resolvedWorkingDirectory)) {
|
||||
throw new PathNotAllowedError(effectiveWorkingDirectory);
|
||||
}
|
||||
|
||||
// Validate that projectPath is allowed if provided
|
||||
if (projectPath) {
|
||||
const resolvedProjectPath = path.resolve(projectPath);
|
||||
if (!isPathAllowed(resolvedProjectPath)) {
|
||||
throw new PathNotAllowedError(projectPath);
|
||||
}
|
||||
}
|
||||
|
||||
const session: SessionMetadata = {
|
||||
id: sessionId,
|
||||
name,
|
||||
projectPath,
|
||||
workingDirectory: workingDirectory || projectPath || process.cwd(),
|
||||
workingDirectory: resolvedWorkingDirectory,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
model,
|
||||
@@ -521,7 +558,7 @@ export class AgentService {
|
||||
// Delete session file
|
||||
try {
|
||||
const sessionFile = path.join(this.stateDir, `${sessionId}.json`);
|
||||
await fs.unlink(sessionFile);
|
||||
await secureFs.unlink(sessionFile);
|
||||
} catch {
|
||||
// File may not exist
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,9 +4,9 @@
|
||||
*/
|
||||
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import type { Feature } from "@automaker/types";
|
||||
import { createLogger } from "@automaker/utils";
|
||||
import * as secureFs from "../lib/secure-fs.js";
|
||||
import {
|
||||
getFeaturesDir,
|
||||
getFeatureDir,
|
||||
@@ -39,8 +39,12 @@ export class FeatureLoader {
|
||||
*/
|
||||
private async deleteOrphanedImages(
|
||||
projectPath: string,
|
||||
oldPaths: Array<string | { path: string; [key: string]: unknown }> | undefined,
|
||||
newPaths: Array<string | { path: string; [key: string]: unknown }> | undefined
|
||||
oldPaths:
|
||||
| Array<string | { path: string; [key: string]: unknown }>
|
||||
| undefined,
|
||||
newPaths:
|
||||
| Array<string | { path: string; [key: string]: unknown }>
|
||||
| undefined
|
||||
): Promise<void> {
|
||||
if (!oldPaths || oldPaths.length === 0) {
|
||||
return;
|
||||
@@ -59,8 +63,8 @@ export class FeatureLoader {
|
||||
if (!newPathSet.has(oldPath)) {
|
||||
try {
|
||||
// Paths are now absolute
|
||||
await fs.unlink(oldPath);
|
||||
logger.info(`Deleted orphaned image: ${oldPath}`);
|
||||
await secureFs.unlink(oldPath);
|
||||
console.log(`[FeatureLoader] Deleted orphaned image: ${oldPath}`);
|
||||
} catch (error) {
|
||||
// Ignore errors when deleting (file may already be gone)
|
||||
logger.warn(
|
||||
@@ -87,10 +91,11 @@ export class FeatureLoader {
|
||||
}
|
||||
|
||||
const featureImagesDir = this.getFeatureImagesDir(projectPath, featureId);
|
||||
await fs.mkdir(featureImagesDir, { recursive: true });
|
||||
await secureFs.mkdir(featureImagesDir, { recursive: true });
|
||||
|
||||
const updatedPaths: Array<string | { path: string; [key: string]: unknown }> =
|
||||
[];
|
||||
const updatedPaths: Array<
|
||||
string | { path: string; [key: string]: unknown }
|
||||
> = [];
|
||||
|
||||
for (const imagePath of imagePaths) {
|
||||
try {
|
||||
@@ -110,7 +115,7 @@ export class FeatureLoader {
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(fullOriginalPath);
|
||||
await secureFs.access(fullOriginalPath);
|
||||
} catch {
|
||||
logger.warn(
|
||||
`[FeatureLoader] Image not found, skipping: ${fullOriginalPath}`
|
||||
@@ -123,14 +128,14 @@ export class FeatureLoader {
|
||||
const newPath = path.join(featureImagesDir, filename);
|
||||
|
||||
// Copy the file
|
||||
await fs.copyFile(fullOriginalPath, newPath);
|
||||
logger.info(
|
||||
await secureFs.copyFile(fullOriginalPath, newPath);
|
||||
console.log(
|
||||
`[FeatureLoader] Copied image: ${originalPath} -> ${newPath}`
|
||||
);
|
||||
|
||||
// Try to delete the original temp file
|
||||
try {
|
||||
await fs.unlink(fullOriginalPath);
|
||||
await secureFs.unlink(fullOriginalPath);
|
||||
} catch {
|
||||
// Ignore errors when deleting temp file
|
||||
}
|
||||
@@ -163,14 +168,20 @@ export class FeatureLoader {
|
||||
* Get the path to a feature's feature.json file
|
||||
*/
|
||||
getFeatureJsonPath(projectPath: string, featureId: string): string {
|
||||
return path.join(this.getFeatureDir(projectPath, featureId), "feature.json");
|
||||
return path.join(
|
||||
this.getFeatureDir(projectPath, featureId),
|
||||
"feature.json"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path to a feature's agent-output.md file
|
||||
*/
|
||||
getAgentOutputPath(projectPath: string, featureId: string): string {
|
||||
return path.join(this.getFeatureDir(projectPath, featureId), "agent-output.md");
|
||||
return path.join(
|
||||
this.getFeatureDir(projectPath, featureId),
|
||||
"agent-output.md"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -189,13 +200,15 @@ export class FeatureLoader {
|
||||
|
||||
// Check if features directory exists
|
||||
try {
|
||||
await fs.access(featuresDir);
|
||||
await secureFs.access(featuresDir);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Read all feature directories
|
||||
const entries = await fs.readdir(featuresDir, { withFileTypes: true });
|
||||
const entries = (await secureFs.readdir(featuresDir, {
|
||||
withFileTypes: true,
|
||||
})) as any[];
|
||||
const featureDirs = entries.filter((entry) => entry.isDirectory());
|
||||
|
||||
// Load each feature
|
||||
@@ -205,7 +218,10 @@ export class FeatureLoader {
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
|
||||
try {
|
||||
const content = await fs.readFile(featureJsonPath, "utf-8");
|
||||
const content = (await secureFs.readFile(
|
||||
featureJsonPath,
|
||||
"utf-8"
|
||||
)) as string;
|
||||
const feature = JSON.parse(content);
|
||||
|
||||
if (!feature.id) {
|
||||
@@ -252,7 +268,10 @@ export class FeatureLoader {
|
||||
async get(projectPath: string, featureId: string): Promise<Feature | null> {
|
||||
try {
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
const content = await fs.readFile(featureJsonPath, "utf-8");
|
||||
const content = (await secureFs.readFile(
|
||||
featureJsonPath,
|
||||
"utf-8"
|
||||
)) as string;
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
@@ -281,7 +300,7 @@ export class FeatureLoader {
|
||||
await ensureAutomakerDir(projectPath);
|
||||
|
||||
// Create feature directory
|
||||
await fs.mkdir(featureDir, { recursive: true });
|
||||
await secureFs.mkdir(featureDir, { recursive: true });
|
||||
|
||||
// Migrate images from temp directory to feature directory
|
||||
const migratedImagePaths = await this.migrateImages(
|
||||
@@ -300,7 +319,7 @@ export class FeatureLoader {
|
||||
};
|
||||
|
||||
// Write feature.json
|
||||
await fs.writeFile(
|
||||
await secureFs.writeFile(
|
||||
featureJsonPath,
|
||||
JSON.stringify(feature, null, 2),
|
||||
"utf-8"
|
||||
@@ -352,7 +371,7 @@ export class FeatureLoader {
|
||||
|
||||
// Write back to file
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
await fs.writeFile(
|
||||
await secureFs.writeFile(
|
||||
featureJsonPath,
|
||||
JSON.stringify(updatedFeature, null, 2),
|
||||
"utf-8"
|
||||
@@ -368,8 +387,8 @@ export class FeatureLoader {
|
||||
async delete(projectPath: string, featureId: string): Promise<boolean> {
|
||||
try {
|
||||
const featureDir = this.getFeatureDir(projectPath, featureId);
|
||||
await fs.rm(featureDir, { recursive: true, force: true });
|
||||
logger.info(`Deleted feature ${featureId}`);
|
||||
await secureFs.rm(featureDir, { recursive: true, force: true });
|
||||
console.log(`[FeatureLoader] Deleted feature ${featureId}`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
@@ -389,7 +408,10 @@ export class FeatureLoader {
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const agentOutputPath = this.getAgentOutputPath(projectPath, featureId);
|
||||
const content = await fs.readFile(agentOutputPath, "utf-8");
|
||||
const content = (await secureFs.readFile(
|
||||
agentOutputPath,
|
||||
"utf-8"
|
||||
)) as string;
|
||||
return content;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
@@ -412,10 +434,10 @@ export class FeatureLoader {
|
||||
content: string
|
||||
): Promise<void> {
|
||||
const featureDir = this.getFeatureDir(projectPath, featureId);
|
||||
await fs.mkdir(featureDir, { recursive: true });
|
||||
await secureFs.mkdir(featureDir, { recursive: true });
|
||||
|
||||
const agentOutputPath = this.getAgentOutputPath(projectPath, featureId);
|
||||
await fs.writeFile(agentOutputPath, content, "utf-8");
|
||||
await secureFs.writeFile(agentOutputPath, content, "utf-8");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -427,7 +449,7 @@ export class FeatureLoader {
|
||||
): Promise<void> {
|
||||
try {
|
||||
const agentOutputPath = this.getAgentOutputPath(projectPath, featureId);
|
||||
await fs.unlink(agentOutputPath);
|
||||
await secureFs.unlink(agentOutputPath);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
|
||||
throw error;
|
||||
|
||||
@@ -7,9 +7,9 @@
|
||||
* - Per-project settings ({projectPath}/.automaker/settings.json)
|
||||
*/
|
||||
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { createLogger } from "@automaker/utils";
|
||||
import * as secureFs from "../lib/secure-fs.js";
|
||||
|
||||
import {
|
||||
getGlobalSettingsPath,
|
||||
getCredentialsPath,
|
||||
@@ -47,12 +47,12 @@ async function atomicWriteJson(filePath: string, data: unknown): Promise<void> {
|
||||
const content = JSON.stringify(data, null, 2);
|
||||
|
||||
try {
|
||||
await fs.writeFile(tempPath, content, "utf-8");
|
||||
await fs.rename(tempPath, filePath);
|
||||
await secureFs.writeFile(tempPath, content, "utf-8");
|
||||
await secureFs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
// Clean up temp file if it exists
|
||||
try {
|
||||
await fs.unlink(tempPath);
|
||||
await secureFs.unlink(tempPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
@@ -65,7 +65,7 @@ async function atomicWriteJson(filePath: string, data: unknown): Promise<void> {
|
||||
*/
|
||||
async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||
try {
|
||||
const content = await fs.readFile(filePath, "utf-8");
|
||||
const content = (await secureFs.readFile(filePath, "utf-8")) as string;
|
||||
return JSON.parse(content) as T;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
@@ -81,7 +81,7 @@ async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||
*/
|
||||
async function fileExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
await secureFs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
@@ -232,9 +232,7 @@ export class SettingsService {
|
||||
* @param updates - Partial Credentials (usually just apiKeys)
|
||||
* @returns Promise resolving to complete updated Credentials object
|
||||
*/
|
||||
async updateCredentials(
|
||||
updates: Partial<Credentials>
|
||||
): Promise<Credentials> {
|
||||
async updateCredentials(updates: Partial<Credentials>): Promise<Credentials> {
|
||||
await ensureDataDir(this.dataDir);
|
||||
const credentialsPath = getCredentialsPath(this.dataDir);
|
||||
|
||||
@@ -270,8 +268,6 @@ export class SettingsService {
|
||||
*/
|
||||
async getMaskedCredentials(): Promise<{
|
||||
anthropic: { configured: boolean; masked: string };
|
||||
google: { configured: boolean; masked: string };
|
||||
openai: { configured: boolean; masked: string };
|
||||
}> {
|
||||
const credentials = await this.getCredentials();
|
||||
|
||||
@@ -285,14 +281,6 @@ export class SettingsService {
|
||||
configured: !!credentials.apiKeys.anthropic,
|
||||
masked: maskKey(credentials.apiKeys.anthropic),
|
||||
},
|
||||
google: {
|
||||
configured: !!credentials.apiKeys.google,
|
||||
masked: maskKey(credentials.apiKeys.google),
|
||||
},
|
||||
openai: {
|
||||
configured: !!credentials.apiKeys.openai,
|
||||
masked: maskKey(credentials.apiKeys.openai),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -563,8 +551,7 @@ export class SettingsService {
|
||||
// Get theme from project object
|
||||
const project = projects.find((p) => p.path === projectPath);
|
||||
if (project?.theme) {
|
||||
projectSettings.theme =
|
||||
project.theme as ProjectSettings["theme"];
|
||||
projectSettings.theme = project.theme as ProjectSettings["theme"];
|
||||
}
|
||||
|
||||
if (boardBackgroundByProject?.[projectPath]) {
|
||||
@@ -586,7 +573,9 @@ export class SettingsService {
|
||||
migratedProjectCount++;
|
||||
}
|
||||
} catch (e) {
|
||||
errors.push(`Failed to migrate project settings for ${projectPath}: ${e}`);
|
||||
errors.push(
|
||||
`Failed to migrate project settings for ${projectPath}: ${e}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ export type {
|
||||
BoardBackgroundSettings,
|
||||
WorktreeInfo,
|
||||
ProjectSettings,
|
||||
} from '@automaker/types';
|
||||
} from "@automaker/types";
|
||||
|
||||
export {
|
||||
DEFAULT_KEYBOARD_SHORTCUTS,
|
||||
@@ -32,4 +32,4 @@ export {
|
||||
SETTINGS_VERSION,
|
||||
CREDENTIALS_VERSION,
|
||||
PROJECT_SETTINGS_VERSION,
|
||||
} from '@automaker/types';
|
||||
} from "@automaker/types";
|
||||
|
||||
@@ -8,7 +8,6 @@ import { vi, beforeEach } from "vitest";
|
||||
// Set test environment variables
|
||||
process.env.NODE_ENV = "test";
|
||||
process.env.DATA_DIR = "/tmp/test-data";
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/tmp/test-projects";
|
||||
|
||||
// Reset all mocks before each test
|
||||
beforeEach(() => {
|
||||
|
||||
@@ -11,134 +11,103 @@ describe("security.ts", () => {
|
||||
});
|
||||
|
||||
describe("initAllowedPaths", () => {
|
||||
it("should parse comma-separated directories from environment", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/path1,/path2,/path3";
|
||||
process.env.DATA_DIR = "";
|
||||
it("should load ALLOWED_ROOT_DIRECTORY if set", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/projects";
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
const { initAllowedPaths, getAllowedPaths } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/path1"));
|
||||
expect(allowed).toContain(path.resolve("/path2"));
|
||||
expect(allowed).toContain(path.resolve("/path3"));
|
||||
expect(allowed).toContain(path.resolve("/projects"));
|
||||
});
|
||||
|
||||
it("should trim whitespace from paths", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = " /path1 , /path2 , /path3 ";
|
||||
process.env.DATA_DIR = "";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/path1"));
|
||||
expect(allowed).toContain(path.resolve("/path2"));
|
||||
});
|
||||
|
||||
it("should always include DATA_DIR if set", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
it("should include DATA_DIR if set", async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
process.env.DATA_DIR = "/data/dir";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
const { initAllowedPaths, getAllowedPaths } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/data/dir"));
|
||||
});
|
||||
|
||||
it("should include WORKSPACE_DIR if set", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
process.env.DATA_DIR = "";
|
||||
process.env.WORKSPACE_DIR = "/workspace/dir";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/workspace/dir"));
|
||||
});
|
||||
|
||||
it("should handle empty ALLOWED_PROJECT_DIRS", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
it("should include both ALLOWED_ROOT_DIRECTORY and DATA_DIR if both set", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/projects";
|
||||
process.env.DATA_DIR = "/data";
|
||||
delete process.env.WORKSPACE_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toHaveLength(1);
|
||||
expect(allowed[0]).toBe(path.resolve("/data"));
|
||||
});
|
||||
|
||||
it("should skip empty entries in comma list", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/path1,,/path2, ,/path3";
|
||||
process.env.DATA_DIR = "";
|
||||
delete process.env.WORKSPACE_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe("addAllowedPath", () => {
|
||||
it("should add path to allowed list", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
process.env.DATA_DIR = "";
|
||||
|
||||
const { initAllowedPaths, addAllowedPath, getAllowedPaths } =
|
||||
const { initAllowedPaths, getAllowedPaths } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
addAllowedPath("/new/path");
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/new/path"));
|
||||
expect(allowed).toContain(path.resolve("/projects"));
|
||||
expect(allowed).toContain(path.resolve("/data"));
|
||||
expect(allowed).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("should resolve relative paths before adding", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
process.env.DATA_DIR = "";
|
||||
it("should return empty array when no paths configured", async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, addAllowedPath, getAllowedPaths } =
|
||||
const { initAllowedPaths, getAllowedPaths } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
addAllowedPath("./relative/path");
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
const cwd = process.cwd();
|
||||
expect(allowed).toContain(path.resolve(cwd, "./relative/path"));
|
||||
expect(allowed).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isPathAllowed", () => {
|
||||
it("should allow all paths (permissions disabled)", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/allowed/project";
|
||||
it("should allow paths within ALLOWED_ROOT_DIRECTORY", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/allowed/project";
|
||||
process.env.DATA_DIR = "";
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
const { initAllowedPaths, isPathAllowed } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
// All paths are now allowed regardless of configuration
|
||||
// Paths within allowed directory should be allowed
|
||||
expect(isPathAllowed("/allowed/project/file.txt")).toBe(true);
|
||||
expect(isPathAllowed("/allowed/project/subdir/file.txt")).toBe(true);
|
||||
|
||||
// Paths outside allowed directory should be denied
|
||||
expect(isPathAllowed("/not/allowed/file.txt")).toBe(false);
|
||||
expect(isPathAllowed("/tmp/file.txt")).toBe(false);
|
||||
expect(isPathAllowed("/etc/passwd")).toBe(false);
|
||||
});
|
||||
|
||||
it("should allow all paths when no restrictions are configured", async () => {
|
||||
delete process.env.DATA_DIR;
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
// All paths should be allowed when no restrictions are configured
|
||||
expect(isPathAllowed("/allowed/project/file.txt")).toBe(true);
|
||||
expect(isPathAllowed("/not/allowed/file.txt")).toBe(true);
|
||||
expect(isPathAllowed("/tmp/file.txt")).toBe(true);
|
||||
expect(isPathAllowed("/etc/passwd")).toBe(true);
|
||||
expect(isPathAllowed("/any/path")).toBe(true);
|
||||
});
|
||||
|
||||
it("should allow all paths when DATA_DIR is set but ALLOWED_ROOT_DIRECTORY is not", async () => {
|
||||
process.env.DATA_DIR = "/data";
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
// DATA_DIR should be allowed
|
||||
expect(isPathAllowed("/data/settings.json")).toBe(true);
|
||||
// But all other paths should also be allowed when ALLOWED_ROOT_DIRECTORY is not set
|
||||
expect(isPathAllowed("/allowed/project/file.txt")).toBe(true);
|
||||
expect(isPathAllowed("/not/allowed/file.txt")).toBe(true);
|
||||
expect(isPathAllowed("/tmp/file.txt")).toBe(true);
|
||||
@@ -148,43 +117,52 @@ describe("security.ts", () => {
|
||||
});
|
||||
|
||||
describe("validatePath", () => {
|
||||
it("should return resolved path for any path (permissions disabled)", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/allowed";
|
||||
it("should return resolved path for allowed paths", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/allowed";
|
||||
process.env.DATA_DIR = "";
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
const { initAllowedPaths, validatePath } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
const result = validatePath("/allowed/file.txt");
|
||||
expect(result).toBe(path.resolve("/allowed/file.txt"));
|
||||
});
|
||||
|
||||
it("should not throw error for any path (permissions disabled)", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/allowed";
|
||||
it("should throw error for paths outside allowed directories", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/allowed";
|
||||
process.env.DATA_DIR = "";
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
const { initAllowedPaths, validatePath } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
// All paths are now allowed, no errors thrown
|
||||
// Disallowed paths should throw PathNotAllowedError
|
||||
expect(() => validatePath("/disallowed/file.txt")).toThrow();
|
||||
});
|
||||
|
||||
it("should not throw error for any path when no restrictions are configured", async () => {
|
||||
delete process.env.DATA_DIR;
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
|
||||
const { initAllowedPaths, validatePath } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
// All paths are allowed when no restrictions configured
|
||||
expect(() => validatePath("/disallowed/file.txt")).not.toThrow();
|
||||
expect(validatePath("/disallowed/file.txt")).toBe(
|
||||
path.resolve("/disallowed/file.txt")
|
||||
);
|
||||
});
|
||||
|
||||
it("should resolve relative paths", async () => {
|
||||
it("should resolve relative paths within allowed directory", async () => {
|
||||
const cwd = process.cwd();
|
||||
process.env.ALLOWED_PROJECT_DIRS = cwd;
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = cwd;
|
||||
process.env.DATA_DIR = "";
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
const { initAllowedPaths, validatePath } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
const result = validatePath("./file.txt");
|
||||
@@ -194,26 +172,26 @@ describe("security.ts", () => {
|
||||
|
||||
describe("getAllowedPaths", () => {
|
||||
it("should return array of allowed paths", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/path1,/path2";
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/projects";
|
||||
process.env.DATA_DIR = "/data";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
const { initAllowedPaths, getAllowedPaths } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
const result = getAllowedPaths();
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
expect(result.length).toBe(2);
|
||||
expect(result).toContain(path.resolve("/projects"));
|
||||
expect(result).toContain(path.resolve("/data"));
|
||||
});
|
||||
|
||||
it("should return resolved paths", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/test";
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/test";
|
||||
process.env.DATA_DIR = "";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
);
|
||||
const { initAllowedPaths, getAllowedPaths } =
|
||||
await import("@automaker/platform");
|
||||
initAllowedPaths();
|
||||
|
||||
const result = getAllowedPaths();
|
||||
|
||||
@@ -183,8 +183,6 @@ describe("settings-service.ts", () => {
|
||||
...DEFAULT_CREDENTIALS,
|
||||
apiKeys: {
|
||||
anthropic: "sk-test-key",
|
||||
google: "",
|
||||
openai: "",
|
||||
},
|
||||
};
|
||||
const credentialsPath = path.join(testDataDir, "credentials.json");
|
||||
@@ -206,8 +204,6 @@ describe("settings-service.ts", () => {
|
||||
|
||||
const credentials = await settingsService.getCredentials();
|
||||
expect(credentials.apiKeys.anthropic).toBe("sk-test");
|
||||
expect(credentials.apiKeys.google).toBe("");
|
||||
expect(credentials.apiKeys.openai).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -216,8 +212,6 @@ describe("settings-service.ts", () => {
|
||||
const updates: Partial<Credentials> = {
|
||||
apiKeys: {
|
||||
anthropic: "sk-test-key",
|
||||
google: "",
|
||||
openai: "",
|
||||
},
|
||||
};
|
||||
|
||||
@@ -237,8 +231,6 @@ describe("settings-service.ts", () => {
|
||||
...DEFAULT_CREDENTIALS,
|
||||
apiKeys: {
|
||||
anthropic: "sk-initial",
|
||||
google: "google-key",
|
||||
openai: "",
|
||||
},
|
||||
};
|
||||
const credentialsPath = path.join(testDataDir, "credentials.json");
|
||||
@@ -253,7 +245,6 @@ describe("settings-service.ts", () => {
|
||||
const updated = await settingsService.updateCredentials(updates);
|
||||
|
||||
expect(updated.apiKeys.anthropic).toBe("sk-updated");
|
||||
expect(updated.apiKeys.google).toBe("google-key"); // Preserved
|
||||
});
|
||||
|
||||
it("should deep merge api keys", async () => {
|
||||
@@ -261,8 +252,6 @@ describe("settings-service.ts", () => {
|
||||
...DEFAULT_CREDENTIALS,
|
||||
apiKeys: {
|
||||
anthropic: "sk-anthropic",
|
||||
google: "google-key",
|
||||
openai: "openai-key",
|
||||
},
|
||||
};
|
||||
const credentialsPath = path.join(testDataDir, "credentials.json");
|
||||
@@ -270,15 +259,13 @@ describe("settings-service.ts", () => {
|
||||
|
||||
const updates: Partial<Credentials> = {
|
||||
apiKeys: {
|
||||
openai: "new-openai-key",
|
||||
anthropic: "sk-updated-anthropic",
|
||||
},
|
||||
};
|
||||
|
||||
const updated = await settingsService.updateCredentials(updates);
|
||||
|
||||
expect(updated.apiKeys.anthropic).toBe("sk-anthropic");
|
||||
expect(updated.apiKeys.google).toBe("google-key");
|
||||
expect(updated.apiKeys.openai).toBe("new-openai-key");
|
||||
expect(updated.apiKeys.anthropic).toBe("sk-updated-anthropic");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -287,34 +274,24 @@ describe("settings-service.ts", () => {
|
||||
const masked = await settingsService.getMaskedCredentials();
|
||||
expect(masked.anthropic.configured).toBe(false);
|
||||
expect(masked.anthropic.masked).toBe("");
|
||||
expect(masked.google.configured).toBe(false);
|
||||
expect(masked.openai.configured).toBe(false);
|
||||
});
|
||||
|
||||
it("should mask keys correctly", async () => {
|
||||
await settingsService.updateCredentials({
|
||||
apiKeys: {
|
||||
anthropic: "sk-ant-api03-1234567890abcdef",
|
||||
google: "AIzaSy1234567890abcdef",
|
||||
openai: "sk-1234567890abcdef",
|
||||
},
|
||||
});
|
||||
|
||||
const masked = await settingsService.getMaskedCredentials();
|
||||
expect(masked.anthropic.configured).toBe(true);
|
||||
expect(masked.anthropic.masked).toBe("sk-a...cdef");
|
||||
expect(masked.google.configured).toBe(true);
|
||||
expect(masked.google.masked).toBe("AIza...cdef");
|
||||
expect(masked.openai.configured).toBe(true);
|
||||
expect(masked.openai.masked).toBe("sk-1...cdef");
|
||||
});
|
||||
|
||||
it("should handle short keys", async () => {
|
||||
await settingsService.updateCredentials({
|
||||
apiKeys: {
|
||||
anthropic: "short",
|
||||
google: "",
|
||||
openai: "",
|
||||
},
|
||||
});
|
||||
|
||||
@@ -332,7 +309,7 @@ describe("settings-service.ts", () => {
|
||||
|
||||
it("should return true when credentials file exists", async () => {
|
||||
await settingsService.updateCredentials({
|
||||
apiKeys: { anthropic: "test", google: "", openai: "" },
|
||||
apiKeys: { anthropic: "test" },
|
||||
});
|
||||
const exists = await settingsService.hasCredentials();
|
||||
expect(exists).toBe(true);
|
||||
@@ -508,8 +485,6 @@ describe("settings-service.ts", () => {
|
||||
state: {
|
||||
apiKeys: {
|
||||
anthropic: "sk-test-key",
|
||||
google: "google-key",
|
||||
openai: "openai-key",
|
||||
},
|
||||
},
|
||||
}),
|
||||
@@ -522,8 +497,6 @@ describe("settings-service.ts", () => {
|
||||
|
||||
const credentials = await settingsService.getCredentials();
|
||||
expect(credentials.apiKeys.anthropic).toBe("sk-test-key");
|
||||
expect(credentials.apiKeys.google).toBe("google-key");
|
||||
expect(credentials.apiKeys.openai).toBe("openai-key");
|
||||
});
|
||||
|
||||
it("should migrate project settings from localStorage data", async () => {
|
||||
|
||||
@@ -40,8 +40,7 @@ export default defineConfig({
|
||||
PORT: String(serverPort),
|
||||
// Enable mock agent in CI to avoid real API calls
|
||||
AUTOMAKER_MOCK_AGENT: mockAgent ? "true" : "false",
|
||||
// Allow access to test directories and common project paths
|
||||
ALLOWED_PROJECT_DIRS: "/Users,/home,/tmp,/var/folders",
|
||||
// No ALLOWED_ROOT_DIRECTORY restriction - allow all paths for testing
|
||||
},
|
||||
},
|
||||
// Frontend Vite dev server
|
||||
@@ -54,7 +53,8 @@ export default defineConfig({
|
||||
...process.env,
|
||||
VITE_SKIP_SETUP: "true",
|
||||
// Skip electron plugin in CI - no display available for Electron
|
||||
VITE_SKIP_ELECTRON: process.env.CI === "true" ? "true" : undefined,
|
||||
VITE_SKIP_ELECTRON:
|
||||
process.env.CI === "true" ? "true" : undefined,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
import { useState, useEffect, useRef, useCallback } from "react";
|
||||
import {
|
||||
FolderOpen,
|
||||
@@ -21,6 +20,11 @@ import {
|
||||
} from "@/components/ui/dialog";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { getJSON, setJSON } from "@/lib/storage";
|
||||
import {
|
||||
getDefaultWorkspaceDirectory,
|
||||
saveLastProjectDirectory,
|
||||
} from "@/lib/workspace-config";
|
||||
|
||||
interface DirectoryEntry {
|
||||
name: string;
|
||||
@@ -50,38 +54,22 @@ const RECENT_FOLDERS_KEY = "file-browser-recent-folders";
|
||||
const MAX_RECENT_FOLDERS = 5;
|
||||
|
||||
function getRecentFolders(): string[] {
|
||||
if (typeof window === "undefined") return [];
|
||||
try {
|
||||
const stored = localStorage.getItem(RECENT_FOLDERS_KEY);
|
||||
return stored ? JSON.parse(stored) : [];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
return getJSON<string[]>(RECENT_FOLDERS_KEY) ?? [];
|
||||
}
|
||||
|
||||
function addRecentFolder(path: string): void {
|
||||
if (typeof window === "undefined") return;
|
||||
try {
|
||||
const recent = getRecentFolders();
|
||||
// Remove if already exists, then add to front
|
||||
const filtered = recent.filter((p) => p !== path);
|
||||
const updated = [path, ...filtered].slice(0, MAX_RECENT_FOLDERS);
|
||||
localStorage.setItem(RECENT_FOLDERS_KEY, JSON.stringify(updated));
|
||||
} catch {
|
||||
// Ignore localStorage errors
|
||||
}
|
||||
const recent = getRecentFolders();
|
||||
// Remove if already exists, then add to front
|
||||
const filtered = recent.filter((p) => p !== path);
|
||||
const updated = [path, ...filtered].slice(0, MAX_RECENT_FOLDERS);
|
||||
setJSON(RECENT_FOLDERS_KEY, updated);
|
||||
}
|
||||
|
||||
function removeRecentFolder(path: string): string[] {
|
||||
if (typeof window === "undefined") return [];
|
||||
try {
|
||||
const recent = getRecentFolders();
|
||||
const updated = recent.filter((p) => p !== path);
|
||||
localStorage.setItem(RECENT_FOLDERS_KEY, JSON.stringify(updated));
|
||||
return updated;
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
const recent = getRecentFolders();
|
||||
const updated = recent.filter((p) => p !== path);
|
||||
setJSON(RECENT_FOLDERS_KEY, updated);
|
||||
return updated;
|
||||
}
|
||||
|
||||
export function FileBrowserDialog({
|
||||
@@ -110,17 +98,16 @@ export function FileBrowserDialog({
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
const handleRemoveRecent = useCallback((e: React.MouseEvent, path: string) => {
|
||||
e.stopPropagation();
|
||||
const updated = removeRecentFolder(path);
|
||||
setRecentFolders(updated);
|
||||
}, []);
|
||||
const handleRemoveRecent = useCallback(
|
||||
(e: React.MouseEvent, path: string) => {
|
||||
e.stopPropagation();
|
||||
const updated = removeRecentFolder(path);
|
||||
setRecentFolders(updated);
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const handleSelectRecent = useCallback((path: string) => {
|
||||
browseDirectory(path);
|
||||
}, []);
|
||||
|
||||
const browseDirectory = async (dirPath?: string) => {
|
||||
const browseDirectory = useCallback(async (dirPath?: string) => {
|
||||
setLoading(true);
|
||||
setError("");
|
||||
setWarning("");
|
||||
@@ -155,7 +142,14 @@ export function FileBrowserDialog({
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const handleSelectRecent = useCallback(
|
||||
(path: string) => {
|
||||
browseDirectory(path);
|
||||
},
|
||||
[browseDirectory]
|
||||
);
|
||||
|
||||
// Reset current path when dialog closes
|
||||
useEffect(() => {
|
||||
@@ -169,12 +163,46 @@ export function FileBrowserDialog({
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
// Load initial path or home directory when dialog opens
|
||||
// Load initial path or workspace directory when dialog opens
|
||||
useEffect(() => {
|
||||
if (open && !currentPath) {
|
||||
browseDirectory(initialPath);
|
||||
// Priority order:
|
||||
// 1. Last selected directory from this file browser (from localStorage)
|
||||
// 2. initialPath prop (from parent component)
|
||||
// 3. Default workspace directory
|
||||
// 4. Home directory
|
||||
const loadInitialPath = async () => {
|
||||
try {
|
||||
// First, check for last selected directory from getDefaultWorkspaceDirectory
|
||||
// which already implements the priority: last used > Documents/Automaker > DATA_DIR
|
||||
const defaultDir = await getDefaultWorkspaceDirectory();
|
||||
|
||||
// If we have a default directory, use it (unless initialPath is explicitly provided and different)
|
||||
const pathToUse = initialPath || defaultDir;
|
||||
|
||||
if (pathToUse) {
|
||||
// Pre-fill the path input immediately
|
||||
setPathInput(pathToUse);
|
||||
// Then browse to that directory
|
||||
browseDirectory(pathToUse);
|
||||
} else {
|
||||
// No default directory, browse home directory
|
||||
browseDirectory();
|
||||
}
|
||||
} catch (err) {
|
||||
// If config fetch fails, try initialPath or fall back to home directory
|
||||
if (initialPath) {
|
||||
setPathInput(initialPath);
|
||||
browseDirectory(initialPath);
|
||||
} else {
|
||||
browseDirectory();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
loadInitialPath();
|
||||
}
|
||||
}, [open, initialPath]);
|
||||
}, [open, initialPath, currentPath, browseDirectory]);
|
||||
|
||||
const handleSelectDirectory = (dir: DirectoryEntry) => {
|
||||
browseDirectory(dir.path);
|
||||
@@ -211,6 +239,8 @@ export function FileBrowserDialog({
|
||||
const handleSelect = useCallback(() => {
|
||||
if (currentPath) {
|
||||
addRecentFolder(currentPath);
|
||||
// Save to last project directory so it's used as default next time
|
||||
saveLastProjectDirectory(currentPath);
|
||||
onSelect(currentPath);
|
||||
onOpenChange(false);
|
||||
}
|
||||
@@ -296,7 +326,9 @@ export function FileBrowserDialog({
|
||||
title={folder}
|
||||
>
|
||||
<Folder className="w-3 h-3 text-brand-500 shrink-0" />
|
||||
<span className="truncate max-w-[120px]">{getFolderName(folder)}</span>
|
||||
<span className="truncate max-w-[120px]">
|
||||
{getFolderName(folder)}
|
||||
</span>
|
||||
<button
|
||||
onClick={(e) => handleRemoveRecent(e, folder)}
|
||||
className="ml-0.5 opacity-0 group-hover:opacity-100 hover:text-destructive transition-opacity"
|
||||
@@ -417,11 +449,20 @@ export function FileBrowserDialog({
|
||||
<Button variant="ghost" size="sm" onClick={() => onOpenChange(false)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button size="sm" onClick={handleSelect} disabled={!currentPath || loading} title="Select current folder (Cmd+Enter / Ctrl+Enter)">
|
||||
<Button
|
||||
size="sm"
|
||||
onClick={handleSelect}
|
||||
disabled={!currentPath || loading}
|
||||
title="Select current folder (Cmd+Enter / Ctrl+Enter)"
|
||||
>
|
||||
<FolderOpen className="w-3.5 h-3.5 mr-1.5" />
|
||||
Select Current Folder
|
||||
<kbd className="ml-2 px-1.5 py-0.5 text-[10px] bg-background/50 rounded border border-border">
|
||||
{typeof navigator !== "undefined" && navigator.platform?.includes("Mac") ? "⌘" : "Ctrl"}+↵
|
||||
{typeof navigator !== "undefined" &&
|
||||
navigator.platform?.includes("Mac")
|
||||
? "⌘"
|
||||
: "Ctrl"}
|
||||
+↵
|
||||
</kbd>
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
import {
|
||||
Dialog,
|
||||
@@ -26,11 +25,12 @@ import {
|
||||
} from "lucide-react";
|
||||
import { starterTemplates, type StarterTemplate } from "@/lib/templates";
|
||||
import { getElectronAPI } from "@/lib/electron";
|
||||
import { getHttpApiClient } from "@/lib/http-api-client";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useFileBrowser } from "@/contexts/file-browser-context";
|
||||
|
||||
const LAST_PROJECT_DIR_KEY = "automaker:lastProjectDir";
|
||||
import {
|
||||
getDefaultWorkspaceDirectory,
|
||||
saveLastProjectDirectory,
|
||||
} from "@/lib/workspace-config";
|
||||
|
||||
interface ValidationErrors {
|
||||
projectName?: boolean;
|
||||
@@ -81,25 +81,15 @@ export function NewProjectModal({
|
||||
// Fetch workspace directory when modal opens
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
// First, check localStorage for last used directory
|
||||
const lastUsedDir = localStorage.getItem(LAST_PROJECT_DIR_KEY);
|
||||
if (lastUsedDir) {
|
||||
setWorkspaceDir(lastUsedDir);
|
||||
return;
|
||||
}
|
||||
|
||||
// Fall back to server config if no saved directory
|
||||
setIsLoadingWorkspace(true);
|
||||
const httpClient = getHttpApiClient();
|
||||
httpClient.workspace
|
||||
.getConfig()
|
||||
.then((result) => {
|
||||
if (result.success && result.workspaceDir) {
|
||||
setWorkspaceDir(result.workspaceDir);
|
||||
getDefaultWorkspaceDirectory()
|
||||
.then((defaultDir) => {
|
||||
if (defaultDir) {
|
||||
setWorkspaceDir(defaultDir);
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Failed to get workspace config:", error);
|
||||
console.error("Failed to get default workspace directory:", error);
|
||||
})
|
||||
.finally(() => {
|
||||
setIsLoadingWorkspace(false);
|
||||
@@ -211,7 +201,7 @@ export function NewProjectModal({
|
||||
if (selectedPath) {
|
||||
setWorkspaceDir(selectedPath);
|
||||
// Save to localStorage for next time
|
||||
localStorage.setItem(LAST_PROJECT_DIR_KEY, selectedPath);
|
||||
saveLastProjectDirectory(selectedPath);
|
||||
// Clear any workspace error when a valid directory is selected
|
||||
if (errors.workspaceDir) {
|
||||
setErrors((prev) => ({ ...prev, workspaceDir: false }));
|
||||
@@ -296,9 +286,7 @@ export function NewProjectModal({
|
||||
{projectPath || workspaceDir}
|
||||
</code>
|
||||
</>
|
||||
) : (
|
||||
<span className="text-red-500">No workspace configured</span>
|
||||
)}
|
||||
) : null}
|
||||
</span>
|
||||
<Button
|
||||
type="button"
|
||||
|
||||
@@ -191,7 +191,9 @@ export function WorktreeTab({
|
||||
)}
|
||||
onClick={() => onSelectWorktree(worktree)}
|
||||
disabled={isActivating}
|
||||
title="Click to preview main"
|
||||
title={`Click to preview ${worktree.branch}`}
|
||||
aria-label={worktree.branch}
|
||||
data-testid={`worktree-branch-${worktree.branch}`}
|
||||
>
|
||||
{isRunning && <Loader2 className="w-3 h-3 animate-spin" />}
|
||||
{isActivating && !isRunning && (
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
PanelLeftClose,
|
||||
} from "lucide-react";
|
||||
import { cn, pathsEqual } from "@/lib/utils";
|
||||
import { getItem, setItem } from "@/lib/storage";
|
||||
import type { WorktreePanelProps, WorktreeInfo } from "./types";
|
||||
import {
|
||||
useWorktrees,
|
||||
@@ -91,13 +92,12 @@ export function WorktreePanel({
|
||||
|
||||
// Collapse state with localStorage persistence
|
||||
const [isCollapsed, setIsCollapsed] = useState(() => {
|
||||
if (typeof window === "undefined") return false;
|
||||
const saved = localStorage.getItem(WORKTREE_PANEL_COLLAPSED_KEY);
|
||||
const saved = getItem(WORKTREE_PANEL_COLLAPSED_KEY);
|
||||
return saved === "true";
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
localStorage.setItem(WORKTREE_PANEL_COLLAPSED_KEY, String(isCollapsed));
|
||||
setItem(WORKTREE_PANEL_COLLAPSED_KEY, String(isCollapsed));
|
||||
}, [isCollapsed]);
|
||||
|
||||
const toggleCollapsed = () => setIsCollapsed((prev) => !prev);
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
import { useState, useCallback, useRef, useEffect } from "react";
|
||||
import { useAppStore, Feature } from "@/store/app-store";
|
||||
import { Card, CardContent } from "@/components/ui/card";
|
||||
@@ -20,6 +19,10 @@ import { Markdown } from "@/components/ui/markdown";
|
||||
import { useFileBrowser } from "@/contexts/file-browser-context";
|
||||
import { toast } from "sonner";
|
||||
import { useNavigate } from "@tanstack/react-router";
|
||||
import {
|
||||
getDefaultWorkspaceDirectory,
|
||||
saveLastProjectDirectory,
|
||||
} from "@/lib/workspace-config";
|
||||
|
||||
interface InterviewMessage {
|
||||
id: string;
|
||||
@@ -65,8 +68,7 @@ const INTERVIEW_QUESTIONS = [
|
||||
];
|
||||
|
||||
export function InterviewView() {
|
||||
const { addProject, setCurrentProject, setAppSpec } =
|
||||
useAppStore();
|
||||
const { addProject, setCurrentProject, setAppSpec } = useAppStore();
|
||||
const { openFileBrowser } = useFileBrowser();
|
||||
const navigate = useNavigate();
|
||||
const [input, setInput] = useState("");
|
||||
@@ -89,6 +91,35 @@ export function InterviewView() {
|
||||
const messagesContainerRef = useRef<HTMLDivElement>(null);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
// Default parent directory using workspace config utility
|
||||
useEffect(() => {
|
||||
if (projectPath) return;
|
||||
|
||||
let isMounted = true;
|
||||
|
||||
const loadWorkspaceDir = async () => {
|
||||
try {
|
||||
const defaultDir = await getDefaultWorkspaceDirectory();
|
||||
|
||||
if (!isMounted || projectPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (defaultDir) {
|
||||
setProjectPath(defaultDir);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to load default workspace directory:", error);
|
||||
}
|
||||
};
|
||||
|
||||
loadWorkspaceDir();
|
||||
|
||||
return () => {
|
||||
isMounted = false;
|
||||
};
|
||||
}, [projectPath]);
|
||||
|
||||
// Initialize with first question
|
||||
useEffect(() => {
|
||||
if (messages.length === 0) {
|
||||
@@ -295,10 +326,12 @@ export function InterviewView() {
|
||||
title: "Select Base Directory",
|
||||
description:
|
||||
"Choose the parent directory where your new project will be created",
|
||||
initialPath: projectPath || undefined,
|
||||
});
|
||||
|
||||
if (selectedPath) {
|
||||
setProjectPath(selectedPath);
|
||||
saveLastProjectDirectory(selectedPath);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -308,6 +341,7 @@ export function InterviewView() {
|
||||
setIsGenerating(true);
|
||||
|
||||
try {
|
||||
saveLastProjectDirectory(projectPath);
|
||||
const api = getElectronAPI();
|
||||
// Use platform-specific path separator
|
||||
const pathSep =
|
||||
@@ -423,8 +457,8 @@ export function InterviewView() {
|
||||
index < currentQuestionIndex
|
||||
? "bg-green-500"
|
||||
: index === currentQuestionIndex
|
||||
? "bg-primary"
|
||||
: "bg-zinc-700"
|
||||
? "bg-primary"
|
||||
: "bg-zinc-700"
|
||||
)}
|
||||
/>
|
||||
))}
|
||||
|
||||
@@ -239,6 +239,24 @@ export function WelcomeView() {
|
||||
const api = getElectronAPI();
|
||||
const projectPath = `${parentDir}/${projectName}`;
|
||||
|
||||
// Validate that parent directory exists
|
||||
const parentExists = await api.exists(parentDir);
|
||||
if (!parentExists) {
|
||||
toast.error("Parent directory does not exist", {
|
||||
description: `Cannot create project in non-existent directory: ${parentDir}`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Verify parent is actually a directory
|
||||
const parentStat = await api.stat(parentDir);
|
||||
if (parentStat && !parentStat.isDirectory) {
|
||||
toast.error("Parent path is not a directory", {
|
||||
description: `${parentDir} is not a directory`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Create project directory
|
||||
const mkdirResult = await api.mkdir(projectPath);
|
||||
if (!mkdirResult.success) {
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
import { useEffect, useState, useRef } from "react";
|
||||
import { getHttpApiClient } from "@/lib/http-api-client";
|
||||
import { isElectron } from "@/lib/electron";
|
||||
import { getItem, removeItem } from "@/lib/storage";
|
||||
|
||||
/**
|
||||
* State returned by useSettingsMigration hook
|
||||
@@ -122,7 +123,7 @@ export function useSettingsMigration(): MigrationState {
|
||||
}
|
||||
|
||||
// Check if we have localStorage data to migrate
|
||||
const automakerStorage = localStorage.getItem("automaker-storage");
|
||||
const automakerStorage = getItem("automaker-storage");
|
||||
if (!automakerStorage) {
|
||||
console.log(
|
||||
"[Settings Migration] No localStorage data to migrate"
|
||||
@@ -136,7 +137,7 @@ export function useSettingsMigration(): MigrationState {
|
||||
// Collect all localStorage data
|
||||
const localStorageData: Record<string, string> = {};
|
||||
for (const key of LOCALSTORAGE_KEYS) {
|
||||
const value = localStorage.getItem(key);
|
||||
const value = getItem(key);
|
||||
if (value) {
|
||||
localStorageData[key] = value;
|
||||
}
|
||||
@@ -154,7 +155,7 @@ export function useSettingsMigration(): MigrationState {
|
||||
|
||||
// Clear old localStorage keys (but keep automaker-storage for Zustand)
|
||||
for (const key of KEYS_TO_CLEAR_AFTER_MIGRATION) {
|
||||
localStorage.removeItem(key);
|
||||
removeItem(key);
|
||||
}
|
||||
|
||||
setState({ checked: true, migrated: true, error: null });
|
||||
@@ -203,7 +204,7 @@ export async function syncSettingsToServer(): Promise<boolean> {
|
||||
|
||||
try {
|
||||
const api = getHttpApiClient();
|
||||
const automakerStorage = localStorage.getItem("automaker-storage");
|
||||
const automakerStorage = getItem("automaker-storage");
|
||||
|
||||
if (!automakerStorage) {
|
||||
return false;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
// Type definitions for Electron IPC API
|
||||
import type { SessionListItem, Message } from "@/types/electron";
|
||||
import { getJSON, setJSON, removeItem } from "./storage";
|
||||
|
||||
export interface FileEntry {
|
||||
name: string;
|
||||
@@ -2667,28 +2668,22 @@ export interface TrashedProject extends Project {
|
||||
}
|
||||
|
||||
export const getStoredProjects = (): Project[] => {
|
||||
if (typeof window === "undefined") return [];
|
||||
const stored = localStorage.getItem(STORAGE_KEYS.PROJECTS);
|
||||
return stored ? JSON.parse(stored) : [];
|
||||
return getJSON<Project[]>(STORAGE_KEYS.PROJECTS) ?? [];
|
||||
};
|
||||
|
||||
export const saveProjects = (projects: Project[]): void => {
|
||||
if (typeof window === "undefined") return;
|
||||
localStorage.setItem(STORAGE_KEYS.PROJECTS, JSON.stringify(projects));
|
||||
setJSON(STORAGE_KEYS.PROJECTS, projects);
|
||||
};
|
||||
|
||||
export const getCurrentProject = (): Project | null => {
|
||||
if (typeof window === "undefined") return null;
|
||||
const stored = localStorage.getItem(STORAGE_KEYS.CURRENT_PROJECT);
|
||||
return stored ? JSON.parse(stored) : null;
|
||||
return getJSON<Project>(STORAGE_KEYS.CURRENT_PROJECT);
|
||||
};
|
||||
|
||||
export const setCurrentProject = (project: Project | null): void => {
|
||||
if (typeof window === "undefined") return;
|
||||
if (project) {
|
||||
localStorage.setItem(STORAGE_KEYS.CURRENT_PROJECT, JSON.stringify(project));
|
||||
setJSON(STORAGE_KEYS.CURRENT_PROJECT, project);
|
||||
} else {
|
||||
localStorage.removeItem(STORAGE_KEYS.CURRENT_PROJECT);
|
||||
removeItem(STORAGE_KEYS.CURRENT_PROJECT);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2709,12 +2704,9 @@ export const removeProject = (projectId: string): void => {
|
||||
};
|
||||
|
||||
export const getStoredTrashedProjects = (): TrashedProject[] => {
|
||||
if (typeof window === "undefined") return [];
|
||||
const stored = localStorage.getItem(STORAGE_KEYS.TRASHED_PROJECTS);
|
||||
return stored ? JSON.parse(stored) : [];
|
||||
return getJSON<TrashedProject[]>(STORAGE_KEYS.TRASHED_PROJECTS) ?? [];
|
||||
};
|
||||
|
||||
export const saveTrashedProjects = (projects: TrashedProject[]): void => {
|
||||
if (typeof window === "undefined") return;
|
||||
localStorage.setItem(STORAGE_KEYS.TRASHED_PROJECTS, JSON.stringify(projects));
|
||||
setJSON(STORAGE_KEYS.TRASHED_PROJECTS, projects);
|
||||
};
|
||||
|
||||
@@ -766,6 +766,7 @@ export class HttpApiClient implements ElectronAPI {
|
||||
success: boolean;
|
||||
configured: boolean;
|
||||
workspaceDir?: string;
|
||||
defaultDir?: string | null;
|
||||
error?: string;
|
||||
}> => this.get("/api/workspace/config"),
|
||||
|
||||
|
||||
@@ -48,6 +48,34 @@ export async function initializeProject(
|
||||
const existingFiles: string[] = [];
|
||||
|
||||
try {
|
||||
// Validate that the project directory exists and is a directory
|
||||
const projectExists = await api.exists(projectPath);
|
||||
if (!projectExists) {
|
||||
return {
|
||||
success: false,
|
||||
isNewProject: false,
|
||||
error: `Project directory does not exist: ${projectPath}. Create it first before initializing.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Verify it's actually a directory (not a file)
|
||||
const projectStat = await api.stat(projectPath);
|
||||
if (!projectStat.success) {
|
||||
return {
|
||||
success: false,
|
||||
isNewProject: false,
|
||||
error: projectStat.error || `Failed to stat project directory: ${projectPath}`,
|
||||
};
|
||||
}
|
||||
|
||||
if (projectStat.stats && !projectStat.stats.isDirectory) {
|
||||
return {
|
||||
success: false,
|
||||
isNewProject: false,
|
||||
error: `Project path is not a directory: ${projectPath}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Initialize git repository if it doesn't exist
|
||||
const gitDirExists = await api.exists(`${projectPath}/.git`);
|
||||
if (!gitDirExists) {
|
||||
|
||||
100
apps/ui/src/lib/storage.ts
Normal file
100
apps/ui/src/lib/storage.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
/**
|
||||
* Centralized localStorage abstraction module
|
||||
*
|
||||
* Provides type-safe wrappers for all localStorage operations.
|
||||
* All localStorage access should go through this module to ensure
|
||||
* consistent error handling and environment checks.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Check if localStorage is available in the current environment
|
||||
*/
|
||||
function isStorageAvailable(): boolean {
|
||||
return typeof window !== "undefined" && window.localStorage !== undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an item from localStorage
|
||||
* @param key - The storage key
|
||||
* @returns The stored value or null if not found/unavailable
|
||||
*/
|
||||
export function getItem(key: string): string | null {
|
||||
if (!isStorageAvailable()) return null;
|
||||
try {
|
||||
return window.localStorage.getItem(key);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set an item in localStorage
|
||||
* @param key - The storage key
|
||||
* @param value - The value to store
|
||||
* @returns true if successful, false otherwise
|
||||
*/
|
||||
export function setItem(key: string, value: string): boolean {
|
||||
if (!isStorageAvailable()) return false;
|
||||
try {
|
||||
window.localStorage.setItem(key, value);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an item from localStorage
|
||||
* @param key - The storage key to remove
|
||||
* @returns true if successful, false otherwise
|
||||
*/
|
||||
export function removeItem(key: string): boolean {
|
||||
if (!isStorageAvailable()) return false;
|
||||
try {
|
||||
window.localStorage.removeItem(key);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a JSON-parsed item from localStorage
|
||||
* @param key - The storage key
|
||||
* @returns The parsed value or null if not found/invalid
|
||||
*/
|
||||
export function getJSON<T>(key: string): T | null {
|
||||
const value = getItem(key);
|
||||
if (!value) return null;
|
||||
try {
|
||||
return JSON.parse(value) as T;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a JSON-stringified item in localStorage
|
||||
* @param key - The storage key
|
||||
* @param value - The value to stringify and store
|
||||
* @returns true if successful, false otherwise
|
||||
*/
|
||||
export function setJSON<T>(key: string, value: T): boolean {
|
||||
try {
|
||||
return setItem(key, JSON.stringify(value));
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Storage module for named exports
|
||||
*/
|
||||
export const storage = {
|
||||
getItem,
|
||||
setItem,
|
||||
removeItem,
|
||||
getJSON,
|
||||
setJSON,
|
||||
isAvailable: isStorageAvailable,
|
||||
};
|
||||
107
apps/ui/src/lib/workspace-config.ts
Normal file
107
apps/ui/src/lib/workspace-config.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
/**
|
||||
* Utility functions for determining default workspace directories
|
||||
* Centralizes the logic for determining where projects should be created/opened
|
||||
*/
|
||||
|
||||
/* eslint-disable no-undef */
|
||||
import { getHttpApiClient } from "./http-api-client";
|
||||
import { getElectronAPI } from "./electron";
|
||||
import { getItem, setItem } from "./storage";
|
||||
import path from "path";
|
||||
|
||||
const LAST_PROJECT_DIR_KEY = "automaker:lastProjectDir";
|
||||
|
||||
/**
|
||||
* Gets the default Documents/Automaker directory path
|
||||
* @returns Promise resolving to Documents/Automaker path, or null if unavailable
|
||||
*/
|
||||
async function getDefaultDocumentsPath(): Promise<string | null> {
|
||||
try {
|
||||
const api = getElectronAPI();
|
||||
const documentsPath = await api.getPath("documents");
|
||||
return path.join(documentsPath, "Automaker");
|
||||
} catch (error) {
|
||||
if (typeof window !== "undefined" && window.console) {
|
||||
window.console.error("Failed to get documents path:", error);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the default directory for project creation/opening
|
||||
* Priority order:
|
||||
* 1. ALLOWED_ROOT_DIRECTORY (if configured)
|
||||
* 2. Last used directory from localStorage (if ALLOWED_ROOT_DIRECTORY is not set)
|
||||
* 3. Documents/Automaker (if ALLOWED_ROOT_DIRECTORY is not set)
|
||||
* 4. DATA_DIR (if ALLOWED_ROOT_DIRECTORY is not set and Documents unavailable)
|
||||
* 5. null (no default)
|
||||
*
|
||||
* @returns Promise resolving to the default directory path, or null if none available
|
||||
*/
|
||||
export async function getDefaultWorkspaceDirectory(): Promise<string | null> {
|
||||
try {
|
||||
const httpClient = getHttpApiClient();
|
||||
const result = await httpClient.workspace.getConfig();
|
||||
|
||||
if (result.success) {
|
||||
// If ALLOWED_ROOT_DIRECTORY is configured, use it
|
||||
if (result.configured && result.workspaceDir) {
|
||||
return result.workspaceDir;
|
||||
}
|
||||
|
||||
// If ALLOWED_ROOT_DIRECTORY is not set, use priority:
|
||||
// 1. Last used directory
|
||||
// 2. Documents/Automaker
|
||||
// 3. DATA_DIR as fallback
|
||||
const lastUsedDir = getItem(LAST_PROJECT_DIR_KEY);
|
||||
|
||||
if (lastUsedDir) {
|
||||
return lastUsedDir;
|
||||
}
|
||||
|
||||
// Try to get Documents/Automaker
|
||||
const documentsPath = await getDefaultDocumentsPath();
|
||||
if (documentsPath) {
|
||||
return documentsPath;
|
||||
}
|
||||
|
||||
// Fallback to DATA_DIR if available
|
||||
if (result.defaultDir) {
|
||||
return result.defaultDir;
|
||||
}
|
||||
}
|
||||
|
||||
// If API call failed, still try last used dir and Documents
|
||||
const lastUsedDir = getItem(LAST_PROJECT_DIR_KEY);
|
||||
|
||||
if (lastUsedDir) {
|
||||
return lastUsedDir;
|
||||
}
|
||||
|
||||
const documentsPath = await getDefaultDocumentsPath();
|
||||
return documentsPath;
|
||||
} catch (error) {
|
||||
if (typeof window !== "undefined" && window.console) {
|
||||
window.console.error("Failed to get default workspace directory:", error);
|
||||
}
|
||||
|
||||
// On error, try last used dir and Documents
|
||||
const lastUsedDir = getItem(LAST_PROJECT_DIR_KEY);
|
||||
|
||||
if (lastUsedDir) {
|
||||
return lastUsedDir;
|
||||
}
|
||||
|
||||
const documentsPath = await getDefaultDocumentsPath();
|
||||
return documentsPath;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the last used project directory to localStorage
|
||||
* @param path - The directory path to save
|
||||
*/
|
||||
export function saveLastProjectDirectory(path: string): void {
|
||||
setItem(LAST_PROJECT_DIR_KEY, path);
|
||||
}
|
||||
@@ -116,7 +116,9 @@ async function startStaticServer(): Promise<void> {
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
staticServer!.listen(STATIC_PORT, () => {
|
||||
console.log(`[Electron] Static server running at http://localhost:${STATIC_PORT}`);
|
||||
console.log(
|
||||
`[Electron] Static server running at http://localhost:${STATIC_PORT}`
|
||||
);
|
||||
resolve();
|
||||
});
|
||||
staticServer!.on("error", reject);
|
||||
@@ -135,7 +137,10 @@ async function startServer(): Promise<void> {
|
||||
command = "node";
|
||||
serverPath = path.join(__dirname, "../../server/src/index.ts");
|
||||
|
||||
const serverNodeModules = path.join(__dirname, "../../server/node_modules/tsx");
|
||||
const serverNodeModules = path.join(
|
||||
__dirname,
|
||||
"../../server/node_modules/tsx"
|
||||
);
|
||||
const rootNodeModules = path.join(__dirname, "../../../node_modules/tsx");
|
||||
|
||||
let tsxCliPath: string;
|
||||
@@ -170,23 +175,16 @@ async function startServer(): Promise<void> {
|
||||
? path.join(process.resourcesPath, "server", "node_modules")
|
||||
: path.join(__dirname, "../../server/node_modules");
|
||||
|
||||
const defaultWorkspaceDir = path.join(app.getPath("documents"), "Automaker");
|
||||
|
||||
if (!fs.existsSync(defaultWorkspaceDir)) {
|
||||
try {
|
||||
fs.mkdirSync(defaultWorkspaceDir, { recursive: true });
|
||||
console.log("[Electron] Created workspace directory:", defaultWorkspaceDir);
|
||||
} catch (error) {
|
||||
console.error("[Electron] Failed to create workspace directory:", error);
|
||||
}
|
||||
}
|
||||
|
||||
const env = {
|
||||
...process.env,
|
||||
PORT: SERVER_PORT.toString(),
|
||||
DATA_DIR: app.getPath("userData"),
|
||||
NODE_PATH: serverNodeModules,
|
||||
WORKSPACE_DIR: process.env.WORKSPACE_DIR || defaultWorkspaceDir,
|
||||
// Only set ALLOWED_ROOT_DIRECTORY if explicitly provided in environment
|
||||
// If not set, server will allow access to all paths
|
||||
...(process.env.ALLOWED_ROOT_DIRECTORY && {
|
||||
ALLOWED_ROOT_DIRECTORY: process.env.ALLOWED_ROOT_DIRECTORY,
|
||||
}),
|
||||
};
|
||||
|
||||
console.log("[Electron] Starting backend server...");
|
||||
@@ -324,7 +322,10 @@ app.whenReady().then(async () => {
|
||||
try {
|
||||
app.dock.setIcon(iconPath);
|
||||
} catch (error) {
|
||||
console.warn("[Electron] Failed to set dock icon:", (error as Error).message);
|
||||
console.warn(
|
||||
"[Electron] Failed to set dock icon:",
|
||||
(error as Error).message
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -426,9 +427,12 @@ ipcMain.handle("shell:openPath", async (_, filePath: string) => {
|
||||
});
|
||||
|
||||
// App info
|
||||
ipcMain.handle("app:getPath", async (_, name: Parameters<typeof app.getPath>[0]) => {
|
||||
return app.getPath(name);
|
||||
});
|
||||
ipcMain.handle(
|
||||
"app:getPath",
|
||||
async (_, name: Parameters<typeof app.getPath>[0]) => {
|
||||
return app.getPath(name);
|
||||
}
|
||||
);
|
||||
|
||||
ipcMain.handle("app:getVersion", async () => {
|
||||
return app.getVersion();
|
||||
|
||||
@@ -46,28 +46,31 @@ test.describe("Spec Editor Persistence", () => {
|
||||
// Step 4: Click on the Spec Editor in the sidebar
|
||||
await navigateToSpecEditor(page);
|
||||
|
||||
// Step 5: Wait for the spec editor to load
|
||||
// Step 5: Wait for the spec view to load (not empty state)
|
||||
await waitForElement(page, "spec-view", { timeout: 10000 });
|
||||
|
||||
// Step 6: Wait for the spec editor to load
|
||||
const specEditor = await getByTestId(page, "spec-editor");
|
||||
await specEditor.waitFor({ state: "visible", timeout: 10000 });
|
||||
|
||||
// Step 6: Wait for CodeMirror to initialize (it has a .cm-content element)
|
||||
// Step 7: Wait for CodeMirror to initialize (it has a .cm-content element)
|
||||
await specEditor.locator(".cm-content").waitFor({ state: "visible", timeout: 10000 });
|
||||
|
||||
// Step 7: Modify the editor content to "hello world"
|
||||
// Step 8: Modify the editor content to "hello world"
|
||||
await setEditorContent(page, "hello world");
|
||||
|
||||
// Verify content was set before saving
|
||||
const contentBeforeSave = await getEditorContent(page);
|
||||
expect(contentBeforeSave.trim()).toBe("hello world");
|
||||
|
||||
// Step 8: Click the save button and wait for save to complete
|
||||
// Step 9: Click the save button and wait for save to complete
|
||||
await clickSaveButton(page);
|
||||
|
||||
// Step 9: Refresh the page
|
||||
// Step 10: Refresh the page
|
||||
await page.reload();
|
||||
await waitForNetworkIdle(page);
|
||||
|
||||
// Step 10: Navigate back to the spec editor
|
||||
// Step 11: Navigate back to the spec editor
|
||||
// After reload, we need to wait for the app to initialize
|
||||
await waitForElement(page, "sidebar", { timeout: 10000 });
|
||||
|
||||
@@ -116,7 +119,7 @@ test.describe("Spec Editor Persistence", () => {
|
||||
);
|
||||
}
|
||||
|
||||
// Step 11: Verify the content was persisted
|
||||
// Step 12: Verify the content was persisted
|
||||
const persistedContent = await getEditorContent(page);
|
||||
expect(persistedContent.trim()).toBe("hello world");
|
||||
});
|
||||
|
||||
@@ -37,8 +37,25 @@ export async function navigateToSpec(page: Page): Promise<void> {
|
||||
await page.goto("/spec");
|
||||
await page.waitForLoadState("networkidle");
|
||||
|
||||
// Wait for the spec view to be visible
|
||||
await waitForElement(page, "spec-view", { timeout: 10000 });
|
||||
// Wait for loading state to complete first (if present)
|
||||
const loadingElement = page.locator('[data-testid="spec-view-loading"]');
|
||||
try {
|
||||
const loadingVisible = await loadingElement.isVisible({ timeout: 2000 });
|
||||
if (loadingVisible) {
|
||||
// Wait for loading to disappear (spec view or empty state will appear)
|
||||
await loadingElement.waitFor({ state: "hidden", timeout: 10000 });
|
||||
}
|
||||
} catch {
|
||||
// Loading element not found or already hidden, continue
|
||||
}
|
||||
|
||||
// Wait for either the main spec view or empty state to be visible
|
||||
// The spec-view element appears when loading is complete and spec exists
|
||||
// The spec-view-empty element appears when loading is complete and spec doesn't exist
|
||||
await Promise.race([
|
||||
waitForElement(page, "spec-view", { timeout: 10000 }).catch(() => null),
|
||||
waitForElement(page, "spec-view-empty", { timeout: 10000 }).catch(() => null),
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -128,7 +128,7 @@ export async function waitForContextFile(
|
||||
filename: string,
|
||||
timeout: number = 10000
|
||||
): Promise<void> {
|
||||
const locator = await getByTestId(page, `context-file-${filename}`);
|
||||
const locator = page.locator(`[data-testid="context-file-${filename}"]`);
|
||||
await locator.waitFor({ state: "visible", timeout });
|
||||
}
|
||||
|
||||
|
||||
@@ -103,9 +103,10 @@ test.describe("Worktree Integration Tests", () => {
|
||||
const branchLabel = page.getByText("Branch:");
|
||||
await expect(branchLabel).toBeVisible({ timeout: 10000 });
|
||||
|
||||
// Verify main branch button is displayed
|
||||
const mainBranchButton = page.getByRole("button", { name: "main" });
|
||||
await expect(mainBranchButton).toBeVisible({ timeout: 10000 });
|
||||
// Wait for worktrees to load and main branch button to appear
|
||||
// Use data-testid for more reliable selection
|
||||
const mainBranchButton = page.locator('[data-testid="worktree-branch-main"]');
|
||||
await expect(mainBranchButton).toBeVisible({ timeout: 15000 });
|
||||
});
|
||||
|
||||
test("should select main branch by default when app loads with stale worktree data", async ({
|
||||
|
||||
@@ -2,9 +2,9 @@ services:
|
||||
server:
|
||||
volumes:
|
||||
# Mount your workspace directory to /projects inside the container
|
||||
# Example: mount your local /workspace to /projects inside the container
|
||||
- /Users/webdevcody/Workspace/automaker-workspace:/projects:rw
|
||||
environment:
|
||||
# Set workspace directory so the UI can discover projects
|
||||
- WORKSPACE_DIR=/projects
|
||||
# Ensure /projects is in allowed directories
|
||||
- ALLOWED_PROJECT_DIRS=/projects
|
||||
# Set root directory for all projects and file operations
|
||||
# Users can only create/open projects within this directory
|
||||
- ALLOWED_ROOT_DIRECTORY=/projects
|
||||
|
||||
@@ -37,19 +37,17 @@ services:
|
||||
# Optional - authentication (leave empty to disable)
|
||||
- AUTOMAKER_API_KEY=${AUTOMAKER_API_KEY:-}
|
||||
|
||||
# Optional - restrict to specific directories within container only
|
||||
# These paths are INSIDE the container, not on your host
|
||||
- ALLOWED_PROJECT_DIRS=${ALLOWED_PROJECT_DIRS:-/projects}
|
||||
# Optional - restrict to specific directory within container only
|
||||
# Projects and files can only be created/accessed within this directory
|
||||
# Paths are INSIDE the container, not on your host
|
||||
# Default: /projects
|
||||
- ALLOWED_ROOT_DIRECTORY=${ALLOWED_ROOT_DIRECTORY:-/projects}
|
||||
|
||||
# Optional - data directory for sessions, etc. (container-only)
|
||||
# Optional - data directory for sessions, settings, etc. (container-only)
|
||||
- DATA_DIR=/data
|
||||
|
||||
# Optional - CORS origin (default allows all)
|
||||
- CORS_ORIGIN=${CORS_ORIGIN:-*}
|
||||
|
||||
# Optional - additional API keys
|
||||
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
|
||||
- GOOGLE_API_KEY=${GOOGLE_API_KEY:-}
|
||||
volumes:
|
||||
# ONLY named volumes - these are isolated from your host filesystem
|
||||
# This volume persists data between restarts but is container-managed
|
||||
|
||||
@@ -77,28 +77,36 @@ Path validation and security checks.
|
||||
```typescript
|
||||
import {
|
||||
initAllowedPaths,
|
||||
addAllowedPath,
|
||||
isPathAllowed,
|
||||
validatePath,
|
||||
getAllowedPaths
|
||||
getAllowedPaths,
|
||||
getAllowedRootDirectory,
|
||||
getDataDirectory,
|
||||
PathNotAllowedError
|
||||
} from '@automaker/platform';
|
||||
|
||||
// Initialize allowed paths from environment
|
||||
// Reads ALLOWED_ROOT_DIRECTORY and DATA_DIR environment variables
|
||||
initAllowedPaths();
|
||||
|
||||
// Add custom allowed path
|
||||
addAllowedPath('/custom/path');
|
||||
|
||||
// Check if path is allowed
|
||||
if (isPathAllowed('/project/path')) {
|
||||
console.log('Path is allowed');
|
||||
}
|
||||
|
||||
// Validate and normalize path
|
||||
const safePath = validatePath('/requested/path');
|
||||
// Validate and normalize path (throws PathNotAllowedError if not allowed)
|
||||
try {
|
||||
const safePath = validatePath('/requested/path');
|
||||
} catch (error) {
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
console.error('Access denied:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Get all allowed paths
|
||||
const allowed = getAllowedPaths();
|
||||
// Get configured directories
|
||||
const rootDir = getAllowedRootDirectory(); // or null if not configured
|
||||
const dataDir = getDataDirectory(); // or null if not configured
|
||||
const allowed = getAllowedPaths(); // array of all allowed paths
|
||||
```
|
||||
|
||||
## Usage Example
|
||||
@@ -139,33 +147,44 @@ async function executeFeature(projectPath: string, featureId: string) {
|
||||
|
||||
## Security Model
|
||||
|
||||
**IMPORTANT: Path validation is currently disabled.**
|
||||
Path security is enforced through two environment variables:
|
||||
|
||||
All path access checks (`isPathAllowed()`) always return `true`, allowing unrestricted file system access. This is a deliberate design decision for the following reasons:
|
||||
### Environment Variables
|
||||
|
||||
### Rationale
|
||||
- **ALLOWED_ROOT_DIRECTORY**: Primary security boundary. When set, all file operations must be within this directory.
|
||||
- **DATA_DIR**: Application data directory (settings, credentials). Always allowed regardless of ALLOWED_ROOT_DIRECTORY.
|
||||
|
||||
1. **Development Flexibility**: AutoMaker is a development tool that needs to access various project directories chosen by the user. Strict path restrictions would limit its usefulness.
|
||||
### Behavior
|
||||
|
||||
2. **User Control**: The application runs with the user's permissions. Users should have full control over which directories they work with.
|
||||
1. **When ALLOWED_ROOT_DIRECTORY is set**: Only paths within this directory (or DATA_DIR) are allowed. Attempts to access other paths will throw `PathNotAllowedError`.
|
||||
|
||||
3. **Trust Model**: AutoMaker operates under a trust model where the user is assumed to be working on their own projects.
|
||||
2. **When ALLOWED_ROOT_DIRECTORY is not set**: All paths are allowed (backward compatibility mode).
|
||||
|
||||
### Implications
|
||||
3. **DATA_DIR exception**: Paths within DATA_DIR are always allowed, even if outside ALLOWED_ROOT_DIRECTORY. This ensures settings and credentials are always accessible.
|
||||
|
||||
- The allowed paths list is maintained for API compatibility but not enforced
|
||||
- All file system operations are performed with the user's full permissions
|
||||
- The tool does not impose artificial directory restrictions
|
||||
### Example Configuration
|
||||
|
||||
### Re-enabling Security (Future)
|
||||
```bash
|
||||
# Docker/containerized environment
|
||||
ALLOWED_ROOT_DIRECTORY=/workspace
|
||||
DATA_DIR=/app/data
|
||||
|
||||
If strict path validation is needed (e.g., for production deployments or untrusted environments):
|
||||
# Development (no restrictions)
|
||||
# Leave ALLOWED_ROOT_DIRECTORY unset for full access
|
||||
```
|
||||
|
||||
1. Modify `isPathAllowed()` in `src/security.ts` to check against the allowed paths list
|
||||
2. Consider adding an environment variable `ENABLE_PATH_SECURITY=true`
|
||||
3. Implement additional security layers as needed
|
||||
### Secure File System
|
||||
|
||||
The infrastructure is already in place; only the enforcement logic needs to be activated.
|
||||
The `secureFs` module wraps Node.js `fs` operations with path validation:
|
||||
|
||||
```typescript
|
||||
import { secureFs } from '@automaker/platform';
|
||||
|
||||
// All operations validate paths before execution
|
||||
await secureFs.readFile('/workspace/project/file.txt');
|
||||
await secureFs.writeFile('/workspace/project/output.txt', data);
|
||||
await secureFs.mkdir('/workspace/project/new-dir', { recursive: true });
|
||||
```
|
||||
|
||||
## Directory Structure
|
||||
|
||||
|
||||
@@ -32,9 +32,15 @@ export {
|
||||
|
||||
// Security
|
||||
export {
|
||||
PathNotAllowedError,
|
||||
initAllowedPaths,
|
||||
addAllowedPath,
|
||||
isPathAllowed,
|
||||
validatePath,
|
||||
isPathWithinDirectory,
|
||||
getAllowedRootDirectory,
|
||||
getDataDirectory,
|
||||
getAllowedPaths,
|
||||
} from './security.js';
|
||||
|
||||
// Secure file system (validates paths before I/O operations)
|
||||
export * as secureFs from './secure-fs.js';
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
* Directory creation is handled separately by ensure* functions.
|
||||
*/
|
||||
|
||||
import fs from "fs/promises";
|
||||
import * as secureFs from "./secure-fs.js";
|
||||
import path from "path";
|
||||
|
||||
/**
|
||||
@@ -149,7 +149,7 @@ export function getBranchTrackingPath(projectPath: string): string {
|
||||
*/
|
||||
export async function ensureAutomakerDir(projectPath: string): Promise<string> {
|
||||
const automakerDir = getAutomakerDir(projectPath);
|
||||
await fs.mkdir(automakerDir, { recursive: true });
|
||||
await secureFs.mkdir(automakerDir, { recursive: true });
|
||||
return automakerDir;
|
||||
}
|
||||
|
||||
@@ -211,6 +211,6 @@ export function getProjectSettingsPath(projectPath: string): string {
|
||||
* @returns Promise resolving to the created data directory path
|
||||
*/
|
||||
export async function ensureDataDir(dataDir: string): Promise<string> {
|
||||
await fs.mkdir(dataDir, { recursive: true });
|
||||
await secureFs.mkdir(dataDir, { recursive: true });
|
||||
return dataDir;
|
||||
}
|
||||
|
||||
168
libs/platform/src/secure-fs.ts
Normal file
168
libs/platform/src/secure-fs.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
/**
|
||||
* Secure File System Adapter
|
||||
*
|
||||
* All file I/O operations must go through this adapter to enforce
|
||||
* ALLOWED_ROOT_DIRECTORY restrictions at the actual access point,
|
||||
* not just at the API layer. This provides defense-in-depth security.
|
||||
*/
|
||||
|
||||
import fs from "fs/promises";
|
||||
import type { Dirent } from "fs";
|
||||
import path from "path";
|
||||
import { validatePath } from "./security.js";
|
||||
|
||||
/**
|
||||
* Wrapper around fs.access that validates path first
|
||||
*/
|
||||
export async function access(filePath: string, mode?: number): Promise<void> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.access(validatedPath, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.readFile that validates path first
|
||||
*/
|
||||
export async function readFile(
|
||||
filePath: string,
|
||||
encoding?: BufferEncoding
|
||||
): Promise<string | Buffer> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
if (encoding) {
|
||||
return fs.readFile(validatedPath, encoding);
|
||||
}
|
||||
return fs.readFile(validatedPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.writeFile that validates path first
|
||||
*/
|
||||
export async function writeFile(
|
||||
filePath: string,
|
||||
data: string | Buffer,
|
||||
encoding?: BufferEncoding
|
||||
): Promise<void> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.writeFile(validatedPath, data, encoding);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.mkdir that validates path first
|
||||
*/
|
||||
export async function mkdir(
|
||||
dirPath: string,
|
||||
options?: { recursive?: boolean; mode?: number }
|
||||
): Promise<string | undefined> {
|
||||
const validatedPath = validatePath(dirPath);
|
||||
return fs.mkdir(validatedPath, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.readdir that validates path first
|
||||
*/
|
||||
export async function readdir(
|
||||
dirPath: string,
|
||||
options?: { withFileTypes?: false; encoding?: BufferEncoding }
|
||||
): Promise<string[]>;
|
||||
export async function readdir(
|
||||
dirPath: string,
|
||||
options: { withFileTypes: true; encoding?: BufferEncoding }
|
||||
): Promise<Dirent[]>;
|
||||
export async function readdir(
|
||||
dirPath: string,
|
||||
options?: { withFileTypes?: boolean; encoding?: BufferEncoding }
|
||||
): Promise<string[] | Dirent[]> {
|
||||
const validatedPath = validatePath(dirPath);
|
||||
if (options?.withFileTypes === true) {
|
||||
return fs.readdir(validatedPath, { withFileTypes: true });
|
||||
}
|
||||
return fs.readdir(validatedPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.stat that validates path first
|
||||
*/
|
||||
export async function stat(filePath: string): Promise<any> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.stat(validatedPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.rm that validates path first
|
||||
*/
|
||||
export async function rm(
|
||||
filePath: string,
|
||||
options?: { recursive?: boolean; force?: boolean }
|
||||
): Promise<void> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.rm(validatedPath, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.unlink that validates path first
|
||||
*/
|
||||
export async function unlink(filePath: string): Promise<void> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.unlink(validatedPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.copyFile that validates both paths first
|
||||
*/
|
||||
export async function copyFile(
|
||||
src: string,
|
||||
dest: string,
|
||||
mode?: number
|
||||
): Promise<void> {
|
||||
const validatedSrc = validatePath(src);
|
||||
const validatedDest = validatePath(dest);
|
||||
return fs.copyFile(validatedSrc, validatedDest, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.appendFile that validates path first
|
||||
*/
|
||||
export async function appendFile(
|
||||
filePath: string,
|
||||
data: string | Buffer,
|
||||
encoding?: BufferEncoding
|
||||
): Promise<void> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.appendFile(validatedPath, data, encoding);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.rename that validates both paths first
|
||||
*/
|
||||
export async function rename(
|
||||
oldPath: string,
|
||||
newPath: string
|
||||
): Promise<void> {
|
||||
const validatedOldPath = validatePath(oldPath);
|
||||
const validatedNewPath = validatePath(newPath);
|
||||
return fs.rename(validatedOldPath, validatedNewPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.lstat that validates path first
|
||||
* Returns file stats without following symbolic links
|
||||
*/
|
||||
export async function lstat(filePath: string): Promise<any> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.lstat(validatedPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around path.join that returns resolved path
|
||||
* Does NOT validate - use this for path construction, then pass to other operations
|
||||
*/
|
||||
export function joinPath(...pathSegments: string[]): string {
|
||||
return path.join(...pathSegments);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around path.resolve that returns resolved path
|
||||
* Does NOT validate - use this for path construction, then pass to other operations
|
||||
*/
|
||||
export function resolvePath(...pathSegments: string[]): string {
|
||||
return path.resolve(...pathSegments);
|
||||
}
|
||||
@@ -1,90 +1,143 @@
|
||||
/**
|
||||
* Security utilities for path validation
|
||||
*
|
||||
* SECURITY NOTICE: Path validation is currently DISABLED
|
||||
*
|
||||
* All path access checks always return true, allowing unrestricted file system access.
|
||||
* This was a deliberate design decision for the following reasons:
|
||||
*
|
||||
* 1. Development Flexibility: AutoMaker is a development tool that needs to access
|
||||
* various project directories chosen by the user. Strict path restrictions would
|
||||
* limit its usefulness.
|
||||
*
|
||||
* 2. User Control: The application runs with the user's permissions. Users should
|
||||
* have full control over which directories they work with, without artificial
|
||||
* restrictions imposed by the tool.
|
||||
*
|
||||
* 3. Trust Model: AutoMaker operates under a trust model where the user is assumed
|
||||
* to be working on their own projects. The tool itself doesn't perform operations
|
||||
* without user initiation.
|
||||
*
|
||||
* SECURITY CONSIDERATIONS:
|
||||
* - This module maintains the allowed paths list for API compatibility and potential
|
||||
* future use, but does not enforce any restrictions.
|
||||
* - If security restrictions are needed in the future, the infrastructure is in place
|
||||
* to enable them by modifying isPathAllowed() to actually check the allowed list.
|
||||
* - For production deployments or untrusted environments, consider re-enabling path
|
||||
* validation or implementing additional security layers.
|
||||
*
|
||||
* FUTURE ENHANCEMENT: Consider adding an environment variable (e.g., ENABLE_PATH_SECURITY)
|
||||
* to allow enabling strict path validation when needed for specific deployment scenarios.
|
||||
* Enforces ALLOWED_ROOT_DIRECTORY constraint with appData exception
|
||||
*/
|
||||
|
||||
import path from "path";
|
||||
|
||||
// Allowed project directories - kept for API compatibility
|
||||
const allowedPaths = new Set<string>();
|
||||
/**
|
||||
* Error thrown when a path is not allowed by security policy
|
||||
*/
|
||||
export class PathNotAllowedError extends Error {
|
||||
constructor(filePath: string) {
|
||||
super(
|
||||
`Path not allowed: ${filePath}. Must be within ALLOWED_ROOT_DIRECTORY or DATA_DIR.`
|
||||
);
|
||||
this.name = "PathNotAllowedError";
|
||||
}
|
||||
}
|
||||
|
||||
// Allowed root directory - main security boundary
|
||||
let allowedRootDirectory: string | null = null;
|
||||
|
||||
// Data directory - always allowed for settings/credentials
|
||||
let dataDirectory: string | null = null;
|
||||
|
||||
/**
|
||||
* Initialize allowed paths from environment variable
|
||||
* Note: All paths are now allowed regardless of this setting
|
||||
* Initialize security settings from environment variables
|
||||
* - ALLOWED_ROOT_DIRECTORY: main security boundary
|
||||
* - DATA_DIR: appData exception, always allowed
|
||||
*/
|
||||
export function initAllowedPaths(): void {
|
||||
const dirs = process.env.ALLOWED_PROJECT_DIRS;
|
||||
if (dirs) {
|
||||
for (const dir of dirs.split(",")) {
|
||||
const trimmed = dir.trim();
|
||||
if (trimmed) {
|
||||
allowedPaths.add(path.resolve(trimmed));
|
||||
}
|
||||
}
|
||||
// Load ALLOWED_ROOT_DIRECTORY
|
||||
const rootDir = process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
if (rootDir) {
|
||||
allowedRootDirectory = path.resolve(rootDir);
|
||||
console.log(
|
||||
`[Security] ✓ ALLOWED_ROOT_DIRECTORY configured: ${allowedRootDirectory}`
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
"[Security] ⚠️ ALLOWED_ROOT_DIRECTORY not set - allowing access to all paths"
|
||||
);
|
||||
}
|
||||
|
||||
// Load DATA_DIR (appData exception - always allowed)
|
||||
const dataDir = process.env.DATA_DIR;
|
||||
if (dataDir) {
|
||||
allowedPaths.add(path.resolve(dataDir));
|
||||
}
|
||||
|
||||
const workspaceDir = process.env.WORKSPACE_DIR;
|
||||
if (workspaceDir) {
|
||||
allowedPaths.add(path.resolve(workspaceDir));
|
||||
dataDirectory = path.resolve(dataDir);
|
||||
console.log(`[Security] ✓ DATA_DIR configured: ${dataDirectory}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a path to the allowed list (no-op, all paths allowed)
|
||||
* Check if a path is allowed based on ALLOWED_ROOT_DIRECTORY
|
||||
* Returns true if:
|
||||
* - Path is within ALLOWED_ROOT_DIRECTORY, OR
|
||||
* - Path is within DATA_DIR (appData exception), OR
|
||||
* - No restrictions are configured (backward compatibility)
|
||||
*/
|
||||
export function addAllowedPath(filePath: string): void {
|
||||
allowedPaths.add(path.resolve(filePath));
|
||||
export function isPathAllowed(filePath: string): boolean {
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
|
||||
// Always allow appData directory (settings, credentials)
|
||||
if (dataDirectory && isPathWithinDirectory(resolvedPath, dataDirectory)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If no ALLOWED_ROOT_DIRECTORY restriction is configured, allow all paths
|
||||
// Note: DATA_DIR is checked above as an exception, but doesn't restrict other paths
|
||||
if (!allowedRootDirectory) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Allow if within ALLOWED_ROOT_DIRECTORY
|
||||
if (
|
||||
allowedRootDirectory &&
|
||||
isPathWithinDirectory(resolvedPath, allowedRootDirectory)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If restrictions are configured but path doesn't match, deny
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path is allowed - always returns true
|
||||
*/
|
||||
export function isPathAllowed(_filePath: string): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a path - just resolves the path without checking permissions
|
||||
* Validate a path - resolves it and checks permissions
|
||||
* Throws PathNotAllowedError if path is not allowed
|
||||
*/
|
||||
export function validatePath(filePath: string): string {
|
||||
return path.resolve(filePath);
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
|
||||
if (!isPathAllowed(resolvedPath)) {
|
||||
throw new PathNotAllowedError(filePath);
|
||||
}
|
||||
|
||||
return resolvedPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path is within a directory, with protection against path traversal
|
||||
* Returns true only if resolvedPath is within directoryPath
|
||||
*/
|
||||
export function isPathWithinDirectory(
|
||||
resolvedPath: string,
|
||||
directoryPath: string
|
||||
): boolean {
|
||||
// Get the relative path from directory to the target
|
||||
const relativePath = path.relative(directoryPath, resolvedPath);
|
||||
|
||||
// If relative path starts with "..", it's outside the directory
|
||||
// If relative path is absolute, it's outside the directory
|
||||
// If relative path is empty or ".", it's the directory itself
|
||||
return !relativePath.startsWith("..") && !path.isAbsolute(relativePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the configured allowed root directory
|
||||
*/
|
||||
export function getAllowedRootDirectory(): string | null {
|
||||
return allowedRootDirectory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the configured data directory
|
||||
*/
|
||||
export function getDataDirectory(): string | null {
|
||||
return dataDirectory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of allowed paths (for debugging)
|
||||
*/
|
||||
export function getAllowedPaths(): string[] {
|
||||
return Array.from(allowedPaths);
|
||||
const paths: string[] = [];
|
||||
if (allowedRootDirectory) {
|
||||
paths.push(allowedRootDirectory);
|
||||
}
|
||||
if (dataDirectory) {
|
||||
paths.push(dataDirectory);
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
@@ -1,12 +1,5 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
import path from "path";
|
||||
import {
|
||||
initAllowedPaths,
|
||||
addAllowedPath,
|
||||
isPathAllowed,
|
||||
validatePath,
|
||||
getAllowedPaths,
|
||||
} from "../src/security";
|
||||
|
||||
describe("security.ts", () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
@@ -14,6 +7,8 @@ describe("security.ts", () => {
|
||||
beforeEach(() => {
|
||||
// Save original environment
|
||||
originalEnv = { ...process.env };
|
||||
// Reset modules to get fresh state
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -22,217 +17,237 @@ describe("security.ts", () => {
|
||||
});
|
||||
|
||||
describe("initAllowedPaths", () => {
|
||||
it("should initialize from ALLOWED_PROJECT_DIRS environment variable", () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/path/one,/path/two,/path/three";
|
||||
it("should load ALLOWED_ROOT_DIRECTORY if set", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/projects";
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/path/one"));
|
||||
expect(allowed).toContain(path.resolve("/path/two"));
|
||||
expect(allowed).toContain(path.resolve("/path/three"));
|
||||
expect(allowed).toContain(path.resolve("/projects"));
|
||||
});
|
||||
|
||||
it("should trim whitespace from paths", () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = " /path/one , /path/two , /path/three ";
|
||||
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/path/one"));
|
||||
expect(allowed).toContain(path.resolve("/path/two"));
|
||||
expect(allowed).toContain(path.resolve("/path/three"));
|
||||
});
|
||||
|
||||
it("should skip empty paths", () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/path/one,,/path/two, ,/path/three";
|
||||
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed.length).toBeLessThanOrEqual(3);
|
||||
expect(allowed).toContain(path.resolve("/path/one"));
|
||||
});
|
||||
|
||||
it("should initialize from DATA_DIR environment variable", () => {
|
||||
it("should load DATA_DIR if set", async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
process.env.DATA_DIR = "/data/directory";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/data/directory"));
|
||||
});
|
||||
|
||||
it("should initialize from WORKSPACE_DIR environment variable", () => {
|
||||
process.env.WORKSPACE_DIR = "/workspace/directory";
|
||||
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/workspace/directory"));
|
||||
});
|
||||
|
||||
it("should handle all environment variables together", () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/projects/one,/projects/two";
|
||||
it("should load both ALLOWED_ROOT_DIRECTORY and DATA_DIR if both set", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/projects";
|
||||
process.env.DATA_DIR = "/app/data";
|
||||
process.env.WORKSPACE_DIR = "/app/workspace";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/projects/one"));
|
||||
expect(allowed).toContain(path.resolve("/projects/two"));
|
||||
expect(allowed).toContain(path.resolve("/projects"));
|
||||
expect(allowed).toContain(path.resolve("/app/data"));
|
||||
expect(allowed).toContain(path.resolve("/app/workspace"));
|
||||
});
|
||||
|
||||
it("should handle missing environment variables gracefully", () => {
|
||||
delete process.env.ALLOWED_PROJECT_DIRS;
|
||||
it("should handle missing environment variables gracefully", async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
delete process.env.DATA_DIR;
|
||||
delete process.env.WORKSPACE_DIR;
|
||||
|
||||
const { initAllowedPaths } = await import("../src/security");
|
||||
expect(() => initAllowedPaths()).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("addAllowedPath", () => {
|
||||
it("should add a path to allowed list", () => {
|
||||
const testPath = "/new/allowed/path";
|
||||
|
||||
addAllowedPath(testPath);
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve(testPath));
|
||||
});
|
||||
|
||||
it("should resolve relative paths to absolute", () => {
|
||||
const relativePath = "relative/path";
|
||||
|
||||
addAllowedPath(relativePath);
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve(relativePath));
|
||||
});
|
||||
|
||||
it("should handle duplicate paths", () => {
|
||||
const testPath = "/duplicate/path";
|
||||
|
||||
addAllowedPath(testPath);
|
||||
addAllowedPath(testPath);
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
const count = allowed.filter((p) => p === path.resolve(testPath)).length;
|
||||
expect(count).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isPathAllowed", () => {
|
||||
it("should always return true (all paths allowed)", () => {
|
||||
it("should allow paths within ALLOWED_ROOT_DIRECTORY", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/allowed";
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
expect(isPathAllowed("/allowed/file.txt")).toBe(true);
|
||||
expect(isPathAllowed("/allowed/subdir/file.txt")).toBe(true);
|
||||
});
|
||||
|
||||
it("should deny paths outside ALLOWED_ROOT_DIRECTORY", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/allowed";
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
expect(isPathAllowed("/not-allowed/file.txt")).toBe(false);
|
||||
expect(isPathAllowed("/etc/passwd")).toBe(false);
|
||||
});
|
||||
|
||||
it("should always allow DATA_DIR paths", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/projects";
|
||||
process.env.DATA_DIR = "/app/data";
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
// DATA_DIR paths are always allowed
|
||||
expect(isPathAllowed("/app/data/settings.json")).toBe(true);
|
||||
expect(isPathAllowed("/app/data/credentials.json")).toBe(true);
|
||||
});
|
||||
|
||||
it("should allow all paths when no restrictions configured", async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
expect(isPathAllowed("/any/path")).toBe(true);
|
||||
expect(isPathAllowed("/another/path")).toBe(true);
|
||||
expect(isPathAllowed("relative/path")).toBe(true);
|
||||
expect(isPathAllowed("/etc/passwd")).toBe(true);
|
||||
expect(isPathAllowed("../../../dangerous/path")).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true even for non-existent paths", () => {
|
||||
expect(isPathAllowed("/nonexistent/path/12345")).toBe(true);
|
||||
});
|
||||
it("should allow all paths when only DATA_DIR is configured", async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
process.env.DATA_DIR = "/data";
|
||||
|
||||
it("should return true for empty string", () => {
|
||||
expect(isPathAllowed("")).toBe(true);
|
||||
const { initAllowedPaths, isPathAllowed } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
// DATA_DIR should be allowed
|
||||
expect(isPathAllowed("/data/file.txt")).toBe(true);
|
||||
// And all other paths should be allowed since no ALLOWED_ROOT_DIRECTORY restriction
|
||||
expect(isPathAllowed("/any/path")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validatePath", () => {
|
||||
it("should resolve absolute paths", () => {
|
||||
const absPath = "/absolute/path/to/file.txt";
|
||||
const result = validatePath(absPath);
|
||||
expect(result).toBe(path.resolve(absPath));
|
||||
it("should return resolved path for allowed paths", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/allowed";
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, validatePath } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
const result = validatePath("/allowed/file.txt");
|
||||
expect(result).toBe(path.resolve("/allowed/file.txt"));
|
||||
});
|
||||
|
||||
it("should resolve relative paths", () => {
|
||||
const relPath = "relative/path/file.txt";
|
||||
const result = validatePath(relPath);
|
||||
expect(result).toBe(path.resolve(relPath));
|
||||
it("should throw error for paths outside allowed directories", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/allowed";
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, validatePath, PathNotAllowedError } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
expect(() => validatePath("/not-allowed/file.txt")).toThrow(
|
||||
PathNotAllowedError
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle current directory", () => {
|
||||
const result = validatePath(".");
|
||||
expect(result).toBe(path.resolve("."));
|
||||
it("should resolve relative paths", async () => {
|
||||
const cwd = process.cwd();
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = cwd;
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, validatePath } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
const result = validatePath("./file.txt");
|
||||
expect(result).toBe(path.resolve(cwd, "./file.txt"));
|
||||
});
|
||||
|
||||
it("should handle parent directory", () => {
|
||||
const result = validatePath("..");
|
||||
expect(result).toBe(path.resolve(".."));
|
||||
});
|
||||
it("should not throw when no restrictions configured", async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
it("should handle complex relative paths", () => {
|
||||
const complexPath = "../../some/nested/../path/./file.txt";
|
||||
const result = validatePath(complexPath);
|
||||
expect(result).toBe(path.resolve(complexPath));
|
||||
});
|
||||
const { initAllowedPaths, validatePath } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
it("should handle paths with spaces", () => {
|
||||
const pathWithSpaces = "/path with spaces/file.txt";
|
||||
const result = validatePath(pathWithSpaces);
|
||||
expect(result).toBe(path.resolve(pathWithSpaces));
|
||||
});
|
||||
|
||||
it("should handle home directory expansion on Unix", () => {
|
||||
if (process.platform !== "win32") {
|
||||
const homePath = "~/documents/file.txt";
|
||||
const result = validatePath(homePath);
|
||||
expect(result).toBe(path.resolve(homePath));
|
||||
}
|
||||
expect(() => validatePath("/any/path")).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAllowedPaths", () => {
|
||||
it("should return empty array initially", () => {
|
||||
it("should return empty array when no paths configured", async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(Array.isArray(allowed)).toBe(true);
|
||||
expect(allowed).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should return array of added paths", () => {
|
||||
addAllowedPath("/path/one");
|
||||
addAllowedPath("/path/two");
|
||||
it("should return configured paths", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/projects";
|
||||
process.env.DATA_DIR = "/data";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/path/one"));
|
||||
expect(allowed).toContain(path.resolve("/path/two"));
|
||||
});
|
||||
|
||||
it("should return copy of internal set", () => {
|
||||
addAllowedPath("/test/path");
|
||||
|
||||
const allowed1 = getAllowedPaths();
|
||||
const allowed2 = getAllowedPaths();
|
||||
|
||||
expect(allowed1).not.toBe(allowed2);
|
||||
expect(allowed1).toEqual(allowed2);
|
||||
expect(allowed).toContain(path.resolve("/projects"));
|
||||
expect(allowed).toContain(path.resolve("/data"));
|
||||
});
|
||||
});
|
||||
|
||||
describe("Path security disabled behavior", () => {
|
||||
it("should allow unrestricted access despite allowed paths list", () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/only/this/path";
|
||||
describe("getAllowedRootDirectory", () => {
|
||||
it("should return the configured root directory", async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = "/projects";
|
||||
|
||||
const { initAllowedPaths, getAllowedRootDirectory } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
// Should return true even for paths not in allowed list
|
||||
expect(isPathAllowed("/some/other/path")).toBe(true);
|
||||
expect(isPathAllowed("/completely/different/path")).toBe(true);
|
||||
expect(getAllowedRootDirectory()).toBe(path.resolve("/projects"));
|
||||
});
|
||||
|
||||
it("should validate paths without permission checks", () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/only/this/path";
|
||||
it("should return null when not configured", async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
|
||||
const { initAllowedPaths, getAllowedRootDirectory } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
// Should validate any path without throwing
|
||||
expect(() => validatePath("/some/other/path")).not.toThrow();
|
||||
expect(validatePath("/some/other/path")).toBe(
|
||||
path.resolve("/some/other/path")
|
||||
);
|
||||
expect(getAllowedRootDirectory()).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("getDataDirectory", () => {
|
||||
it("should return the configured data directory", async () => {
|
||||
process.env.DATA_DIR = "/data";
|
||||
|
||||
const { initAllowedPaths, getDataDirectory } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
expect(getDataDirectory()).toBe(path.resolve("/data"));
|
||||
});
|
||||
|
||||
it("should return null when not configured", async () => {
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, getDataDirectory } =
|
||||
await import("../src/security");
|
||||
initAllowedPaths();
|
||||
|
||||
expect(getDataDirectory()).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
"author": "AutoMaker Team",
|
||||
"license": "SEE LICENSE IN LICENSE",
|
||||
"dependencies": {
|
||||
"@automaker/platform": "^1.0.0",
|
||||
"@automaker/types": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
* File system utilities that handle symlinks safely
|
||||
*/
|
||||
|
||||
import fs from "fs/promises";
|
||||
import { secureFs } from "@automaker/platform";
|
||||
import path from "path";
|
||||
|
||||
/**
|
||||
@@ -14,7 +14,7 @@ export async function mkdirSafe(dirPath: string): Promise<void> {
|
||||
|
||||
// Check if path already exists using lstat (doesn't follow symlinks)
|
||||
try {
|
||||
const stats = await fs.lstat(resolvedPath);
|
||||
const stats = await secureFs.lstat(resolvedPath);
|
||||
// Path exists - if it's a directory or symlink, consider it success
|
||||
if (stats.isDirectory() || stats.isSymbolicLink()) {
|
||||
return;
|
||||
@@ -36,7 +36,7 @@ export async function mkdirSafe(dirPath: string): Promise<void> {
|
||||
|
||||
// Path doesn't exist, create it
|
||||
try {
|
||||
await fs.mkdir(resolvedPath, { recursive: true });
|
||||
await secureFs.mkdir(resolvedPath, { recursive: true });
|
||||
} catch (error: any) {
|
||||
// Handle race conditions and symlink issues
|
||||
if (error.code === "EEXIST" || error.code === "ELOOP") {
|
||||
@@ -52,7 +52,7 @@ export async function mkdirSafe(dirPath: string): Promise<void> {
|
||||
*/
|
||||
export async function existsSafe(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.lstat(filePath);
|
||||
await secureFs.lstat(filePath);
|
||||
return true;
|
||||
} catch (error: any) {
|
||||
if (error.code === "ENOENT") {
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
* - Path resolution (relative/absolute)
|
||||
*/
|
||||
|
||||
import fs from "fs/promises";
|
||||
import { secureFs } from "@automaker/platform";
|
||||
import path from "path";
|
||||
import type { ImageData, ImageContentBlock } from '@automaker/types';
|
||||
|
||||
@@ -42,7 +42,7 @@ export function getMimeTypeForImage(imagePath: string): string {
|
||||
* @throws Error if file cannot be read
|
||||
*/
|
||||
export async function readImageAsBase64(imagePath: string): Promise<ImageData> {
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const imageBuffer = await secureFs.readFile(imagePath) as Buffer;
|
||||
const base64Data = imageBuffer.toString("base64");
|
||||
const mimeType = getMimeTypeForImage(imagePath);
|
||||
|
||||
|
||||
1
package-lock.json
generated
1
package-lock.json
generated
@@ -301,6 +301,7 @@
|
||||
"version": "1.0.0",
|
||||
"license": "SEE LICENSE IN LICENSE",
|
||||
"dependencies": {
|
||||
"@automaker/platform": "^1.0.0",
|
||||
"@automaker/types": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"dev:server": "npm run dev --workspace=apps/server",
|
||||
"dev:full": "concurrently \"npm run dev:server\" \"npm run dev:web\"",
|
||||
"build": "npm run build --workspace=apps/ui",
|
||||
"build:packages": "npm run build -w @automaker/types && npm run build -w @automaker/utils && npm run build -w @automaker/prompts && npm run build -w @automaker/platform -w @automaker/model-resolver -w @automaker/dependency-resolver && npm run build -w @automaker/git-utils",
|
||||
"build:packages": "npm run build -w @automaker/types && npm run build -w @automaker/platform && npm run build -w @automaker/utils && npm run build -w @automaker/prompts -w @automaker/model-resolver -w @automaker/dependency-resolver && npm run build -w @automaker/git-utils",
|
||||
"build:server": "npm run build --workspace=apps/server",
|
||||
"build:electron": "npm run build:electron --workspace=apps/ui",
|
||||
"build:electron:dir": "npm run build:electron:dir --workspace=apps/ui",
|
||||
|
||||
70
pnpm-lock.yaml
generated
Normal file
70
pnpm-lock.yaml
generated
Normal file
@@ -0,0 +1,70 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
cross-spawn:
|
||||
specifier: ^7.0.6
|
||||
version: 7.0.6
|
||||
tree-kill:
|
||||
specifier: ^1.2.2
|
||||
version: 1.2.2
|
||||
|
||||
packages:
|
||||
|
||||
cross-spawn@7.0.6:
|
||||
resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==}
|
||||
engines: {node: '>= 8'}
|
||||
|
||||
isexe@2.0.0:
|
||||
resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
|
||||
|
||||
path-key@3.1.1:
|
||||
resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
shebang-command@2.0.0:
|
||||
resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
shebang-regex@3.0.0:
|
||||
resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
tree-kill@1.2.2:
|
||||
resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==}
|
||||
hasBin: true
|
||||
|
||||
which@2.0.2:
|
||||
resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
|
||||
engines: {node: '>= 8'}
|
||||
hasBin: true
|
||||
|
||||
snapshots:
|
||||
|
||||
cross-spawn@7.0.6:
|
||||
dependencies:
|
||||
path-key: 3.1.1
|
||||
shebang-command: 2.0.0
|
||||
which: 2.0.2
|
||||
|
||||
isexe@2.0.0: {}
|
||||
|
||||
path-key@3.1.1: {}
|
||||
|
||||
shebang-command@2.0.0:
|
||||
dependencies:
|
||||
shebang-regex: 3.0.0
|
||||
|
||||
shebang-regex@3.0.0: {}
|
||||
|
||||
tree-kill@1.2.2: {}
|
||||
|
||||
which@2.0.2:
|
||||
dependencies:
|
||||
isexe: 2.0.0
|
||||
Reference in New Issue
Block a user