mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-02 20:43:36 +00:00
Merge main into massive-terminal-upgrade
Resolves merge conflicts: - apps/server/src/routes/terminal/common.ts: Keep randomBytes import, use @automaker/utils for createLogger - apps/ui/eslint.config.mjs: Use main's explicit globals list with XMLHttpRequest and MediaQueryListEvent additions - apps/ui/src/components/views/terminal-view.tsx: Keep our terminal improvements (killAllSessions, beforeunload, better error handling) - apps/ui/src/config/terminal-themes.ts: Keep our search highlight colors for all themes - apps/ui/src/store/app-store.ts: Keep our terminal settings persistence improvements (merge function) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
217
libs/platform/README.md
Normal file
217
libs/platform/README.md
Normal file
@@ -0,0 +1,217 @@
|
||||
# @automaker/platform
|
||||
|
||||
Platform-specific utilities for AutoMaker.
|
||||
|
||||
## Overview
|
||||
|
||||
This package provides platform-specific utilities including path management, subprocess handling, and security validation. It handles AutoMaker's directory structure and system operations.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @automaker/platform
|
||||
```
|
||||
|
||||
## Exports
|
||||
|
||||
### Path Management
|
||||
|
||||
AutoMaker directory structure utilities.
|
||||
|
||||
```typescript
|
||||
import {
|
||||
getAutomakerDir,
|
||||
getFeaturesDir,
|
||||
getFeatureDir,
|
||||
getFeatureImagesDir,
|
||||
getBoardDir,
|
||||
getImagesDir,
|
||||
getContextDir,
|
||||
getWorktreesDir,
|
||||
getAppSpecPath,
|
||||
getBranchTrackingPath,
|
||||
ensureAutomakerDir,
|
||||
} from '@automaker/platform';
|
||||
|
||||
// Get AutoMaker directory: /project/.automaker
|
||||
const automakerDir = getAutomakerDir('/project/path');
|
||||
|
||||
// Get features directory: /project/.automaker/features
|
||||
const featuresDir = getFeaturesDir('/project/path');
|
||||
|
||||
// Get specific feature directory: /project/.automaker/features/feature-id
|
||||
const featureDir = getFeatureDir('/project/path', 'feature-id');
|
||||
|
||||
// Get feature images: /project/.automaker/features/feature-id/images
|
||||
const imagesDir = getFeatureImagesDir('/project/path', 'feature-id');
|
||||
|
||||
// Ensure .automaker directory exists
|
||||
await ensureAutomakerDir('/project/path');
|
||||
```
|
||||
|
||||
### Subprocess Management
|
||||
|
||||
Spawn and manage subprocesses with JSON-lines output.
|
||||
|
||||
```typescript
|
||||
import { spawnJSONLProcess, spawnProcess } from '@automaker/platform';
|
||||
|
||||
// Spawn process with JSONL output parsing
|
||||
const result = await spawnJSONLProcess({
|
||||
command: 'claude-agent',
|
||||
args: ['--output', 'jsonl'],
|
||||
cwd: '/project/path',
|
||||
onLine: (data) => console.log('Received:', data),
|
||||
onError: (error) => console.error('Error:', error),
|
||||
});
|
||||
|
||||
// Spawn regular process
|
||||
const output = await spawnProcess({
|
||||
command: 'git',
|
||||
args: ['status'],
|
||||
cwd: '/project/path',
|
||||
});
|
||||
```
|
||||
|
||||
### Security Validation
|
||||
|
||||
Path validation and security checks.
|
||||
|
||||
```typescript
|
||||
import {
|
||||
initAllowedPaths,
|
||||
isPathAllowed,
|
||||
validatePath,
|
||||
getAllowedPaths,
|
||||
getAllowedRootDirectory,
|
||||
getDataDirectory,
|
||||
PathNotAllowedError,
|
||||
} from '@automaker/platform';
|
||||
|
||||
// Initialize allowed paths from environment
|
||||
// Reads ALLOWED_ROOT_DIRECTORY and DATA_DIR environment variables
|
||||
initAllowedPaths();
|
||||
|
||||
// Check if path is allowed
|
||||
if (isPathAllowed('/project/path')) {
|
||||
console.log('Path is allowed');
|
||||
}
|
||||
|
||||
// Validate and normalize path (throws PathNotAllowedError if not allowed)
|
||||
try {
|
||||
const safePath = validatePath('/requested/path');
|
||||
} catch (error) {
|
||||
if (error instanceof PathNotAllowedError) {
|
||||
console.error('Access denied:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Get configured directories
|
||||
const rootDir = getAllowedRootDirectory(); // or null if not configured
|
||||
const dataDir = getDataDirectory(); // or null if not configured
|
||||
const allowed = getAllowedPaths(); // array of all allowed paths
|
||||
```
|
||||
|
||||
## Usage Example
|
||||
|
||||
```typescript
|
||||
import {
|
||||
getFeatureDir,
|
||||
ensureAutomakerDir,
|
||||
spawnJSONLProcess,
|
||||
validatePath,
|
||||
} from '@automaker/platform';
|
||||
|
||||
async function executeFeature(projectPath: string, featureId: string) {
|
||||
// Validate project path
|
||||
const safePath = validatePath(projectPath);
|
||||
|
||||
// Ensure AutoMaker directory exists
|
||||
await ensureAutomakerDir(safePath);
|
||||
|
||||
// Get feature directory
|
||||
const featureDir = getFeatureDir(safePath, featureId);
|
||||
|
||||
// Execute agent in feature directory
|
||||
const result = await spawnJSONLProcess({
|
||||
command: 'claude-agent',
|
||||
args: ['execute'],
|
||||
cwd: featureDir,
|
||||
onLine: (data) => {
|
||||
if (data.type === 'progress') {
|
||||
console.log('Progress:', data.progress);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
```
|
||||
|
||||
## Security Model
|
||||
|
||||
Path security is enforced through two environment variables:
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- **ALLOWED_ROOT_DIRECTORY**: Primary security boundary. When set, all file operations must be within this directory.
|
||||
- **DATA_DIR**: Application data directory (settings, credentials). Always allowed regardless of ALLOWED_ROOT_DIRECTORY.
|
||||
|
||||
### Behavior
|
||||
|
||||
1. **When ALLOWED_ROOT_DIRECTORY is set**: Only paths within this directory (or DATA_DIR) are allowed. Attempts to access other paths will throw `PathNotAllowedError`.
|
||||
|
||||
2. **When ALLOWED_ROOT_DIRECTORY is not set**: All paths are allowed (backward compatibility mode).
|
||||
|
||||
3. **DATA_DIR exception**: Paths within DATA_DIR are always allowed, even if outside ALLOWED_ROOT_DIRECTORY. This ensures settings and credentials are always accessible.
|
||||
|
||||
### Example Configuration
|
||||
|
||||
```bash
|
||||
# Docker/containerized environment
|
||||
ALLOWED_ROOT_DIRECTORY=/workspace
|
||||
DATA_DIR=/app/data
|
||||
|
||||
# Development (no restrictions)
|
||||
# Leave ALLOWED_ROOT_DIRECTORY unset for full access
|
||||
```
|
||||
|
||||
### Secure File System
|
||||
|
||||
The `secureFs` module wraps Node.js `fs` operations with path validation:
|
||||
|
||||
```typescript
|
||||
import { secureFs } from '@automaker/platform';
|
||||
|
||||
// All operations validate paths before execution
|
||||
await secureFs.readFile('/workspace/project/file.txt');
|
||||
await secureFs.writeFile('/workspace/project/output.txt', data);
|
||||
await secureFs.mkdir('/workspace/project/new-dir', { recursive: true });
|
||||
```
|
||||
|
||||
## Directory Structure
|
||||
|
||||
AutoMaker uses the following directory structure:
|
||||
|
||||
```
|
||||
/project/
|
||||
├── .automaker/
|
||||
│ ├── features/ # Feature storage
|
||||
│ │ └── {featureId}/
|
||||
│ │ ├── feature.json
|
||||
│ │ └── images/
|
||||
│ ├── board/ # Board configuration
|
||||
│ ├── context/ # Context files
|
||||
│ ├── images/ # Global images
|
||||
│ ├── worktrees/ # Git worktrees
|
||||
│ ├── app-spec.md # App specification
|
||||
│ └── branch-tracking.json
|
||||
```
|
||||
|
||||
## Dependencies
|
||||
|
||||
- `@automaker/types` - Type definitions
|
||||
|
||||
## Used By
|
||||
|
||||
- `@automaker/server`
|
||||
28
libs/platform/package.json
Normal file
28
libs/platform/package.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "@automaker/platform",
|
||||
"version": "1.0.0",
|
||||
"type": "module",
|
||||
"description": "Platform-specific utilities for AutoMaker",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"watch": "tsc --watch",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"keywords": [
|
||||
"automaker",
|
||||
"platform"
|
||||
],
|
||||
"author": "AutoMaker Team",
|
||||
"license": "SEE LICENSE IN LICENSE",
|
||||
"dependencies": {
|
||||
"@automaker/types": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.10.5",
|
||||
"typescript": "^5.7.3",
|
||||
"vitest": "^4.0.16"
|
||||
}
|
||||
}
|
||||
46
libs/platform/src/index.ts
Normal file
46
libs/platform/src/index.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* @automaker/platform
|
||||
* Platform-specific utilities for AutoMaker
|
||||
*/
|
||||
|
||||
// Path utilities
|
||||
export {
|
||||
getAutomakerDir,
|
||||
getFeaturesDir,
|
||||
getFeatureDir,
|
||||
getFeatureImagesDir,
|
||||
getBoardDir,
|
||||
getImagesDir,
|
||||
getContextDir,
|
||||
getWorktreesDir,
|
||||
getAppSpecPath,
|
||||
getBranchTrackingPath,
|
||||
ensureAutomakerDir,
|
||||
getGlobalSettingsPath,
|
||||
getCredentialsPath,
|
||||
getProjectSettingsPath,
|
||||
ensureDataDir,
|
||||
} from './paths.js';
|
||||
|
||||
// Subprocess management
|
||||
export {
|
||||
spawnJSONLProcess,
|
||||
spawnProcess,
|
||||
type SubprocessOptions,
|
||||
type SubprocessResult,
|
||||
} from './subprocess.js';
|
||||
|
||||
// Security
|
||||
export {
|
||||
PathNotAllowedError,
|
||||
initAllowedPaths,
|
||||
isPathAllowed,
|
||||
validatePath,
|
||||
isPathWithinDirectory,
|
||||
getAllowedRootDirectory,
|
||||
getDataDirectory,
|
||||
getAllowedPaths,
|
||||
} from './security.js';
|
||||
|
||||
// Secure file system (validates paths before I/O operations)
|
||||
export * as secureFs from './secure-fs.js';
|
||||
213
libs/platform/src/paths.ts
Normal file
213
libs/platform/src/paths.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
/**
|
||||
* Automaker Paths - Utilities for managing automaker data storage
|
||||
*
|
||||
* Provides functions to construct paths for:
|
||||
* - Project-level data stored in {projectPath}/.automaker/
|
||||
* - Global user data stored in app userData directory
|
||||
*
|
||||
* All returned paths are absolute and ready to use with fs module.
|
||||
* Directory creation is handled separately by ensure* functions.
|
||||
*/
|
||||
|
||||
import * as secureFs from './secure-fs.js';
|
||||
import path from 'path';
|
||||
|
||||
/**
|
||||
* Get the automaker data directory root for a project
|
||||
*
|
||||
* All project-specific automaker data is stored under {projectPath}/.automaker/
|
||||
* This directory is created when needed via ensureAutomakerDir().
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Absolute path to {projectPath}/.automaker
|
||||
*/
|
||||
export function getAutomakerDir(projectPath: string): string {
|
||||
return path.join(projectPath, '.automaker');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the features directory for a project
|
||||
*
|
||||
* Contains subdirectories for each feature, keyed by featureId.
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Absolute path to {projectPath}/.automaker/features
|
||||
*/
|
||||
export function getFeaturesDir(projectPath: string): string {
|
||||
return path.join(getAutomakerDir(projectPath), 'features');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the directory for a specific feature
|
||||
*
|
||||
* Contains feature-specific data like generated code, tests, and logs.
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param featureId - Feature identifier
|
||||
* @returns Absolute path to {projectPath}/.automaker/features/{featureId}
|
||||
*/
|
||||
export function getFeatureDir(projectPath: string, featureId: string): string {
|
||||
return path.join(getFeaturesDir(projectPath), featureId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the images directory for a feature
|
||||
*
|
||||
* Stores screenshots, diagrams, or other images related to the feature.
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param featureId - Feature identifier
|
||||
* @returns Absolute path to {projectPath}/.automaker/features/{featureId}/images
|
||||
*/
|
||||
export function getFeatureImagesDir(projectPath: string, featureId: string): string {
|
||||
return path.join(getFeatureDir(projectPath, featureId), 'images');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the board directory for a project
|
||||
*
|
||||
* Contains board-related data like background images and customization files.
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Absolute path to {projectPath}/.automaker/board
|
||||
*/
|
||||
export function getBoardDir(projectPath: string): string {
|
||||
return path.join(getAutomakerDir(projectPath), 'board');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the general images directory for a project
|
||||
*
|
||||
* Stores project-level images like background images or shared assets.
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Absolute path to {projectPath}/.automaker/images
|
||||
*/
|
||||
export function getImagesDir(projectPath: string): string {
|
||||
return path.join(getAutomakerDir(projectPath), 'images');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the context files directory for a project
|
||||
*
|
||||
* Stores user-uploaded context files for reference during generation.
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Absolute path to {projectPath}/.automaker/context
|
||||
*/
|
||||
export function getContextDir(projectPath: string): string {
|
||||
return path.join(getAutomakerDir(projectPath), 'context');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the worktrees metadata directory for a project
|
||||
*
|
||||
* Stores information about git worktrees associated with the project.
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Absolute path to {projectPath}/.automaker/worktrees
|
||||
*/
|
||||
export function getWorktreesDir(projectPath: string): string {
|
||||
return path.join(getAutomakerDir(projectPath), 'worktrees');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the app spec file path for a project
|
||||
*
|
||||
* Stores the application specification document used for generation.
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Absolute path to {projectPath}/.automaker/app_spec.txt
|
||||
*/
|
||||
export function getAppSpecPath(projectPath: string): string {
|
||||
return path.join(getAutomakerDir(projectPath), 'app_spec.txt');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the branch tracking file path for a project
|
||||
*
|
||||
* Stores JSON metadata about active git branches and worktrees.
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Absolute path to {projectPath}/.automaker/active-branches.json
|
||||
*/
|
||||
export function getBranchTrackingPath(projectPath: string): string {
|
||||
return path.join(getAutomakerDir(projectPath), 'active-branches.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the automaker directory structure for a project if it doesn't exist
|
||||
*
|
||||
* Creates {projectPath}/.automaker with all subdirectories recursively.
|
||||
* Safe to call multiple times - uses recursive: true.
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Promise resolving to the created automaker directory path
|
||||
*/
|
||||
export async function ensureAutomakerDir(projectPath: string): Promise<string> {
|
||||
const automakerDir = getAutomakerDir(projectPath);
|
||||
await secureFs.mkdir(automakerDir, { recursive: true });
|
||||
return automakerDir;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Global Settings Paths (stored in DATA_DIR from app.getPath('userData'))
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Get the global settings file path
|
||||
*
|
||||
* Stores user preferences, keyboard shortcuts, AI profiles, and project history.
|
||||
* Located in the platform-specific userData directory.
|
||||
*
|
||||
* Default locations:
|
||||
* - macOS: ~/Library/Application Support/automaker
|
||||
* - Windows: %APPDATA%\automaker
|
||||
* - Linux: ~/.config/automaker
|
||||
*
|
||||
* @param dataDir - User data directory (from app.getPath('userData'))
|
||||
* @returns Absolute path to {dataDir}/settings.json
|
||||
*/
|
||||
export function getGlobalSettingsPath(dataDir: string): string {
|
||||
return path.join(dataDir, 'settings.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the credentials file path
|
||||
*
|
||||
* Stores sensitive API keys separately from other settings for security.
|
||||
* Located in the platform-specific userData directory.
|
||||
*
|
||||
* @param dataDir - User data directory (from app.getPath('userData'))
|
||||
* @returns Absolute path to {dataDir}/credentials.json
|
||||
*/
|
||||
export function getCredentialsPath(dataDir: string): string {
|
||||
return path.join(dataDir, 'credentials.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the project settings file path
|
||||
*
|
||||
* Stores project-specific settings that override global settings.
|
||||
* Located within the project's .automaker directory.
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Absolute path to {projectPath}/.automaker/settings.json
|
||||
*/
|
||||
export function getProjectSettingsPath(projectPath: string): string {
|
||||
return path.join(getAutomakerDir(projectPath), 'settings.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the global data directory if it doesn't exist
|
||||
*
|
||||
* Creates the userData directory for storing global settings and credentials.
|
||||
* Safe to call multiple times - uses recursive: true.
|
||||
*
|
||||
* @param dataDir - User data directory path to create
|
||||
* @returns Promise resolving to the created data directory path
|
||||
*/
|
||||
export async function ensureDataDir(dataDir: string): Promise<string> {
|
||||
await secureFs.mkdir(dataDir, { recursive: true });
|
||||
return dataDir;
|
||||
}
|
||||
161
libs/platform/src/secure-fs.ts
Normal file
161
libs/platform/src/secure-fs.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
/**
|
||||
* Secure File System Adapter
|
||||
*
|
||||
* All file I/O operations must go through this adapter to enforce
|
||||
* ALLOWED_ROOT_DIRECTORY restrictions at the actual access point,
|
||||
* not just at the API layer. This provides defense-in-depth security.
|
||||
*/
|
||||
|
||||
import fs from 'fs/promises';
|
||||
import type { Dirent } from 'fs';
|
||||
import path from 'path';
|
||||
import { validatePath } from './security.js';
|
||||
|
||||
/**
|
||||
* Wrapper around fs.access that validates path first
|
||||
*/
|
||||
export async function access(filePath: string, mode?: number): Promise<void> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.access(validatedPath, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.readFile that validates path first
|
||||
*/
|
||||
export async function readFile(
|
||||
filePath: string,
|
||||
encoding?: BufferEncoding
|
||||
): Promise<string | Buffer> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
if (encoding) {
|
||||
return fs.readFile(validatedPath, encoding);
|
||||
}
|
||||
return fs.readFile(validatedPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.writeFile that validates path first
|
||||
*/
|
||||
export async function writeFile(
|
||||
filePath: string,
|
||||
data: string | Buffer,
|
||||
encoding?: BufferEncoding
|
||||
): Promise<void> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.writeFile(validatedPath, data, encoding);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.mkdir that validates path first
|
||||
*/
|
||||
export async function mkdir(
|
||||
dirPath: string,
|
||||
options?: { recursive?: boolean; mode?: number }
|
||||
): Promise<string | undefined> {
|
||||
const validatedPath = validatePath(dirPath);
|
||||
return fs.mkdir(validatedPath, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.readdir that validates path first
|
||||
*/
|
||||
export async function readdir(
|
||||
dirPath: string,
|
||||
options?: { withFileTypes?: false; encoding?: BufferEncoding }
|
||||
): Promise<string[]>;
|
||||
export async function readdir(
|
||||
dirPath: string,
|
||||
options: { withFileTypes: true; encoding?: BufferEncoding }
|
||||
): Promise<Dirent[]>;
|
||||
export async function readdir(
|
||||
dirPath: string,
|
||||
options?: { withFileTypes?: boolean; encoding?: BufferEncoding }
|
||||
): Promise<string[] | Dirent[]> {
|
||||
const validatedPath = validatePath(dirPath);
|
||||
if (options?.withFileTypes === true) {
|
||||
return fs.readdir(validatedPath, { withFileTypes: true });
|
||||
}
|
||||
return fs.readdir(validatedPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.stat that validates path first
|
||||
*/
|
||||
export async function stat(filePath: string): Promise<any> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.stat(validatedPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.rm that validates path first
|
||||
*/
|
||||
export async function rm(
|
||||
filePath: string,
|
||||
options?: { recursive?: boolean; force?: boolean }
|
||||
): Promise<void> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.rm(validatedPath, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.unlink that validates path first
|
||||
*/
|
||||
export async function unlink(filePath: string): Promise<void> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.unlink(validatedPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.copyFile that validates both paths first
|
||||
*/
|
||||
export async function copyFile(src: string, dest: string, mode?: number): Promise<void> {
|
||||
const validatedSrc = validatePath(src);
|
||||
const validatedDest = validatePath(dest);
|
||||
return fs.copyFile(validatedSrc, validatedDest, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.appendFile that validates path first
|
||||
*/
|
||||
export async function appendFile(
|
||||
filePath: string,
|
||||
data: string | Buffer,
|
||||
encoding?: BufferEncoding
|
||||
): Promise<void> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.appendFile(validatedPath, data, encoding);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.rename that validates both paths first
|
||||
*/
|
||||
export async function rename(oldPath: string, newPath: string): Promise<void> {
|
||||
const validatedOldPath = validatePath(oldPath);
|
||||
const validatedNewPath = validatePath(newPath);
|
||||
return fs.rename(validatedOldPath, validatedNewPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around fs.lstat that validates path first
|
||||
* Returns file stats without following symbolic links
|
||||
*/
|
||||
export async function lstat(filePath: string): Promise<any> {
|
||||
const validatedPath = validatePath(filePath);
|
||||
return fs.lstat(validatedPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around path.join that returns resolved path
|
||||
* Does NOT validate - use this for path construction, then pass to other operations
|
||||
*/
|
||||
export function joinPath(...pathSegments: string[]): string {
|
||||
return path.join(...pathSegments);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around path.resolve that returns resolved path
|
||||
* Does NOT validate - use this for path construction, then pass to other operations
|
||||
*/
|
||||
export function resolvePath(...pathSegments: string[]): string {
|
||||
return path.resolve(...pathSegments);
|
||||
}
|
||||
131
libs/platform/src/security.ts
Normal file
131
libs/platform/src/security.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
/**
|
||||
* Security utilities for path validation
|
||||
* Enforces ALLOWED_ROOT_DIRECTORY constraint with appData exception
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
|
||||
/**
|
||||
* Error thrown when a path is not allowed by security policy
|
||||
*/
|
||||
export class PathNotAllowedError extends Error {
|
||||
constructor(filePath: string) {
|
||||
super(`Path not allowed: ${filePath}. Must be within ALLOWED_ROOT_DIRECTORY or DATA_DIR.`);
|
||||
this.name = 'PathNotAllowedError';
|
||||
}
|
||||
}
|
||||
|
||||
// Allowed root directory - main security boundary
|
||||
let allowedRootDirectory: string | null = null;
|
||||
|
||||
// Data directory - always allowed for settings/credentials
|
||||
let dataDirectory: string | null = null;
|
||||
|
||||
/**
|
||||
* Initialize security settings from environment variables
|
||||
* - ALLOWED_ROOT_DIRECTORY: main security boundary
|
||||
* - DATA_DIR: appData exception, always allowed
|
||||
*/
|
||||
export function initAllowedPaths(): void {
|
||||
// Load ALLOWED_ROOT_DIRECTORY
|
||||
const rootDir = process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
if (rootDir) {
|
||||
allowedRootDirectory = path.resolve(rootDir);
|
||||
console.log(`[Security] ✓ ALLOWED_ROOT_DIRECTORY configured: ${allowedRootDirectory}`);
|
||||
} else {
|
||||
console.log('[Security] ⚠️ ALLOWED_ROOT_DIRECTORY not set - allowing access to all paths');
|
||||
}
|
||||
|
||||
// Load DATA_DIR (appData exception - always allowed)
|
||||
const dataDir = process.env.DATA_DIR;
|
||||
if (dataDir) {
|
||||
dataDirectory = path.resolve(dataDir);
|
||||
console.log(`[Security] ✓ DATA_DIR configured: ${dataDirectory}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path is allowed based on ALLOWED_ROOT_DIRECTORY
|
||||
* Returns true if:
|
||||
* - Path is within ALLOWED_ROOT_DIRECTORY, OR
|
||||
* - Path is within DATA_DIR (appData exception), OR
|
||||
* - No restrictions are configured (backward compatibility)
|
||||
*/
|
||||
export function isPathAllowed(filePath: string): boolean {
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
|
||||
// Always allow appData directory (settings, credentials)
|
||||
if (dataDirectory && isPathWithinDirectory(resolvedPath, dataDirectory)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If no ALLOWED_ROOT_DIRECTORY restriction is configured, allow all paths
|
||||
// Note: DATA_DIR is checked above as an exception, but doesn't restrict other paths
|
||||
if (!allowedRootDirectory) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Allow if within ALLOWED_ROOT_DIRECTORY
|
||||
if (allowedRootDirectory && isPathWithinDirectory(resolvedPath, allowedRootDirectory)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If restrictions are configured but path doesn't match, deny
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a path - resolves it and checks permissions
|
||||
* Throws PathNotAllowedError if path is not allowed
|
||||
*/
|
||||
export function validatePath(filePath: string): string {
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
|
||||
if (!isPathAllowed(resolvedPath)) {
|
||||
throw new PathNotAllowedError(filePath);
|
||||
}
|
||||
|
||||
return resolvedPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path is within a directory, with protection against path traversal
|
||||
* Returns true only if resolvedPath is within directoryPath
|
||||
*/
|
||||
export function isPathWithinDirectory(resolvedPath: string, directoryPath: string): boolean {
|
||||
// Get the relative path from directory to the target
|
||||
const relativePath = path.relative(directoryPath, resolvedPath);
|
||||
|
||||
// If relative path starts with "..", it's outside the directory
|
||||
// If relative path is absolute, it's outside the directory
|
||||
// If relative path is empty or ".", it's the directory itself
|
||||
return !relativePath.startsWith('..') && !path.isAbsolute(relativePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the configured allowed root directory
|
||||
*/
|
||||
export function getAllowedRootDirectory(): string | null {
|
||||
return allowedRootDirectory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the configured data directory
|
||||
*/
|
||||
export function getDataDirectory(): string | null {
|
||||
return dataDirectory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of allowed paths (for debugging)
|
||||
*/
|
||||
export function getAllowedPaths(): string[] {
|
||||
const paths: string[] = [];
|
||||
if (allowedRootDirectory) {
|
||||
paths.push(allowedRootDirectory);
|
||||
}
|
||||
if (dataDirectory) {
|
||||
paths.push(dataDirectory);
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
197
libs/platform/src/subprocess.ts
Normal file
197
libs/platform/src/subprocess.ts
Normal file
@@ -0,0 +1,197 @@
|
||||
/**
|
||||
* Subprocess management utilities for CLI providers
|
||||
*/
|
||||
|
||||
import { spawn, type ChildProcess } from 'child_process';
|
||||
import readline from 'readline';
|
||||
|
||||
export interface SubprocessOptions {
|
||||
command: string;
|
||||
args: string[];
|
||||
cwd: string;
|
||||
env?: Record<string, string>;
|
||||
abortController?: AbortController;
|
||||
timeout?: number; // Milliseconds of no output before timeout
|
||||
}
|
||||
|
||||
export interface SubprocessResult {
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
exitCode: number | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Spawns a subprocess and streams JSONL output line-by-line
|
||||
*/
|
||||
export async function* spawnJSONLProcess(options: SubprocessOptions): AsyncGenerator<unknown> {
|
||||
const { command, args, cwd, env, abortController, timeout = 30000 } = options;
|
||||
|
||||
const processEnv = {
|
||||
...process.env,
|
||||
...env,
|
||||
};
|
||||
|
||||
console.log(`[SubprocessManager] Spawning: ${command} ${args.slice(0, -1).join(' ')}`);
|
||||
console.log(`[SubprocessManager] Working directory: ${cwd}`);
|
||||
|
||||
const childProcess: ChildProcess = spawn(command, args, {
|
||||
cwd,
|
||||
env: processEnv,
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
|
||||
let stderrOutput = '';
|
||||
let lastOutputTime = Date.now();
|
||||
let timeoutHandle: NodeJS.Timeout | null = null;
|
||||
|
||||
// Collect stderr for error reporting
|
||||
if (childProcess.stderr) {
|
||||
childProcess.stderr.on('data', (data: Buffer) => {
|
||||
const text = data.toString();
|
||||
stderrOutput += text;
|
||||
console.error(`[SubprocessManager] stderr: ${text}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Setup timeout detection
|
||||
const resetTimeout = () => {
|
||||
lastOutputTime = Date.now();
|
||||
if (timeoutHandle) {
|
||||
clearTimeout(timeoutHandle);
|
||||
}
|
||||
timeoutHandle = setTimeout(() => {
|
||||
const elapsed = Date.now() - lastOutputTime;
|
||||
if (elapsed >= timeout) {
|
||||
console.error(`[SubprocessManager] Process timeout: no output for ${timeout}ms`);
|
||||
childProcess.kill('SIGTERM');
|
||||
}
|
||||
}, timeout);
|
||||
};
|
||||
|
||||
resetTimeout();
|
||||
|
||||
// Setup abort handling
|
||||
if (abortController) {
|
||||
abortController.signal.addEventListener('abort', () => {
|
||||
console.log('[SubprocessManager] Abort signal received, killing process');
|
||||
if (timeoutHandle) {
|
||||
clearTimeout(timeoutHandle);
|
||||
}
|
||||
childProcess.kill('SIGTERM');
|
||||
});
|
||||
}
|
||||
|
||||
// Parse stdout as JSONL (one JSON object per line)
|
||||
if (childProcess.stdout) {
|
||||
const rl = readline.createInterface({
|
||||
input: childProcess.stdout,
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
|
||||
try {
|
||||
for await (const line of rl) {
|
||||
resetTimeout();
|
||||
|
||||
if (!line.trim()) continue;
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(line);
|
||||
yield parsed;
|
||||
} catch (parseError) {
|
||||
console.error(`[SubprocessManager] Failed to parse JSONL line: ${line}`, parseError);
|
||||
// Yield error but continue processing
|
||||
yield {
|
||||
type: 'error',
|
||||
error: `Failed to parse output: ${line}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[SubprocessManager] Error reading stdout:', error);
|
||||
throw error;
|
||||
} finally {
|
||||
if (timeoutHandle) {
|
||||
clearTimeout(timeoutHandle);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for process to exit
|
||||
const exitCode = await new Promise<number | null>((resolve) => {
|
||||
childProcess.on('exit', (code) => {
|
||||
console.log(`[SubprocessManager] Process exited with code: ${code}`);
|
||||
resolve(code);
|
||||
});
|
||||
|
||||
childProcess.on('error', (error) => {
|
||||
console.error('[SubprocessManager] Process error:', error);
|
||||
resolve(null);
|
||||
});
|
||||
});
|
||||
|
||||
// Handle non-zero exit codes
|
||||
if (exitCode !== 0 && exitCode !== null) {
|
||||
const errorMessage = stderrOutput || `Process exited with code ${exitCode}`;
|
||||
console.error(`[SubprocessManager] Process failed: ${errorMessage}`);
|
||||
yield {
|
||||
type: 'error',
|
||||
error: errorMessage,
|
||||
};
|
||||
}
|
||||
|
||||
// Process completed successfully
|
||||
if (exitCode === 0 && !stderrOutput) {
|
||||
console.log('[SubprocessManager] Process completed successfully');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Spawns a subprocess and collects all output
|
||||
*/
|
||||
export async function spawnProcess(options: SubprocessOptions): Promise<SubprocessResult> {
|
||||
const { command, args, cwd, env, abortController } = options;
|
||||
|
||||
const processEnv = {
|
||||
...process.env,
|
||||
...env,
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const childProcess = spawn(command, args, {
|
||||
cwd,
|
||||
env: processEnv,
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
if (childProcess.stdout) {
|
||||
childProcess.stdout.on('data', (data: Buffer) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
}
|
||||
|
||||
if (childProcess.stderr) {
|
||||
childProcess.stderr.on('data', (data: Buffer) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
}
|
||||
|
||||
// Setup abort handling
|
||||
if (abortController) {
|
||||
abortController.signal.addEventListener('abort', () => {
|
||||
childProcess.kill('SIGTERM');
|
||||
reject(new Error('Process aborted'));
|
||||
});
|
||||
}
|
||||
|
||||
childProcess.on('exit', (code) => {
|
||||
resolve({ stdout, stderr, exitCode: code });
|
||||
});
|
||||
|
||||
childProcess.on('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
212
libs/platform/tests/paths.test.ts
Normal file
212
libs/platform/tests/paths.test.ts
Normal file
@@ -0,0 +1,212 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import {
|
||||
getAutomakerDir,
|
||||
getFeaturesDir,
|
||||
getFeatureDir,
|
||||
getFeatureImagesDir,
|
||||
getBoardDir,
|
||||
getImagesDir,
|
||||
getContextDir,
|
||||
getWorktreesDir,
|
||||
getAppSpecPath,
|
||||
getBranchTrackingPath,
|
||||
ensureAutomakerDir,
|
||||
getGlobalSettingsPath,
|
||||
getCredentialsPath,
|
||||
getProjectSettingsPath,
|
||||
ensureDataDir,
|
||||
} from '../src/paths';
|
||||
|
||||
describe('paths.ts', () => {
|
||||
let tempDir: string;
|
||||
let projectPath: string;
|
||||
let dataDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a temporary directory for testing
|
||||
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'platform-paths-test-'));
|
||||
projectPath = path.join(tempDir, 'test-project');
|
||||
dataDir = path.join(tempDir, 'user-data');
|
||||
await fs.mkdir(projectPath, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up temporary directory
|
||||
try {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
} catch (error) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
describe('Project-level path construction', () => {
|
||||
it('should return automaker directory path', () => {
|
||||
const result = getAutomakerDir(projectPath);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker'));
|
||||
});
|
||||
|
||||
it('should return features directory path', () => {
|
||||
const result = getFeaturesDir(projectPath);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'features'));
|
||||
});
|
||||
|
||||
it('should return feature directory path', () => {
|
||||
const featureId = 'auth-feature';
|
||||
const result = getFeatureDir(projectPath, featureId);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'features', featureId));
|
||||
});
|
||||
|
||||
it('should return feature images directory path', () => {
|
||||
const featureId = 'auth-feature';
|
||||
const result = getFeatureImagesDir(projectPath, featureId);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'features', featureId, 'images'));
|
||||
});
|
||||
|
||||
it('should return board directory path', () => {
|
||||
const result = getBoardDir(projectPath);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'board'));
|
||||
});
|
||||
|
||||
it('should return images directory path', () => {
|
||||
const result = getImagesDir(projectPath);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'images'));
|
||||
});
|
||||
|
||||
it('should return context directory path', () => {
|
||||
const result = getContextDir(projectPath);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'context'));
|
||||
});
|
||||
|
||||
it('should return worktrees directory path', () => {
|
||||
const result = getWorktreesDir(projectPath);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'worktrees'));
|
||||
});
|
||||
|
||||
it('should return app spec file path', () => {
|
||||
const result = getAppSpecPath(projectPath);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'app_spec.txt'));
|
||||
});
|
||||
|
||||
it('should return branch tracking file path', () => {
|
||||
const result = getBranchTrackingPath(projectPath);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'active-branches.json'));
|
||||
});
|
||||
|
||||
it('should return project settings file path', () => {
|
||||
const result = getProjectSettingsPath(projectPath);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'settings.json'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Global settings path construction', () => {
|
||||
it('should return global settings path', () => {
|
||||
const result = getGlobalSettingsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, 'settings.json'));
|
||||
});
|
||||
|
||||
it('should return credentials path', () => {
|
||||
const result = getCredentialsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, 'credentials.json'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Directory creation', () => {
|
||||
it('should create automaker directory', async () => {
|
||||
const automakerDir = await ensureAutomakerDir(projectPath);
|
||||
|
||||
expect(automakerDir).toBe(path.join(projectPath, '.automaker'));
|
||||
|
||||
const stats = await fs.stat(automakerDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it('should be idempotent when creating automaker directory', async () => {
|
||||
// Create directory first time
|
||||
const firstResult = await ensureAutomakerDir(projectPath);
|
||||
|
||||
// Create directory second time
|
||||
const secondResult = await ensureAutomakerDir(projectPath);
|
||||
|
||||
expect(firstResult).toBe(secondResult);
|
||||
|
||||
const stats = await fs.stat(firstResult);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it('should create data directory', async () => {
|
||||
const result = await ensureDataDir(dataDir);
|
||||
|
||||
expect(result).toBe(dataDir);
|
||||
|
||||
const stats = await fs.stat(dataDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it('should be idempotent when creating data directory', async () => {
|
||||
// Create directory first time
|
||||
const firstResult = await ensureDataDir(dataDir);
|
||||
|
||||
// Create directory second time
|
||||
const secondResult = await ensureDataDir(dataDir);
|
||||
|
||||
expect(firstResult).toBe(secondResult);
|
||||
|
||||
const stats = await fs.stat(firstResult);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it('should create nested directories recursively', async () => {
|
||||
const deepProjectPath = path.join(tempDir, 'nested', 'deep', 'project');
|
||||
await fs.mkdir(deepProjectPath, { recursive: true });
|
||||
|
||||
const automakerDir = await ensureAutomakerDir(deepProjectPath);
|
||||
|
||||
const stats = await fs.stat(automakerDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Path handling with special characters', () => {
|
||||
it('should handle feature IDs with special characters', () => {
|
||||
const featureId = 'feature-with-dashes_and_underscores';
|
||||
const result = getFeatureDir(projectPath, featureId);
|
||||
expect(result).toContain(featureId);
|
||||
});
|
||||
|
||||
it('should handle paths with spaces', () => {
|
||||
const pathWithSpaces = path.join(tempDir, 'path with spaces');
|
||||
const result = getAutomakerDir(pathWithSpaces);
|
||||
expect(result).toBe(path.join(pathWithSpaces, '.automaker'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Path relationships', () => {
|
||||
it('should have feature dir as child of features dir', () => {
|
||||
const featuresDir = getFeaturesDir(projectPath);
|
||||
const featureDir = getFeatureDir(projectPath, 'test-feature');
|
||||
|
||||
expect(featureDir.startsWith(featuresDir)).toBe(true);
|
||||
});
|
||||
|
||||
it('should have all project paths under automaker dir', () => {
|
||||
const automakerDir = getAutomakerDir(projectPath);
|
||||
const paths = [
|
||||
getFeaturesDir(projectPath),
|
||||
getBoardDir(projectPath),
|
||||
getImagesDir(projectPath),
|
||||
getContextDir(projectPath),
|
||||
getWorktreesDir(projectPath),
|
||||
getAppSpecPath(projectPath),
|
||||
getBranchTrackingPath(projectPath),
|
||||
getProjectSettingsPath(projectPath),
|
||||
];
|
||||
|
||||
paths.forEach((p) => {
|
||||
expect(p.startsWith(automakerDir)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
234
libs/platform/tests/security.test.ts
Normal file
234
libs/platform/tests/security.test.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
||||
import path from 'path';
|
||||
|
||||
describe('security.ts', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Save original environment
|
||||
originalEnv = { ...process.env };
|
||||
// Reset modules to get fresh state
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
describe('initAllowedPaths', () => {
|
||||
it('should load ALLOWED_ROOT_DIRECTORY if set', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/projects';
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve('/projects'));
|
||||
});
|
||||
|
||||
it('should load DATA_DIR if set', async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
process.env.DATA_DIR = '/data/directory';
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve('/data/directory'));
|
||||
});
|
||||
|
||||
it('should load both ALLOWED_ROOT_DIRECTORY and DATA_DIR if both set', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/projects';
|
||||
process.env.DATA_DIR = '/app/data';
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve('/projects'));
|
||||
expect(allowed).toContain(path.resolve('/app/data'));
|
||||
});
|
||||
|
||||
it('should handle missing environment variables gracefully', async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths } = await import('../src/security');
|
||||
expect(() => initAllowedPaths()).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('isPathAllowed', () => {
|
||||
it('should allow paths within ALLOWED_ROOT_DIRECTORY', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/allowed';
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
expect(isPathAllowed('/allowed/file.txt')).toBe(true);
|
||||
expect(isPathAllowed('/allowed/subdir/file.txt')).toBe(true);
|
||||
});
|
||||
|
||||
it('should deny paths outside ALLOWED_ROOT_DIRECTORY', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/allowed';
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
expect(isPathAllowed('/not-allowed/file.txt')).toBe(false);
|
||||
expect(isPathAllowed('/etc/passwd')).toBe(false);
|
||||
});
|
||||
|
||||
it('should always allow DATA_DIR paths', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/projects';
|
||||
process.env.DATA_DIR = '/app/data';
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
// DATA_DIR paths are always allowed
|
||||
expect(isPathAllowed('/app/data/settings.json')).toBe(true);
|
||||
expect(isPathAllowed('/app/data/credentials.json')).toBe(true);
|
||||
});
|
||||
|
||||
it('should allow all paths when no restrictions configured', async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
expect(isPathAllowed('/any/path')).toBe(true);
|
||||
expect(isPathAllowed('/etc/passwd')).toBe(true);
|
||||
});
|
||||
|
||||
it('should allow all paths when only DATA_DIR is configured', async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
process.env.DATA_DIR = '/data';
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
// DATA_DIR should be allowed
|
||||
expect(isPathAllowed('/data/file.txt')).toBe(true);
|
||||
// And all other paths should be allowed since no ALLOWED_ROOT_DIRECTORY restriction
|
||||
expect(isPathAllowed('/any/path')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validatePath', () => {
|
||||
it('should return resolved path for allowed paths', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/allowed';
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
const result = validatePath('/allowed/file.txt');
|
||||
expect(result).toBe(path.resolve('/allowed/file.txt'));
|
||||
});
|
||||
|
||||
it('should throw error for paths outside allowed directories', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/allowed';
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, validatePath, PathNotAllowedError } =
|
||||
await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
expect(() => validatePath('/not-allowed/file.txt')).toThrow(PathNotAllowedError);
|
||||
});
|
||||
|
||||
it('should resolve relative paths', async () => {
|
||||
const cwd = process.cwd();
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = cwd;
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
const result = validatePath('./file.txt');
|
||||
expect(result).toBe(path.resolve(cwd, './file.txt'));
|
||||
});
|
||||
|
||||
it('should not throw when no restrictions configured', async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
expect(() => validatePath('/any/path')).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllowedPaths', () => {
|
||||
it('should return empty array when no paths configured', async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(Array.isArray(allowed)).toBe(true);
|
||||
expect(allowed).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should return configured paths', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/projects';
|
||||
process.env.DATA_DIR = '/data';
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve('/projects'));
|
||||
expect(allowed).toContain(path.resolve('/data'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllowedRootDirectory', () => {
|
||||
it('should return the configured root directory', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/projects';
|
||||
|
||||
const { initAllowedPaths, getAllowedRootDirectory } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
expect(getAllowedRootDirectory()).toBe(path.resolve('/projects'));
|
||||
});
|
||||
|
||||
it('should return null when not configured', async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
|
||||
const { initAllowedPaths, getAllowedRootDirectory } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
expect(getAllowedRootDirectory()).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDataDirectory', () => {
|
||||
it('should return the configured data directory', async () => {
|
||||
process.env.DATA_DIR = '/data';
|
||||
|
||||
const { initAllowedPaths, getDataDirectory } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
expect(getDataDirectory()).toBe(path.resolve('/data'));
|
||||
});
|
||||
|
||||
it('should return null when not configured', async () => {
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, getDataDirectory } = await import('../src/security');
|
||||
initAllowedPaths();
|
||||
|
||||
expect(getDataDirectory()).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
502
libs/platform/tests/subprocess.test.ts
Normal file
502
libs/platform/tests/subprocess.test.ts
Normal file
@@ -0,0 +1,502 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { spawnJSONLProcess, spawnProcess, type SubprocessOptions } from '../src/subprocess';
|
||||
import * as cp from 'child_process';
|
||||
import { EventEmitter } from 'events';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
vi.mock('child_process');
|
||||
|
||||
/**
|
||||
* Helper to collect all items from an async generator
|
||||
*/
|
||||
async function collectAsyncGenerator<T>(generator: AsyncGenerator<T>): Promise<T[]> {
|
||||
const results: T[] = [];
|
||||
for await (const item of generator) {
|
||||
results.push(item);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
describe('subprocess.ts', () => {
|
||||
let consoleSpy: {
|
||||
log: ReturnType<typeof vi.spyOn>;
|
||||
error: ReturnType<typeof vi.spyOn>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
consoleSpy = {
|
||||
log: vi.spyOn(console, 'log').mockImplementation(() => {}),
|
||||
error: vi.spyOn(console, 'error').mockImplementation(() => {}),
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
consoleSpy.log.mockRestore();
|
||||
consoleSpy.error.mockRestore();
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper to create a mock ChildProcess with stdout/stderr streams
|
||||
*/
|
||||
function createMockProcess(config: {
|
||||
stdoutLines?: string[];
|
||||
stderrLines?: string[];
|
||||
exitCode?: number;
|
||||
error?: Error;
|
||||
delayMs?: number;
|
||||
}) {
|
||||
const mockProcess = new EventEmitter() as cp.ChildProcess & {
|
||||
stdout: Readable;
|
||||
stderr: Readable;
|
||||
kill: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
|
||||
// Create readable streams for stdout and stderr
|
||||
const stdout = new Readable({ read() {} });
|
||||
const stderr = new Readable({ read() {} });
|
||||
|
||||
mockProcess.stdout = stdout;
|
||||
mockProcess.stderr = stderr;
|
||||
mockProcess.kill = vi.fn().mockReturnValue(true);
|
||||
|
||||
// Use process.nextTick to ensure readline interface is set up first
|
||||
process.nextTick(() => {
|
||||
// Emit stderr lines immediately
|
||||
if (config.stderrLines) {
|
||||
for (const line of config.stderrLines) {
|
||||
stderr.emit('data', Buffer.from(line));
|
||||
}
|
||||
}
|
||||
|
||||
// Emit stdout lines with small delays to ensure readline processes them
|
||||
const emitLines = async () => {
|
||||
if (config.stdoutLines) {
|
||||
for (const line of config.stdoutLines) {
|
||||
stdout.push(line + '\n');
|
||||
// Small delay to allow readline to process
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
}
|
||||
}
|
||||
|
||||
// Small delay before ending stream
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
stdout.push(null); // End stdout
|
||||
|
||||
// Small delay before exit
|
||||
await new Promise((resolve) => setTimeout(resolve, config.delayMs ?? 10));
|
||||
|
||||
// Emit exit or error
|
||||
if (config.error) {
|
||||
mockProcess.emit('error', config.error);
|
||||
} else {
|
||||
mockProcess.emit('exit', config.exitCode ?? 0);
|
||||
}
|
||||
};
|
||||
|
||||
emitLines();
|
||||
});
|
||||
|
||||
return mockProcess;
|
||||
}
|
||||
|
||||
describe('spawnJSONLProcess', () => {
|
||||
const baseOptions: SubprocessOptions = {
|
||||
command: 'test-command',
|
||||
args: ['arg1', 'arg2'],
|
||||
cwd: '/test/dir',
|
||||
};
|
||||
|
||||
it('should yield parsed JSONL objects line by line', async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: [
|
||||
'{"type":"start","id":1}',
|
||||
'{"type":"progress","value":50}',
|
||||
'{"type":"complete","result":"success"}',
|
||||
],
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(3);
|
||||
expect(results[0]).toEqual({ type: 'start', id: 1 });
|
||||
expect(results[1]).toEqual({ type: 'progress', value: 50 });
|
||||
expect(results[2]).toEqual({ type: 'complete', result: 'success' });
|
||||
});
|
||||
|
||||
it('should skip empty lines', async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: ['{"type":"first"}', '', ' ', '{"type":"second"}'],
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0]).toEqual({ type: 'first' });
|
||||
expect(results[1]).toEqual({ type: 'second' });
|
||||
});
|
||||
|
||||
it('should yield error for malformed JSON and continue processing', async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: ['{"type":"valid"}', '{invalid json}', '{"type":"also_valid"}'],
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(3);
|
||||
expect(results[0]).toEqual({ type: 'valid' });
|
||||
expect(results[1]).toMatchObject({
|
||||
type: 'error',
|
||||
error: expect.stringContaining('Failed to parse output'),
|
||||
});
|
||||
expect(results[2]).toEqual({ type: 'also_valid' });
|
||||
});
|
||||
|
||||
it('should collect stderr output', async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: ['{"type":"test"}'],
|
||||
stderrLines: ['Warning: something happened', 'Error: critical issue'],
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
await collectAsyncGenerator(generator);
|
||||
|
||||
expect(consoleSpy.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Warning: something happened')
|
||||
);
|
||||
expect(consoleSpy.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error: critical issue')
|
||||
);
|
||||
});
|
||||
|
||||
it('should yield error on non-zero exit code', async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: ['{"type":"started"}'],
|
||||
stderrLines: ['Process failed with error'],
|
||||
exitCode: 1,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0]).toEqual({ type: 'started' });
|
||||
expect(results[1]).toMatchObject({
|
||||
type: 'error',
|
||||
error: expect.stringContaining('Process failed with error'),
|
||||
});
|
||||
});
|
||||
|
||||
it('should yield error with exit code when stderr is empty', async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: ['{"type":"test"}'],
|
||||
exitCode: 127,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[1]).toMatchObject({
|
||||
type: 'error',
|
||||
error: 'Process exited with code 127',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle process spawn errors', async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
error: new Error('Command not found'),
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
// When process.on('error') fires, exitCode is null
|
||||
// The generator should handle this gracefully
|
||||
expect(results).toEqual([]);
|
||||
});
|
||||
|
||||
it('should kill process on AbortController signal', async () => {
|
||||
const abortController = new AbortController();
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: ['{"type":"start"}'],
|
||||
exitCode: 0,
|
||||
delayMs: 100, // Delay to allow abort
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess({
|
||||
...baseOptions,
|
||||
abortController,
|
||||
});
|
||||
|
||||
// Start consuming the generator
|
||||
const promise = collectAsyncGenerator(generator);
|
||||
|
||||
// Abort after a short delay
|
||||
setTimeout(() => abortController.abort(), 20);
|
||||
|
||||
await promise;
|
||||
|
||||
expect(mockProcess.kill).toHaveBeenCalledWith('SIGTERM');
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith(expect.stringContaining('Abort signal received'));
|
||||
});
|
||||
|
||||
it('should spawn process with correct arguments', async () => {
|
||||
const mockProcess = createMockProcess({ exitCode: 0 });
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const options: SubprocessOptions = {
|
||||
command: 'my-command',
|
||||
args: ['--flag', 'value'],
|
||||
cwd: '/work/dir',
|
||||
env: { CUSTOM_VAR: 'test' },
|
||||
};
|
||||
|
||||
const generator = spawnJSONLProcess(options);
|
||||
await collectAsyncGenerator(generator);
|
||||
|
||||
expect(cp.spawn).toHaveBeenCalledWith('my-command', ['--flag', 'value'], {
|
||||
cwd: '/work/dir',
|
||||
env: expect.objectContaining({ CUSTOM_VAR: 'test' }),
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge env with process.env', async () => {
|
||||
const mockProcess = createMockProcess({ exitCode: 0 });
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const options: SubprocessOptions = {
|
||||
command: 'test',
|
||||
args: [],
|
||||
cwd: '/test',
|
||||
env: { CUSTOM: 'value' },
|
||||
};
|
||||
|
||||
const generator = spawnJSONLProcess(options);
|
||||
await collectAsyncGenerator(generator);
|
||||
|
||||
expect(cp.spawn).toHaveBeenCalledWith(
|
||||
'test',
|
||||
[],
|
||||
expect.objectContaining({
|
||||
env: expect.objectContaining({
|
||||
CUSTOM: 'value',
|
||||
// Should also include existing process.env
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle complex JSON objects', async () => {
|
||||
const complexObject = {
|
||||
type: 'complex',
|
||||
nested: { deep: { value: [1, 2, 3] } },
|
||||
array: [{ id: 1 }, { id: 2 }],
|
||||
string: 'with "quotes" and \\backslashes',
|
||||
};
|
||||
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: [JSON.stringify(complexObject)],
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]).toEqual(complexObject);
|
||||
});
|
||||
});
|
||||
|
||||
describe('spawnProcess', () => {
|
||||
const baseOptions: SubprocessOptions = {
|
||||
command: 'test-command',
|
||||
args: ['arg1'],
|
||||
cwd: '/test',
|
||||
};
|
||||
|
||||
it('should collect stdout and stderr', async () => {
|
||||
const mockProcess = new EventEmitter() as cp.ChildProcess & {
|
||||
stdout: Readable;
|
||||
stderr: Readable;
|
||||
kill: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
const stdout = new Readable({ read() {} });
|
||||
const stderr = new Readable({ read() {} });
|
||||
|
||||
mockProcess.stdout = stdout;
|
||||
mockProcess.stderr = stderr;
|
||||
mockProcess.kill = vi.fn().mockReturnValue(true);
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => {
|
||||
stdout.push('line 1\n');
|
||||
stdout.push('line 2\n');
|
||||
stdout.push(null);
|
||||
|
||||
stderr.push('error 1\n');
|
||||
stderr.push('error 2\n');
|
||||
stderr.push(null);
|
||||
|
||||
mockProcess.emit('exit', 0);
|
||||
}, 10);
|
||||
|
||||
const result = await spawnProcess(baseOptions);
|
||||
|
||||
expect(result.stdout).toBe('line 1\nline 2\n');
|
||||
expect(result.stderr).toBe('error 1\nerror 2\n');
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
|
||||
it('should return correct exit code', async () => {
|
||||
const mockProcess = new EventEmitter() as cp.ChildProcess & {
|
||||
stdout: Readable;
|
||||
stderr: Readable;
|
||||
kill: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
mockProcess.stdout = new Readable({ read() {} });
|
||||
mockProcess.stderr = new Readable({ read() {} });
|
||||
mockProcess.kill = vi.fn().mockReturnValue(true);
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => {
|
||||
mockProcess.stdout.push(null);
|
||||
mockProcess.stderr.push(null);
|
||||
mockProcess.emit('exit', 42);
|
||||
}, 10);
|
||||
|
||||
const result = await spawnProcess(baseOptions);
|
||||
|
||||
expect(result.exitCode).toBe(42);
|
||||
});
|
||||
|
||||
it('should handle process errors', async () => {
|
||||
const mockProcess = new EventEmitter() as cp.ChildProcess & {
|
||||
stdout: Readable;
|
||||
stderr: Readable;
|
||||
kill: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
mockProcess.stdout = new Readable({ read() {} });
|
||||
mockProcess.stderr = new Readable({ read() {} });
|
||||
mockProcess.kill = vi.fn().mockReturnValue(true);
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => {
|
||||
mockProcess.emit('error', new Error('Spawn failed'));
|
||||
}, 10);
|
||||
|
||||
await expect(spawnProcess(baseOptions)).rejects.toThrow('Spawn failed');
|
||||
});
|
||||
|
||||
it('should handle AbortController signal', async () => {
|
||||
const abortController = new AbortController();
|
||||
const mockProcess = new EventEmitter() as cp.ChildProcess & {
|
||||
stdout: Readable;
|
||||
stderr: Readable;
|
||||
kill: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
mockProcess.stdout = new Readable({ read() {} });
|
||||
mockProcess.stderr = new Readable({ read() {} });
|
||||
mockProcess.kill = vi.fn().mockReturnValue(true);
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => abortController.abort(), 20);
|
||||
|
||||
await expect(spawnProcess({ ...baseOptions, abortController })).rejects.toThrow(
|
||||
'Process aborted'
|
||||
);
|
||||
|
||||
expect(mockProcess.kill).toHaveBeenCalledWith('SIGTERM');
|
||||
});
|
||||
|
||||
it('should spawn with correct options', async () => {
|
||||
const mockProcess = new EventEmitter() as cp.ChildProcess & {
|
||||
stdout: Readable;
|
||||
stderr: Readable;
|
||||
kill: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
mockProcess.stdout = new Readable({ read() {} });
|
||||
mockProcess.stderr = new Readable({ read() {} });
|
||||
mockProcess.kill = vi.fn().mockReturnValue(true);
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => {
|
||||
mockProcess.stdout.push(null);
|
||||
mockProcess.stderr.push(null);
|
||||
mockProcess.emit('exit', 0);
|
||||
}, 10);
|
||||
|
||||
const options: SubprocessOptions = {
|
||||
command: 'my-cmd',
|
||||
args: ['--verbose'],
|
||||
cwd: '/my/dir',
|
||||
env: { MY_VAR: 'value' },
|
||||
};
|
||||
|
||||
await spawnProcess(options);
|
||||
|
||||
expect(cp.spawn).toHaveBeenCalledWith('my-cmd', ['--verbose'], {
|
||||
cwd: '/my/dir',
|
||||
env: expect.objectContaining({ MY_VAR: 'value' }),
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle empty stdout and stderr', async () => {
|
||||
const mockProcess = new EventEmitter() as cp.ChildProcess & {
|
||||
stdout: Readable;
|
||||
stderr: Readable;
|
||||
kill: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
mockProcess.stdout = new Readable({ read() {} });
|
||||
mockProcess.stderr = new Readable({ read() {} });
|
||||
mockProcess.kill = vi.fn().mockReturnValue(true);
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => {
|
||||
mockProcess.stdout.push(null);
|
||||
mockProcess.stderr.push(null);
|
||||
mockProcess.emit('exit', 0);
|
||||
}, 10);
|
||||
|
||||
const result = await spawnProcess(baseOptions);
|
||||
|
||||
expect(result.stdout).toBe('');
|
||||
expect(result.stderr).toBe('');
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
9
libs/platform/tsconfig.json
Normal file
9
libs/platform/tsconfig.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"extends": "../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
23
libs/platform/vitest.config.ts
Normal file
23
libs/platform/vitest.config.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { defineConfig } from 'vitest/config';
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'node',
|
||||
include: ['tests/**/*.test.ts'],
|
||||
coverage: {
|
||||
provider: 'v8',
|
||||
reporter: ['text', 'json', 'html'],
|
||||
include: ['src/**/*.ts'],
|
||||
exclude: ['src/**/*.d.ts', 'src/index.ts'],
|
||||
thresholds: {
|
||||
// Excellent coverage: 94.69% stmts, 80.48% branches, 97.14% funcs, 94.64% lines
|
||||
// All files now have comprehensive tests
|
||||
lines: 90,
|
||||
functions: 95,
|
||||
branches: 75,
|
||||
statements: 90,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
Reference in New Issue
Block a user