refactor: Improve all git operations, add stash support, add improved pull request flow, add worktree file copy options, address code review comments, add cherry pick options

This commit is contained in:
gsxdsm
2026-02-17 22:02:58 -08:00
parent f4e87d4c25
commit 9af63bc1ef
89 changed files with 6811 additions and 351 deletions

View File

@@ -69,6 +69,7 @@ import { CodexModelCacheService } from './services/codex-model-cache-service.js'
import { createZaiRoutes } from './routes/zai/index.js';
import { ZaiUsageService } from './services/zai-usage-service.js';
import { createGeminiRoutes } from './routes/gemini/index.js';
import { GeminiUsageService } from './services/gemini-usage-service.js';
import { createGitHubRoutes } from './routes/github/index.js';
import { createContextRoutes } from './routes/context/index.js';
import { createBacklogPlanRoutes } from './routes/backlog-plan/index.js';
@@ -332,6 +333,7 @@ const codexAppServerService = new CodexAppServerService();
const codexModelCacheService = new CodexModelCacheService(DATA_DIR, codexAppServerService);
const codexUsageService = new CodexUsageService(codexAppServerService);
const zaiUsageService = new ZaiUsageService();
const geminiUsageService = new GeminiUsageService();
const mcpTestService = new MCPTestService(settingsService);
const ideationService = new IdeationService(events, settingsService, featureLoader);
@@ -494,7 +496,7 @@ app.use('/api/settings', createSettingsRoutes(settingsService));
app.use('/api/claude', createClaudeRoutes(claudeUsageService));
app.use('/api/codex', createCodexRoutes(codexUsageService, codexModelCacheService));
app.use('/api/zai', createZaiRoutes(zaiUsageService, settingsService));
app.use('/api/gemini', createGeminiRoutes());
app.use('/api/gemini', createGeminiRoutes(geminiUsageService, events));
app.use('/api/github', createGitHubRoutes(events, settingsService));
app.use('/api/context', createContextRoutes(settingsService));
app.use('/api/backlog-plan', createBacklogPlanRoutes(events, settingsService));

View File

@@ -788,7 +788,7 @@ export class CodexProvider extends BaseProvider {
overrides.push({ key: 'features.web_search_request', value: true });
}
buildConfigOverrides(overrides);
const configOverrideArgs = buildConfigOverrides(overrides);
const preExecArgs: string[] = [];
// Add additional directories with write access
@@ -807,6 +807,7 @@ export class CodexProvider extends BaseProvider {
CODEX_MODEL_FLAG,
options.model,
CODEX_JSON_FLAG,
...configOverrideArgs,
'-', // Read prompt from stdin to avoid shell escaping issues
];

View File

@@ -31,7 +31,7 @@ import type {
} from './types.js';
import { validateBareModelId } from '@automaker/types';
import { validateApiKey } from '../lib/auth-utils.js';
import { getEffectivePermissions } from '../services/cursor-config-service.js';
import { getEffectivePermissions, detectProfile } from '../services/cursor-config-service.js';
import {
type CursorStreamEvent,
type CursorSystemEvent,
@@ -878,8 +878,12 @@ export class CursorProvider extends CliProvider {
logger.debug(`CursorProvider.executeQuery called with model: "${options.model}"`);
// Get effective permissions for this project
await getEffectivePermissions(options.cwd || process.cwd());
// Get effective permissions for this project and detect the active profile
const effectivePermissions = await getEffectivePermissions(options.cwd || process.cwd());
const activeProfile = detectProfile(effectivePermissions);
logger.debug(
`Active permission profile: ${activeProfile ?? 'none'}, permissions: ${JSON.stringify(effectivePermissions)}`
);
// Debug: log raw events when AUTOMAKER_DEBUG_RAW_OUTPUT is enabled
const debugRawEvents =

View File

@@ -58,6 +58,9 @@ export function createApplyHandler() {
if (feature.dependencies?.includes(change.featureId)) {
const newDeps = feature.dependencies.filter((d) => d !== change.featureId);
await featureLoader.update(projectPath, feature.id, { dependencies: newDeps });
// Mutate the in-memory feature object so subsequent deletions use the updated
// dependency list and don't reintroduce already-removed dependency IDs.
feature.dependencies = newDeps;
logger.info(
`[BacklogPlan] Removed dependency ${change.featureId} from ${feature.id}`
);

View File

@@ -19,6 +19,7 @@ import { createBrowseHandler } from './routes/browse.js';
import { createImageHandler } from './routes/image.js';
import { createSaveBoardBackgroundHandler } from './routes/save-board-background.js';
import { createDeleteBoardBackgroundHandler } from './routes/delete-board-background.js';
import { createBrowseProjectFilesHandler } from './routes/browse-project-files.js';
export function createFsRoutes(_events: EventEmitter): Router {
const router = Router();
@@ -37,6 +38,7 @@ export function createFsRoutes(_events: EventEmitter): Router {
router.get('/image', createImageHandler());
router.post('/save-board-background', createSaveBoardBackgroundHandler());
router.post('/delete-board-background', createDeleteBoardBackgroundHandler());
router.post('/browse-project-files', createBrowseProjectFilesHandler());
return router;
}

View File

@@ -0,0 +1,186 @@
/**
* POST /browse-project-files endpoint - Browse files and directories within a project
*
* Unlike /browse which only lists directories (for project folder selection),
* this endpoint lists both files and directories relative to a project root.
* Used by the file selector for "Copy files to worktree" settings.
*
* Features:
* - Lists both files and directories
* - Hides .git, .worktrees, node_modules, and other build artifacts
* - Returns entries relative to the project root
* - Supports navigating into subdirectories
* - Security: prevents path traversal outside project root
*/
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { PathNotAllowedError } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
// Directories to hide from the listing (build artifacts, caches, etc.)
const HIDDEN_DIRECTORIES = new Set([
'.git',
'.worktrees',
'node_modules',
'.automaker',
'__pycache__',
'.cache',
'.next',
'.nuxt',
'.svelte-kit',
'.turbo',
'.vercel',
'.output',
'coverage',
'.nyc_output',
'dist',
'build',
'out',
'.tmp',
'tmp',
'.venv',
'venv',
'target',
'vendor',
'.gradle',
'.idea',
'.vscode',
]);
interface ProjectFileEntry {
name: string;
relativePath: string;
isDirectory: boolean;
isFile: boolean;
}
export function createBrowseProjectFilesHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { projectPath, relativePath } = req.body as {
projectPath: string;
relativePath?: string; // Relative path within the project to browse (empty = project root)
};
if (!projectPath) {
res.status(400).json({ success: false, error: 'projectPath is required' });
return;
}
const resolvedProjectPath = path.resolve(projectPath);
// Determine the target directory to browse
let targetPath = resolvedProjectPath;
let currentRelativePath = '';
if (relativePath) {
// Security: normalize and validate the relative path
const normalized = path.normalize(relativePath);
if (normalized.startsWith('..') || path.isAbsolute(normalized)) {
res.status(400).json({
success: false,
error: 'Invalid relative path - must be within the project directory',
});
return;
}
targetPath = path.join(resolvedProjectPath, normalized);
currentRelativePath = normalized;
// Double-check the resolved path is within the project
const resolvedTarget = path.resolve(targetPath);
if (!resolvedTarget.startsWith(resolvedProjectPath)) {
res.status(400).json({
success: false,
error: 'Path traversal detected',
});
return;
}
}
// Determine parent relative path
let parentRelativePath: string | null = null;
if (currentRelativePath) {
const parent = path.dirname(currentRelativePath);
parentRelativePath = parent === '.' ? '' : parent;
}
try {
const stat = await secureFs.stat(targetPath);
if (!stat.isDirectory()) {
res.status(400).json({ success: false, error: 'Path is not a directory' });
return;
}
// Read directory contents
const dirEntries = await secureFs.readdir(targetPath, { withFileTypes: true });
// Filter and map entries
const entries: ProjectFileEntry[] = dirEntries
.filter((entry) => {
// Skip hidden directories (build artifacts, etc.)
if (entry.isDirectory() && HIDDEN_DIRECTORIES.has(entry.name)) {
return false;
}
// Skip entries starting with . (hidden files) except common config files
// We keep hidden files visible since users often need .env, .eslintrc, etc.
return true;
})
.map((entry) => {
const entryRelativePath = currentRelativePath
? `${currentRelativePath}/${entry.name}`
: entry.name;
return {
name: entry.name,
relativePath: entryRelativePath,
isDirectory: entry.isDirectory(),
isFile: entry.isFile(),
};
})
// Sort: directories first, then files, alphabetically within each group
.sort((a, b) => {
if (a.isDirectory !== b.isDirectory) {
return a.isDirectory ? -1 : 1;
}
return a.name.localeCompare(b.name);
});
res.json({
success: true,
currentRelativePath,
parentRelativePath,
entries,
});
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to read directory';
const isPermissionError = errorMessage.includes('EPERM') || errorMessage.includes('EACCES');
if (isPermissionError) {
res.json({
success: true,
currentRelativePath,
parentRelativePath,
entries: [],
warning: 'Permission denied - unable to read this directory',
});
} else {
res.status(400).json({
success: false,
error: errorMessage,
});
}
}
} catch (error) {
if (error instanceof PathNotAllowedError) {
res.status(403).json({ success: false, error: getErrorMessage(error) });
return;
}
logError(error, 'Browse project files failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -1,17 +1,20 @@
import { Router, Request, Response } from 'express';
import { GeminiProvider } from '../../providers/gemini-provider.js';
import { getGeminiUsageService } from '../../services/gemini-usage-service.js';
import { GeminiUsageService } from '../../services/gemini-usage-service.js';
import { createLogger } from '@automaker/utils';
import type { EventEmitter } from '../../lib/events.js';
const logger = createLogger('Gemini');
export function createGeminiRoutes(): Router {
export function createGeminiRoutes(
usageService: GeminiUsageService,
_events: EventEmitter
): Router {
const router = Router();
// Get current usage/quota data from Google Cloud API
router.get('/usage', async (_req: Request, res: Response) => {
try {
const usageService = getGeminiUsageService();
const usageData = await usageService.fetchUsageData();
res.json(usageData);

View File

@@ -110,6 +110,7 @@ export function createVerifyClaudeAuthHandler() {
let authenticated = false;
let errorMessage = '';
let receivedAnyContent = false;
let cleanupEnv: (() => void) | undefined;
// Create secure auth session
const sessionId = `claude-auth-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
@@ -151,7 +152,7 @@ export function createVerifyClaudeAuthHandler() {
AuthSessionManager.createSession(sessionId, authMethod || 'api_key', apiKey, 'anthropic');
// Create temporary environment override for SDK call
const _cleanupEnv = createTempEnvOverride(authEnv);
cleanupEnv = createTempEnvOverride(authEnv);
// Run a minimal query to verify authentication
const stream = query({
@@ -313,6 +314,8 @@ export function createVerifyClaudeAuthHandler() {
}
} finally {
clearTimeout(timeoutId);
// Restore process.env to its original state
cleanupEnv?.();
// Clean up the auth session
AuthSessionManager.destroySession(sessionId);
}

View File

@@ -51,9 +51,17 @@ import {
createDeleteInitScriptHandler,
createRunInitScriptHandler,
} from './routes/init-script.js';
import { createCommitLogHandler } from './routes/commit-log.js';
import { createDiscardChangesHandler } from './routes/discard-changes.js';
import { createListRemotesHandler } from './routes/list-remotes.js';
import { createAddRemoteHandler } from './routes/add-remote.js';
import { createStashPushHandler } from './routes/stash-push.js';
import { createStashListHandler } from './routes/stash-list.js';
import { createStashApplyHandler } from './routes/stash-apply.js';
import { createStashDropHandler } from './routes/stash-drop.js';
import { createCherryPickHandler } from './routes/cherry-pick.js';
import { createBranchCommitLogHandler } from './routes/branch-commit-log.js';
import { createGeneratePRDescriptionHandler } from './routes/generate-pr-description.js';
import type { SettingsService } from '../../services/settings-service.js';
export function createWorktreeRoutes(
@@ -73,7 +81,11 @@ export function createWorktreeRoutes(
requireValidProject,
createMergeHandler()
);
router.post('/create', validatePathParams('projectPath'), createCreateHandler(events));
router.post(
'/create',
validatePathParams('projectPath'),
createCreateHandler(events, settingsService)
);
router.post('/delete', validatePathParams('projectPath', 'worktreePath'), createDeleteHandler());
router.post('/create-pr', createCreatePRHandler());
router.post('/pr-info', createPRInfoHandler());
@@ -192,5 +204,63 @@ export function createWorktreeRoutes(
createAddRemoteHandler()
);
// Commit log route
router.post(
'/commit-log',
validatePathParams('worktreePath'),
requireValidWorktree,
createCommitLogHandler()
);
// Stash routes
router.post(
'/stash-push',
validatePathParams('worktreePath'),
requireGitRepoOnly,
createStashPushHandler()
);
router.post(
'/stash-list',
validatePathParams('worktreePath'),
requireGitRepoOnly,
createStashListHandler()
);
router.post(
'/stash-apply',
validatePathParams('worktreePath'),
requireGitRepoOnly,
createStashApplyHandler()
);
router.post(
'/stash-drop',
validatePathParams('worktreePath'),
requireGitRepoOnly,
createStashDropHandler()
);
// Cherry-pick route
router.post(
'/cherry-pick',
validatePathParams('worktreePath'),
requireValidWorktree,
createCherryPickHandler()
);
// Generate PR description route
router.post(
'/generate-pr-description',
validatePathParams('worktreePath'),
requireGitRepoOnly,
createGeneratePRDescriptionHandler(settingsService)
);
// Branch commit log route (get commits from a specific branch)
router.post(
'/branch-commit-log',
validatePathParams('worktreePath'),
requireValidWorktree,
createBranchCommitLogHandler()
);
return router;
}

View File

@@ -0,0 +1,123 @@
/**
* POST /branch-commit-log endpoint - Get recent commit history for a specific branch
*
* Similar to commit-log but allows specifying a branch name to get commits from
* any branch, not just the currently checked out one. Useful for cherry-pick workflows
* where you need to browse commits from other branches.
*
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
* the requireValidWorktree middleware in index.ts
*/
import type { Request, Response } from 'express';
import { execGitCommand, getErrorMessage, logError } from '../common.js';
export function createBranchCommitLogHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const {
worktreePath,
branchName,
limit = 20,
} = req.body as {
worktreePath: string;
branchName?: string;
limit?: number;
};
if (!worktreePath) {
res.status(400).json({
success: false,
error: 'worktreePath required',
});
return;
}
// Clamp limit to a reasonable range
const commitLimit = Math.min(Math.max(1, Number(limit) || 20), 100);
// Use the specified branch or default to HEAD
const targetRef = branchName || 'HEAD';
// Get detailed commit log for the specified branch
const logOutput = await execGitCommand(
[
'log',
targetRef,
`--max-count=${commitLimit}`,
'--format=%H%n%h%n%an%n%ae%n%aI%n%s%n%b%n---END---',
],
worktreePath
);
// Parse the output into structured commit objects
const commits: Array<{
hash: string;
shortHash: string;
author: string;
authorEmail: string;
date: string;
subject: string;
body: string;
files: string[];
}> = [];
const commitBlocks = logOutput.split('---END---\n').filter((block) => block.trim());
for (const block of commitBlocks) {
const lines = block.split('\n');
if (lines.length >= 6) {
const hash = lines[0].trim();
// Get list of files changed in this commit
let files: string[] = [];
try {
const filesOutput = await execGitCommand(
['diff-tree', '--no-commit-id', '--name-only', '-r', hash],
worktreePath
);
files = filesOutput
.trim()
.split('\n')
.filter((f) => f.trim());
} catch {
// Ignore errors getting file list
}
commits.push({
hash,
shortHash: lines[1].trim(),
author: lines[2].trim(),
authorEmail: lines[3].trim(),
date: lines[4].trim(),
subject: lines[5].trim(),
body: lines.slice(6).join('\n').trim(),
files,
});
}
}
// If branchName wasn't specified, get current branch for display
let displayBranch = branchName;
if (!displayBranch) {
const branchOutput = await execGitCommand(
['rev-parse', '--abbrev-ref', 'HEAD'],
worktreePath
);
displayBranch = branchOutput.trim();
}
res.json({
success: true,
result: {
branch: displayBranch,
commits,
total: commits.length,
},
});
} catch (error) {
logError(error, 'Get branch commit log failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -15,9 +15,10 @@ import { getErrorMessage, logError, isValidBranchName, execGitCommand } from '..
export function createCheckoutBranchHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath, branchName } = req.body as {
const { worktreePath, branchName, baseBranch } = req.body as {
worktreePath: string;
branchName: string;
baseBranch?: string; // Optional base branch to create from (defaults to current HEAD)
};
if (!worktreePath) {
@@ -46,6 +47,16 @@ export function createCheckoutBranchHandler() {
return;
}
// Validate base branch if provided
if (baseBranch && !isValidBranchName(baseBranch) && baseBranch !== 'HEAD') {
res.status(400).json({
success: false,
error:
'Invalid base branch name. Must contain only letters, numbers, dots, dashes, underscores, or slashes.',
});
return;
}
// Resolve and validate worktreePath to prevent traversal attacks.
// The validatePathParams middleware checks against ALLOWED_ROOT_DIRECTORY,
// but we also resolve the path and verify it exists as a directory.
@@ -88,7 +99,12 @@ export function createCheckoutBranchHandler() {
}
// Create and checkout the new branch (using argument array to avoid shell injection)
await execGitCommand(['checkout', '-b', branchName], resolvedPath);
// If baseBranch is provided, create the branch from that starting point
const checkoutArgs = ['checkout', '-b', branchName];
if (baseBranch) {
checkoutArgs.push(baseBranch);
}
await execGitCommand(checkoutArgs, resolvedPath);
res.json({
success: true,

View File

@@ -0,0 +1,128 @@
/**
* POST /cherry-pick endpoint - Cherry-pick one or more commits into the current branch
*
* Applies commits from another branch onto the current branch.
* Supports single or multiple commit cherry-picks.
*
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
* the requireValidWorktree middleware in index.ts
*/
import type { Request, Response } from 'express';
import { execGitCommand, getErrorMessage, logError } from '../common.js';
import { createLogger } from '@automaker/utils';
const logger = createLogger('Worktree');
export function createCherryPickHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath, commitHashes, options } = req.body as {
worktreePath: string;
commitHashes: string[];
options?: {
noCommit?: boolean;
};
};
if (!worktreePath) {
res.status(400).json({
success: false,
error: 'worktreePath is required',
});
return;
}
if (!commitHashes || !Array.isArray(commitHashes) || commitHashes.length === 0) {
res.status(400).json({
success: false,
error: 'commitHashes array is required and must contain at least one commit hash',
});
return;
}
// Validate each commit hash format (should be hex string)
for (const hash of commitHashes) {
if (!/^[a-fA-F0-9]+$/.test(hash)) {
res.status(400).json({
success: false,
error: `Invalid commit hash format: "${hash}"`,
});
return;
}
}
// Verify each commit exists
for (const hash of commitHashes) {
try {
await execGitCommand(['rev-parse', '--verify', hash], worktreePath);
} catch {
res.status(400).json({
success: false,
error: `Commit "${hash}" does not exist`,
});
return;
}
}
// Build cherry-pick command args
const args = ['cherry-pick'];
if (options?.noCommit) {
args.push('--no-commit');
}
// Add commit hashes in order
args.push(...commitHashes);
// Execute the cherry-pick
try {
await execGitCommand(args, worktreePath);
// Get current branch name
const branchOutput = await execGitCommand(
['rev-parse', '--abbrev-ref', 'HEAD'],
worktreePath
);
res.json({
success: true,
result: {
cherryPicked: true,
commitHashes,
branch: branchOutput.trim(),
message: `Successfully cherry-picked ${commitHashes.length} commit(s)`,
},
});
} catch (cherryPickError: unknown) {
// Check if this is a cherry-pick conflict
const err = cherryPickError as { stdout?: string; stderr?: string; message?: string };
const output = `${err.stdout || ''} ${err.stderr || ''} ${err.message || ''}`;
const hasConflicts =
output.includes('CONFLICT') ||
output.includes('cherry-pick failed') ||
output.includes('could not apply');
if (hasConflicts) {
// Abort the cherry-pick to leave the repo in a clean state
try {
await execGitCommand(['cherry-pick', '--abort'], worktreePath);
} catch {
logger.warn('Failed to abort cherry-pick after conflict');
}
res.status(409).json({
success: false,
error: `Cherry-pick CONFLICT: Could not apply commit(s) cleanly. Conflicts need to be resolved manually.`,
hasConflicts: true,
});
return;
}
// Re-throw non-conflict errors
throw cherryPickError;
}
} catch (error) {
logError(error, 'Cherry-pick failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -0,0 +1,112 @@
/**
* POST /commit-log endpoint - Get recent commit history for a worktree
*
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
* the requireValidWorktree middleware in index.ts
*/
import type { Request, Response } from 'express';
import { execGitCommand, getErrorMessage, logError } from '../common.js';
export function createCommitLogHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath, limit = 20 } = req.body as {
worktreePath: string;
limit?: number;
};
if (!worktreePath) {
res.status(400).json({
success: false,
error: 'worktreePath required',
});
return;
}
// Clamp limit to a reasonable range
const commitLimit = Math.min(Math.max(1, Number(limit) || 20), 100);
// Get detailed commit log using the secure execGitCommand helper
const logOutput = await execGitCommand(
['log', `--max-count=${commitLimit}`, '--format=%H%n%h%n%an%n%ae%n%aI%n%s%n%b%n---END---'],
worktreePath
);
// Parse the output into structured commit objects
const commits: Array<{
hash: string;
shortHash: string;
author: string;
authorEmail: string;
date: string;
subject: string;
body: string;
files: string[];
}> = [];
const commitBlocks = logOutput.split('---END---\n').filter((block) => block.trim());
for (const block of commitBlocks) {
const lines = block.split('\n');
if (lines.length >= 6) {
const hash = lines[0].trim();
// Get list of files changed in this commit
let files: string[] = [];
try {
const filesOutput = await execGitCommand(
// -m causes merge commits to be diffed against each parent,
// showing all files touched by the merge (without -m, diff-tree
// produces no output for merge commits because they have 2+ parents)
['diff-tree', '--no-commit-id', '--name-only', '-r', '-m', hash],
worktreePath
);
// Deduplicate: -m can list the same file multiple times
// (once per parent diff for merge commits)
files = [
...new Set(
filesOutput
.trim()
.split('\n')
.filter((f) => f.trim())
),
];
} catch {
// Ignore errors getting file list
}
commits.push({
hash,
shortHash: lines[1].trim(),
author: lines[2].trim(),
authorEmail: lines[3].trim(),
date: lines[4].trim(),
subject: lines[5].trim(),
body: lines.slice(6).join('\n').trim(),
files,
});
}
}
// Get current branch name
const branchOutput = await execGitCommand(
['rev-parse', '--abbrev-ref', 'HEAD'],
worktreePath
);
const branch = branchOutput.trim();
res.json({
success: true,
result: {
branch,
commits,
total: commits.length,
},
});
} catch (error) {
logError(error, 'Get commit log failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -6,11 +6,12 @@
*/
import type { Request, Response } from 'express';
import { exec } from 'child_process';
import { exec, execFile } from 'child_process';
import { promisify } from 'util';
import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
const execFileAsync = promisify(execFile);
export function createCommitHandler() {
return async (req: Request, res: Response): Promise<void> => {
@@ -48,19 +49,18 @@ export function createCommitHandler() {
// Stage changes - either specific files or all changes
if (files && files.length > 0) {
// Reset any previously staged changes first
await execAsync('git reset HEAD', { cwd: worktreePath }).catch(() => {
await execFileAsync('git', ['reset', 'HEAD'], { cwd: worktreePath }).catch(() => {
// Ignore errors from reset (e.g., if nothing is staged)
});
// Stage only the selected files
const escapedFiles = files.map((f) => `"${f.replace(/"/g, '\\"')}"`).join(' ');
await execAsync(`git add ${escapedFiles}`, { cwd: worktreePath });
// Stage only the selected files (args array avoids shell injection)
await execFileAsync('git', ['add', ...files], { cwd: worktreePath });
} else {
// Stage all changes (original behavior)
await execAsync('git add -A', { cwd: worktreePath });
await execFileAsync('git', ['add', '-A'], { cwd: worktreePath });
}
// Create commit
await execAsync(`git commit -m "${message.replace(/"/g, '\\"')}"`, {
// Create commit (pass message as arg to avoid shell injection)
await execFileAsync('git', ['commit', '-m', message], {
cwd: worktreePath,
});

View File

@@ -20,16 +20,25 @@ const logger = createLogger('CreatePR');
export function createCreatePRHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath, projectPath, commitMessage, prTitle, prBody, baseBranch, draft } =
req.body as {
worktreePath: string;
projectPath?: string;
commitMessage?: string;
prTitle?: string;
prBody?: string;
baseBranch?: string;
draft?: boolean;
};
const {
worktreePath,
projectPath,
commitMessage,
prTitle,
prBody,
baseBranch,
draft,
remote,
} = req.body as {
worktreePath: string;
projectPath?: string;
commitMessage?: string;
prTitle?: string;
prBody?: string;
baseBranch?: string;
draft?: boolean;
remote?: string;
};
if (!worktreePath) {
res.status(400).json({
@@ -110,17 +119,18 @@ export function createCreatePRHandler() {
}
}
// Push the branch to remote
// Push the branch to remote (use selected remote or default to 'origin')
const pushRemote = remote || 'origin';
let pushError: string | null = null;
try {
await execAsync(`git push -u origin ${branchName}`, {
await execAsync(`git push -u ${pushRemote} ${branchName}`, {
cwd: worktreePath,
env: execEnv,
});
} catch {
// If push fails, try with --set-upstream
try {
await execAsync(`git push --set-upstream origin ${branchName}`, {
await execAsync(`git push --set-upstream ${pushRemote} ${branchName}`, {
cwd: worktreePath,
env: execEnv,
});

View File

@@ -11,8 +11,10 @@ import type { Request, Response } from 'express';
import { exec } from 'child_process';
import { promisify } from 'util';
import path from 'path';
import fs from 'fs/promises';
import * as secureFs from '../../../lib/secure-fs.js';
import type { EventEmitter } from '../../../lib/events.js';
import type { SettingsService } from '../../../services/settings-service.js';
import { isGitRepo } from '@automaker/git-utils';
import {
getErrorMessage,
@@ -81,7 +83,66 @@ async function findExistingWorktreeForBranch(
}
}
export function createCreateHandler(events: EventEmitter) {
/**
* Copy configured files from project root into the new worktree.
* Reads worktreeCopyFiles from project settings and copies each file/directory.
* Silently skips files that don't exist in the source.
*/
async function copyConfiguredFiles(
projectPath: string,
worktreePath: string,
settingsService?: SettingsService
): Promise<void> {
if (!settingsService) return;
try {
const projectSettings = await settingsService.getProjectSettings(projectPath);
const copyFiles = projectSettings.worktreeCopyFiles;
if (!copyFiles || copyFiles.length === 0) return;
for (const relativePath of copyFiles) {
// Security: prevent path traversal
const normalized = path.normalize(relativePath);
if (normalized.startsWith('..') || path.isAbsolute(normalized)) {
logger.warn(`Skipping suspicious copy path: ${relativePath}`);
continue;
}
const sourcePath = path.join(projectPath, normalized);
const destPath = path.join(worktreePath, normalized);
try {
// Check if source exists
const stat = await fs.stat(sourcePath);
// Ensure destination directory exists
const destDir = path.dirname(destPath);
await fs.mkdir(destDir, { recursive: true });
if (stat.isDirectory()) {
// Recursively copy directory
await fs.cp(sourcePath, destPath, { recursive: true, force: true });
logger.info(`Copied directory "${normalized}" to worktree`);
} else {
// Copy single file
await fs.copyFile(sourcePath, destPath);
logger.info(`Copied file "${normalized}" to worktree`);
}
} catch (err) {
if ((err as NodeJS.ErrnoException).code === 'ENOENT') {
logger.debug(`Skipping copy of "${normalized}" - file not found in project root`);
} else {
logger.warn(`Failed to copy "${normalized}" to worktree:`, err);
}
}
}
} catch (error) {
logger.warn('Failed to read project settings for file copying:', error);
}
}
export function createCreateHandler(events: EventEmitter, settingsService?: SettingsService) {
return async (req: Request, res: Response): Promise<void> => {
try {
const { projectPath, branchName, baseBranch } = req.body as {
@@ -200,6 +261,10 @@ export function createCreateHandler(events: EventEmitter) {
// normalizePath converts to forward slashes for API consistency
const absoluteWorktreePath = path.resolve(worktreePath);
// Copy configured files into the new worktree before responding
// This runs synchronously to ensure files are in place before any init script
await copyConfiguredFiles(projectPath, absoluteWorktreePath, settingsService);
// Respond immediately (non-blocking)
res.json({
success: true,

View File

@@ -79,10 +79,12 @@ export function createDiscardChangesHandler() {
const branchName = branchOutput.trim();
// Parse the status output to categorize files
// Git --porcelain format: XY PATH where X=index status, Y=worktree status
// Preserve the exact two-character XY status (no trim) to keep index vs worktree info
const statusLines = status.trim().split('\n').filter(Boolean);
const allFiles = statusLines.map((line) => {
const fileStatus = line.substring(0, 2).trim();
const filePath = line.substring(3).trim();
const fileStatus = line.substring(0, 2);
const filePath = line.slice(3).trim();
return { status: fileStatus, path: filePath };
});
@@ -112,18 +114,21 @@ export function createDiscardChangesHandler() {
for (const file of allFiles) {
if (!filesToDiscard.has(file.path)) continue;
if (file.status === '?') {
// file.status is the raw two-character XY git porcelain status (no trim)
// X = index/staging status, Y = worktree status
const xy = file.status.substring(0, 2);
const indexStatus = xy.charAt(0);
const workTreeStatus = xy.charAt(1);
if (indexStatus === '?' && workTreeStatus === '?') {
untrackedFiles.push(file.path);
} else {
// Check if the file has staged changes (first character of status)
const indexStatus = statusLines
.find((l) => l.substring(3).trim() === file.path)
?.charAt(0);
if (indexStatus && indexStatus !== ' ' && indexStatus !== '?') {
// Check if the file has staged changes (index status X)
if (indexStatus !== ' ' && indexStatus !== '?') {
stagedFiles.push(file.path);
}
// Check for working tree changes (tracked files)
if (file.status === 'M' || file.status === 'D' || file.status === 'A') {
// Check for working tree changes (worktree status Y): handles MM, AM, MD, etc.
if (workTreeStatus === 'M' || workTreeStatus === 'D' || workTreeStatus === 'A') {
trackedModified.push(file.path);
}
}

View File

@@ -0,0 +1,410 @@
/**
* POST /worktree/generate-pr-description endpoint - Generate an AI PR description from git diff
*
* Uses the configured model (via phaseModels.commitMessageModel) to generate a pull request
* title and description from the branch's changes compared to the base branch.
* Defaults to Claude Haiku for speed.
*/
import type { Request, Response } from 'express';
import { exec } from 'child_process';
import { promisify } from 'util';
import { existsSync } from 'fs';
import { join } from 'path';
import { createLogger } from '@automaker/utils';
import { isCursorModel, stripProviderPrefix } from '@automaker/types';
import { resolvePhaseModel } from '@automaker/model-resolver';
import { ProviderFactory } from '../../../providers/provider-factory.js';
import type { SettingsService } from '../../../services/settings-service.js';
import { getErrorMessage, logError } from '../common.js';
import { getPhaseModelWithOverrides } from '../../../lib/settings-helpers.js';
const logger = createLogger('GeneratePRDescription');
const execAsync = promisify(exec);
/** Timeout for AI provider calls in milliseconds (30 seconds) */
const AI_TIMEOUT_MS = 30_000;
/** Max diff size to send to AI (characters) */
const MAX_DIFF_SIZE = 15_000;
const PR_DESCRIPTION_SYSTEM_PROMPT = `You are a pull request description generator. Your task is to create a clear, well-structured PR title and description based on the git diff and branch information provided.
Output your response in EXACTLY this format (including the markers):
---TITLE---
<a concise PR title, 50-72 chars, imperative mood>
---BODY---
## Summary
<1-3 bullet points describing the key changes>
## Changes
<Detailed list of what was changed and why>
Rules:
- The title should be concise and descriptive (50-72 characters)
- Use imperative mood for the title (e.g., "Add dark mode toggle" not "Added dark mode toggle")
- The description should explain WHAT changed and WHY
- Group related changes together
- Use markdown formatting for the body
- Do NOT include the branch name in the title
- Focus on the user-facing impact when possible
- If there are breaking changes, mention them prominently
- The diff may include both committed changes and uncommitted working directory changes. Treat all changes as part of the PR since uncommitted changes will be committed when the PR is created
- Do NOT distinguish between committed and uncommitted changes in the output - describe all changes as a unified set of PR changes`;
/**
* Wraps an async generator with a timeout.
*/
async function* withTimeout<T>(
generator: AsyncIterable<T>,
timeoutMs: number
): AsyncGenerator<T, void, unknown> {
const timeoutPromise = new Promise<never>((_, reject) => {
setTimeout(() => reject(new Error(`AI provider timed out after ${timeoutMs}ms`)), timeoutMs);
});
const iterator = generator[Symbol.asyncIterator]();
let done = false;
while (!done) {
const result = await Promise.race([iterator.next(), timeoutPromise]);
if (result.done) {
done = true;
} else {
yield result.value;
}
}
}
interface GeneratePRDescriptionRequestBody {
worktreePath: string;
baseBranch?: string;
}
interface GeneratePRDescriptionSuccessResponse {
success: true;
title: string;
body: string;
}
interface GeneratePRDescriptionErrorResponse {
success: false;
error: string;
}
export function createGeneratePRDescriptionHandler(
settingsService?: SettingsService
): (req: Request, res: Response) => Promise<void> {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath, baseBranch } = req.body as GeneratePRDescriptionRequestBody;
if (!worktreePath || typeof worktreePath !== 'string') {
const response: GeneratePRDescriptionErrorResponse = {
success: false,
error: 'worktreePath is required and must be a string',
};
res.status(400).json(response);
return;
}
// Validate that the directory exists
if (!existsSync(worktreePath)) {
const response: GeneratePRDescriptionErrorResponse = {
success: false,
error: 'worktreePath does not exist',
};
res.status(400).json(response);
return;
}
// Validate that it's a git repository
const gitPath = join(worktreePath, '.git');
if (!existsSync(gitPath)) {
const response: GeneratePRDescriptionErrorResponse = {
success: false,
error: 'worktreePath is not a git repository',
};
res.status(400).json(response);
return;
}
logger.info(`Generating PR description for worktree: ${worktreePath}`);
// Get current branch name
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
cwd: worktreePath,
});
const branchName = branchOutput.trim();
// Determine the base branch for comparison
const base = baseBranch || 'main';
// Get the diff between current branch and base branch (committed changes)
// Track whether the diff method used only includes committed changes.
// `git diff base...HEAD` and `git diff origin/base...HEAD` only show committed changes,
// while the fallback methods (`git diff HEAD`, `git diff --cached + git diff`) already
// include uncommitted working directory changes.
let diff = '';
let diffIncludesUncommitted = false;
try {
// First, try to get diff against the base branch
const { stdout: branchDiff } = await execAsync(`git diff ${base}...HEAD`, {
cwd: worktreePath,
maxBuffer: 1024 * 1024 * 5, // 5MB buffer
});
diff = branchDiff;
// git diff base...HEAD only shows committed changes
diffIncludesUncommitted = false;
} catch {
// If branch comparison fails (e.g., base branch doesn't exist locally),
// try fetching and comparing against remote base
try {
const { stdout: remoteDiff } = await execAsync(`git diff origin/${base}...HEAD`, {
cwd: worktreePath,
maxBuffer: 1024 * 1024 * 5,
});
diff = remoteDiff;
// git diff origin/base...HEAD only shows committed changes
diffIncludesUncommitted = false;
} catch {
// Fall back to getting all uncommitted + committed changes
try {
const { stdout: allDiff } = await execAsync('git diff HEAD', {
cwd: worktreePath,
maxBuffer: 1024 * 1024 * 5,
});
diff = allDiff;
// git diff HEAD includes uncommitted changes
diffIncludesUncommitted = true;
} catch {
// Last resort: get staged + unstaged changes
const { stdout: stagedDiff } = await execAsync('git diff --cached', {
cwd: worktreePath,
maxBuffer: 1024 * 1024 * 5,
});
const { stdout: unstagedDiff } = await execAsync('git diff', {
cwd: worktreePath,
maxBuffer: 1024 * 1024 * 5,
});
diff = stagedDiff + unstagedDiff;
// These already include uncommitted changes
diffIncludesUncommitted = true;
}
}
}
// Check for uncommitted changes (staged + unstaged) to include in the description.
// When creating a PR, uncommitted changes will be auto-committed, so they should be
// reflected in the generated description. We only need to fetch uncommitted diffs
// when the primary diff method (base...HEAD) was used, since it only shows committed changes.
let hasUncommittedChanges = false;
try {
const { stdout: statusOutput } = await execAsync('git status --porcelain', {
cwd: worktreePath,
});
hasUncommittedChanges = statusOutput.trim().length > 0;
if (hasUncommittedChanges && !diffIncludesUncommitted) {
logger.info('Uncommitted changes detected, including in PR description context');
let uncommittedDiff = '';
// Get staged changes
try {
const { stdout: stagedDiff } = await execAsync('git diff --cached', {
cwd: worktreePath,
maxBuffer: 1024 * 1024 * 5,
});
if (stagedDiff.trim()) {
uncommittedDiff += stagedDiff;
}
} catch {
// Ignore staged diff errors
}
// Get unstaged changes (tracked files only)
try {
const { stdout: unstagedDiff } = await execAsync('git diff', {
cwd: worktreePath,
maxBuffer: 1024 * 1024 * 5,
});
if (unstagedDiff.trim()) {
uncommittedDiff += unstagedDiff;
}
} catch {
// Ignore unstaged diff errors
}
// Get list of untracked files for context
const untrackedFiles = statusOutput
.split('\n')
.filter((line) => line.startsWith('??'))
.map((line) => line.substring(3).trim());
if (untrackedFiles.length > 0) {
// Add a summary of untracked (new) files as context
uncommittedDiff += `\n# New untracked files:\n${untrackedFiles.map((f) => `# + ${f}`).join('\n')}\n`;
}
// Append uncommitted changes to the committed diff
if (uncommittedDiff.trim()) {
diff = diff + uncommittedDiff;
}
}
} catch {
// Ignore errors checking for uncommitted changes
}
// Also get the commit log for context
let commitLog = '';
try {
const { stdout: logOutput } = await execAsync(
`git log ${base}..HEAD --oneline --no-decorate 2>/dev/null || git log --oneline -10 --no-decorate`,
{
cwd: worktreePath,
maxBuffer: 1024 * 1024,
}
);
commitLog = logOutput.trim();
} catch {
// Ignore commit log errors
}
if (!diff.trim() && !commitLog.trim()) {
const response: GeneratePRDescriptionErrorResponse = {
success: false,
error: 'No changes found to generate a PR description from',
};
res.status(400).json(response);
return;
}
// Truncate diff if too long
const truncatedDiff =
diff.length > MAX_DIFF_SIZE
? diff.substring(0, MAX_DIFF_SIZE) + '\n\n[... diff truncated ...]'
: diff;
// Build the user prompt
let userPrompt = `Generate a pull request title and description for the following changes.\n\nBranch: ${branchName}\nBase Branch: ${base}\n`;
if (commitLog) {
userPrompt += `\nCommit History:\n${commitLog}\n`;
}
if (hasUncommittedChanges) {
userPrompt += `\nNote: This branch has uncommitted changes that will be included in the PR.\n`;
}
if (truncatedDiff) {
userPrompt += `\n\`\`\`diff\n${truncatedDiff}\n\`\`\``;
}
// Get model from phase settings with provider info
const {
phaseModel: phaseModelEntry,
provider: claudeCompatibleProvider,
credentials,
} = await getPhaseModelWithOverrides(
'commitMessageModel',
settingsService,
worktreePath,
'[GeneratePRDescription]'
);
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
logger.info(
`Using model for PR description: ${model}`,
claudeCompatibleProvider ? `via provider: ${claudeCompatibleProvider.name}` : 'direct API'
);
// Get provider for the model type
const aiProvider = ProviderFactory.getProviderForModel(model);
const bareModel = stripProviderPrefix(model);
// For Cursor models, combine prompts
const effectivePrompt = isCursorModel(model)
? `${PR_DESCRIPTION_SYSTEM_PROMPT}\n\n${userPrompt}`
: userPrompt;
const effectiveSystemPrompt = isCursorModel(model) ? undefined : PR_DESCRIPTION_SYSTEM_PROMPT;
logger.info(`Using ${aiProvider.getName()} provider for model: ${model}`);
let responseText = '';
const stream = aiProvider.executeQuery({
prompt: effectivePrompt,
model: bareModel,
cwd: worktreePath,
systemPrompt: effectiveSystemPrompt,
maxTurns: 1,
allowedTools: [],
readOnly: true,
thinkingLevel,
claudeCompatibleProvider,
credentials,
});
// Wrap with timeout
for await (const msg of withTimeout(stream, AI_TIMEOUT_MS)) {
if (msg.type === 'assistant' && msg.message?.content) {
for (const block of msg.message.content) {
if (block.type === 'text' && block.text) {
responseText += block.text;
}
}
} else if (msg.type === 'result' && msg.subtype === 'success' && msg.result) {
responseText = msg.result;
}
}
const fullResponse = responseText.trim();
if (!fullResponse || fullResponse.length === 0) {
logger.warn('Received empty response from model');
const response: GeneratePRDescriptionErrorResponse = {
success: false,
error: 'Failed to generate PR description - empty response',
};
res.status(500).json(response);
return;
}
// Parse the response to extract title and body
let title = '';
let body = '';
const titleMatch = fullResponse.match(/---TITLE---\s*\n([\s\S]*?)(?=---BODY---|$)/);
const bodyMatch = fullResponse.match(/---BODY---\s*\n([\s\S]*?)$/);
if (titleMatch && bodyMatch) {
title = titleMatch[1].trim();
body = bodyMatch[1].trim();
} else {
// Fallback: treat first line as title, rest as body
const lines = fullResponse.split('\n');
title = lines[0].trim();
body = lines.slice(1).join('\n').trim();
}
// Clean up title - remove any markdown or quotes
title = title.replace(/^#+\s*/, '').replace(/^["']|["']$/g, '');
logger.info(`Generated PR title: ${title.substring(0, 100)}...`);
const response: GeneratePRDescriptionSuccessResponse = {
success: true,
title,
body,
};
res.json(response);
} catch (error) {
logError(error, 'Generate PR description failed');
const response: GeneratePRDescriptionErrorResponse = {
success: false,
error: getErrorMessage(error),
};
res.status(500).json(response);
}
};
}

View File

@@ -6,11 +6,12 @@
*/
import type { Request, Response } from 'express';
import { exec } from 'child_process';
import { exec, execFile } from 'child_process';
import { promisify } from 'util';
import { getErrorMessage, logWorktreeError } from '../common.js';
const execAsync = promisify(exec);
const execFileAsync = promisify(execFile);
interface BranchInfo {
name: string;
@@ -131,15 +132,17 @@ export function createListBranchesHandler() {
let hasRemoteBranch = false;
try {
// First check if there's a remote tracking branch
const { stdout: upstreamOutput } = await execAsync(
`git rev-parse --abbrev-ref ${currentBranch}@{upstream}`,
const { stdout: upstreamOutput } = await execFileAsync(
'git',
['rev-parse', '--abbrev-ref', `${currentBranch}@{upstream}`],
{ cwd: worktreePath }
);
if (upstreamOutput.trim()) {
hasRemoteBranch = true;
const { stdout: aheadBehindOutput } = await execAsync(
`git rev-list --left-right --count ${currentBranch}@{upstream}...HEAD`,
const { stdout: aheadBehindOutput } = await execFileAsync(
'git',
['rev-list', '--left-right', '--count', `${currentBranch}@{upstream}...HEAD`],
{ cwd: worktreePath }
);
const [behind, ahead] = aheadBehindOutput.trim().split(/\s+/).map(Number);
@@ -150,8 +153,9 @@ export function createListBranchesHandler() {
// No upstream branch set - check if the branch exists on any remote
try {
// Check if there's a matching branch on origin (most common remote)
const { stdout: remoteBranchOutput } = await execAsync(
`git ls-remote --heads origin ${currentBranch}`,
const { stdout: remoteBranchOutput } = await execFileAsync(
'git',
['ls-remote', '--heads', 'origin', currentBranch],
{ cwd: worktreePath, timeout: 5000 }
);
hasRemoteBranch = remoteBranchOutput.trim().length > 0;

View File

@@ -15,8 +15,9 @@ const execAsync = promisify(exec);
export function createPullHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath } = req.body as {
const { worktreePath, remote } = req.body as {
worktreePath: string;
remote?: string;
};
if (!worktreePath) {
@@ -33,8 +34,11 @@ export function createPullHandler() {
});
const branchName = branchOutput.trim();
// Use specified remote or default to 'origin'
const targetRemote = remote || 'origin';
// Fetch latest from remote
await execAsync('git fetch origin', { cwd: worktreePath });
await execAsync(`git fetch ${targetRemote}`, { cwd: worktreePath });
// Check if there are local changes that would be overwritten
const { stdout: status } = await execAsync('git status --porcelain', {
@@ -52,7 +56,7 @@ export function createPullHandler() {
// Pull latest changes
try {
const { stdout: pullOutput } = await execAsync(`git pull origin ${branchName}`, {
const { stdout: pullOutput } = await execAsync(`git pull ${targetRemote} ${branchName}`, {
cwd: worktreePath,
});
@@ -75,7 +79,7 @@ export function createPullHandler() {
if (errorMsg.includes('no tracking information')) {
res.status(400).json({
success: false,
error: `Branch '${branchName}' has no upstream branch. Push it first or set upstream with: git branch --set-upstream-to=origin/${branchName}`,
error: `Branch '${branchName}' has no upstream branch. Push it first or set upstream with: git branch --set-upstream-to=${targetRemote}/${branchName}`,
});
return;
}

View File

@@ -0,0 +1,103 @@
/**
* POST /stash-apply endpoint - Apply or pop a stash in a worktree
*
* Applies a specific stash entry to the working directory.
* Can either "apply" (keep stash) or "pop" (remove stash after applying).
*
* Note: Git repository validation (isGitRepo) is handled by
* the requireGitRepoOnly middleware in index.ts
*/
import type { Request, Response } from 'express';
import { execFile } from 'child_process';
import { promisify } from 'util';
import { getErrorMessage, logError } from '../common.js';
const execFileAsync = promisify(execFile);
export function createStashApplyHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath, stashIndex, pop } = req.body as {
worktreePath: string;
stashIndex: number;
pop?: boolean;
};
if (!worktreePath) {
res.status(400).json({
success: false,
error: 'worktreePath required',
});
return;
}
if (stashIndex === undefined || stashIndex === null) {
res.status(400).json({
success: false,
error: 'stashIndex required',
});
return;
}
const stashRef = `stash@{${stashIndex}}`;
const operation = pop ? 'pop' : 'apply';
try {
const { stdout, stderr } = await execFileAsync('git', ['stash', operation, stashRef], {
cwd: worktreePath,
});
const output = `${stdout}\n${stderr}`;
// Check for conflict markers in the output
if (output.includes('CONFLICT') || output.includes('Merge conflict')) {
res.json({
success: true,
result: {
applied: true,
hasConflicts: true,
operation,
stashIndex,
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} with conflicts. Please resolve the conflicts.`,
},
});
return;
}
res.json({
success: true,
result: {
applied: true,
hasConflicts: false,
operation,
stashIndex,
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} successfully`,
},
});
} catch (error) {
const errorMsg = getErrorMessage(error);
// Check if the error is due to conflicts
if (errorMsg.includes('CONFLICT') || errorMsg.includes('Merge conflict')) {
res.json({
success: true,
result: {
applied: true,
hasConflicts: true,
operation,
stashIndex,
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} with conflicts. Please resolve the conflicts.`,
},
});
return;
}
throw error;
}
} catch (error) {
logError(error, 'Stash apply failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -0,0 +1,60 @@
/**
* POST /stash-drop endpoint - Drop (delete) a stash entry
*
* Removes a specific stash entry from the stash list.
*
* Note: Git repository validation (isGitRepo) is handled by
* the requireGitRepoOnly middleware in index.ts
*/
import type { Request, Response } from 'express';
import { execFile } from 'child_process';
import { promisify } from 'util';
import { getErrorMessage, logError } from '../common.js';
const execFileAsync = promisify(execFile);
export function createStashDropHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath, stashIndex } = req.body as {
worktreePath: string;
stashIndex: number;
};
if (!worktreePath) {
res.status(400).json({
success: false,
error: 'worktreePath required',
});
return;
}
if (stashIndex === undefined || stashIndex === null) {
res.status(400).json({
success: false,
error: 'stashIndex required',
});
return;
}
const stashRef = `stash@{${stashIndex}}`;
await execFileAsync('git', ['stash', 'drop', stashRef], {
cwd: worktreePath,
});
res.json({
success: true,
result: {
dropped: true,
stashIndex,
message: `Stash ${stashRef} dropped successfully`,
},
});
} catch (error) {
logError(error, 'Stash drop failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -0,0 +1,122 @@
/**
* POST /stash-list endpoint - List all stashes in a worktree
*
* Returns a list of all stash entries with their index, message, branch, and date.
* Also includes the list of files changed in each stash.
*
* Note: Git repository validation (isGitRepo) is handled by
* the requireGitRepoOnly middleware in index.ts
*/
import type { Request, Response } from 'express';
import { execFile } from 'child_process';
import { promisify } from 'util';
import { getErrorMessage, logError } from '../common.js';
const execFileAsync = promisify(execFile);
interface StashEntry {
index: number;
message: string;
branch: string;
date: string;
files: string[];
}
export function createStashListHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath } = req.body as {
worktreePath: string;
};
if (!worktreePath) {
res.status(400).json({
success: false,
error: 'worktreePath required',
});
return;
}
// Get stash list with format: index, message, date
// Use %aI (strict ISO 8601) instead of %ai to ensure cross-browser compatibility
const { stdout: stashOutput } = await execFileAsync(
'git',
['stash', 'list', '--format=%gd|||%s|||%aI'],
{ cwd: worktreePath }
);
if (!stashOutput.trim()) {
res.json({
success: true,
result: {
stashes: [],
total: 0,
},
});
return;
}
const stashLines = stashOutput
.trim()
.split('\n')
.filter((l) => l.trim());
const stashes: StashEntry[] = [];
for (const line of stashLines) {
const parts = line.split('|||');
if (parts.length < 3) continue;
const refSpec = parts[0].trim(); // e.g., "stash@{0}"
const message = parts[1].trim();
const date = parts[2].trim();
// Extract index from stash@{N}
const indexMatch = refSpec.match(/stash@\{(\d+)\}/);
const index = indexMatch ? parseInt(indexMatch[1], 10) : 0;
// Extract branch name from message (format: "WIP on branch: hash message" or "On branch: hash message")
let branch = '';
const branchMatch = message.match(/^(?:WIP on|On) ([^:]+):/);
if (branchMatch) {
branch = branchMatch[1];
}
// Get list of files in this stash
let files: string[] = [];
try {
const { stdout: filesOutput } = await execFileAsync(
'git',
['stash', 'show', refSpec, '--name-only'],
{ cwd: worktreePath }
);
files = filesOutput
.trim()
.split('\n')
.filter((f) => f.trim());
} catch {
// Ignore errors getting file list
}
stashes.push({
index,
message,
branch,
date,
files,
});
}
res.json({
success: true,
result: {
stashes,
total: stashes.length,
},
});
} catch (error) {
logError(error, 'Stash list failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -0,0 +1,87 @@
/**
* POST /stash-push endpoint - Stash changes in a worktree
*
* Stashes uncommitted changes (including untracked files) with an optional message.
* Supports selective file stashing when a files array is provided.
*
* Note: Git repository validation (isGitRepo) is handled by
* the requireGitRepoOnly middleware in index.ts
*/
import type { Request, Response } from 'express';
import { execFile } from 'child_process';
import { promisify } from 'util';
import { getErrorMessage, logError } from '../common.js';
const execFileAsync = promisify(execFile);
export function createStashPushHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath, message, files } = req.body as {
worktreePath: string;
message?: string;
files?: string[];
};
if (!worktreePath) {
res.status(400).json({
success: false,
error: 'worktreePath required',
});
return;
}
// Check for any changes to stash
const { stdout: status } = await execFileAsync('git', ['status', '--porcelain'], {
cwd: worktreePath,
});
if (!status.trim()) {
res.json({
success: true,
result: {
stashed: false,
message: 'No changes to stash',
},
});
return;
}
// Build stash push command args
const args = ['stash', 'push', '--include-untracked'];
if (message && message.trim()) {
args.push('-m', message.trim());
}
// If specific files are provided, add them as pathspecs after '--'
if (files && files.length > 0) {
args.push('--');
args.push(...files);
}
// Execute stash push
await execFileAsync('git', args, { cwd: worktreePath });
// Get current branch name
const { stdout: branchOutput } = await execFileAsync(
'git',
['rev-parse', '--abbrev-ref', 'HEAD'],
{ cwd: worktreePath }
);
const branchName = branchOutput.trim();
res.json({
success: true,
result: {
stashed: true,
branch: branchName,
message: message?.trim() || `WIP on ${branchName}`,
},
});
} catch (error) {
logError(error, 'Stash push failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -16,47 +16,22 @@
*/
import type { Request, Response } from 'express';
import { exec } from 'child_process';
import { execFile } from 'child_process';
import { promisify } from 'util';
import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
const execFileAsync = promisify(execFile);
function isExcludedWorktreeLine(line: string): boolean {
return line.includes('.worktrees/') || line.endsWith('.worktrees');
}
function isUntrackedLine(line: string): boolean {
return line.startsWith('?? ');
}
function isBlockingChangeLine(line: string): boolean {
if (!line.trim()) return false;
if (isExcludedWorktreeLine(line)) return false;
if (isUntrackedLine(line)) return false;
return true;
}
/**
* Check if there are uncommitted changes in the working directory
* Excludes .worktrees/ directory which is created by automaker
*/
async function hasUncommittedChanges(cwd: string): Promise<boolean> {
try {
const { stdout } = await execAsync('git status --porcelain', { cwd });
const lines = stdout.trim().split('\n').filter(isBlockingChangeLine);
return lines.length > 0;
} catch {
return false;
}
}
/**
* Check if there are any changes at all (including untracked) that should be stashed
*/
async function hasAnyChanges(cwd: string): Promise<boolean> {
try {
const { stdout } = await execAsync('git status --porcelain', { cwd });
const { stdout } = await execFileAsync('git', ['status', '--porcelain'], { cwd });
const lines = stdout
.trim()
.split('\n')
@@ -78,17 +53,17 @@ async function hasAnyChanges(cwd: string): Promise<boolean> {
async function stashChanges(cwd: string, message: string): Promise<boolean> {
try {
// Get stash count before
const { stdout: beforeCount } = await execAsync('git stash list', { cwd });
const { stdout: beforeCount } = await execFileAsync('git', ['stash', 'list'], { cwd });
const countBefore = beforeCount
.trim()
.split('\n')
.filter((l) => l.trim()).length;
// Stash including untracked files
await execAsync(`git stash push --include-untracked -m "${message}"`, { cwd });
await execFileAsync('git', ['stash', 'push', '--include-untracked', '-m', message], { cwd });
// Get stash count after to verify something was stashed
const { stdout: afterCount } = await execAsync('git stash list', { cwd });
const { stdout: afterCount } = await execFileAsync('git', ['stash', 'list'], { cwd });
const countAfter = afterCount
.trim()
.split('\n')
@@ -108,7 +83,7 @@ async function popStash(
cwd: string
): Promise<{ success: boolean; hasConflicts: boolean; error?: string }> {
try {
const { stdout, stderr } = await execAsync('git stash pop', { cwd });
const { stdout, stderr } = await execFileAsync('git', ['stash', 'pop'], { cwd });
const output = `${stdout}\n${stderr}`;
// Check for conflict markers in the output
if (output.includes('CONFLICT') || output.includes('Merge conflict')) {
@@ -129,7 +104,7 @@ async function popStash(
*/
async function fetchRemotes(cwd: string): Promise<void> {
try {
await execAsync('git fetch --all --quiet', {
await execFileAsync('git', ['fetch', '--all', '--quiet'], {
cwd,
timeout: 15000, // 15 second timeout
});
@@ -155,7 +130,9 @@ function parseRemoteBranch(branchName: string): { remote: string; branch: string
*/
async function isRemoteBranch(cwd: string, branchName: string): Promise<boolean> {
try {
const { stdout } = await execAsync('git branch -r --format="%(refname:short)"', { cwd });
const { stdout } = await execFileAsync('git', ['branch', '-r', '--format=%(refname:short)'], {
cwd,
});
const remoteBranches = stdout
.trim()
.split('\n')
@@ -172,7 +149,7 @@ async function isRemoteBranch(cwd: string, branchName: string): Promise<boolean>
*/
async function localBranchExists(cwd: string, branchName: string): Promise<boolean> {
try {
await execAsync(`git rev-parse --verify "refs/heads/${branchName}"`, { cwd });
await execFileAsync('git', ['rev-parse', '--verify', `refs/heads/${branchName}`], { cwd });
return true;
} catch {
return false;
@@ -204,9 +181,11 @@ export function createSwitchBranchHandler() {
}
// Get current branch
const { stdout: currentBranchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
cwd: worktreePath,
});
const { stdout: currentBranchOutput } = await execFileAsync(
'git',
['rev-parse', '--abbrev-ref', 'HEAD'],
{ cwd: worktreePath }
);
const previousBranch = currentBranchOutput.trim();
// Determine the actual target branch name for checkout
@@ -243,7 +222,7 @@ export function createSwitchBranchHandler() {
// Check if target branch exists (locally or as remote ref)
if (!isRemote) {
try {
await execAsync(`git rev-parse --verify "${branchName}"`, {
await execFileAsync('git', ['rev-parse', '--verify', branchName], {
cwd: worktreePath,
});
} catch {
@@ -271,16 +250,16 @@ export function createSwitchBranchHandler() {
if (parsed) {
if (await localBranchExists(worktreePath, parsed.branch)) {
// Local branch exists, just checkout
await execAsync(`git checkout "${parsed.branch}"`, { cwd: worktreePath });
await execFileAsync('git', ['checkout', parsed.branch], { cwd: worktreePath });
} else {
// Create local tracking branch from remote
await execAsync(`git checkout -b "${parsed.branch}" "${branchName}"`, {
await execFileAsync('git', ['checkout', '-b', parsed.branch, branchName], {
cwd: worktreePath,
});
}
}
} else {
await execAsync(`git checkout "${targetBranch}"`, { cwd: worktreePath });
await execFileAsync('git', ['checkout', targetBranch], { cwd: worktreePath });
}
// Fetch latest from remotes after switching

View File

@@ -64,7 +64,52 @@ export function createZaiRoutes(
router.post('/configure', async (req: Request, res: Response) => {
try {
const { apiToken, apiHost } = req.body;
const result = await usageService.configure({ apiToken, apiHost }, settingsService);
// Validate apiToken: must be present and a string
if (apiToken === undefined || apiToken === null || typeof apiToken !== 'string') {
res.status(400).json({
success: false,
error: 'Invalid request: apiToken is required and must be a string',
});
return;
}
// Validate apiHost if provided: must be a string and a well-formed URL
if (apiHost !== undefined && apiHost !== null) {
if (typeof apiHost !== 'string') {
res.status(400).json({
success: false,
error: 'Invalid request: apiHost must be a string',
});
return;
}
// Validate that apiHost is a well-formed URL
try {
const parsedUrl = new URL(apiHost);
if (parsedUrl.protocol !== 'http:' && parsedUrl.protocol !== 'https:') {
res.status(400).json({
success: false,
error: 'Invalid request: apiHost must be a valid HTTP or HTTPS URL',
});
return;
}
} catch {
res.status(400).json({
success: false,
error: 'Invalid request: apiHost must be a well-formed URL',
});
return;
}
}
// Pass only the sanitized values to the service
const sanitizedToken = apiToken.trim();
const sanitizedHost = typeof apiHost === 'string' ? apiHost.trim() : undefined;
const result = await usageService.configure(
{ apiToken: sanitizedToken, apiHost: sanitizedHost },
settingsService
);
res.json(result);
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';

View File

@@ -4,6 +4,7 @@
import type { Feature } from '@automaker/types';
import { createLogger, classifyError } from '@automaker/utils';
import { areDependenciesSatisfied } from '@automaker/dependency-resolver';
import type { TypedEventBus } from './typed-event-bus.js';
import type { ConcurrencyManager } from './concurrency-manager.js';
import type { SettingsService } from './settings-service.js';
@@ -64,6 +65,7 @@ export type ClearExecutionStateFn = (
) => Promise<void>;
export type ResetStuckFeaturesFn = (projectPath: string) => Promise<void>;
export type IsFeatureFinishedFn = (feature: Feature) => boolean;
export type LoadAllFeaturesFn = (projectPath: string) => Promise<Feature[]>;
export class AutoLoopCoordinator {
private autoLoopsByProject = new Map<string, ProjectAutoLoopState>();
@@ -78,7 +80,8 @@ export class AutoLoopCoordinator {
private clearExecutionStateFn: ClearExecutionStateFn,
private resetStuckFeaturesFn: ResetStuckFeaturesFn,
private isFeatureFinishedFn: IsFeatureFinishedFn,
private isFeatureRunningFn: (featureId: string) => boolean
private isFeatureRunningFn: (featureId: string) => boolean,
private loadAllFeaturesFn?: LoadAllFeaturesFn
) {}
/**
@@ -178,9 +181,31 @@ export class AutoLoopCoordinator {
await this.sleep(10000, projectState.abortController.signal);
continue;
}
const nextFeature = pendingFeatures.find(
(f) => !this.isFeatureRunningFn(f.id) && !this.isFeatureFinishedFn(f)
// Load all features for dependency checking (if callback provided)
const allFeatures = this.loadAllFeaturesFn
? await this.loadAllFeaturesFn(projectPath)
: pendingFeatures;
// Filter to eligible features: not running, not finished, and dependencies satisfied
const eligibleFeatures = pendingFeatures.filter(
(f) =>
!this.isFeatureRunningFn(f.id) &&
!this.isFeatureFinishedFn(f) &&
areDependenciesSatisfied(f, allFeatures)
);
// Sort eligible features by priority (lower number = higher priority, default 2)
eligibleFeatures.sort((a, b) => (a.priority ?? 2) - (b.priority ?? 2));
const nextFeature = eligibleFeatures[0] ?? null;
if (nextFeature) {
logger.info(
`Auto-loop selected feature "${nextFeature.title || nextFeature.id}" ` +
`(priority=${nextFeature.priority ?? 2}) from ${eligibleFeatures.length} eligible features`
);
}
if (nextFeature) {
projectState.hasEmittedIdleEvent = false;
this.executeFeatureFn(

View File

@@ -324,7 +324,8 @@ export class AutoModeServiceFacade {
feature.status === 'completed' ||
feature.status === 'verified' ||
feature.status === 'waiting_approval',
(featureId) => concurrencyManager.isRunning(featureId)
(featureId) => concurrencyManager.isRunning(featureId),
async (pPath) => featureLoader.getAll(pPath)
);
// ExecutionService - runAgentFn calls AgentExecutor.execute

View File

@@ -729,6 +729,7 @@ export class SettingsService {
anthropic: { configured: boolean; masked: string };
google: { configured: boolean; masked: string };
openai: { configured: boolean; masked: string };
zai: { configured: boolean; masked: string };
}> {
const credentials = await this.getCredentials();
@@ -750,6 +751,10 @@ export class SettingsService {
configured: !!credentials.apiKeys.openai,
masked: maskKey(credentials.apiKeys.openai),
},
zai: {
configured: !!credentials.apiKeys.zai,
masked: maskKey(credentials.apiKeys.zai),
},
};
}

View File

@@ -171,7 +171,11 @@ export class ZaiUsageService {
*/
getApiHost(): string {
// Priority: 1. Instance host, 2. Z_AI_API_HOST env, 3. Default
return process.env.Z_AI_API_HOST ? `https://${process.env.Z_AI_API_HOST}` : this.apiHost;
if (process.env.Z_AI_API_HOST) {
const envHost = process.env.Z_AI_API_HOST.trim();
return envHost.startsWith('http') ? envHost : `https://${envHost}`;
}
return this.apiHost;
}
/**
@@ -242,8 +246,7 @@ export class ZaiUsageService {
}
const quotaUrl =
process.env.Z_AI_QUOTA_URL ||
`${process.env.Z_AI_API_HOST ? `https://${process.env.Z_AI_API_HOST}` : 'https://api.z.ai'}/api/monitor/usage/quota/limit`;
process.env.Z_AI_QUOTA_URL || `${this.getApiHost()}/api/monitor/usage/quota/limit`;
logger.info(`[verify] Testing API key against: ${quotaUrl}`);

View File

@@ -64,7 +64,7 @@ describe('CLI Detection Framework', () => {
});
it('should handle unsupported platform', () => {
const instructions = getInstallInstructions('claude', 'unknown-platform' as any);
const instructions = getInstallInstructions('claude', 'unknown-platform' as NodeJS.Platform);
expect(instructions).toContain('No installation instructions available');
});
});
@@ -339,15 +339,17 @@ describe('Performance Tests', () => {
// Edge Cases
describe('Edge Cases', () => {
it('should handle empty CLI names', async () => {
await expect(detectCli('' as any)).rejects.toThrow();
await expect(detectCli('' as unknown as Parameters<typeof detectCli>[0])).rejects.toThrow();
});
it('should handle null CLI names', async () => {
await expect(detectCli(null as any)).rejects.toThrow();
await expect(detectCli(null as unknown as Parameters<typeof detectCli>[0])).rejects.toThrow();
});
it('should handle undefined CLI names', async () => {
await expect(detectCli(undefined as any)).rejects.toThrow();
await expect(
detectCli(undefined as unknown as Parameters<typeof detectCli>[0])
).rejects.toThrow();
});
it('should handle malformed error objects', () => {