feat: Address review comments, add stage/unstage functionality, conflict resolution improvements, support for Sonnet 4.6

This commit is contained in:
gsxdsm
2026-02-18 18:58:33 -08:00
parent df9a6314da
commit 983eb21faa
66 changed files with 2317 additions and 823 deletions

View File

@@ -6,7 +6,12 @@
* import from here rather than defining their own copy.
*/
import fs from 'fs/promises';
import path from 'path';
import { spawnProcess } from '@automaker/platform';
import { createLogger } from '@automaker/utils';
const logger = createLogger('GitLib');
// ============================================================================
// Secure Command Execution
@@ -80,3 +85,110 @@ export async function getCurrentBranch(worktreePath: string): Promise<string> {
const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath);
return branchOutput.trim();
}
// ============================================================================
// Index Lock Recovery
// ============================================================================
/**
* Check whether an error message indicates a stale git index lock file.
*
* Git operations that write to the index (e.g. `git stash push`) will fail
* with "could not write index" or "Unable to create ... .lock" when a
* `.git/index.lock` file exists from a previously interrupted operation.
*
* @param errorMessage - The error string from a failed git command
* @returns true if the error looks like a stale index lock issue
*/
export function isIndexLockError(errorMessage: string): boolean {
const lower = errorMessage.toLowerCase();
return (
lower.includes('could not write index') ||
(lower.includes('unable to create') && lower.includes('index.lock')) ||
lower.includes('index.lock')
);
}
/**
* Attempt to remove a stale `.git/index.lock` file for the given worktree.
*
* Uses `git rev-parse --git-dir` to locate the correct `.git` directory,
* which works for both regular repositories and linked worktrees.
*
* @param worktreePath - Path to the git worktree (or main repo)
* @returns true if a lock file was found and removed, false otherwise
*/
export async function removeStaleIndexLock(worktreePath: string): Promise<boolean> {
try {
// Resolve the .git directory (handles worktrees correctly)
const gitDirRaw = await execGitCommand(['rev-parse', '--git-dir'], worktreePath);
const gitDir = path.resolve(worktreePath, gitDirRaw.trim());
const lockFilePath = path.join(gitDir, 'index.lock');
// Check if the lock file exists
try {
await fs.access(lockFilePath);
} catch {
// Lock file does not exist — nothing to remove
return false;
}
// Remove the stale lock file
await fs.unlink(lockFilePath);
logger.info('Removed stale index.lock file', { worktreePath, lockFilePath });
return true;
} catch (err) {
logger.warn('Failed to remove stale index.lock file', {
worktreePath,
error: err instanceof Error ? err.message : String(err),
});
return false;
}
}
/**
* Execute a git command with automatic retry when a stale index.lock is detected.
*
* If the command fails with an error indicating a locked index file, this
* helper will attempt to remove the stale `.git/index.lock` and retry the
* command exactly once.
*
* This is particularly useful for `git stash push` which writes to the
* index and commonly fails when a previous git operation was interrupted.
*
* @param args - Array of git command arguments
* @param cwd - Working directory to execute the command in
* @param env - Optional additional environment variables
* @returns Promise resolving to stdout output
* @throws The original error if retry also fails, or a non-lock error
*/
export async function execGitCommandWithLockRetry(
args: string[],
cwd: string,
env?: Record<string, string>
): Promise<string> {
try {
return await execGitCommand(args, cwd, env);
} catch (error: unknown) {
const err = error as { message?: string; stderr?: string };
const errorMessage = err.stderr || err.message || '';
if (!isIndexLockError(errorMessage)) {
throw error;
}
logger.info('Git command failed due to index lock, attempting cleanup and retry', {
cwd,
args: args.join(' '),
});
const removed = await removeStaleIndexLock(cwd);
if (!removed) {
// Could not remove the lock file — re-throw the original error
throw error;
}
// Retry the command once after removing the lock file
return await execGitCommand(args, cwd, env);
}
}

View File

@@ -343,6 +343,18 @@ export class ClaudeProvider extends BaseProvider {
tier: 'premium' as const,
default: true,
},
{
id: 'claude-sonnet-4-6',
name: 'Claude Sonnet 4.6',
modelString: 'claude-sonnet-4-6',
provider: 'anthropic',
description: 'Balanced performance and cost with enhanced reasoning',
contextWindow: 200000,
maxOutputTokens: 128000,
supportsVision: true,
supportsTools: true,
tier: 'standard' as const,
},
{
id: 'claude-sonnet-4-20250514',
name: 'Claude Sonnet 4',

View File

@@ -245,15 +245,9 @@ async function resolveCodexExecutionPlan(options: ExecuteOptions): Promise<Codex
throw new Error(ERROR_CODEX_CLI_REQUIRED);
}
if (!cliAuthenticated) {
throw new Error(ERROR_CODEX_AUTH_REQUIRED);
}
return {
mode: CODEX_EXECUTION_MODE_CLI,
cliPath,
openAiApiKey,
};
// At this point, neither hasCliNativeAuth nor hasApiKey is true,
// so authentication is required regardless.
throw new Error(ERROR_CODEX_AUTH_REQUIRED);
}
function getEventType(event: Record<string, unknown>): string | null {

View File

@@ -15,6 +15,9 @@ const SDK_HISTORY_HEADER = 'Current request:\n';
const DEFAULT_RESPONSE_TEXT = '';
const SDK_ERROR_DETAILS_LABEL = 'Details:';
type SdkReasoningEffort = 'minimal' | 'low' | 'medium' | 'high' | 'xhigh';
const SDK_REASONING_EFFORTS = new Set<string>(['minimal', 'low', 'medium', 'high', 'xhigh']);
type PromptBlock = {
type: string;
text?: string;
@@ -103,9 +106,6 @@ export async function* executeCodexSdkQuery(
// The model must be passed to startThread/resumeThread so the SDK
// knows which model to use for the conversation. Without this,
// the SDK may use a default model that the user doesn't have access to.
type SdkReasoningEffort = 'minimal' | 'low' | 'medium' | 'high' | 'xhigh';
const SDK_REASONING_EFFORTS = new Set<string>(['minimal', 'low', 'medium', 'high', 'xhigh']);
const threadOptions: {
model?: string;
modelReasoningEffort?: SdkReasoningEffort;
@@ -118,6 +118,7 @@ export async function* executeCodexSdkQuery(
// Add reasoning effort to thread options if model supports it
if (
options.reasoningEffort &&
options.model &&
supportsReasoningEffort(options.model) &&
options.reasoningEffort !== 'none' &&
SDK_REASONING_EFFORTS.has(options.reasoningEffort)

View File

@@ -42,7 +42,7 @@ import {
const logger = createLogger('CopilotProvider');
// Default bare model (without copilot- prefix) for SDK calls
const DEFAULT_BARE_MODEL = 'claude-sonnet-4.5';
const DEFAULT_BARE_MODEL = 'claude-sonnet-4.6';
// =============================================================================
// SDK Event Types (from @github/copilot-sdk)

View File

@@ -549,8 +549,15 @@ export class OpencodeProvider extends CliProvider {
// sdkSessionId IS set — the CLI will receive `--session <id>`.
// If that session no longer exists, intercept the error and retry fresh.
//
// To avoid buffering the entire stream in memory for long-lived sessions,
// we only buffer an initial window of messages until we observe a healthy
// (non-error) message. Once a healthy message is seen, we flush the buffer
// and switch to direct passthrough, while still watching for session errors
// via isSessionNotFoundError on any subsequent error messages.
const buffered: ProviderMessage[] = [];
let sessionError = false;
let seenHealthyMessage = false;
try {
for await (const msg of super.executeQuery(options)) {
@@ -565,13 +572,30 @@ export class OpencodeProvider extends CliProvider {
break; // stop consuming the failed stream
}
// Non-session error — clean and buffer
// Non-session error — clean it
if (msg.error && typeof msg.error === 'string') {
msg.error = OpencodeProvider.cleanErrorMessage(msg.error);
}
} else {
// A non-error message is a healthy signal — stop buffering after this
seenHealthyMessage = true;
}
buffered.push(msg);
if (seenHealthyMessage && buffered.length > 0) {
// Flush the pre-healthy buffer first, then switch to passthrough
for (const bufferedMsg of buffered) {
yield bufferedMsg;
}
buffered.length = 0;
}
if (seenHealthyMessage) {
// Passthrough mode — yield directly without buffering
yield msg;
} else {
// Still in initial window — buffer until we see a healthy message
buffered.push(msg);
}
}
} catch (error) {
// Also handle thrown exceptions (e.g. from mapError in cli-provider)
@@ -602,12 +626,15 @@ export class OpencodeProvider extends CliProvider {
}
yield retryMsg;
}
} else {
// No session error — flush buffered messages to the consumer
} else if (buffered.length > 0) {
// No session error and still have buffered messages (stream ended before
// any healthy message was observed) — flush them to the consumer
for (const msg of buffered) {
yield msg;
}
}
// If seenHealthyMessage is true, all messages have already been yielded
// directly in passthrough mode — nothing left to flush.
}
/**
@@ -673,7 +700,7 @@ export class OpencodeProvider extends CliProvider {
return {
type: 'error',
session_id: finishEvent.sessionID,
error: finishEvent.part.error,
error: OpencodeProvider.cleanErrorMessage(finishEvent.part.error),
};
}
@@ -682,7 +709,7 @@ export class OpencodeProvider extends CliProvider {
return {
type: 'error',
session_id: finishEvent.sessionID,
error: 'Step execution failed',
error: OpencodeProvider.cleanErrorMessage('Step execution failed'),
};
}
@@ -705,8 +732,10 @@ export class OpencodeProvider extends CliProvider {
case 'tool_error': {
const toolErrorEvent = openCodeEvent as OpenCodeBaseEvent;
// Extract error message from part.error
const errorMessage = toolErrorEvent.part?.error || 'Tool execution failed';
// Extract error message from part.error and clean ANSI codes
const errorMessage = OpencodeProvider.cleanErrorMessage(
toolErrorEvent.part?.error || 'Tool execution failed'
);
return {
type: 'error',
@@ -719,16 +748,8 @@ export class OpencodeProvider extends CliProvider {
// The event format includes the tool name, call ID, and state with input/output.
// Handle both 'tool_use' (actual CLI format) and 'tool_call' (legacy/alternative) for robustness.
case 'tool_use': {
const toolUseEvent = openCodeEvent as OpenCodeBaseEvent;
const part = toolUseEvent.part as OpenCodePart & {
callID?: string;
tool?: string;
state?: {
status?: string;
input?: unknown;
output?: string;
};
};
const toolUseEvent = openCodeEvent as OpenCodeToolUseEvent;
const part = toolUseEvent.part;
// Generate a tool use ID if not provided
const toolUseId = part?.callID || part?.call_id || generateToolUseId();
@@ -898,9 +919,9 @@ export class OpencodeProvider extends CliProvider {
default: true,
},
{
id: 'opencode/glm-4.7-free',
name: 'GLM 4.7 Free',
modelString: 'opencode/glm-4.7-free',
id: 'opencode/glm-5-free',
name: 'GLM 5 Free',
modelString: 'opencode/glm-5-free',
provider: 'opencode',
description: 'OpenCode free tier GLM model',
supportsTools: true,
@@ -918,19 +939,19 @@ export class OpencodeProvider extends CliProvider {
tier: 'basic',
},
{
id: 'opencode/grok-code',
name: 'Grok Code (Free)',
modelString: 'opencode/grok-code',
id: 'opencode/kimi-k2.5-free',
name: 'Kimi K2.5 Free',
modelString: 'opencode/kimi-k2.5-free',
provider: 'opencode',
description: 'OpenCode free tier Grok model for coding',
description: 'OpenCode free tier Kimi model for coding',
supportsTools: true,
supportsVision: false,
tier: 'basic',
},
{
id: 'opencode/minimax-m2.1-free',
name: 'MiniMax M2.1 Free',
modelString: 'opencode/minimax-m2.1-free',
id: 'opencode/minimax-m2.5-free',
name: 'MiniMax M2.5 Free',
modelString: 'opencode/minimax-m2.5-free',
provider: 'opencode',
description: 'OpenCode free tier MiniMax model',
supportsTools: true,
@@ -1052,7 +1073,7 @@ export class OpencodeProvider extends CliProvider {
*
* OpenCode CLI output format (one model per line):
* opencode/big-pickle
* opencode/glm-4.7-free
* opencode/glm-5-free
* anthropic/claude-3-5-haiku-20241022
* github-copilot/claude-3.5-sonnet
* ...

View File

@@ -94,7 +94,7 @@ export interface StreamingQueryOptions extends SimpleQueryOptions {
/**
* Default model to use when none specified
*/
const DEFAULT_MODEL = 'claude-sonnet-4-20250514';
const DEFAULT_MODEL = 'claude-sonnet-4-6';
/**
* Execute a simple query and return the text result

View File

@@ -6,12 +6,14 @@ import { Router } from 'express';
import { validatePathParams } from '../../middleware/validate-paths.js';
import { createDiffsHandler } from './routes/diffs.js';
import { createFileDiffHandler } from './routes/file-diff.js';
import { createStageFilesHandler } from './routes/stage-files.js';
export function createGitRoutes(): Router {
const router = Router();
router.post('/diffs', validatePathParams('projectPath'), createDiffsHandler());
router.post('/file-diff', validatePathParams('projectPath', 'filePath'), createFileDiffHandler());
router.post('/stage-files', validatePathParams('projectPath'), createStageFilesHandler());
return router;
}

View File

@@ -0,0 +1,60 @@
/**
* POST /stage-files endpoint - Stage or unstage files in the main project
*/
import type { Request, Response } from 'express';
import { getErrorMessage, logError } from '../common.js';
import { execGitCommand } from '../../../lib/git.js';
export function createStageFilesHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { projectPath, files, operation } = req.body as {
projectPath: string;
files: string[];
operation: 'stage' | 'unstage';
};
if (!projectPath) {
res.status(400).json({
success: false,
error: 'projectPath required',
});
return;
}
if (!files || files.length === 0) {
res.status(400).json({
success: false,
error: 'files array required and must not be empty',
});
return;
}
if (operation !== 'stage' && operation !== 'unstage') {
res.status(400).json({
success: false,
error: 'operation must be "stage" or "unstage"',
});
return;
}
if (operation === 'stage') {
await execGitCommand(['add', '--', ...files], projectPath);
} else {
await execGitCommand(['reset', 'HEAD', '--', ...files], projectPath);
}
res.json({
success: true,
result: {
operation,
filesCount: files.length,
},
});
} catch (error) {
logError(error, `${(req.body as { operation?: string })?.operation ?? 'stage'} files failed`);
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -158,7 +158,7 @@ export function createVerifyClaudeAuthHandler() {
const stream = query({
prompt: "Reply with only the word 'ok'",
options: {
model: 'claude-sonnet-4-20250514',
model: 'claude-sonnet-4-6',
maxTurns: 1,
allowedTools: [],
abortController,

View File

@@ -63,6 +63,9 @@ import { createCherryPickHandler } from './routes/cherry-pick.js';
import { createBranchCommitLogHandler } from './routes/branch-commit-log.js';
import { createGeneratePRDescriptionHandler } from './routes/generate-pr-description.js';
import { createRebaseHandler } from './routes/rebase.js';
import { createAbortOperationHandler } from './routes/abort-operation.js';
import { createContinueOperationHandler } from './routes/continue-operation.js';
import { createStageFilesHandler } from './routes/stage-files.js';
import type { SettingsService } from '../../services/settings-service.js';
export function createWorktreeRoutes(
@@ -276,5 +279,29 @@ export function createWorktreeRoutes(
createRebaseHandler(events)
);
// Abort in-progress merge/rebase/cherry-pick
router.post(
'/abort-operation',
validatePathParams('worktreePath'),
requireGitRepoOnly,
createAbortOperationHandler(events)
);
// Continue in-progress merge/rebase/cherry-pick after resolving conflicts
router.post(
'/continue-operation',
validatePathParams('worktreePath'),
requireGitRepoOnly,
createContinueOperationHandler(events)
);
// Stage/unstage files route
router.post(
'/stage-files',
validatePathParams('worktreePath'),
requireGitRepoOnly,
createStageFilesHandler()
);
return router;
}

View File

@@ -0,0 +1,117 @@
/**
* POST /abort-operation endpoint - Abort an in-progress merge, rebase, or cherry-pick
*
* Detects which operation (merge, rebase, or cherry-pick) is in progress
* and aborts it, returning the repository to a clean state.
*/
import type { Request, Response } from 'express';
import path from 'path';
import * as fs from 'fs/promises';
import { getErrorMessage, logError, execAsync } from '../common.js';
import type { EventEmitter } from '../../../lib/events.js';
/**
* Detect what type of conflict operation is currently in progress
*/
async function detectOperation(
worktreePath: string
): Promise<'merge' | 'rebase' | 'cherry-pick' | null> {
try {
const { stdout: gitDirRaw } = await execAsync('git rev-parse --git-dir', {
cwd: worktreePath,
});
const gitDir = path.resolve(worktreePath, gitDirRaw.trim());
const [rebaseMergeExists, rebaseApplyExists, mergeHeadExists, cherryPickHeadExists] =
await Promise.all([
fs
.access(path.join(gitDir, 'rebase-merge'))
.then(() => true)
.catch(() => false),
fs
.access(path.join(gitDir, 'rebase-apply'))
.then(() => true)
.catch(() => false),
fs
.access(path.join(gitDir, 'MERGE_HEAD'))
.then(() => true)
.catch(() => false),
fs
.access(path.join(gitDir, 'CHERRY_PICK_HEAD'))
.then(() => true)
.catch(() => false),
]);
if (rebaseMergeExists || rebaseApplyExists) return 'rebase';
if (mergeHeadExists) return 'merge';
if (cherryPickHeadExists) return 'cherry-pick';
return null;
} catch {
return null;
}
}
export function createAbortOperationHandler(events: EventEmitter) {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath } = req.body as {
worktreePath: string;
};
if (!worktreePath) {
res.status(400).json({
success: false,
error: 'worktreePath is required',
});
return;
}
const resolvedWorktreePath = path.resolve(worktreePath);
// Detect what operation is in progress
const operation = await detectOperation(resolvedWorktreePath);
if (!operation) {
res.status(400).json({
success: false,
error: 'No merge, rebase, or cherry-pick in progress',
});
return;
}
// Abort the operation
let abortCommand: string;
switch (operation) {
case 'merge':
abortCommand = 'git merge --abort';
break;
case 'rebase':
abortCommand = 'git rebase --abort';
break;
case 'cherry-pick':
abortCommand = 'git cherry-pick --abort';
break;
}
await execAsync(abortCommand, { cwd: resolvedWorktreePath });
// Emit event
events.emit('conflict:aborted', {
worktreePath: resolvedWorktreePath,
operation,
});
res.json({
success: true,
result: {
operation,
message: `${operation.charAt(0).toUpperCase() + operation.slice(1)} aborted successfully`,
},
});
} catch (error) {
logError(error, 'Abort operation failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -0,0 +1,151 @@
/**
* POST /continue-operation endpoint - Continue an in-progress merge, rebase, or cherry-pick
*
* After conflicts have been resolved, this endpoint continues the operation.
* For merge: performs git commit (merge is auto-committed after conflict resolution)
* For rebase: runs git rebase --continue
* For cherry-pick: runs git cherry-pick --continue
*/
import type { Request, Response } from 'express';
import path from 'path';
import * as fs from 'fs/promises';
import { getErrorMessage, logError, execAsync } from '../common.js';
import type { EventEmitter } from '../../../lib/events.js';
/**
* Detect what type of conflict operation is currently in progress
*/
async function detectOperation(
worktreePath: string
): Promise<'merge' | 'rebase' | 'cherry-pick' | null> {
try {
const { stdout: gitDirRaw } = await execAsync('git rev-parse --git-dir', {
cwd: worktreePath,
});
const gitDir = path.resolve(worktreePath, gitDirRaw.trim());
const [rebaseMergeExists, rebaseApplyExists, mergeHeadExists, cherryPickHeadExists] =
await Promise.all([
fs
.access(path.join(gitDir, 'rebase-merge'))
.then(() => true)
.catch(() => false),
fs
.access(path.join(gitDir, 'rebase-apply'))
.then(() => true)
.catch(() => false),
fs
.access(path.join(gitDir, 'MERGE_HEAD'))
.then(() => true)
.catch(() => false),
fs
.access(path.join(gitDir, 'CHERRY_PICK_HEAD'))
.then(() => true)
.catch(() => false),
]);
if (rebaseMergeExists || rebaseApplyExists) return 'rebase';
if (mergeHeadExists) return 'merge';
if (cherryPickHeadExists) return 'cherry-pick';
return null;
} catch {
return null;
}
}
/**
* Check if there are still unmerged paths (unresolved conflicts)
*/
async function hasUnmergedPaths(worktreePath: string): Promise<boolean> {
try {
const { stdout: statusOutput } = await execAsync('git status --porcelain', {
cwd: worktreePath,
});
return statusOutput.split('\n').some((line) => /^(UU|AA|DD|AU|UA|DU|UD)/.test(line));
} catch {
return false;
}
}
export function createContinueOperationHandler(events: EventEmitter) {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath } = req.body as {
worktreePath: string;
};
if (!worktreePath) {
res.status(400).json({
success: false,
error: 'worktreePath is required',
});
return;
}
const resolvedWorktreePath = path.resolve(worktreePath);
// Detect what operation is in progress
const operation = await detectOperation(resolvedWorktreePath);
if (!operation) {
res.status(400).json({
success: false,
error: 'No merge, rebase, or cherry-pick in progress',
});
return;
}
// Check for unresolved conflicts
if (await hasUnmergedPaths(resolvedWorktreePath)) {
res.status(409).json({
success: false,
error:
'There are still unresolved conflicts. Please resolve all conflicts before continuing.',
hasUnresolvedConflicts: true,
});
return;
}
// Stage all resolved files first
await execAsync('git add -A', { cwd: resolvedWorktreePath });
// Continue the operation
let continueCommand: string;
switch (operation) {
case 'merge':
// For merge, we need to commit after resolving conflicts
continueCommand = 'git commit --no-edit';
break;
case 'rebase':
continueCommand = 'git rebase --continue';
break;
case 'cherry-pick':
continueCommand = 'git cherry-pick --continue';
break;
}
await execAsync(continueCommand, {
cwd: resolvedWorktreePath,
env: { ...process.env, GIT_EDITOR: 'true' }, // Prevent editor from opening
});
// Emit event
events.emit('conflict:resolved', {
worktreePath: resolvedWorktreePath,
operation,
});
res.json({
success: true,
result: {
operation,
message: `${operation.charAt(0).toUpperCase() + operation.slice(1)} continued successfully`,
},
});
} catch (error) {
logError(error, 'Continue operation failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -58,6 +58,88 @@ interface WorktreeInfo {
hasChanges?: boolean;
changedFilesCount?: number;
pr?: WorktreePRInfo; // PR info if a PR has been created for this branch
/** Whether a merge or rebase is in progress (has conflicts) */
hasConflicts?: boolean;
/** Type of conflict operation in progress */
conflictType?: 'merge' | 'rebase' | 'cherry-pick';
/** List of files with conflicts */
conflictFiles?: string[];
}
/**
* Detect if a merge, rebase, or cherry-pick is in progress for a worktree.
* Checks for the presence of state files/directories that git creates
* during these operations.
*/
async function detectConflictState(worktreePath: string): Promise<{
hasConflicts: boolean;
conflictType?: 'merge' | 'rebase' | 'cherry-pick';
conflictFiles?: string[];
}> {
try {
// Find the canonical .git directory for this worktree
const { stdout: gitDirRaw } = await execAsync('git rev-parse --git-dir', {
cwd: worktreePath,
});
const gitDir = path.resolve(worktreePath, gitDirRaw.trim());
// Check for merge, rebase, and cherry-pick state files/directories
const [mergeHeadExists, rebaseMergeExists, rebaseApplyExists, cherryPickHeadExists] =
await Promise.all([
secureFs
.access(path.join(gitDir, 'MERGE_HEAD'))
.then(() => true)
.catch(() => false),
secureFs
.access(path.join(gitDir, 'rebase-merge'))
.then(() => true)
.catch(() => false),
secureFs
.access(path.join(gitDir, 'rebase-apply'))
.then(() => true)
.catch(() => false),
secureFs
.access(path.join(gitDir, 'CHERRY_PICK_HEAD'))
.then(() => true)
.catch(() => false),
]);
let conflictType: 'merge' | 'rebase' | 'cherry-pick' | undefined;
if (rebaseMergeExists || rebaseApplyExists) {
conflictType = 'rebase';
} else if (mergeHeadExists) {
conflictType = 'merge';
} else if (cherryPickHeadExists) {
conflictType = 'cherry-pick';
}
if (!conflictType) {
return { hasConflicts: false };
}
// Get list of conflicted files using machine-readable git status
let conflictFiles: string[] = [];
try {
const { stdout: statusOutput } = await execAsync('git diff --name-only --diff-filter=U', {
cwd: worktreePath,
});
conflictFiles = statusOutput
.trim()
.split('\n')
.filter((f) => f.trim().length > 0);
} catch {
// Fall back to empty list if diff fails
}
return {
hasConflicts: true,
conflictType,
conflictFiles,
};
} catch {
// If anything fails, assume no conflicts
return { hasConflicts: false };
}
}
async function getCurrentBranch(cwd: string): Promise<string> {
@@ -373,7 +455,7 @@ export function createListHandler() {
// Read all worktree metadata to get PR info
const allMetadata = await readAllWorktreeMetadata(projectPath);
// If includeDetails is requested, fetch change status for each worktree
// If includeDetails is requested, fetch change status and conflict state for each worktree
if (includeDetails) {
for (const worktree of worktrees) {
try {
@@ -390,6 +472,18 @@ export function createListHandler() {
worktree.hasChanges = false;
worktree.changedFilesCount = 0;
}
// Detect merge/rebase/cherry-pick in progress
try {
const conflictState = await detectConflictState(worktree.path);
if (conflictState.hasConflicts) {
worktree.hasConflicts = true;
worktree.conflictType = conflictState.conflictType;
worktree.conflictFiles = conflictState.conflictFiles;
}
} catch {
// Ignore conflict detection errors
}
}
}

View File

@@ -0,0 +1,69 @@
/**
* POST /stage-files endpoint - Stage or unstage files in a worktree
*
* Supports two operations:
* 1. Stage files: `git add <files>` (adds files to the staging area)
* 2. Unstage files: `git reset HEAD -- <files>` (removes files from staging area)
*
* Note: Git repository validation (isGitRepo) is handled by
* the requireGitRepoOnly middleware in index.ts
*/
import type { Request, Response } from 'express';
import { getErrorMessage, logError } from '../common.js';
import { execGitCommand } from '../../../lib/git.js';
export function createStageFilesHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath, files, operation } = req.body as {
worktreePath: string;
files: string[];
operation: 'stage' | 'unstage';
};
if (!worktreePath) {
res.status(400).json({
success: false,
error: 'worktreePath required',
});
return;
}
if (!files || files.length === 0) {
res.status(400).json({
success: false,
error: 'files array required and must not be empty',
});
return;
}
if (operation !== 'stage' && operation !== 'unstage') {
res.status(400).json({
success: false,
error: 'operation must be "stage" or "unstage"',
});
return;
}
if (operation === 'stage') {
// Stage the specified files
await execGitCommand(['add', '--', ...files], worktreePath);
} else {
// Unstage the specified files
await execGitCommand(['reset', 'HEAD', '--', ...files], worktreePath);
}
res.json({
success: true,
result: {
operation,
filesCount: files.length,
},
});
} catch (error) {
logError(error, `${(req.body as { operation?: string })?.operation ?? 'stage'} files failed`);
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -54,7 +54,7 @@ export function createStashApplyHandler(events: EventEmitter) {
const result = await applyOrPop(worktreePath, idx, { pop }, events);
if (!result.success) {
logError(new Error(result.error ?? 'Stash apply failed'), 'Stash apply failed');
// applyOrPop already logs the error internally via logError — no need to double-log here
res.status(500).json({ success: false, error: result.error });
return;
}

View File

@@ -42,6 +42,27 @@ export class AgentExecutor {
private static readonly WRITE_DEBOUNCE_MS = 500;
private static readonly STREAM_HEARTBEAT_MS = 15_000;
/**
* Sanitize a provider error value into clean text.
* Coalesces to string, removes ANSI codes, strips leading "Error:" prefix,
* trims, and returns 'Unknown error' when empty.
*/
private static sanitizeProviderError(input: string | { error?: string } | undefined): string {
let raw: string;
if (typeof input === 'string') {
raw = input;
} else if (input && typeof input === 'object' && typeof input.error === 'string') {
raw = input.error;
} else {
raw = '';
}
const cleaned = raw
.replace(/\x1b\[[0-9;]*m/g, '')
.replace(/^Error:\s*/i, '')
.trim();
return cleaned || 'Unknown error';
}
constructor(
private eventBus: TypedEventBus,
private featureStateManager: FeatureStateManager,
@@ -255,15 +276,7 @@ export class AgentExecutor {
}
}
} else if (msg.type === 'error') {
// Clean the error: strip ANSI codes and the redundant "Error: " prefix
// that CLI providers add. Without this, wrapping in new Error() produces
// "Error: Error: Session not found" (double-prefixed).
const cleanedError =
(msg.error || 'Unknown error')
.replace(/\x1b\[[0-9;]*m/g, '')
.replace(/^Error:\s*/i, '')
.trim() || 'Unknown error';
throw new Error(cleanedError);
throw new Error(AgentExecutor.sanitizeProviderError(msg.error));
} else if (msg.type === 'result' && msg.subtype === 'success') scheduleWrite();
}
await writeToFile();

View File

@@ -96,6 +96,20 @@ export class AgentService {
await secureFs.mkdir(this.stateDir, { recursive: true });
}
/**
* Detect provider-side session errors (session not found, expired, etc.).
* Used to decide whether to clear a stale sdkSessionId.
*/
private isStaleSessionError(rawErrorText: string): boolean {
const errorLower = rawErrorText.toLowerCase();
return (
errorLower.includes('session not found') ||
errorLower.includes('session expired') ||
errorLower.includes('invalid session') ||
errorLower.includes('no such session')
);
}
/**
* Start or resume a conversation
*/
@@ -195,7 +209,15 @@ export class AgentService {
const resolvedWorkingDirectory = path.resolve(effectiveWorkingDirectory);
// Validate that the working directory is allowed using centralized validation
validateWorkingDirectory(resolvedWorkingDirectory);
try {
validateWorkingDirectory(resolvedWorkingDirectory);
} catch (validationError) {
this.logger.warn(
`Session "${sessionId}": working directory "${resolvedWorkingDirectory}" is not allowed — ` +
`returning null so callers treat it as a missing session. Error: ${(validationError as Error).message}`
);
return null;
}
// Load persisted queue
const promptQueue = await this.loadQueueState(sessionId);
@@ -411,7 +433,7 @@ export class AgentService {
// When using a custom provider (GLM, MiniMax), use resolved Claude model for SDK config
// (thinking level budgets, allowedTools) but we MUST pass the provider's model ID
// (e.g. "GLM-4.7") to the API - not "claude-sonnet-4-20250514" which causes "model not found"
// (e.g. "GLM-4.7") to the API - not "claude-sonnet-4-6" which causes "model not found"
const modelForSdk = providerResolvedModel || model;
const sessionModelForSdk = providerResolvedModel ? undefined : session.model;
@@ -616,14 +638,7 @@ export class AgentService {
// sdkSessionId so the next attempt starts a fresh provider session.
// This handles providers that don't have built-in session recovery
// (unlike OpenCode which auto-retries without the session flag).
const errorLower = rawErrorText.toLowerCase();
if (
session.sdkSessionId &&
(errorLower.includes('session not found') ||
errorLower.includes('session expired') ||
errorLower.includes('invalid session') ||
errorLower.includes('no such session'))
) {
if (session.sdkSessionId && this.isStaleSessionError(rawErrorText)) {
this.logger.info(
`Clearing stale sdkSessionId for session ${sessionId} after provider session error`
);
@@ -699,13 +714,7 @@ export class AgentService {
// Check if the thrown error is a provider-side session error.
// Clear the stale sdkSessionId so the next retry starts fresh.
if (
session.sdkSessionId &&
(thrownErrorMsg.includes('session not found') ||
thrownErrorMsg.includes('session expired') ||
thrownErrorMsg.includes('invalid session') ||
thrownErrorMsg.includes('no such session'))
) {
if (session.sdkSessionId && this.isStaleSessionError(rawThrownMsg)) {
this.logger.info(
`Clearing stale sdkSessionId for session ${sessionId} after thrown session error`
);

View File

@@ -208,7 +208,7 @@ export class AutoModeServiceFacade {
model?: string,
opts?: Record<string, unknown>
) => {
const resolvedModel = model || 'claude-sonnet-4-20250514';
const resolvedModel = model || 'claude-sonnet-4-6';
const provider = ProviderFactory.getProviderForModel(resolvedModel);
const effectiveBareModel = stripProviderPrefix(resolvedModel);
@@ -258,7 +258,7 @@ export class AutoModeServiceFacade {
featureStateManager.saveFeatureSummary(projPath, fId, summary),
buildTaskPrompt: (task, allTasks, taskIndex, _planContent, template, feedback) => {
let taskPrompt = template
.replace(/\{\{taskName\}\}/g, task.description)
.replace(/\{\{taskName\}\}/g, task.description || `Task ${task.id}`)
.replace(/\{\{taskIndex\}\}/g, String(taskIndex + 1))
.replace(/\{\{totalTasks\}\}/g, String(allTasks.length))
.replace(/\{\{taskDescription\}\}/g, task.description || `Task ${task.id}`);
@@ -336,7 +336,7 @@ export class AutoModeServiceFacade {
branchName?: string | null;
}
) => {
const resolvedModel = model || 'claude-sonnet-4-20250514';
const resolvedModel = model || 'claude-sonnet-4-6';
const provider = ProviderFactory.getProviderForModel(resolvedModel);
const effectiveBareModel = stripProviderPrefix(resolvedModel);
@@ -385,7 +385,7 @@ export class AutoModeServiceFacade {
featureStateManager.saveFeatureSummary(projPath, fId, summary),
buildTaskPrompt: (task, allTasks, taskIndex, planContent, template, feedback) => {
let taskPrompt = template
.replace(/\{\{taskName\}\}/g, task.description)
.replace(/\{\{taskName\}\}/g, task.description || `Task ${task.id}`)
.replace(/\{\{taskIndex\}\}/g, String(taskIndex + 1))
.replace(/\{\{totalTasks\}\}/g, String(allTasks.length))
.replace(/\{\{taskDescription\}\}/g, task.description || `Task ${task.id}`);

View File

@@ -35,7 +35,10 @@ export interface MergeServiceResult {
*/
function isValidBranchName(name: string): boolean {
// First char must be alphanumeric, dot, underscore, or slash (not dash)
return /^[a-zA-Z0-9._/][a-zA-Z0-9._\-/]*$/.test(name) && name.length < 250;
// Reject names containing '..' to prevent git ref traversal
return (
/^[a-zA-Z0-9._/][a-zA-Z0-9._\-/]*$/.test(name) && name.length < 250 && !name.includes('..')
);
}
/**

View File

@@ -16,7 +16,7 @@
*/
import { createLogger } from '@automaker/utils';
import { execGitCommand } from '../lib/git.js';
import { execGitCommand, execGitCommandWithLockRetry } from '../lib/git.js';
import { getErrorMessage } from '../routes/worktree/common.js';
const logger = createLogger('PullService');
@@ -106,7 +106,10 @@ export async function getLocalChanges(
*/
export async function stashChanges(worktreePath: string, branchName: string): Promise<void> {
const stashMessage = `automaker-pull-stash: Pre-pull stash on ${branchName}`;
await execGitCommand(['stash', 'push', '--include-untracked', '-m', stashMessage], worktreePath);
await execGitCommandWithLockRetry(
['stash', 'push', '--include-untracked', '-m', stashMessage],
worktreePath
);
}
/**

View File

@@ -16,7 +16,7 @@
import { createLogger } from '@automaker/utils';
import type { EventEmitter } from '../lib/events.js';
import { execGitCommand } from '../lib/git.js';
import { execGitCommand, execGitCommandWithLockRetry } from '../lib/git.js';
import { getErrorMessage, logError } from '../routes/worktree/common.js';
const logger = createLogger('StashService');
@@ -105,6 +105,46 @@ function isConflictOutput(output: string): boolean {
return output.includes('CONFLICT') || output.includes('Merge conflict');
}
/**
* Build a conflict result from stash apply/pop, emit events, and return.
* Extracted to avoid duplicating conflict handling in the try and catch paths.
*/
async function handleStashConflicts(
worktreePath: string,
stashIndex: number,
operation: 'apply' | 'pop',
events?: EventEmitter
): Promise<StashApplyResult> {
const conflictFiles = await getConflictedFiles(worktreePath);
events?.emit('stash:conflicts', {
worktreePath,
stashIndex,
operation,
conflictFiles,
});
const result: StashApplyResult = {
success: true,
applied: true,
hasConflicts: true,
conflictFiles,
operation,
stashIndex,
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} with conflicts. Please resolve the conflicts.`,
};
events?.emit('stash:success', {
worktreePath,
stashIndex,
operation,
hasConflicts: true,
conflictFiles,
});
return result;
}
// ============================================================================
// Main Service Function
// ============================================================================
@@ -164,34 +204,7 @@ export async function applyOrPop(
// 4. Check if the error is a conflict
if (isConflictOutput(combinedOutput)) {
const conflictFiles = await getConflictedFiles(worktreePath);
events?.emit('stash:conflicts', {
worktreePath,
stashIndex,
operation,
conflictFiles,
});
const result: StashApplyResult = {
success: true,
applied: true,
hasConflicts: true,
conflictFiles,
operation,
stashIndex,
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} with conflicts. Please resolve the conflicts.`,
};
events?.emit('stash:success', {
worktreePath,
stashIndex,
operation,
hasConflicts: true,
conflictFiles,
});
return result;
return handleStashConflicts(worktreePath, stashIndex, operation, events);
}
// 5. Non-conflict git error re-throw so the outer catch logs and handles it
@@ -205,34 +218,7 @@ export async function applyOrPop(
events?.emit('stash:progress', { worktreePath, stashIndex, operation, output: combinedOutput });
if (isConflictOutput(combinedOutput)) {
const conflictFiles = await getConflictedFiles(worktreePath);
events?.emit('stash:conflicts', {
worktreePath,
stashIndex,
operation,
conflictFiles,
});
const result: StashApplyResult = {
success: true,
applied: true,
hasConflicts: true,
conflictFiles,
operation,
stashIndex,
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} with conflicts. Please resolve the conflicts.`,
};
events?.emit('stash:success', {
worktreePath,
stashIndex,
operation,
hasConflicts: true,
conflictFiles,
});
return result;
return handleStashConflicts(worktreePath, stashIndex, operation, events);
}
// 7. Clean success
@@ -296,17 +282,20 @@ export async function applyOrPop(
*/
export async function pushStash(
worktreePath: string,
options?: { message?: string; files?: string[] }
options?: { message?: string; files?: string[] },
events?: EventEmitter
): Promise<StashPushResult> {
const message = options?.message;
const files = options?.files;
logger.info(`[StashService] push stash in ${worktreePath}`);
events?.emit('stash:start', { worktreePath, operation: 'push' });
// 1. Check for any changes to stash
const status = await execGitCommand(['status', '--porcelain'], worktreePath);
if (!status.trim()) {
events?.emit('stash:success', { worktreePath, operation: 'push', stashed: false });
return {
success: true,
stashed: false,
@@ -326,13 +315,20 @@ export async function pushStash(
args.push(...files);
}
// 3. Execute stash push
await execGitCommand(args, worktreePath);
// 3. Execute stash push (with automatic index.lock cleanup and retry)
await execGitCommandWithLockRetry(args, worktreePath);
// 4. Get current branch name
const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath);
const branchName = branchOutput.trim();
events?.emit('stash:success', {
worktreePath,
operation: 'push',
stashed: true,
branch: branchName,
});
return {
success: true,
stashed: true,
@@ -445,14 +441,18 @@ export async function listStash(worktreePath: string): Promise<StashListResult>
*/
export async function dropStash(
worktreePath: string,
stashIndex: number
stashIndex: number,
events?: EventEmitter
): Promise<StashDropResult> {
const stashRef = `stash@{${stashIndex}}`;
logger.info(`[StashService] drop ${stashRef} in ${worktreePath}`);
events?.emit('stash:start', { worktreePath, stashIndex, stashRef, operation: 'drop' });
await execGitCommand(['stash', 'drop', stashRef], worktreePath);
events?.emit('stash:success', { worktreePath, stashIndex, stashRef, operation: 'drop' });
return {
success: true,
dropped: true,

View File

@@ -16,9 +16,8 @@
* rebase-service.ts.
*/
import { createLogger } from '@automaker/utils';
import { execGitCommand } from '../lib/git.js';
import { getErrorMessage } from '../routes/worktree/common.js';
import { createLogger, getErrorMessage } from '@automaker/utils';
import { execGitCommand, execGitCommandWithLockRetry } from '../lib/git.js';
import type { EventEmitter } from '../lib/events.js';
const logger = createLogger('WorktreeBranchService');
@@ -66,7 +65,11 @@ async function hasAnyChanges(cwd: string): Promise<boolean> {
return true;
});
return lines.length > 0;
} catch {
} catch (err) {
logger.error('hasAnyChanges: execGitCommand failed — returning false', {
cwd,
error: getErrorMessage(err),
});
return false;
}
}
@@ -78,24 +81,11 @@ async function hasAnyChanges(cwd: string): Promise<boolean> {
*/
async function stashChanges(cwd: string, message: string): Promise<boolean> {
try {
// Get stash count before
const beforeOutput = await execGitCommand(['stash', 'list'], cwd);
const countBefore = beforeOutput
.trim()
.split('\n')
.filter((l) => l.trim()).length;
// Stash including untracked files
await execGitCommand(['stash', 'push', '--include-untracked', '-m', message], cwd);
// Get stash count after to verify something was stashed
const afterOutput = await execGitCommand(['stash', 'list'], cwd);
const countAfter = afterOutput
.trim()
.split('\n')
.filter((l) => l.trim()).length;
return countAfter > countBefore;
// Stash including untracked files — a successful execGitCommand is proof
// the stash was created. No need for a post-push listing which can throw
// and incorrectly report a failed stash.
await execGitCommandWithLockRetry(['stash', 'push', '--include-untracked', '-m', message], cwd);
return true;
} catch (error) {
const errorMsg = getErrorMessage(error);
@@ -127,11 +117,8 @@ async function popStash(
cwd: string
): Promise<{ success: boolean; hasConflicts: boolean; error?: string }> {
try {
const stdout = await execGitCommand(['stash', 'pop'], cwd);
// Check for conflict markers in the output
if (stdout.includes('CONFLICT') || stdout.includes('Merge conflict')) {
return { success: false, hasConflicts: true };
}
await execGitCommand(['stash', 'pop'], cwd);
// If execGitCommand succeeds (zero exit code), there are no conflicts
return { success: true, hasConflicts: false };
} catch (error) {
const errorMsg = getErrorMessage(error);
@@ -274,11 +261,9 @@ export async function performSwitchBranch(
};
}
// 4. Check if target branch exists (locally or as remote ref)
// 4. Check if target branch exists as a local branch
if (!isRemote) {
try {
await execGitCommand(['rev-parse', '--verify', branchName], worktreePath);
} catch {
if (!(await localBranchExists(worktreePath, branchName))) {
events?.emit('switch:error', {
worktreePath,
branchName,