mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-03-22 11:43:07 +00:00
feat: Add GPT-5 model variants and improve Codex execution logic. Addressed code review comments
This commit is contained in:
@@ -15,7 +15,14 @@ export function parseGitLogOutput(output: string): CommitFields[] {
|
||||
const commitBlocks = output.split('\0').filter((block) => block.trim());
|
||||
|
||||
for (const block of commitBlocks) {
|
||||
const fields = block.split('\n');
|
||||
const allLines = block.split('\n');
|
||||
|
||||
// Skip leading empty lines that may appear at block boundaries
|
||||
let startIndex = 0;
|
||||
while (startIndex < allLines.length && allLines[startIndex].trim() === '') {
|
||||
startIndex++;
|
||||
}
|
||||
const fields = allLines.slice(startIndex);
|
||||
|
||||
// Validate we have all expected fields (at least hash, shortHash, author, authorEmail, date, subject)
|
||||
if (fields.length < 6) {
|
||||
|
||||
82
apps/server/src/lib/git.ts
Normal file
82
apps/server/src/lib/git.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
/**
|
||||
* Shared git command execution utilities.
|
||||
*
|
||||
* This module provides the canonical `execGitCommand` helper and common
|
||||
* git utilities used across services and routes. All consumers should
|
||||
* import from here rather than defining their own copy.
|
||||
*/
|
||||
|
||||
import { spawnProcess } from '@automaker/platform';
|
||||
|
||||
// ============================================================================
|
||||
// Secure Command Execution
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Execute git command with array arguments to prevent command injection.
|
||||
* Uses spawnProcess from @automaker/platform for secure, cross-platform execution.
|
||||
*
|
||||
* @param args - Array of git command arguments (e.g., ['worktree', 'add', path])
|
||||
* @param cwd - Working directory to execute the command in
|
||||
* @param env - Optional additional environment variables to pass to the git process.
|
||||
* These are merged on top of the current process environment. Pass
|
||||
* `{ LC_ALL: 'C' }` to force git to emit English output regardless of the
|
||||
* system locale so that text-based output parsing remains reliable.
|
||||
* @returns Promise resolving to stdout output
|
||||
* @throws Error with stderr/stdout message if command fails. The thrown error
|
||||
* also has `stdout` and `stderr` string properties for structured access.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Safe: no injection possible
|
||||
* await execGitCommand(['branch', '-D', branchName], projectPath);
|
||||
*
|
||||
* // Force English output for reliable text parsing:
|
||||
* await execGitCommand(['rebase', '--', 'main'], worktreePath, { LC_ALL: 'C' });
|
||||
*
|
||||
* // Instead of unsafe:
|
||||
* // await execAsync(`git branch -D ${branchName}`, { cwd });
|
||||
* ```
|
||||
*/
|
||||
export async function execGitCommand(
|
||||
args: string[],
|
||||
cwd: string,
|
||||
env?: Record<string, string>
|
||||
): Promise<string> {
|
||||
const result = await spawnProcess({
|
||||
command: 'git',
|
||||
args,
|
||||
cwd,
|
||||
...(env !== undefined ? { env } : {}),
|
||||
});
|
||||
|
||||
// spawnProcess returns { stdout, stderr, exitCode }
|
||||
if (result.exitCode === 0) {
|
||||
return result.stdout;
|
||||
} else {
|
||||
const errorMessage =
|
||||
result.stderr || result.stdout || `Git command failed with code ${result.exitCode}`;
|
||||
throw Object.assign(new Error(errorMessage), {
|
||||
stdout: result.stdout,
|
||||
stderr: result.stderr,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Common Git Utilities
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Get the current branch name for the given worktree.
|
||||
*
|
||||
* This is the canonical implementation shared across services. Services
|
||||
* should import this rather than duplicating the logic locally.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @returns The current branch name (trimmed)
|
||||
*/
|
||||
export async function getCurrentBranch(worktreePath: string): Promise<string> {
|
||||
const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath);
|
||||
return branchOutput.trim();
|
||||
}
|
||||
@@ -32,6 +32,19 @@ export const CODEX_MODELS: ModelDefinition[] = [
|
||||
default: true,
|
||||
hasReasoning: true,
|
||||
},
|
||||
{
|
||||
id: CODEX_MODEL_MAP.gpt53CodexSpark,
|
||||
name: 'GPT-5.3-Codex-Spark',
|
||||
modelString: CODEX_MODEL_MAP.gpt53CodexSpark,
|
||||
provider: 'openai',
|
||||
description: 'Near-instant real-time coding model, 1000+ tokens/sec.',
|
||||
contextWindow: CONTEXT_WINDOW_256K,
|
||||
maxOutputTokens: MAX_OUTPUT_32K,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: 'premium' as const,
|
||||
hasReasoning: true,
|
||||
},
|
||||
{
|
||||
id: CODEX_MODEL_MAP.gpt52Codex,
|
||||
name: 'GPT-5.2-Codex',
|
||||
@@ -71,6 +84,45 @@ export const CODEX_MODELS: ModelDefinition[] = [
|
||||
tier: 'basic' as const,
|
||||
hasReasoning: false,
|
||||
},
|
||||
{
|
||||
id: CODEX_MODEL_MAP.gpt51Codex,
|
||||
name: 'GPT-5.1-Codex',
|
||||
modelString: CODEX_MODEL_MAP.gpt51Codex,
|
||||
provider: 'openai',
|
||||
description: 'Original GPT-5.1 Codex agentic coding model.',
|
||||
contextWindow: CONTEXT_WINDOW_256K,
|
||||
maxOutputTokens: MAX_OUTPUT_32K,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: 'standard' as const,
|
||||
hasReasoning: true,
|
||||
},
|
||||
{
|
||||
id: CODEX_MODEL_MAP.gpt5Codex,
|
||||
name: 'GPT-5-Codex',
|
||||
modelString: CODEX_MODEL_MAP.gpt5Codex,
|
||||
provider: 'openai',
|
||||
description: 'Original GPT-5 Codex model.',
|
||||
contextWindow: CONTEXT_WINDOW_128K,
|
||||
maxOutputTokens: MAX_OUTPUT_16K,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: 'standard' as const,
|
||||
hasReasoning: true,
|
||||
},
|
||||
{
|
||||
id: CODEX_MODEL_MAP.gpt5CodexMini,
|
||||
name: 'GPT-5-Codex-Mini',
|
||||
modelString: CODEX_MODEL_MAP.gpt5CodexMini,
|
||||
provider: 'openai',
|
||||
description: 'Smaller, cheaper GPT-5 Codex variant.',
|
||||
contextWindow: CONTEXT_WINDOW_128K,
|
||||
maxOutputTokens: MAX_OUTPUT_16K,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: 'basic' as const,
|
||||
hasReasoning: false,
|
||||
},
|
||||
|
||||
// ========== General-Purpose GPT Models ==========
|
||||
{
|
||||
@@ -99,6 +151,19 @@ export const CODEX_MODELS: ModelDefinition[] = [
|
||||
tier: 'standard' as const,
|
||||
hasReasoning: true,
|
||||
},
|
||||
{
|
||||
id: CODEX_MODEL_MAP.gpt5,
|
||||
name: 'GPT-5',
|
||||
modelString: CODEX_MODEL_MAP.gpt5,
|
||||
provider: 'openai',
|
||||
description: 'Base GPT-5 model.',
|
||||
contextWindow: CONTEXT_WINDOW_128K,
|
||||
maxOutputTokens: MAX_OUTPUT_16K,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: 'standard' as const,
|
||||
hasReasoning: true,
|
||||
},
|
||||
];
|
||||
|
||||
/**
|
||||
|
||||
@@ -205,10 +205,28 @@ async function resolveCodexExecutionPlan(options: ExecuteOptions): Promise<Codex
|
||||
const authIndicators = await getCodexAuthIndicators();
|
||||
const openAiApiKey = await resolveOpenAiApiKey();
|
||||
const hasApiKey = Boolean(openAiApiKey);
|
||||
const cliAuthenticated = authIndicators.hasOAuthToken || authIndicators.hasApiKey || hasApiKey;
|
||||
const sdkEligible = isSdkEligible(options);
|
||||
const cliAvailable = Boolean(cliPath);
|
||||
// CLI OAuth login takes priority: if the user has logged in via `codex login`,
|
||||
// use the CLI regardless of whether an API key is also stored.
|
||||
// hasOAuthToken = OAuth session from `codex login`
|
||||
// authIndicators.hasApiKey = API key stored in Codex's own auth file (via `codex login --api-key`)
|
||||
// Both are "CLI-native" auth — distinct from an API key stored in Automaker's credentials.
|
||||
const hasCliNativeAuth = authIndicators.hasOAuthToken || authIndicators.hasApiKey;
|
||||
const cliAuthenticated = hasCliNativeAuth || hasApiKey;
|
||||
const sdkEligible = isSdkEligible(options);
|
||||
|
||||
// If CLI is available and the user authenticated via the CLI (`codex login`),
|
||||
// prefer CLI mode over SDK. This ensures `codex login` sessions take priority
|
||||
// over API keys stored in Automaker's credentials.
|
||||
if (cliAvailable && hasCliNativeAuth) {
|
||||
return {
|
||||
mode: CODEX_EXECUTION_MODE_CLI,
|
||||
cliPath,
|
||||
openAiApiKey,
|
||||
};
|
||||
}
|
||||
|
||||
// No CLI-native auth — fall back to API key via SDK if available.
|
||||
if (hasApiKey) {
|
||||
return {
|
||||
mode: CODEX_EXECUTION_MODE_SDK,
|
||||
@@ -854,16 +872,35 @@ export class CodexProvider extends BaseProvider {
|
||||
|
||||
// Enhance error message with helpful context
|
||||
let enhancedError = errorText;
|
||||
if (errorText.toLowerCase().includes('rate limit')) {
|
||||
const errorLower = errorText.toLowerCase();
|
||||
if (errorLower.includes('rate limit')) {
|
||||
enhancedError = `${errorText}\n\nTip: You're being rate limited. Try reducing concurrent tasks or waiting a few minutes before retrying.`;
|
||||
} else if (
|
||||
errorText.toLowerCase().includes('authentication') ||
|
||||
errorText.toLowerCase().includes('unauthorized')
|
||||
) {
|
||||
} else if (errorLower.includes('authentication') || errorLower.includes('unauthorized')) {
|
||||
enhancedError = `${errorText}\n\nTip: Check that your OPENAI_API_KEY is set correctly or run 'codex auth login' to authenticate.`;
|
||||
} else if (
|
||||
errorText.toLowerCase().includes('not found') ||
|
||||
errorText.toLowerCase().includes('command not found')
|
||||
errorLower.includes('does not exist') ||
|
||||
errorLower.includes('do not have access') ||
|
||||
errorLower.includes('model_not_found') ||
|
||||
errorLower.includes('invalid_model')
|
||||
) {
|
||||
enhancedError =
|
||||
`${errorText}\n\nTip: The model '${options.model}' may not be available on your OpenAI plan. ` +
|
||||
`Some models (like gpt-5.3-codex) require a ChatGPT Pro/Plus subscription and OAuth login via 'codex login'. ` +
|
||||
`Try using a different model (e.g., gpt-5.1 or gpt-5.2), or authenticate with 'codex login' instead of an API key.`;
|
||||
} else if (
|
||||
errorLower.includes('stream disconnected') ||
|
||||
errorLower.includes('stream ended') ||
|
||||
errorLower.includes('connection reset')
|
||||
) {
|
||||
enhancedError =
|
||||
`${errorText}\n\nTip: The connection to OpenAI was interrupted. This can happen due to:\n` +
|
||||
`- Network instability\n` +
|
||||
`- The model not being available on your plan\n` +
|
||||
`- Server-side timeouts for long-running requests\n` +
|
||||
`Try again, or switch to a different model.`;
|
||||
} else if (
|
||||
errorLower.includes('command not found') ||
|
||||
(errorLower.includes('not found') && !errorLower.includes('model'))
|
||||
) {
|
||||
enhancedError = `${errorText}\n\nTip: Make sure the Codex CLI is installed. Run 'npm install -g @openai/codex-cli' to install.`;
|
||||
}
|
||||
|
||||
@@ -99,38 +99,54 @@ export async function* executeCodexSdkQuery(
|
||||
const apiKey = resolveApiKey();
|
||||
const codex = new Codex({ apiKey });
|
||||
|
||||
// Build thread options with model
|
||||
// The model must be passed to startThread/resumeThread so the SDK
|
||||
// knows which model to use for the conversation. Without this,
|
||||
// the SDK may use a default model that the user doesn't have access to.
|
||||
type SdkReasoningEffort = 'minimal' | 'low' | 'medium' | 'high' | 'xhigh';
|
||||
const SDK_REASONING_EFFORTS = new Set<string>(['minimal', 'low', 'medium', 'high', 'xhigh']);
|
||||
|
||||
const threadOptions: {
|
||||
model?: string;
|
||||
modelReasoningEffort?: SdkReasoningEffort;
|
||||
} = {};
|
||||
|
||||
if (options.model) {
|
||||
threadOptions.model = options.model;
|
||||
}
|
||||
|
||||
// Add reasoning effort to thread options if model supports it
|
||||
if (
|
||||
options.reasoningEffort &&
|
||||
supportsReasoningEffort(options.model) &&
|
||||
options.reasoningEffort !== 'none' &&
|
||||
SDK_REASONING_EFFORTS.has(options.reasoningEffort)
|
||||
) {
|
||||
threadOptions.modelReasoningEffort = options.reasoningEffort as SdkReasoningEffort;
|
||||
}
|
||||
|
||||
// Resume existing thread or start new one
|
||||
let thread;
|
||||
if (options.sdkSessionId) {
|
||||
try {
|
||||
thread = codex.resumeThread(options.sdkSessionId);
|
||||
thread = codex.resumeThread(options.sdkSessionId, threadOptions);
|
||||
} catch {
|
||||
// If resume fails, start a new thread
|
||||
thread = codex.startThread();
|
||||
thread = codex.startThread(threadOptions);
|
||||
}
|
||||
} else {
|
||||
thread = codex.startThread();
|
||||
thread = codex.startThread(threadOptions);
|
||||
}
|
||||
|
||||
const promptText = buildPromptText(options, systemPrompt);
|
||||
|
||||
// Build run options with reasoning effort if supported
|
||||
// Build run options
|
||||
const runOptions: {
|
||||
signal?: AbortSignal;
|
||||
reasoning?: { effort: string };
|
||||
} = {
|
||||
signal: options.abortController?.signal,
|
||||
};
|
||||
|
||||
// Add reasoning effort if model supports it and reasoningEffort is specified
|
||||
if (
|
||||
options.reasoningEffort &&
|
||||
supportsReasoningEffort(options.model) &&
|
||||
options.reasoningEffort !== 'none'
|
||||
) {
|
||||
runOptions.reasoning = { effort: options.reasoningEffort };
|
||||
}
|
||||
|
||||
// Run the query
|
||||
const result = await thread.run(promptText, runOptions);
|
||||
|
||||
@@ -160,10 +176,40 @@ export async function* executeCodexSdkQuery(
|
||||
} catch (error) {
|
||||
const errorInfo = classifyError(error);
|
||||
const userMessage = getUserFriendlyErrorMessage(error);
|
||||
const combinedMessage = buildSdkErrorMessage(errorInfo.message, userMessage);
|
||||
let combinedMessage = buildSdkErrorMessage(errorInfo.message, userMessage);
|
||||
|
||||
// Enhance error messages with actionable tips for common Codex issues
|
||||
const errorLower = errorInfo.message.toLowerCase();
|
||||
|
||||
if (
|
||||
errorLower.includes('does not exist') ||
|
||||
errorLower.includes('model_not_found') ||
|
||||
errorLower.includes('invalid_model')
|
||||
) {
|
||||
// Model not found - provide helpful guidance
|
||||
combinedMessage +=
|
||||
`\n\nTip: The model '${options.model}' may not be available on your OpenAI plan. ` +
|
||||
`Some models (like gpt-5.3-codex) require a ChatGPT Pro/Plus subscription and OAuth login via 'codex login'. ` +
|
||||
`Try using a different model (e.g., gpt-5.1 or gpt-5.2), or authenticate with 'codex login' instead of an API key.`;
|
||||
} else if (
|
||||
errorLower.includes('stream disconnected') ||
|
||||
errorLower.includes('stream ended') ||
|
||||
errorLower.includes('connection reset') ||
|
||||
errorLower.includes('socket hang up')
|
||||
) {
|
||||
// Stream disconnection - provide helpful guidance
|
||||
combinedMessage +=
|
||||
`\n\nTip: The connection to OpenAI was interrupted. This can happen due to:\n` +
|
||||
`- Network instability\n` +
|
||||
`- The model not being available on your plan (try 'codex login' for OAuth authentication)\n` +
|
||||
`- Server-side timeouts for long-running requests\n` +
|
||||
`Try again, or switch to a different model.`;
|
||||
}
|
||||
|
||||
console.error('[CodexSDK] executeQuery() error during execution:', {
|
||||
type: errorInfo.type,
|
||||
message: errorInfo.message,
|
||||
model: options.model,
|
||||
isRateLimit: errorInfo.isRateLimit,
|
||||
retryAfter: errorInfo.retryAfter,
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
|
||||
@@ -3,57 +3,17 @@
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { spawnProcess } from '@automaker/platform';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
// Re-export execGitCommand from the canonical shared module so any remaining
|
||||
// consumers that import from this file continue to work.
|
||||
export { execGitCommand } from '../../lib/git.js';
|
||||
|
||||
const logger = createLogger('Worktree');
|
||||
export const execAsync = promisify(exec);
|
||||
|
||||
// ============================================================================
|
||||
// Secure Command Execution
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Execute git command with array arguments to prevent command injection.
|
||||
* Uses spawnProcess from @automaker/platform for secure, cross-platform execution.
|
||||
*
|
||||
* @param args - Array of git command arguments (e.g., ['worktree', 'add', path])
|
||||
* @param cwd - Working directory to execute the command in
|
||||
* @returns Promise resolving to stdout output
|
||||
* @throws Error with stderr/stdout message if command fails. The thrown error
|
||||
* also has `stdout` and `stderr` string properties for structured access.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Safe: no injection possible
|
||||
* await execGitCommand(['branch', '-D', branchName], projectPath);
|
||||
*
|
||||
* // Instead of unsafe:
|
||||
* // await execAsync(`git branch -D ${branchName}`, { cwd });
|
||||
* ```
|
||||
*/
|
||||
export async function execGitCommand(args: string[], cwd: string): Promise<string> {
|
||||
const result = await spawnProcess({
|
||||
command: 'git',
|
||||
args,
|
||||
cwd,
|
||||
});
|
||||
|
||||
// spawnProcess returns { stdout, stderr, exitCode }
|
||||
if (result.exitCode === 0) {
|
||||
return result.stdout;
|
||||
} else {
|
||||
const errorMessage =
|
||||
result.stderr || result.stdout || `Git command failed with code ${result.exitCode}`;
|
||||
throw Object.assign(new Error(errorMessage), {
|
||||
stdout: result.stdout,
|
||||
stderr: result.stderr,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Constants
|
||||
// ============================================================================
|
||||
@@ -111,9 +71,12 @@ export const execEnv = {
|
||||
* Validate branch name to prevent command injection.
|
||||
* Git branch names cannot contain: space, ~, ^, :, ?, *, [, \, or control chars.
|
||||
* We also reject shell metacharacters for safety.
|
||||
* The first character must not be '-' to prevent git argument injection.
|
||||
*/
|
||||
export function isValidBranchName(name: string): boolean {
|
||||
return /^[a-zA-Z0-9._\-/]+$/.test(name) && name.length < MAX_BRANCH_NAME_LENGTH;
|
||||
// First char must be alphanumeric, dot, underscore, or slash (not dash)
|
||||
// to prevent git option injection via names like "-flag" or "--option".
|
||||
return /^[a-zA-Z0-9._/][a-zA-Z0-9._\-/]*$/.test(name) && name.length < MAX_BRANCH_NAME_LENGTH;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -126,7 +126,7 @@ export function createWorktreeRoutes(
|
||||
requireValidWorktree,
|
||||
createListBranchesHandler()
|
||||
);
|
||||
router.post('/switch-branch', requireValidWorktree, createSwitchBranchHandler());
|
||||
router.post('/switch-branch', requireValidWorktree, createSwitchBranchHandler(events));
|
||||
router.post('/open-in-editor', validatePathParams('worktreePath'), createOpenInEditorHandler());
|
||||
router.post(
|
||||
'/open-in-terminal',
|
||||
@@ -210,7 +210,7 @@ export function createWorktreeRoutes(
|
||||
'/commit-log',
|
||||
validatePathParams('worktreePath'),
|
||||
requireValidWorktree,
|
||||
createCommitLogHandler()
|
||||
createCommitLogHandler(events)
|
||||
);
|
||||
|
||||
// Stash routes
|
||||
@@ -218,13 +218,13 @@ export function createWorktreeRoutes(
|
||||
'/stash-push',
|
||||
validatePathParams('worktreePath'),
|
||||
requireGitRepoOnly,
|
||||
createStashPushHandler()
|
||||
createStashPushHandler(events)
|
||||
);
|
||||
router.post(
|
||||
'/stash-list',
|
||||
validatePathParams('worktreePath'),
|
||||
requireGitRepoOnly,
|
||||
createStashListHandler()
|
||||
createStashListHandler(events)
|
||||
);
|
||||
router.post(
|
||||
'/stash-apply',
|
||||
@@ -236,7 +236,7 @@ export function createWorktreeRoutes(
|
||||
'/stash-drop',
|
||||
validatePathParams('worktreePath'),
|
||||
requireGitRepoOnly,
|
||||
createStashDropHandler()
|
||||
createStashDropHandler(events)
|
||||
);
|
||||
|
||||
// Cherry-pick route
|
||||
|
||||
@@ -10,7 +10,8 @@
|
||||
import type { Request, Response } from 'express';
|
||||
import path from 'path';
|
||||
import { stat } from 'fs/promises';
|
||||
import { getErrorMessage, logError, isValidBranchName, execGitCommand } from '../common.js';
|
||||
import { getErrorMessage, logError, isValidBranchName } from '../common.js';
|
||||
import { execGitCommand } from '../../../lib/git.js';
|
||||
|
||||
export function createCheckoutBranchHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
|
||||
@@ -6,6 +6,9 @@
|
||||
*
|
||||
* Git business logic is delegated to cherry-pick-service.ts.
|
||||
* Events are emitted at key lifecycle points for WebSocket subscribers.
|
||||
* The global event emitter is passed into the service so all lifecycle
|
||||
* events (started, success, conflict, abort, verify-failed) are broadcast
|
||||
* to WebSocket clients.
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
||||
* the requireValidWorktree middleware in index.ts
|
||||
@@ -58,8 +61,8 @@ export function createCherryPickHandler(events: EventEmitter) {
|
||||
}
|
||||
}
|
||||
|
||||
// Verify each commit exists via the service
|
||||
const invalidHash = await verifyCommits(resolvedWorktreePath, commitHashes);
|
||||
// Verify each commit exists via the service; emits cherry-pick:verify-failed if any hash is missing
|
||||
const invalidHash = await verifyCommits(resolvedWorktreePath, commitHashes, events);
|
||||
if (invalidHash !== null) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
@@ -68,24 +71,12 @@ export function createCherryPickHandler(events: EventEmitter) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Emit started event
|
||||
events.emit('cherry-pick:started', {
|
||||
worktreePath: resolvedWorktreePath,
|
||||
commitHashes,
|
||||
options,
|
||||
});
|
||||
|
||||
// Execute the cherry-pick via the service
|
||||
const result = await runCherryPick(resolvedWorktreePath, commitHashes, options);
|
||||
// Execute the cherry-pick via the service.
|
||||
// The service emits: cherry-pick:started, cherry-pick:success, cherry-pick:conflict,
|
||||
// and cherry-pick:abort at the appropriate lifecycle points.
|
||||
const result = await runCherryPick(resolvedWorktreePath, commitHashes, options, events);
|
||||
|
||||
if (result.success) {
|
||||
// Emit success event
|
||||
events.emit('cherry-pick:success', {
|
||||
worktreePath: resolvedWorktreePath,
|
||||
commitHashes,
|
||||
branch: result.branch,
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
@@ -96,13 +87,6 @@ export function createCherryPickHandler(events: EventEmitter) {
|
||||
},
|
||||
});
|
||||
} else if (result.hasConflicts) {
|
||||
// Emit conflict event
|
||||
events.emit('cherry-pick:conflict', {
|
||||
worktreePath: resolvedWorktreePath,
|
||||
commitHashes,
|
||||
aborted: result.aborted,
|
||||
});
|
||||
|
||||
res.status(409).json({
|
||||
success: false,
|
||||
error: result.error,
|
||||
@@ -111,7 +95,7 @@ export function createCherryPickHandler(events: EventEmitter) {
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
// Emit failure event
|
||||
// Emit failure event for unexpected (non-conflict) errors
|
||||
events.emit('cherry-pick:failure', {
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
|
||||
@@ -1,29 +1,22 @@
|
||||
/**
|
||||
* POST /commit-log endpoint - Get recent commit history for a worktree
|
||||
*
|
||||
* Uses the same robust parsing approach as branch-commit-log-service:
|
||||
* a single `git log --name-only` call with custom separators to fetch
|
||||
* both commit metadata and file lists, avoiding N+1 git invocations.
|
||||
* The handler only validates input, invokes the service, streams lifecycle
|
||||
* events via the EventEmitter, and sends the final JSON response.
|
||||
*
|
||||
* Git business logic is delegated to commit-log-service.ts.
|
||||
* Events are emitted at key lifecycle points for WebSocket subscribers.
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
||||
* the requireValidWorktree middleware in index.ts
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { execGitCommand, getErrorMessage, logError } from '../common.js';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { getCommitLog } from '../../../services/commit-log-service.js';
|
||||
|
||||
interface CommitResult {
|
||||
hash: string;
|
||||
shortHash: string;
|
||||
author: string;
|
||||
authorEmail: string;
|
||||
date: string;
|
||||
subject: string;
|
||||
body: string;
|
||||
files: string[];
|
||||
}
|
||||
|
||||
export function createCommitLogHandler() {
|
||||
export function createCommitLogHandler(events: EventEmitter) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath, limit = 20 } = req.body as {
|
||||
@@ -39,112 +32,39 @@ export function createCommitLogHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clamp limit to a reasonable range
|
||||
const commitLimit = Math.min(Math.max(1, Number(limit) || 20), 100);
|
||||
// Emit start event so the frontend can observe progress
|
||||
events.emit('commitLog:start', {
|
||||
worktreePath,
|
||||
limit,
|
||||
});
|
||||
|
||||
// Use custom separators to parse both metadata and file lists from
|
||||
// a single git log invocation (same approach as branch-commit-log-service).
|
||||
//
|
||||
// -m causes merge commits to be diffed against each parent so all
|
||||
// files touched by the merge are listed (without -m, --name-only
|
||||
// produces no file output for merge commits because they have 2+ parents).
|
||||
// This means merge commits appear multiple times in the output (once per
|
||||
// parent), so we deduplicate by hash below and merge their file lists.
|
||||
// We over-fetch (2x the limit) to compensate for -m duplicating merge
|
||||
// commit entries, then trim the result to the requested limit.
|
||||
const COMMIT_SEP = '---COMMIT---';
|
||||
const META_END = '---META_END---';
|
||||
const fetchLimit = commitLimit * 2;
|
||||
// Delegate all Git work to the service
|
||||
const result = await getCommitLog(worktreePath, limit);
|
||||
|
||||
const logOutput = await execGitCommand(
|
||||
[
|
||||
'log',
|
||||
`--max-count=${fetchLimit}`,
|
||||
'-m',
|
||||
'--name-only',
|
||||
`--format=${COMMIT_SEP}%n%H%n%h%n%an%n%ae%n%aI%n%s%n%b${META_END}`,
|
||||
],
|
||||
worktreePath
|
||||
);
|
||||
// Emit progress with the number of commits fetched
|
||||
events.emit('commitLog:progress', {
|
||||
worktreePath,
|
||||
branch: result.branch,
|
||||
commitsLoaded: result.total,
|
||||
});
|
||||
|
||||
// Split output into per-commit blocks and drop the empty first chunk
|
||||
// (the output starts with ---COMMIT---).
|
||||
const commitBlocks = logOutput.split(COMMIT_SEP).filter((block) => block.trim());
|
||||
|
||||
// Use a Map to deduplicate merge commit entries (which appear once per
|
||||
// parent when -m is used) while preserving insertion order.
|
||||
const commitMap = new Map<string, CommitResult>();
|
||||
|
||||
for (const block of commitBlocks) {
|
||||
const metaEndIdx = block.indexOf(META_END);
|
||||
if (metaEndIdx === -1) continue; // malformed block, skip
|
||||
|
||||
// --- Parse metadata (everything before ---META_END---) ---
|
||||
const metaRaw = block.substring(0, metaEndIdx);
|
||||
const metaLines = metaRaw.split('\n');
|
||||
|
||||
// The first line may be empty (newline right after COMMIT_SEP), skip it
|
||||
const nonEmptyStart = metaLines.findIndex((l) => l.trim() !== '');
|
||||
if (nonEmptyStart === -1) continue;
|
||||
|
||||
const fields = metaLines.slice(nonEmptyStart);
|
||||
if (fields.length < 6) continue; // need at least hash..subject
|
||||
|
||||
const hash = fields[0].trim();
|
||||
const shortHash = fields[1].trim();
|
||||
const author = fields[2].trim();
|
||||
const authorEmail = fields[3].trim();
|
||||
const date = fields[4].trim();
|
||||
const subject = fields[5].trim();
|
||||
const body = fields.slice(6).join('\n').trim();
|
||||
|
||||
// --- Parse file list (everything after ---META_END---) ---
|
||||
const filesRaw = block.substring(metaEndIdx + META_END.length);
|
||||
const blockFiles = filesRaw
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f) => f.trim());
|
||||
|
||||
// Merge file lists for duplicate entries (merge commits with -m)
|
||||
const existing = commitMap.get(hash);
|
||||
if (existing) {
|
||||
// Add new files to the existing entry's file set
|
||||
const fileSet = new Set(existing.files);
|
||||
for (const f of blockFiles) fileSet.add(f);
|
||||
existing.files = [...fileSet];
|
||||
} else {
|
||||
commitMap.set(hash, {
|
||||
hash,
|
||||
shortHash,
|
||||
author,
|
||||
authorEmail,
|
||||
date,
|
||||
subject,
|
||||
body,
|
||||
files: [...new Set(blockFiles)],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Trim to the requested limit (we over-fetched to account for -m duplicates)
|
||||
const commits = [...commitMap.values()].slice(0, commitLimit);
|
||||
|
||||
// Get current branch name
|
||||
const branchOutput = await execGitCommand(
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
worktreePath
|
||||
);
|
||||
const branch = branchOutput.trim();
|
||||
// Emit complete event
|
||||
events.emit('commitLog:complete', {
|
||||
worktreePath,
|
||||
branch: result.branch,
|
||||
total: result.total,
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
branch,
|
||||
commits,
|
||||
total: commits.length,
|
||||
},
|
||||
result,
|
||||
});
|
||||
} catch (error) {
|
||||
// Emit error event so the frontend can react
|
||||
events.emit('commitLog:error', {
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
|
||||
logError(error, 'Get commit log failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -8,11 +8,11 @@ import {
|
||||
logError,
|
||||
execAsync,
|
||||
execEnv,
|
||||
execGitCommand,
|
||||
isValidBranchName,
|
||||
isValidRemoteName,
|
||||
isGhCliAvailable,
|
||||
} from '../common.js';
|
||||
import { execGitCommand } from '../../../lib/git.js';
|
||||
import { spawnProcess } from '@automaker/platform';
|
||||
import { updateWorktreePRInfo } from '../../../lib/worktree-metadata.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
@@ -22,8 +22,8 @@ import {
|
||||
normalizePath,
|
||||
ensureInitialCommit,
|
||||
isValidBranchName,
|
||||
execGitCommand,
|
||||
} from '../common.js';
|
||||
import { execGitCommand } from '../../../lib/git.js';
|
||||
import { trackBranch } from './branch-tracking.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { runInitScript } from '../../../services/init-script-service.js';
|
||||
|
||||
@@ -6,7 +6,8 @@ import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { isGitRepo } from '@automaker/git-utils';
|
||||
import { getErrorMessage, logError, isValidBranchName, execGitCommand } from '../common.js';
|
||||
import { getErrorMessage, logError, isValidBranchName } from '../common.js';
|
||||
import { execGitCommand } from '../../../lib/git.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -17,13 +17,10 @@
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { execFile } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
import { getErrorMessage, logError } from '@automaker/utils';
|
||||
import { execGitCommand } from '../../../lib/git.js';
|
||||
|
||||
/**
|
||||
* Validate that a file path does not escape the worktree directory.
|
||||
@@ -72,9 +69,7 @@ export function createDiscardChangesHandler() {
|
||||
}
|
||||
|
||||
// Check for uncommitted changes first
|
||||
const { stdout: status } = await execFileAsync('git', ['status', '--porcelain'], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const status = await execGitCommand(['status', '--porcelain'], worktreePath);
|
||||
|
||||
if (!status.trim()) {
|
||||
res.json({
|
||||
@@ -88,12 +83,9 @@ export function createDiscardChangesHandler() {
|
||||
}
|
||||
|
||||
// Get branch name before discarding
|
||||
const { stdout: branchOutput } = await execFileAsync(
|
||||
'git',
|
||||
const branchOutput = await execGitCommand(
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
{
|
||||
cwd: worktreePath,
|
||||
}
|
||||
worktreePath
|
||||
);
|
||||
const branchName = branchOutput.trim();
|
||||
|
||||
@@ -162,9 +154,7 @@ export function createDiscardChangesHandler() {
|
||||
// 1. Unstage selected staged files (using execFile to bypass shell)
|
||||
if (stagedFiles.length > 0) {
|
||||
try {
|
||||
await execFileAsync('git', ['reset', 'HEAD', '--', ...stagedFiles], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
await execGitCommand(['reset', 'HEAD', '--', ...stagedFiles], worktreePath);
|
||||
} catch (error) {
|
||||
const msg = getErrorMessage(error);
|
||||
logError(error, `Failed to unstage files: ${msg}`);
|
||||
@@ -175,9 +165,7 @@ export function createDiscardChangesHandler() {
|
||||
// 2. Revert selected tracked file changes
|
||||
if (trackedModified.length > 0) {
|
||||
try {
|
||||
await execFileAsync('git', ['checkout', '--', ...trackedModified], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
await execGitCommand(['checkout', '--', ...trackedModified], worktreePath);
|
||||
} catch (error) {
|
||||
const msg = getErrorMessage(error);
|
||||
logError(error, `Failed to revert tracked files: ${msg}`);
|
||||
@@ -188,9 +176,7 @@ export function createDiscardChangesHandler() {
|
||||
// 3. Remove selected untracked files
|
||||
if (untrackedFiles.length > 0) {
|
||||
try {
|
||||
await execFileAsync('git', ['clean', '-fd', '--', ...untrackedFiles], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
await execGitCommand(['clean', '-fd', '--', ...untrackedFiles], worktreePath);
|
||||
} catch (error) {
|
||||
const msg = getErrorMessage(error);
|
||||
logError(error, `Failed to clean untracked files: ${msg}`);
|
||||
@@ -201,9 +187,7 @@ export function createDiscardChangesHandler() {
|
||||
const fileCount = files.length;
|
||||
|
||||
// Verify the remaining state
|
||||
const { stdout: finalStatus } = await execFileAsync('git', ['status', '--porcelain'], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const finalStatus = await execGitCommand(['status', '--porcelain'], worktreePath);
|
||||
|
||||
const remainingCount = finalStatus.trim()
|
||||
? finalStatus.trim().split('\n').filter(Boolean).length
|
||||
@@ -233,7 +217,7 @@ export function createDiscardChangesHandler() {
|
||||
|
||||
// 1. Reset any staged changes
|
||||
try {
|
||||
await execFileAsync('git', ['reset', 'HEAD'], { cwd: worktreePath });
|
||||
await execGitCommand(['reset', 'HEAD'], worktreePath);
|
||||
} catch (error) {
|
||||
const msg = getErrorMessage(error);
|
||||
logError(error, `git reset HEAD failed: ${msg}`);
|
||||
@@ -242,7 +226,7 @@ export function createDiscardChangesHandler() {
|
||||
|
||||
// 2. Discard changes in tracked files
|
||||
try {
|
||||
await execFileAsync('git', ['checkout', '.'], { cwd: worktreePath });
|
||||
await execGitCommand(['checkout', '.'], worktreePath);
|
||||
} catch (error) {
|
||||
const msg = getErrorMessage(error);
|
||||
logError(error, `git checkout . failed: ${msg}`);
|
||||
@@ -251,7 +235,7 @@ export function createDiscardChangesHandler() {
|
||||
|
||||
// 3. Remove untracked files and directories
|
||||
try {
|
||||
await execFileAsync('git', ['clean', '-fd'], { cwd: worktreePath });
|
||||
await execGitCommand(['clean', '-fd'], worktreePath);
|
||||
} catch (error) {
|
||||
const msg = getErrorMessage(error);
|
||||
logError(error, `git clean -fd failed: ${msg}`);
|
||||
@@ -259,9 +243,7 @@ export function createDiscardChangesHandler() {
|
||||
}
|
||||
|
||||
// Verify all changes were discarded
|
||||
const { stdout: finalStatus } = await execFileAsync('git', ['status', '--porcelain'], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const finalStatus = await execGitCommand(['status', '--porcelain'], worktreePath);
|
||||
|
||||
if (finalStatus.trim()) {
|
||||
const remainingCount = finalStatus.trim().split('\n').filter(Boolean).length;
|
||||
|
||||
@@ -8,13 +8,8 @@
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError, isValidBranchName, execGitCommand } from '../common.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
const logger = createLogger('Worktree');
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { performMerge } from '../../../services/merge-service.js';
|
||||
|
||||
export function createMergeHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -38,118 +33,34 @@ export function createMergeHandler() {
|
||||
// Determine the target branch (default to 'main')
|
||||
const mergeTo = targetBranch || 'main';
|
||||
|
||||
// Validate source branch exists
|
||||
try {
|
||||
await execAsync(`git rev-parse --verify ${branchName}`, { cwd: projectPath });
|
||||
} catch {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Branch "${branchName}" does not exist`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate target branch exists
|
||||
try {
|
||||
await execAsync(`git rev-parse --verify ${mergeTo}`, { cwd: projectPath });
|
||||
} catch {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Target branch "${mergeTo}" does not exist`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Merge the feature branch into the target branch
|
||||
const mergeCmd = options?.squash
|
||||
? `git merge --squash ${branchName}`
|
||||
: `git merge ${branchName} -m "${options?.message || `Merge ${branchName} into ${mergeTo}`}"`;
|
||||
|
||||
try {
|
||||
await execAsync(mergeCmd, { cwd: projectPath });
|
||||
} catch (mergeError: unknown) {
|
||||
// Check if this is a merge conflict
|
||||
const err = mergeError as { stdout?: string; stderr?: string; message?: string };
|
||||
const output = `${err.stdout || ''} ${err.stderr || ''} ${err.message || ''}`;
|
||||
const hasConflicts =
|
||||
output.includes('CONFLICT') || output.includes('Automatic merge failed');
|
||||
|
||||
if (hasConflicts) {
|
||||
// Get list of conflicted files
|
||||
let conflictFiles: string[] = [];
|
||||
try {
|
||||
const diffOutput = await execGitCommand(
|
||||
['diff', '--name-only', '--diff-filter=U'],
|
||||
projectPath
|
||||
);
|
||||
conflictFiles = diffOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f: string) => f.trim().length > 0);
|
||||
} catch {
|
||||
// If we can't get the file list, that's okay
|
||||
}
|
||||
// Delegate all merge logic to the service
|
||||
const result = await performMerge(projectPath, branchName, worktreePath, mergeTo, options);
|
||||
|
||||
if (!result.success) {
|
||||
if (result.hasConflicts) {
|
||||
// Return conflict-specific error message that frontend can detect
|
||||
res.status(409).json({
|
||||
success: false,
|
||||
error: `Merge CONFLICT: Automatic merge of "${branchName}" into "${mergeTo}" failed. Please resolve conflicts manually.`,
|
||||
error: result.error,
|
||||
hasConflicts: true,
|
||||
conflictFiles,
|
||||
conflictFiles: result.conflictFiles,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Re-throw non-conflict errors to be handled by outer catch
|
||||
throw mergeError;
|
||||
}
|
||||
|
||||
// If squash merge, need to commit
|
||||
if (options?.squash) {
|
||||
await execAsync(`git commit -m "${options?.message || `Merge ${branchName} (squash)`}"`, {
|
||||
cwd: projectPath,
|
||||
// Non-conflict service errors (e.g. branch not found, invalid name)
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: result.error,
|
||||
});
|
||||
}
|
||||
|
||||
// Optionally delete the worktree and branch after merging
|
||||
let worktreeDeleted = false;
|
||||
let branchDeleted = false;
|
||||
|
||||
if (options?.deleteWorktreeAndBranch) {
|
||||
// Remove the worktree
|
||||
try {
|
||||
await execGitCommand(['worktree', 'remove', worktreePath, '--force'], projectPath);
|
||||
worktreeDeleted = true;
|
||||
} catch {
|
||||
// Try with prune if remove fails
|
||||
try {
|
||||
await execGitCommand(['worktree', 'prune'], projectPath);
|
||||
worktreeDeleted = true;
|
||||
} catch {
|
||||
logger.warn(`Failed to remove worktree: ${worktreePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the branch (but not main/master)
|
||||
if (branchName !== 'main' && branchName !== 'master') {
|
||||
if (!isValidBranchName(branchName)) {
|
||||
logger.warn(`Invalid branch name detected, skipping deletion: ${branchName}`);
|
||||
} else {
|
||||
try {
|
||||
await execGitCommand(['branch', '-D', branchName], projectPath);
|
||||
branchDeleted = true;
|
||||
} catch {
|
||||
logger.warn(`Failed to delete branch: ${branchName}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
mergedBranch: branchName,
|
||||
targetBranch: mergeTo,
|
||||
deleted: options?.deleteWorktreeAndBranch ? { worktreeDeleted, branchDeleted } : undefined,
|
||||
mergedBranch: result.mergedBranch,
|
||||
targetBranch: result.targetBranch,
|
||||
deleted: result.deleted,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Merge worktree failed');
|
||||
|
||||
@@ -9,12 +9,16 @@
|
||||
* 5. Detects merge conflicts from both pull and stash reapplication
|
||||
* 6. Returns structured conflict information for AI-assisted resolution
|
||||
*
|
||||
* Git business logic is delegated to pull-service.ts.
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
||||
* the requireValidWorktree middleware in index.ts
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { execGitCommand, getErrorMessage, logError } from '../common.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { performPull } from '../../../services/pull-service.js';
|
||||
import type { PullResult } from '../../../services/pull-service.js';
|
||||
|
||||
export function createPullHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -34,323 +38,66 @@ export function createPullHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get current branch name
|
||||
const branchOutput = await execGitCommand(
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
worktreePath
|
||||
);
|
||||
const branchName = branchOutput.trim();
|
||||
// Execute the pull via the service
|
||||
const result = await performPull(worktreePath, { remote, stashIfNeeded });
|
||||
|
||||
// Check for detached HEAD state
|
||||
if (branchName === 'HEAD') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Cannot pull in detached HEAD state. Please checkout a branch first.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Use specified remote or default to 'origin'
|
||||
const targetRemote = remote || 'origin';
|
||||
|
||||
// Fetch latest from remote
|
||||
try {
|
||||
await execGitCommand(['fetch', targetRemote], worktreePath);
|
||||
} catch (fetchError) {
|
||||
const errorMsg = getErrorMessage(fetchError);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: `Failed to fetch from remote '${targetRemote}': ${errorMsg}`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if there are local changes that would be overwritten
|
||||
const statusOutput = await execGitCommand(['status', '--porcelain'], worktreePath);
|
||||
const hasLocalChanges = statusOutput.trim().length > 0;
|
||||
|
||||
// Parse changed files for the response
|
||||
let localChangedFiles: string[] = [];
|
||||
if (hasLocalChanges) {
|
||||
localChangedFiles = statusOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((line) => line.trim().length > 0)
|
||||
.map((line) => line.substring(3).trim());
|
||||
}
|
||||
|
||||
// If there are local changes and stashIfNeeded is not requested, return info
|
||||
if (hasLocalChanges && !stashIfNeeded) {
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
branch: branchName,
|
||||
pulled: false,
|
||||
hasLocalChanges: true,
|
||||
localChangedFiles,
|
||||
message:
|
||||
'Local changes detected. Use stashIfNeeded to automatically stash and reapply changes.',
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Stash local changes if needed
|
||||
let didStash = false;
|
||||
if (hasLocalChanges && stashIfNeeded) {
|
||||
try {
|
||||
const stashMessage = `automaker-pull-stash: Pre-pull stash on ${branchName}`;
|
||||
await execGitCommand(
|
||||
['stash', 'push', '--include-untracked', '-m', stashMessage],
|
||||
worktreePath
|
||||
);
|
||||
didStash = true;
|
||||
} catch (stashError) {
|
||||
const errorMsg = getErrorMessage(stashError);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: `Failed to stash local changes: ${errorMsg}`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the branch has upstream tracking
|
||||
let hasUpstream = false;
|
||||
try {
|
||||
await execGitCommand(
|
||||
['rev-parse', '--abbrev-ref', `${branchName}@{upstream}`],
|
||||
worktreePath
|
||||
);
|
||||
hasUpstream = true;
|
||||
} catch {
|
||||
// No upstream tracking - check if the remote branch exists
|
||||
try {
|
||||
await execGitCommand(
|
||||
['rev-parse', '--verify', `${targetRemote}/${branchName}`],
|
||||
worktreePath
|
||||
);
|
||||
hasUpstream = true; // Remote branch exists, we can pull from it
|
||||
} catch {
|
||||
// Remote branch doesn't exist either
|
||||
if (didStash) {
|
||||
// Reapply stash since we won't be pulling
|
||||
try {
|
||||
await execGitCommand(['stash', 'pop'], worktreePath);
|
||||
} catch {
|
||||
// Stash pop failed - leave it in stash list for manual recovery
|
||||
}
|
||||
}
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Branch '${branchName}' has no upstream branch on remote '${targetRemote}'. Push it first or set upstream with: git branch --set-upstream-to=${targetRemote}/${branchName}`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Pull latest changes
|
||||
let pullConflict = false;
|
||||
let pullConflictFiles: string[] = [];
|
||||
try {
|
||||
const pullOutput = await execGitCommand(['pull', targetRemote, branchName], worktreePath);
|
||||
|
||||
// Check if we pulled any changes
|
||||
const alreadyUpToDate = pullOutput.includes('Already up to date');
|
||||
|
||||
// If no stash to reapply, return success
|
||||
if (!didStash) {
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
branch: branchName,
|
||||
pulled: !alreadyUpToDate,
|
||||
hasLocalChanges: false,
|
||||
stashed: false,
|
||||
stashRestored: false,
|
||||
message: alreadyUpToDate ? 'Already up to date' : 'Pulled latest changes',
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
} catch (pullError: unknown) {
|
||||
const err = pullError as { stderr?: string; stdout?: string; message?: string };
|
||||
const errorOutput = `${err.stderr || ''} ${err.stdout || ''} ${err.message || ''}`;
|
||||
|
||||
// Check for merge conflicts from the pull itself
|
||||
if (errorOutput.includes('CONFLICT') || errorOutput.includes('Automatic merge failed')) {
|
||||
pullConflict = true;
|
||||
// Get list of conflicted files
|
||||
try {
|
||||
const diffOutput = await execGitCommand(
|
||||
['diff', '--name-only', '--diff-filter=U'],
|
||||
worktreePath
|
||||
);
|
||||
pullConflictFiles = diffOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f) => f.trim().length > 0);
|
||||
} catch {
|
||||
// If we can't get the file list, that's okay
|
||||
}
|
||||
} else {
|
||||
// Non-conflict pull error
|
||||
if (didStash) {
|
||||
// Try to restore stash since pull failed
|
||||
try {
|
||||
await execGitCommand(['stash', 'pop'], worktreePath);
|
||||
} catch {
|
||||
// Leave stash in place for manual recovery
|
||||
}
|
||||
}
|
||||
|
||||
// Check for common errors
|
||||
const errorMsg = err.stderr || err.message || 'Pull failed';
|
||||
if (errorMsg.includes('no tracking information')) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Branch '${branchName}' has no upstream branch. Push it first or set upstream with: git branch --set-upstream-to=${targetRemote}/${branchName}`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: errorMsg,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// If pull had conflicts, return conflict info (don't try stash pop)
|
||||
if (pullConflict) {
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
branch: branchName,
|
||||
pulled: true,
|
||||
hasConflicts: true,
|
||||
conflictSource: 'pull',
|
||||
conflictFiles: pullConflictFiles,
|
||||
stashed: didStash,
|
||||
stashRestored: false,
|
||||
message:
|
||||
`Pull resulted in merge conflicts. ${didStash ? 'Your local changes are still stashed.' : ''}`.trim(),
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Pull succeeded, now try to reapply stash
|
||||
if (didStash) {
|
||||
try {
|
||||
const stashPopOutput = await execGitCommand(['stash', 'pop'], worktreePath);
|
||||
const stashPopCombined = stashPopOutput || '';
|
||||
|
||||
// Check if stash pop had conflicts
|
||||
if (
|
||||
stashPopCombined.includes('CONFLICT') ||
|
||||
stashPopCombined.includes('Merge conflict')
|
||||
) {
|
||||
// Get conflicted files
|
||||
let stashConflictFiles: string[] = [];
|
||||
try {
|
||||
const diffOutput = await execGitCommand(
|
||||
['diff', '--name-only', '--diff-filter=U'],
|
||||
worktreePath
|
||||
);
|
||||
stashConflictFiles = diffOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f) => f.trim().length > 0);
|
||||
} catch {
|
||||
// If we can't get the file list, that's okay
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
branch: branchName,
|
||||
pulled: true,
|
||||
hasConflicts: true,
|
||||
conflictSource: 'stash',
|
||||
conflictFiles: stashConflictFiles,
|
||||
stashed: true,
|
||||
stashRestored: true, // Stash was applied but with conflicts
|
||||
message:
|
||||
'Pull succeeded but reapplying your stashed changes resulted in merge conflicts.',
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Stash pop succeeded cleanly
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
branch: branchName,
|
||||
pulled: true,
|
||||
hasConflicts: false,
|
||||
stashed: true,
|
||||
stashRestored: true,
|
||||
message: 'Pulled latest changes and restored your stashed changes.',
|
||||
},
|
||||
});
|
||||
} catch (stashPopError: unknown) {
|
||||
const err = stashPopError as { stderr?: string; stdout?: string; message?: string };
|
||||
const errorOutput = `${err.stderr || ''} ${err.stdout || ''} ${err.message || ''}`;
|
||||
|
||||
// Check if stash pop failed due to conflicts
|
||||
if (errorOutput.includes('CONFLICT') || errorOutput.includes('Merge conflict')) {
|
||||
let stashConflictFiles: string[] = [];
|
||||
try {
|
||||
const diffOutput = await execGitCommand(
|
||||
['diff', '--name-only', '--diff-filter=U'],
|
||||
worktreePath
|
||||
);
|
||||
stashConflictFiles = diffOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f) => f.trim().length > 0);
|
||||
} catch {
|
||||
// If we can't get the file list, that's okay
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
branch: branchName,
|
||||
pulled: true,
|
||||
hasConflicts: true,
|
||||
conflictSource: 'stash',
|
||||
conflictFiles: stashConflictFiles,
|
||||
stashed: true,
|
||||
stashRestored: true,
|
||||
message:
|
||||
'Pull succeeded but reapplying your stashed changes resulted in merge conflicts.',
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Non-conflict stash pop error - stash is still in the stash list
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
branch: branchName,
|
||||
pulled: true,
|
||||
hasConflicts: false,
|
||||
stashed: true,
|
||||
stashRestored: false,
|
||||
message:
|
||||
'Pull succeeded but failed to reapply stashed changes. Your changes are still in the stash list.',
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
// Map service result to HTTP response
|
||||
mapResultToResponse(res, result);
|
||||
} catch (error) {
|
||||
logError(error, 'Pull failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Map a PullResult from the service to the appropriate HTTP response.
|
||||
*
|
||||
* - Successful results (including local-changes-detected info) → 200
|
||||
* - Validation/state errors (detached HEAD, no upstream) → 400
|
||||
* - Operational errors (fetch/stash/pull failures) → 500
|
||||
*/
|
||||
function mapResultToResponse(res: Response, result: PullResult): void {
|
||||
if (!result.success && result.error) {
|
||||
// Determine the appropriate HTTP status for errors
|
||||
const statusCode = isClientError(result.error) ? 400 : 500;
|
||||
res.status(statusCode).json({
|
||||
success: false,
|
||||
error: result.error,
|
||||
...(result.stashRecoveryFailed && { stashRecoveryFailed: true }),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Success case (includes partial success like local changes detected, conflicts, etc.)
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
branch: result.branch,
|
||||
pulled: result.pulled,
|
||||
hasLocalChanges: result.hasLocalChanges,
|
||||
localChangedFiles: result.localChangedFiles,
|
||||
hasConflicts: result.hasConflicts,
|
||||
conflictSource: result.conflictSource,
|
||||
conflictFiles: result.conflictFiles,
|
||||
stashed: result.stashed,
|
||||
stashRestored: result.stashRestored,
|
||||
message: result.message,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether an error message represents a client error (400)
|
||||
* vs a server error (500).
|
||||
*
|
||||
* Client errors are validation issues or invalid git state that the user
|
||||
* needs to resolve (e.g. detached HEAD, no upstream, no tracking info).
|
||||
*/
|
||||
function isClientError(errorMessage: string): boolean {
|
||||
return (
|
||||
errorMessage.includes('detached HEAD') ||
|
||||
errorMessage.includes('has no upstream branch') ||
|
||||
errorMessage.includes('no tracking information')
|
||||
);
|
||||
}
|
||||
|
||||
@@ -96,6 +96,20 @@ export function createRebaseHandler(events: EventEmitter) {
|
||||
conflictFiles: result.conflictFiles,
|
||||
aborted: result.aborted,
|
||||
});
|
||||
} else {
|
||||
// Emit failure event for non-conflict failures
|
||||
events.emit('rebase:failed', {
|
||||
worktreePath: resolvedWorktreePath,
|
||||
branch: result.branch,
|
||||
ontoBranch: result.ontoBranch,
|
||||
error: result.error,
|
||||
});
|
||||
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: result.error ?? 'Rebase failed',
|
||||
hasConflicts: false,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
// Emit failure event
|
||||
|
||||
@@ -4,34 +4,15 @@
|
||||
* Applies a specific stash entry to the working directory.
|
||||
* Can either "apply" (keep stash) or "pop" (remove stash after applying).
|
||||
*
|
||||
* All git operations and conflict detection are delegated to StashService.
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo) is handled by
|
||||
* the requireGitRepoOnly middleware in index.ts
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { execFile } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
/**
|
||||
* Retrieves the list of files with unmerged (conflicted) entries using git diff.
|
||||
*/
|
||||
async function getConflictedFiles(worktreePath: string): Promise<string[]> {
|
||||
try {
|
||||
const { stdout } = await execFileAsync('git', ['diff', '--name-only', '--diff-filter=U'], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
return stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f) => f.trim().length > 0);
|
||||
} catch {
|
||||
// If we can't get the file list, return empty array
|
||||
return [];
|
||||
}
|
||||
}
|
||||
import { applyOrPop } from '../../../services/stash-service.js';
|
||||
|
||||
export function createStashApplyHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -68,65 +49,26 @@ export function createStashApplyHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
const stashRef = `stash@{${idx}}`;
|
||||
const operation = pop ? 'pop' : 'apply';
|
||||
// Delegate all stash apply/pop logic to the service
|
||||
const result = await applyOrPop(worktreePath, idx, { pop });
|
||||
|
||||
try {
|
||||
const { stdout, stderr } = await execFileAsync('git', ['stash', operation, stashRef], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
|
||||
const output = `${stdout}\n${stderr}`;
|
||||
|
||||
// Check for conflict markers in the output
|
||||
if (output.includes('CONFLICT') || output.includes('Merge conflict')) {
|
||||
const conflictFiles = await getConflictedFiles(worktreePath);
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
applied: true,
|
||||
hasConflicts: true,
|
||||
conflictFiles,
|
||||
operation,
|
||||
stashIndex,
|
||||
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} with conflicts. Please resolve the conflicts.`,
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
applied: true,
|
||||
hasConflicts: false,
|
||||
operation,
|
||||
stashIndex,
|
||||
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} successfully`,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
const errorMsg = getErrorMessage(error);
|
||||
|
||||
// Check if the error is due to conflicts
|
||||
if (errorMsg.includes('CONFLICT') || errorMsg.includes('Merge conflict')) {
|
||||
const conflictFiles = await getConflictedFiles(worktreePath);
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
applied: true,
|
||||
hasConflicts: true,
|
||||
conflictFiles,
|
||||
operation,
|
||||
stashIndex,
|
||||
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} with conflicts. Please resolve the conflicts.`,
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
throw error;
|
||||
if (!result.success) {
|
||||
logError(new Error(result.error ?? 'Stash apply failed'), 'Stash apply failed');
|
||||
res.status(500).json({ success: false, error: result.error });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
applied: result.applied,
|
||||
hasConflicts: result.hasConflicts,
|
||||
conflictFiles: result.conflictFiles,
|
||||
operation: result.operation,
|
||||
stashIndex: result.stashIndex,
|
||||
message: result.message,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Stash apply failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
|
||||
@@ -1,20 +1,22 @@
|
||||
/**
|
||||
* POST /stash-drop endpoint - Drop (delete) a stash entry
|
||||
*
|
||||
* Removes a specific stash entry from the stash list.
|
||||
* The handler only validates input, invokes the service, streams lifecycle
|
||||
* events via the EventEmitter, and sends the final JSON response.
|
||||
*
|
||||
* Git business logic is delegated to stash-service.ts.
|
||||
* Events are emitted at key lifecycle points for WebSocket subscribers.
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo) is handled by
|
||||
* the requireGitRepoOnly middleware in index.ts
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { execFile } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { dropStash } from '../../../services/stash-service.js';
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
export function createStashDropHandler() {
|
||||
export function createStashDropHandler(events: EventEmitter) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath, stashIndex } = req.body as {
|
||||
@@ -38,21 +40,42 @@ export function createStashDropHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
const stashRef = `stash@{${stashIndex}}`;
|
||||
// Emit start event so the frontend can observe progress
|
||||
events.emit('stash:start', {
|
||||
worktreePath,
|
||||
stashIndex,
|
||||
stashRef: `stash@{${stashIndex}}`,
|
||||
operation: 'drop',
|
||||
});
|
||||
|
||||
await execFileAsync('git', ['stash', 'drop', stashRef], {
|
||||
cwd: worktreePath,
|
||||
// Delegate all Git work to the service
|
||||
const result = await dropStash(worktreePath, stashIndex);
|
||||
|
||||
// Emit success event
|
||||
events.emit('stash:success', {
|
||||
worktreePath,
|
||||
stashIndex,
|
||||
operation: 'drop',
|
||||
dropped: result.dropped,
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
dropped: true,
|
||||
stashIndex,
|
||||
message: `Stash ${stashRef} dropped successfully`,
|
||||
dropped: result.dropped,
|
||||
stashIndex: result.stashIndex,
|
||||
message: result.message,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
// Emit error event so the frontend can react
|
||||
events.emit('stash:failure', {
|
||||
worktreePath: req.body?.worktreePath,
|
||||
stashIndex: req.body?.stashIndex,
|
||||
operation: 'drop',
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
|
||||
logError(error, 'Stash drop failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -1,29 +1,22 @@
|
||||
/**
|
||||
* POST /stash-list endpoint - List all stashes in a worktree
|
||||
*
|
||||
* Returns a list of all stash entries with their index, message, branch, and date.
|
||||
* Also includes the list of files changed in each stash.
|
||||
* The handler only validates input, invokes the service, streams lifecycle
|
||||
* events via the EventEmitter, and sends the final JSON response.
|
||||
*
|
||||
* Git business logic is delegated to stash-service.ts.
|
||||
* Events are emitted at key lifecycle points for WebSocket subscribers.
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo) is handled by
|
||||
* the requireGitRepoOnly middleware in index.ts
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { execFile } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { listStash } from '../../../services/stash-service.js';
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
interface StashEntry {
|
||||
index: number;
|
||||
message: string;
|
||||
branch: string;
|
||||
date: string;
|
||||
files: string[];
|
||||
}
|
||||
|
||||
export function createStashListHandler() {
|
||||
export function createStashListHandler(events: EventEmitter) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath } = req.body as {
|
||||
@@ -38,84 +31,44 @@ export function createStashListHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get stash list with format: index, message, date
|
||||
// Use %aI (strict ISO 8601) instead of %ai to ensure cross-browser compatibility
|
||||
const { stdout: stashOutput } = await execFileAsync(
|
||||
'git',
|
||||
['stash', 'list', '--format=%gd|||%s|||%aI'],
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
// Emit start event so the frontend can observe progress
|
||||
events.emit('stash:start', {
|
||||
worktreePath,
|
||||
operation: 'list',
|
||||
});
|
||||
|
||||
if (!stashOutput.trim()) {
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
stashes: [],
|
||||
total: 0,
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
// Delegate all Git work to the service
|
||||
const result = await listStash(worktreePath);
|
||||
|
||||
const stashLines = stashOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((l) => l.trim());
|
||||
const stashes: StashEntry[] = [];
|
||||
// Emit progress with stash count
|
||||
events.emit('stash:progress', {
|
||||
worktreePath,
|
||||
operation: 'list',
|
||||
total: result.total,
|
||||
});
|
||||
|
||||
for (const line of stashLines) {
|
||||
const parts = line.split('|||');
|
||||
if (parts.length < 3) continue;
|
||||
|
||||
const refSpec = parts[0].trim(); // e.g., "stash@{0}"
|
||||
const message = parts[1].trim();
|
||||
const date = parts[2].trim();
|
||||
|
||||
// Extract index from stash@{N}; skip entries that don't match the expected format
|
||||
const indexMatch = refSpec.match(/stash@\{(\d+)\}/);
|
||||
if (!indexMatch) continue;
|
||||
const index = parseInt(indexMatch[1], 10);
|
||||
|
||||
// Extract branch name from message (format: "WIP on branch: hash message" or "On branch: hash message")
|
||||
let branch = '';
|
||||
const branchMatch = message.match(/^(?:WIP on|On) ([^:]+):/);
|
||||
if (branchMatch) {
|
||||
branch = branchMatch[1];
|
||||
}
|
||||
|
||||
// Get list of files in this stash
|
||||
let files: string[] = [];
|
||||
try {
|
||||
const { stdout: filesOutput } = await execFileAsync(
|
||||
'git',
|
||||
['stash', 'show', refSpec, '--name-only'],
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
files = filesOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f) => f.trim());
|
||||
} catch {
|
||||
// Ignore errors getting file list
|
||||
}
|
||||
|
||||
stashes.push({
|
||||
index,
|
||||
message,
|
||||
branch,
|
||||
date,
|
||||
files,
|
||||
});
|
||||
}
|
||||
// Emit success event
|
||||
events.emit('stash:success', {
|
||||
worktreePath,
|
||||
operation: 'list',
|
||||
total: result.total,
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
stashes,
|
||||
total: stashes.length,
|
||||
stashes: result.stashes,
|
||||
total: result.total,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
// Emit error event so the frontend can react
|
||||
events.emit('stash:failure', {
|
||||
worktreePath: req.body?.worktreePath,
|
||||
operation: 'list',
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
|
||||
logError(error, 'Stash list failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -1,21 +1,22 @@
|
||||
/**
|
||||
* POST /stash-push endpoint - Stash changes in a worktree
|
||||
*
|
||||
* Stashes uncommitted changes (including untracked files) with an optional message.
|
||||
* Supports selective file stashing when a files array is provided.
|
||||
* The handler only validates input, invokes the service, streams lifecycle
|
||||
* events via the EventEmitter, and sends the final JSON response.
|
||||
*
|
||||
* Git business logic is delegated to stash-service.ts.
|
||||
* Events are emitted at key lifecycle points for WebSocket subscribers.
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo) is handled by
|
||||
* the requireGitRepoOnly middleware in index.ts
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { execFile } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { pushStash } from '../../../services/stash-service.js';
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
export function createStashPushHandler() {
|
||||
export function createStashPushHandler(events: EventEmitter) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath, message, files } = req.body as {
|
||||
@@ -32,54 +33,47 @@ export function createStashPushHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for any changes to stash
|
||||
const { stdout: status } = await execFileAsync('git', ['status', '--porcelain'], {
|
||||
cwd: worktreePath,
|
||||
// Emit start event so the frontend can observe progress
|
||||
events.emit('stash:start', {
|
||||
worktreePath,
|
||||
operation: 'push',
|
||||
});
|
||||
|
||||
if (!status.trim()) {
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
stashed: false,
|
||||
message: 'No changes to stash',
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
// Delegate all Git work to the service
|
||||
const result = await pushStash(worktreePath, { message, files });
|
||||
|
||||
// Build stash push command args
|
||||
const args = ['stash', 'push', '--include-untracked'];
|
||||
if (message && message.trim()) {
|
||||
args.push('-m', message.trim());
|
||||
}
|
||||
// Emit progress with stash result
|
||||
events.emit('stash:progress', {
|
||||
worktreePath,
|
||||
operation: 'push',
|
||||
stashed: result.stashed,
|
||||
branch: result.branch,
|
||||
});
|
||||
|
||||
// If specific files are provided, add them as pathspecs after '--'
|
||||
if (files && files.length > 0) {
|
||||
args.push('--');
|
||||
args.push(...files);
|
||||
}
|
||||
|
||||
// Execute stash push
|
||||
await execFileAsync('git', args, { cwd: worktreePath });
|
||||
|
||||
// Get current branch name
|
||||
const { stdout: branchOutput } = await execFileAsync(
|
||||
'git',
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
const branchName = branchOutput.trim();
|
||||
// Emit success event
|
||||
events.emit('stash:success', {
|
||||
worktreePath,
|
||||
operation: 'push',
|
||||
stashed: result.stashed,
|
||||
branch: result.branch,
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
stashed: true,
|
||||
branch: branchName,
|
||||
message: message?.trim() || `WIP on ${branchName}`,
|
||||
stashed: result.stashed,
|
||||
branch: result.branch,
|
||||
message: result.message,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
// Emit error event so the frontend can react
|
||||
events.emit('stash:failure', {
|
||||
worktreePath: req.body?.worktreePath,
|
||||
operation: 'push',
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
|
||||
logError(error, 'Stash push failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
|
||||
@@ -11,152 +11,19 @@
|
||||
*
|
||||
* Also fetches the latest remote refs after switching.
|
||||
*
|
||||
* Git business logic is delegated to worktree-branch-service.ts.
|
||||
* Events are emitted at key lifecycle points for WebSocket subscribers.
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
||||
* the requireValidWorktree middleware in index.ts
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { execFile } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { getErrorMessage, logError, isValidBranchName } from '../common.js';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { performSwitchBranch } from '../../../services/worktree-branch-service.js';
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
function isExcludedWorktreeLine(line: string): boolean {
|
||||
return line.includes('.worktrees/') || line.endsWith('.worktrees');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there are any changes at all (including untracked) that should be stashed
|
||||
*/
|
||||
async function hasAnyChanges(cwd: string): Promise<boolean> {
|
||||
try {
|
||||
const { stdout } = await execFileAsync('git', ['status', '--porcelain'], { cwd });
|
||||
const lines = stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((line) => {
|
||||
if (!line.trim()) return false;
|
||||
if (isExcludedWorktreeLine(line)) return false;
|
||||
return true;
|
||||
});
|
||||
return lines.length > 0;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stash all local changes (including untracked files)
|
||||
* Returns true if a stash was created, false if there was nothing to stash
|
||||
*/
|
||||
async function stashChanges(cwd: string, message: string): Promise<boolean> {
|
||||
try {
|
||||
// Get stash count before
|
||||
const { stdout: beforeCount } = await execFileAsync('git', ['stash', 'list'], { cwd });
|
||||
const countBefore = beforeCount
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((l) => l.trim()).length;
|
||||
|
||||
// Stash including untracked files
|
||||
await execFileAsync('git', ['stash', 'push', '--include-untracked', '-m', message], { cwd });
|
||||
|
||||
// Get stash count after to verify something was stashed
|
||||
const { stdout: afterCount } = await execFileAsync('git', ['stash', 'list'], { cwd });
|
||||
const countAfter = afterCount
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((l) => l.trim()).length;
|
||||
|
||||
return countAfter > countBefore;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pop the most recent stash entry
|
||||
* Returns an object indicating success and whether there were conflicts
|
||||
*/
|
||||
async function popStash(
|
||||
cwd: string
|
||||
): Promise<{ success: boolean; hasConflicts: boolean; error?: string }> {
|
||||
try {
|
||||
const { stdout, stderr } = await execFileAsync('git', ['stash', 'pop'], { cwd });
|
||||
const output = `${stdout}\n${stderr}`;
|
||||
// Check for conflict markers in the output
|
||||
if (output.includes('CONFLICT') || output.includes('Merge conflict')) {
|
||||
return { success: false, hasConflicts: true };
|
||||
}
|
||||
return { success: true, hasConflicts: false };
|
||||
} catch (error) {
|
||||
const errorMsg = getErrorMessage(error);
|
||||
if (errorMsg.includes('CONFLICT') || errorMsg.includes('Merge conflict')) {
|
||||
return { success: false, hasConflicts: true, error: errorMsg };
|
||||
}
|
||||
return { success: false, hasConflicts: false, error: errorMsg };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch latest from all remotes (silently, with timeout)
|
||||
*/
|
||||
async function fetchRemotes(cwd: string): Promise<void> {
|
||||
try {
|
||||
await execFileAsync('git', ['fetch', '--all', '--quiet'], {
|
||||
cwd,
|
||||
timeout: 15000, // 15 second timeout
|
||||
});
|
||||
} catch {
|
||||
// Ignore fetch errors - we may be offline
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a remote branch name like "origin/feature-branch" into its parts
|
||||
*/
|
||||
function parseRemoteBranch(branchName: string): { remote: string; branch: string } | null {
|
||||
const slashIndex = branchName.indexOf('/');
|
||||
if (slashIndex === -1) return null;
|
||||
return {
|
||||
remote: branchName.substring(0, slashIndex),
|
||||
branch: branchName.substring(slashIndex + 1),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a branch name refers to a remote branch
|
||||
*/
|
||||
async function isRemoteBranch(cwd: string, branchName: string): Promise<boolean> {
|
||||
try {
|
||||
const { stdout } = await execFileAsync('git', ['branch', '-r', '--format=%(refname:short)'], {
|
||||
cwd,
|
||||
});
|
||||
const remoteBranches = stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map((b) => b.trim().replace(/^['"]|['"]$/g, ''))
|
||||
.filter((b) => b);
|
||||
return remoteBranches.includes(branchName);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a local branch already exists
|
||||
*/
|
||||
async function localBranchExists(cwd: string, branchName: string): Promise<boolean> {
|
||||
try {
|
||||
await execFileAsync('git', ['rev-parse', '--verify', `refs/heads/${branchName}`], { cwd });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function createSwitchBranchHandler() {
|
||||
export function createSwitchBranchHandler(events?: EventEmitter) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath, branchName } = req.body as {
|
||||
@@ -180,186 +47,58 @@ export function createSwitchBranchHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get current branch
|
||||
const { stdout: currentBranchOutput } = await execFileAsync(
|
||||
'git',
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
const previousBranch = currentBranchOutput.trim();
|
||||
|
||||
// Determine the actual target branch name for checkout
|
||||
let targetBranch = branchName;
|
||||
let isRemote = false;
|
||||
|
||||
// Check if this is a remote branch (e.g., "origin/feature-branch")
|
||||
let parsedRemote: { remote: string; branch: string } | null = null;
|
||||
if (await isRemoteBranch(worktreePath, branchName)) {
|
||||
isRemote = true;
|
||||
parsedRemote = parseRemoteBranch(branchName);
|
||||
if (parsedRemote) {
|
||||
targetBranch = parsedRemote.branch;
|
||||
} else {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Failed to parse remote branch name '${branchName}'`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (previousBranch === targetBranch) {
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
message: `Already on branch '${targetBranch}'`,
|
||||
},
|
||||
// Validate branch name using shared allowlist to prevent Git option injection
|
||||
if (!isValidBranchName(branchName)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid branch name',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if target branch exists (locally or as remote ref)
|
||||
if (!isRemote) {
|
||||
try {
|
||||
await execFileAsync('git', ['rev-parse', '--verify', branchName], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
} catch {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Branch '${branchName}' does not exist`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
// Execute the branch switch via the service
|
||||
const result = await performSwitchBranch(worktreePath, branchName, events);
|
||||
|
||||
// Map service result to HTTP response
|
||||
if (!result.success) {
|
||||
// Determine status code based on error type
|
||||
const statusCode = isBranchNotFoundError(result.error) ? 400 : 500;
|
||||
res.status(statusCode).json({
|
||||
success: false,
|
||||
error: result.error,
|
||||
...(result.stashPopConflicts !== undefined && {
|
||||
stashPopConflicts: result.stashPopConflicts,
|
||||
}),
|
||||
...(result.stashPopConflictMessage && {
|
||||
stashPopConflictMessage: result.stashPopConflictMessage,
|
||||
}),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Stash local changes if any exist
|
||||
const hadChanges = await hasAnyChanges(worktreePath);
|
||||
let didStash = false;
|
||||
|
||||
if (hadChanges) {
|
||||
const stashMessage = `automaker-branch-switch: ${previousBranch} → ${targetBranch}`;
|
||||
didStash = await stashChanges(worktreePath, stashMessage);
|
||||
}
|
||||
|
||||
try {
|
||||
// Switch to the target branch
|
||||
if (isRemote) {
|
||||
if (!parsedRemote) {
|
||||
throw new Error(`Failed to parse remote branch name '${branchName}'`);
|
||||
}
|
||||
if (await localBranchExists(worktreePath, parsedRemote.branch)) {
|
||||
// Local branch exists, just checkout
|
||||
await execFileAsync('git', ['checkout', parsedRemote.branch], { cwd: worktreePath });
|
||||
} else {
|
||||
// Create local tracking branch from remote
|
||||
await execFileAsync('git', ['checkout', '-b', parsedRemote.branch, branchName], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
await execFileAsync('git', ['checkout', targetBranch], { cwd: worktreePath });
|
||||
}
|
||||
|
||||
// Fetch latest from remotes after switching
|
||||
await fetchRemotes(worktreePath);
|
||||
|
||||
// Reapply stashed changes if we stashed earlier
|
||||
let hasConflicts = false;
|
||||
let conflictMessage = '';
|
||||
let stashReapplied = false;
|
||||
|
||||
if (didStash) {
|
||||
const popResult = await popStash(worktreePath);
|
||||
hasConflicts = popResult.hasConflicts;
|
||||
if (popResult.hasConflicts) {
|
||||
conflictMessage = `Switched to branch '${targetBranch}' but merge conflicts occurred when reapplying your local changes. Please resolve the conflicts.`;
|
||||
} else if (!popResult.success) {
|
||||
// Stash pop failed for a non-conflict reason - the stash is still there
|
||||
conflictMessage = `Switched to branch '${targetBranch}' but failed to reapply stashed changes: ${popResult.error}. Your changes are still in the stash.`;
|
||||
} else {
|
||||
stashReapplied = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasConflicts) {
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
message: conflictMessage,
|
||||
hasConflicts: true,
|
||||
stashedChanges: true,
|
||||
},
|
||||
});
|
||||
} else if (didStash && !stashReapplied) {
|
||||
// Stash pop failed for a non-conflict reason — stash is still present
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
message: conflictMessage,
|
||||
hasConflicts: false,
|
||||
stashedChanges: true,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
const stashNote = stashReapplied ? ' (local changes stashed and reapplied)' : '';
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
message: `Switched to branch '${targetBranch}'${stashNote}`,
|
||||
hasConflicts: false,
|
||||
stashedChanges: stashReapplied,
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch (checkoutError) {
|
||||
// If checkout failed and we stashed, try to restore the stash
|
||||
if (didStash) {
|
||||
const popResult = await popStash(worktreePath);
|
||||
if (popResult.hasConflicts) {
|
||||
// Stash pop itself produced merge conflicts — the working tree is now in a
|
||||
// conflicted state even though the checkout failed. Surface this clearly so
|
||||
// the caller can prompt the user (or AI) to resolve conflicts rather than
|
||||
// simply retrying the branch switch.
|
||||
const checkoutErrorMsg = getErrorMessage(checkoutError);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: checkoutErrorMsg,
|
||||
stashPopConflicts: true,
|
||||
stashPopConflictMessage:
|
||||
'Stash pop resulted in conflicts: your stashed changes were partially reapplied ' +
|
||||
'but produced merge conflicts. Please resolve the conflicts before retrying the branch switch.',
|
||||
});
|
||||
return;
|
||||
} else if (!popResult.success) {
|
||||
// Stash pop failed for a non-conflict reason; the stash entry is still intact.
|
||||
// Include this detail alongside the original checkout error.
|
||||
const checkoutErrorMsg = getErrorMessage(checkoutError);
|
||||
const combinedMessage =
|
||||
`${checkoutErrorMsg}. Additionally, restoring your stashed changes failed: ` +
|
||||
`${popResult.error ?? 'unknown error'} — your changes are still saved in the stash.`;
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: combinedMessage,
|
||||
stashPopConflicts: false,
|
||||
});
|
||||
return;
|
||||
}
|
||||
// popResult.success === true: stash was cleanly restored, re-throw the checkout error
|
||||
}
|
||||
throw checkoutError;
|
||||
}
|
||||
res.json({
|
||||
success: true,
|
||||
result: result.result,
|
||||
});
|
||||
} catch (error) {
|
||||
events?.emit('switch:error', {
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
|
||||
logError(error, 'Switch branch failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether an error message represents a client error (400)
|
||||
* vs a server error (500).
|
||||
*
|
||||
* Client errors are validation issues like non-existent branches or
|
||||
* unparseable remote branch names.
|
||||
*/
|
||||
function isBranchNotFoundError(error?: string): boolean {
|
||||
if (!error) return false;
|
||||
return error.includes('does not exist') || error.includes('Failed to parse remote branch name');
|
||||
}
|
||||
|
||||
@@ -17,10 +17,11 @@ import { promisify } from 'util';
|
||||
import type { Feature, PlanningMode, ThinkingLevel } from '@automaker/types';
|
||||
import { DEFAULT_MAX_CONCURRENCY, stripProviderPrefix } from '@automaker/types';
|
||||
import { createLogger, loadContextFiles, classifyError } from '@automaker/utils';
|
||||
import { getFeatureDir, spawnProcess } from '@automaker/platform';
|
||||
import { getFeatureDir } from '@automaker/platform';
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import { validateWorkingDirectory } from '../../lib/sdk-options.js';
|
||||
import { getPromptCustomization, getProviderByModelId } from '../../lib/settings-helpers.js';
|
||||
import { execGitCommand } from '../../lib/git.js';
|
||||
import { TypedEventBus } from '../typed-event-bus.js';
|
||||
import { ConcurrencyManager } from '../concurrency-manager.js';
|
||||
import { WorktreeResolver } from '../worktree-resolver.js';
|
||||
@@ -49,24 +50,6 @@ import type {
|
||||
const execAsync = promisify(exec);
|
||||
const logger = createLogger('AutoModeServiceFacade');
|
||||
|
||||
/**
|
||||
* Execute git command with array arguments to prevent command injection.
|
||||
*/
|
||||
async function execGitCommand(args: string[], cwd: string): Promise<string> {
|
||||
const result = await spawnProcess({
|
||||
command: 'git',
|
||||
args,
|
||||
cwd,
|
||||
});
|
||||
|
||||
if (result.exitCode === 0) {
|
||||
return result.stdout;
|
||||
} else {
|
||||
const errorMessage = result.stderr || `Git command failed with code ${result.exitCode}`;
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* AutoModeServiceFacade provides a clean interface for auto-mode functionality.
|
||||
*
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
* invokes this service, streams lifecycle events, and sends the response.
|
||||
*/
|
||||
|
||||
import { execGitCommand } from '../routes/worktree/common.js';
|
||||
import { execGitCommand } from '../lib/git.js';
|
||||
|
||||
// ============================================================================
|
||||
// Types
|
||||
@@ -68,8 +68,18 @@ export async function getBranchCommitLog(
|
||||
// parent), so we deduplicate by hash below and merge their file lists.
|
||||
// We over-fetch (2× the limit) to compensate for -m duplicating merge
|
||||
// commit entries, then trim the result to the requested limit.
|
||||
const COMMIT_SEP = '---COMMIT---';
|
||||
const META_END = '---META_END---';
|
||||
// Use ASCII control characters as record separators – these cannot appear in
|
||||
// git commit messages, so these delimiters are safe regardless of commit
|
||||
// body content. %x00 and %x01 in git's format string emit literal NUL /
|
||||
// SOH bytes respectively.
|
||||
//
|
||||
// COMMIT_SEP (\x00) – marks the start of each commit record.
|
||||
// META_END (\x01) – separates commit metadata from the --name-only file list.
|
||||
//
|
||||
// Full per-commit layout emitted by git:
|
||||
// \x00\n<hash>\n<shorthash>\n...\n<subject>\n<body>\x01<files...>
|
||||
const COMMIT_SEP = '\x00';
|
||||
const META_END = '\x01';
|
||||
const fetchLimit = commitLimit * 2;
|
||||
|
||||
const logOutput = await execGitCommand(
|
||||
@@ -79,13 +89,13 @@ export async function getBranchCommitLog(
|
||||
`--max-count=${fetchLimit}`,
|
||||
'-m',
|
||||
'--name-only',
|
||||
`--format=${COMMIT_SEP}%n%H%n%h%n%an%n%ae%n%aI%n%s%n%b${META_END}`,
|
||||
`--format=%x00%n%H%n%h%n%an%n%ae%n%aI%n%s%n%b%x01`,
|
||||
],
|
||||
worktreePath
|
||||
);
|
||||
|
||||
// Split output into per-commit blocks and drop the empty first chunk
|
||||
// (the output starts with ---COMMIT---).
|
||||
// (the output starts with a NUL commit separator).
|
||||
const commitBlocks = logOutput.split(COMMIT_SEP).filter((block) => block.trim());
|
||||
|
||||
// Use a Map to deduplicate merge commit entries (which appear once per
|
||||
@@ -96,7 +106,7 @@ export async function getBranchCommitLog(
|
||||
const metaEndIdx = block.indexOf(META_END);
|
||||
if (metaEndIdx === -1) continue; // malformed block, skip
|
||||
|
||||
// --- Parse metadata (everything before ---META_END---) ---
|
||||
// --- Parse metadata (everything before the META_END delimiter) ---
|
||||
const metaRaw = block.substring(0, metaEndIdx);
|
||||
const metaLines = metaRaw.split('\n');
|
||||
|
||||
@@ -108,14 +118,15 @@ export async function getBranchCommitLog(
|
||||
if (fields.length < 6) continue; // need at least hash..subject
|
||||
|
||||
const hash = fields[0].trim();
|
||||
const shortHash = fields[1].trim();
|
||||
const author = fields[2].trim();
|
||||
const authorEmail = fields[3].trim();
|
||||
const date = fields[4].trim();
|
||||
const subject = fields[5].trim();
|
||||
if (!hash) continue; // defensive: skip if hash is empty
|
||||
const shortHash = fields[1]?.trim() ?? '';
|
||||
const author = fields[2]?.trim() ?? '';
|
||||
const authorEmail = fields[3]?.trim() ?? '';
|
||||
const date = fields[4]?.trim() ?? '';
|
||||
const subject = fields[5]?.trim() ?? '';
|
||||
const body = fields.slice(6).join('\n').trim();
|
||||
|
||||
// --- Parse file list (everything after ---META_END---) ---
|
||||
// --- Parse file list (everything after the META_END delimiter) ---
|
||||
const filesRaw = block.substring(metaEndIdx + META_END.length);
|
||||
const blockFiles = filesRaw
|
||||
.trim()
|
||||
|
||||
@@ -7,7 +7,8 @@
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { execGitCommand } from '../routes/worktree/common.js';
|
||||
import { execGitCommand, getCurrentBranch } from '../lib/git.js';
|
||||
import { type EventEmitter } from '../lib/events.js';
|
||||
|
||||
const logger = createLogger('CherryPickService');
|
||||
|
||||
@@ -39,16 +40,19 @@ export interface CherryPickResult {
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @param commitHashes - Array of commit hashes to verify
|
||||
* @param emitter - Optional event emitter for lifecycle events
|
||||
* @returns The first invalid commit hash, or null if all are valid
|
||||
*/
|
||||
export async function verifyCommits(
|
||||
worktreePath: string,
|
||||
commitHashes: string[]
|
||||
commitHashes: string[],
|
||||
emitter?: EventEmitter
|
||||
): Promise<string | null> {
|
||||
for (const hash of commitHashes) {
|
||||
try {
|
||||
await execGitCommand(['rev-parse', '--verify', hash], worktreePath);
|
||||
} catch {
|
||||
emitter?.emit('cherry-pick:verify-failed', { worktreePath, hash });
|
||||
return hash;
|
||||
}
|
||||
}
|
||||
@@ -61,12 +65,14 @@ export async function verifyCommits(
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @param commitHashes - Array of commit hashes to cherry-pick (in order)
|
||||
* @param options - Cherry-pick options (e.g., noCommit)
|
||||
* @param emitter - Optional event emitter for lifecycle events
|
||||
* @returns CherryPickResult with success/failure information
|
||||
*/
|
||||
export async function runCherryPick(
|
||||
worktreePath: string,
|
||||
commitHashes: string[],
|
||||
options?: CherryPickOptions
|
||||
options?: CherryPickOptions,
|
||||
emitter?: EventEmitter
|
||||
): Promise<CherryPickResult> {
|
||||
const args = ['cherry-pick'];
|
||||
if (options?.noCommit) {
|
||||
@@ -74,28 +80,34 @@ export async function runCherryPick(
|
||||
}
|
||||
args.push(...commitHashes);
|
||||
|
||||
emitter?.emit('cherry-pick:started', { worktreePath, commitHashes });
|
||||
|
||||
try {
|
||||
await execGitCommand(args, worktreePath);
|
||||
|
||||
const branch = await getCurrentBranch(worktreePath);
|
||||
|
||||
if (options?.noCommit) {
|
||||
return {
|
||||
const result: CherryPickResult = {
|
||||
success: true,
|
||||
cherryPicked: false,
|
||||
commitHashes,
|
||||
branch,
|
||||
message: `Staged changes from ${commitHashes.length} commit(s); no commit created due to --no-commit`,
|
||||
};
|
||||
emitter?.emit('cherry-pick:success', { worktreePath, commitHashes, branch });
|
||||
return result;
|
||||
}
|
||||
|
||||
return {
|
||||
const result: CherryPickResult = {
|
||||
success: true,
|
||||
cherryPicked: true,
|
||||
commitHashes,
|
||||
branch,
|
||||
message: `Successfully cherry-picked ${commitHashes.length} commit(s)`,
|
||||
};
|
||||
emitter?.emit('cherry-pick:success', { worktreePath, commitHashes, branch });
|
||||
return result;
|
||||
} catch (cherryPickError: unknown) {
|
||||
// Check if this is a cherry-pick conflict
|
||||
const err = cherryPickError as { stdout?: string; stderr?: string; message?: string };
|
||||
@@ -107,7 +119,7 @@ export async function runCherryPick(
|
||||
|
||||
if (hasConflicts) {
|
||||
// Abort the cherry-pick to leave the repo in a clean state
|
||||
const aborted = await abortCherryPick(worktreePath);
|
||||
const aborted = await abortCherryPick(worktreePath, emitter);
|
||||
|
||||
if (!aborted) {
|
||||
logger.error(
|
||||
@@ -116,6 +128,14 @@ export async function runCherryPick(
|
||||
);
|
||||
}
|
||||
|
||||
emitter?.emit('cherry-pick:conflict', {
|
||||
worktreePath,
|
||||
commitHashes,
|
||||
aborted,
|
||||
stdout: err.stdout,
|
||||
stderr: err.stderr,
|
||||
});
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: aborted
|
||||
@@ -135,25 +155,25 @@ export async function runCherryPick(
|
||||
* Abort an in-progress cherry-pick operation.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @param emitter - Optional event emitter for lifecycle events
|
||||
* @returns true if abort succeeded, false if it failed (logged as warning)
|
||||
*/
|
||||
export async function abortCherryPick(worktreePath: string): Promise<boolean> {
|
||||
export async function abortCherryPick(
|
||||
worktreePath: string,
|
||||
emitter?: EventEmitter
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
await execGitCommand(['cherry-pick', '--abort'], worktreePath);
|
||||
emitter?.emit('cherry-pick:abort', { worktreePath, aborted: true });
|
||||
return true;
|
||||
} catch {
|
||||
} catch (err: unknown) {
|
||||
const error = err as { message?: string };
|
||||
logger.warn('Failed to abort cherry-pick after conflict');
|
||||
emitter?.emit('cherry-pick:abort', {
|
||||
worktreePath,
|
||||
aborted: false,
|
||||
error: error.message ?? 'Unknown error during cherry-pick abort',
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current branch name for the worktree.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @returns The current branch name
|
||||
*/
|
||||
export async function getCurrentBranch(worktreePath: string): Promise<string> {
|
||||
const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath);
|
||||
return branchOutput.trim();
|
||||
}
|
||||
|
||||
161
apps/server/src/services/commit-log-service.ts
Normal file
161
apps/server/src/services/commit-log-service.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
/**
|
||||
* Service for fetching commit log data from a worktree.
|
||||
*
|
||||
* Extracts the heavy Git command execution and parsing logic from the
|
||||
* commit-log route handler so the handler only validates input,
|
||||
* invokes this service, streams lifecycle events, and sends the response.
|
||||
*
|
||||
* Follows the same approach as branch-commit-log-service: a single
|
||||
* `git log --name-only` call with custom separators to fetch both
|
||||
* commit metadata and file lists, avoiding N+1 git invocations.
|
||||
*/
|
||||
|
||||
import { execGitCommand } from '../lib/git.js';
|
||||
|
||||
// ============================================================================
|
||||
// Types
|
||||
// ============================================================================
|
||||
|
||||
export interface CommitLogEntry {
|
||||
hash: string;
|
||||
shortHash: string;
|
||||
author: string;
|
||||
authorEmail: string;
|
||||
date: string;
|
||||
subject: string;
|
||||
body: string;
|
||||
files: string[];
|
||||
}
|
||||
|
||||
export interface CommitLogResult {
|
||||
branch: string;
|
||||
commits: CommitLogEntry[];
|
||||
total: number;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Service
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Fetch the commit log for a worktree (HEAD).
|
||||
*
|
||||
* Runs a single `git log --name-only` invocation plus `git rev-parse`
|
||||
* inside the given worktree path and returns a structured result.
|
||||
*
|
||||
* @param worktreePath - Absolute path to the worktree / repository
|
||||
* @param limit - Maximum number of commits to return (clamped 1-100)
|
||||
*/
|
||||
export async function getCommitLog(worktreePath: string, limit: number): Promise<CommitLogResult> {
|
||||
// Clamp limit to a reasonable range
|
||||
const parsedLimit = Number(limit);
|
||||
const commitLimit = Math.min(Math.max(1, Number.isFinite(parsedLimit) ? parsedLimit : 20), 100);
|
||||
|
||||
// Use custom separators to parse both metadata and file lists from
|
||||
// a single git log invocation (same approach as branch-commit-log-service).
|
||||
//
|
||||
// -m causes merge commits to be diffed against each parent so all
|
||||
// files touched by the merge are listed (without -m, --name-only
|
||||
// produces no file output for merge commits because they have 2+ parents).
|
||||
// This means merge commits appear multiple times in the output (once per
|
||||
// parent), so we deduplicate by hash below and merge their file lists.
|
||||
// We over-fetch (2x the limit) to compensate for -m duplicating merge
|
||||
// commit entries, then trim the result to the requested limit.
|
||||
// Use ASCII control characters as record separators – these cannot appear in
|
||||
// git commit messages, so these delimiters are safe regardless of commit
|
||||
// body content. %x00 and %x01 in git's format string emit literal NUL /
|
||||
// SOH bytes respectively.
|
||||
//
|
||||
// COMMIT_SEP (\x00) – marks the start of each commit record.
|
||||
// META_END (\x01) – separates commit metadata from the --name-only file list.
|
||||
//
|
||||
// Full per-commit layout emitted by git:
|
||||
// \x00\n<hash>\n<shorthash>\n...\n<subject>\n<body>\x01<files...>
|
||||
const COMMIT_SEP = '\x00';
|
||||
const META_END = '\x01';
|
||||
const fetchLimit = commitLimit * 2;
|
||||
|
||||
const logOutput = await execGitCommand(
|
||||
[
|
||||
'log',
|
||||
`--max-count=${fetchLimit}`,
|
||||
'-m',
|
||||
'--name-only',
|
||||
`--format=%x00%n%H%n%h%n%an%n%ae%n%aI%n%s%n%b%x01`,
|
||||
],
|
||||
worktreePath
|
||||
);
|
||||
|
||||
// Split output into per-commit blocks and drop the empty first chunk
|
||||
// (the output starts with a NUL commit separator).
|
||||
const commitBlocks = logOutput.split(COMMIT_SEP).filter((block) => block.trim());
|
||||
|
||||
// Use a Map to deduplicate merge commit entries (which appear once per
|
||||
// parent when -m is used) while preserving insertion order.
|
||||
const commitMap = new Map<string, CommitLogEntry>();
|
||||
|
||||
for (const block of commitBlocks) {
|
||||
const metaEndIdx = block.indexOf(META_END);
|
||||
if (metaEndIdx === -1) continue; // malformed block, skip
|
||||
|
||||
// --- Parse metadata (everything before the META_END delimiter) ---
|
||||
const metaRaw = block.substring(0, metaEndIdx);
|
||||
const metaLines = metaRaw.split('\n');
|
||||
|
||||
// The first line may be empty (newline right after COMMIT_SEP), skip it
|
||||
const nonEmptyStart = metaLines.findIndex((l) => l.trim() !== '');
|
||||
if (nonEmptyStart === -1) continue;
|
||||
|
||||
const fields = metaLines.slice(nonEmptyStart);
|
||||
if (fields.length < 6) continue; // need at least hash..subject
|
||||
|
||||
const hash = fields[0].trim();
|
||||
if (!hash) continue; // defensive: skip if hash is empty
|
||||
const shortHash = fields[1]?.trim() ?? '';
|
||||
const author = fields[2]?.trim() ?? '';
|
||||
const authorEmail = fields[3]?.trim() ?? '';
|
||||
const date = fields[4]?.trim() ?? '';
|
||||
const subject = fields[5]?.trim() ?? '';
|
||||
const body = fields.slice(6).join('\n').trim();
|
||||
|
||||
// --- Parse file list (everything after the META_END delimiter) ---
|
||||
const filesRaw = block.substring(metaEndIdx + META_END.length);
|
||||
const blockFiles = filesRaw
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f) => f.trim());
|
||||
|
||||
// Merge file lists for duplicate entries (merge commits with -m)
|
||||
const existing = commitMap.get(hash);
|
||||
if (existing) {
|
||||
// Add new files to the existing entry's file set
|
||||
const fileSet = new Set(existing.files);
|
||||
for (const f of blockFiles) fileSet.add(f);
|
||||
existing.files = [...fileSet];
|
||||
} else {
|
||||
commitMap.set(hash, {
|
||||
hash,
|
||||
shortHash,
|
||||
author,
|
||||
authorEmail,
|
||||
date,
|
||||
subject,
|
||||
body,
|
||||
files: [...new Set(blockFiles)],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Trim to the requested limit (we over-fetched to account for -m duplicates)
|
||||
const commits = [...commitMap.values()].slice(0, commitLimit);
|
||||
|
||||
// Get current branch name
|
||||
const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath);
|
||||
const branch = branchOutput.trim();
|
||||
|
||||
return {
|
||||
branch,
|
||||
commits,
|
||||
total: commits.length,
|
||||
};
|
||||
}
|
||||
@@ -5,7 +5,8 @@
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { spawnProcess } from '@automaker/platform';
|
||||
import { createEventEmitter } from '../lib/events';
|
||||
import { execGitCommand } from '../lib/git.js';
|
||||
const logger = createLogger('MergeService');
|
||||
|
||||
export interface MergeOptions {
|
||||
@@ -27,33 +28,14 @@ export interface MergeServiceResult {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute git command with array arguments to prevent command injection.
|
||||
*/
|
||||
async function execGitCommand(args: string[], cwd: string): Promise<string> {
|
||||
const result = await spawnProcess({
|
||||
command: 'git',
|
||||
args,
|
||||
cwd,
|
||||
});
|
||||
|
||||
if (result.exitCode === 0) {
|
||||
return result.stdout;
|
||||
} else {
|
||||
const errorMessage =
|
||||
result.stderr || result.stdout || `Git command failed with code ${result.exitCode}`;
|
||||
throw Object.assign(new Error(errorMessage), {
|
||||
stdout: result.stdout,
|
||||
stderr: result.stderr,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate branch name to prevent command injection.
|
||||
* The first character must not be '-' to prevent git argument injection
|
||||
* via names like "-flag" or "--option".
|
||||
*/
|
||||
function isValidBranchName(name: string): boolean {
|
||||
return /^[a-zA-Z0-9._\-/]+$/.test(name) && name.length < 250;
|
||||
// First char must be alphanumeric, dot, underscore, or slash (not dash)
|
||||
return /^[a-zA-Z0-9._/][a-zA-Z0-9._\-/]*$/.test(name) && name.length < 250;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -72,6 +54,8 @@ export async function performMerge(
|
||||
targetBranch: string = 'main',
|
||||
options?: MergeOptions
|
||||
): Promise<MergeServiceResult> {
|
||||
const emitter = createEventEmitter();
|
||||
|
||||
if (!projectPath || !branchName || !worktreePath) {
|
||||
return {
|
||||
success: false,
|
||||
@@ -115,6 +99,9 @@ export async function performMerge(
|
||||
};
|
||||
}
|
||||
|
||||
// Emit merge:start after validating inputs
|
||||
emitter.emit('merge:start', { branchName, targetBranch: mergeTo, worktreePath });
|
||||
|
||||
// Merge the feature branch into the target branch (using safe array-based commands)
|
||||
const mergeMessage = options?.message || `Merge ${branchName} into ${mergeTo}`;
|
||||
const mergeArgs = options?.squash
|
||||
@@ -131,7 +118,7 @@ export async function performMerge(
|
||||
|
||||
if (hasConflicts) {
|
||||
// Get list of conflicted files
|
||||
let conflictFiles: string[] = [];
|
||||
let conflictFiles: string[] | undefined;
|
||||
try {
|
||||
const diffOutput = await execGitCommand(
|
||||
['diff', '--name-only', '--diff-filter=U'],
|
||||
@@ -142,9 +129,13 @@ export async function performMerge(
|
||||
.split('\n')
|
||||
.filter((f) => f.trim().length > 0);
|
||||
} catch {
|
||||
// If we can't get the file list, that's okay - continue without it
|
||||
// If we can't get the file list, leave conflictFiles undefined so callers
|
||||
// can distinguish "no conflicts" (empty array) from "unknown due to diff failure" (undefined)
|
||||
}
|
||||
|
||||
// Emit merge:conflict event with conflict details
|
||||
emitter.emit('merge:conflict', { branchName, targetBranch: mergeTo, conflictFiles });
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: `Merge CONFLICT: Automatic merge of "${branchName}" into "${mergeTo}" failed. Please resolve conflicts manually.`,
|
||||
@@ -153,6 +144,13 @@ export async function performMerge(
|
||||
};
|
||||
}
|
||||
|
||||
// Emit merge:error for non-conflict errors before re-throwing
|
||||
emitter.emit('merge:error', {
|
||||
branchName,
|
||||
targetBranch: mergeTo,
|
||||
error: err.message || String(mergeError),
|
||||
});
|
||||
|
||||
// Re-throw non-conflict errors
|
||||
throw mergeError;
|
||||
}
|
||||
@@ -197,6 +195,13 @@ export async function performMerge(
|
||||
}
|
||||
}
|
||||
|
||||
// Emit merge:success with merged branch, target branch, and deletion info
|
||||
emitter.emit('merge:success', {
|
||||
mergedBranch: branchName,
|
||||
targetBranch: mergeTo,
|
||||
deleted: options?.deleteWorktreeAndBranch ? { worktreeDeleted, branchDeleted } : undefined,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
mergedBranch: branchName,
|
||||
|
||||
457
apps/server/src/services/pull-service.ts
Normal file
457
apps/server/src/services/pull-service.ts
Normal file
@@ -0,0 +1,457 @@
|
||||
/**
|
||||
* PullService - Pull git operations without HTTP
|
||||
*
|
||||
* Encapsulates the full git pull workflow including:
|
||||
* - Branch name and detached HEAD detection
|
||||
* - Fetching from remote
|
||||
* - Status parsing and local change detection
|
||||
* - Stash push/pop logic
|
||||
* - Upstream verification (rev-parse / --verify)
|
||||
* - Pull execution and conflict detection
|
||||
* - Conflict file list collection
|
||||
*
|
||||
* Extracted from the worktree pull route to improve organization
|
||||
* and testability. Follows the same pattern as rebase-service.ts
|
||||
* and cherry-pick-service.ts.
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { execGitCommand } from '../lib/git.js';
|
||||
import { getErrorMessage } from '../routes/worktree/common.js';
|
||||
|
||||
const logger = createLogger('PullService');
|
||||
|
||||
// ============================================================================
|
||||
// Types
|
||||
// ============================================================================
|
||||
|
||||
export interface PullOptions {
|
||||
/** Remote name to pull from (defaults to 'origin') */
|
||||
remote?: string;
|
||||
/** When true, automatically stash local changes before pulling and reapply after */
|
||||
stashIfNeeded?: boolean;
|
||||
}
|
||||
|
||||
export interface PullResult {
|
||||
success: boolean;
|
||||
error?: string;
|
||||
branch?: string;
|
||||
pulled?: boolean;
|
||||
hasLocalChanges?: boolean;
|
||||
localChangedFiles?: string[];
|
||||
stashed?: boolean;
|
||||
stashRestored?: boolean;
|
||||
stashRecoveryFailed?: boolean;
|
||||
hasConflicts?: boolean;
|
||||
conflictSource?: 'pull' | 'stash';
|
||||
conflictFiles?: string[];
|
||||
message?: string;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Get the current branch name for the worktree.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @returns The current branch name (returns 'HEAD' for detached HEAD state)
|
||||
*/
|
||||
export async function getCurrentBranch(worktreePath: string): Promise<string> {
|
||||
const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath);
|
||||
return branchOutput.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the latest refs from a remote.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @param remote - Remote name (e.g. 'origin')
|
||||
*/
|
||||
export async function fetchRemote(worktreePath: string, remote: string): Promise<void> {
|
||||
await execGitCommand(['fetch', remote], worktreePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse `git status --porcelain` output into a list of changed file paths.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @returns Object with hasLocalChanges flag and list of changed file paths
|
||||
*/
|
||||
export async function getLocalChanges(
|
||||
worktreePath: string
|
||||
): Promise<{ hasLocalChanges: boolean; localChangedFiles: string[] }> {
|
||||
const statusOutput = await execGitCommand(['status', '--porcelain'], worktreePath);
|
||||
const hasLocalChanges = statusOutput.trim().length > 0;
|
||||
|
||||
let localChangedFiles: string[] = [];
|
||||
if (hasLocalChanges) {
|
||||
localChangedFiles = statusOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((line) => line.trim().length > 0)
|
||||
.map((line) => line.substring(3).trim());
|
||||
}
|
||||
|
||||
return { hasLocalChanges, localChangedFiles };
|
||||
}
|
||||
|
||||
/**
|
||||
* Stash local changes with a descriptive message.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @param branchName - Current branch name (used in stash message)
|
||||
* @returns true if stash was created
|
||||
*/
|
||||
export async function stashChanges(worktreePath: string, branchName: string): Promise<void> {
|
||||
const stashMessage = `automaker-pull-stash: Pre-pull stash on ${branchName}`;
|
||||
await execGitCommand(['stash', 'push', '--include-untracked', '-m', stashMessage], worktreePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pop the top stash entry.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @returns The stdout from stash pop
|
||||
*/
|
||||
export async function popStash(worktreePath: string): Promise<string> {
|
||||
return await execGitCommand(['stash', 'pop'], worktreePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to pop the stash, returning whether the pop succeeded.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @returns true if stash pop succeeded, false if it failed
|
||||
*/
|
||||
async function tryPopStash(worktreePath: string): Promise<boolean> {
|
||||
try {
|
||||
await execGitCommand(['stash', 'pop'], worktreePath);
|
||||
return true;
|
||||
} catch (stashPopError) {
|
||||
// Stash pop failed - leave it in stash list for manual recovery
|
||||
logger.error('Failed to reapply stash during error recovery', {
|
||||
worktreePath,
|
||||
error: getErrorMessage(stashPopError),
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the branch has an upstream tracking ref, or whether
|
||||
* the remote branch exists.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @param branchName - Current branch name
|
||||
* @param remote - Remote name
|
||||
* @returns true if upstream or remote branch exists
|
||||
*/
|
||||
export async function hasUpstreamOrRemoteBranch(
|
||||
worktreePath: string,
|
||||
branchName: string,
|
||||
remote: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
await execGitCommand(['rev-parse', '--abbrev-ref', `${branchName}@{upstream}`], worktreePath);
|
||||
return true;
|
||||
} catch {
|
||||
// No upstream tracking - check if the remote branch exists
|
||||
try {
|
||||
await execGitCommand(['rev-parse', '--verify', `${remote}/${branchName}`], worktreePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the list of files with unresolved merge conflicts.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @returns Array of file paths with conflicts
|
||||
*/
|
||||
export async function getConflictFiles(worktreePath: string): Promise<string[]> {
|
||||
try {
|
||||
const diffOutput = await execGitCommand(
|
||||
['diff', '--name-only', '--diff-filter=U'],
|
||||
worktreePath
|
||||
);
|
||||
return diffOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f) => f.trim().length > 0);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether an error output string indicates a merge conflict.
|
||||
*/
|
||||
function isConflictError(errorOutput: string): boolean {
|
||||
return errorOutput.includes('CONFLICT') || errorOutput.includes('Automatic merge failed');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether an output string indicates a stash conflict.
|
||||
*/
|
||||
function isStashConflict(output: string): boolean {
|
||||
return output.includes('CONFLICT') || output.includes('Merge conflict');
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Main Service Function
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Perform a full git pull workflow on the given worktree.
|
||||
*
|
||||
* The workflow:
|
||||
* 1. Get current branch name (detect detached HEAD)
|
||||
* 2. Fetch from remote
|
||||
* 3. Check for local changes
|
||||
* 4. If local changes and stashIfNeeded, stash them
|
||||
* 5. Verify upstream tracking or remote branch exists
|
||||
* 6. Execute `git pull`
|
||||
* 7. If stash was created and pull succeeded, reapply stash
|
||||
* 8. Detect and report conflicts from pull or stash reapplication
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @param options - Pull options (remote, stashIfNeeded)
|
||||
* @returns PullResult with detailed status information
|
||||
*/
|
||||
export async function performPull(
|
||||
worktreePath: string,
|
||||
options?: PullOptions
|
||||
): Promise<PullResult> {
|
||||
const targetRemote = options?.remote || 'origin';
|
||||
const stashIfNeeded = options?.stashIfNeeded ?? false;
|
||||
|
||||
// 1. Get current branch name
|
||||
const branchName = await getCurrentBranch(worktreePath);
|
||||
|
||||
// 2. Check for detached HEAD state
|
||||
if (branchName === 'HEAD') {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Cannot pull in detached HEAD state. Please checkout a branch first.',
|
||||
};
|
||||
}
|
||||
|
||||
// 3. Fetch latest from remote
|
||||
try {
|
||||
await fetchRemote(worktreePath, targetRemote);
|
||||
} catch (fetchError) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Failed to fetch from remote '${targetRemote}': ${getErrorMessage(fetchError)}`,
|
||||
};
|
||||
}
|
||||
|
||||
// 4. Check for local changes
|
||||
const { hasLocalChanges, localChangedFiles } = await getLocalChanges(worktreePath);
|
||||
|
||||
// 5. If there are local changes and stashIfNeeded is not requested, return info
|
||||
if (hasLocalChanges && !stashIfNeeded) {
|
||||
return {
|
||||
success: true,
|
||||
branch: branchName,
|
||||
pulled: false,
|
||||
hasLocalChanges: true,
|
||||
localChangedFiles,
|
||||
message:
|
||||
'Local changes detected. Use stashIfNeeded to automatically stash and reapply changes.',
|
||||
};
|
||||
}
|
||||
|
||||
// 6. Stash local changes if needed
|
||||
let didStash = false;
|
||||
if (hasLocalChanges && stashIfNeeded) {
|
||||
try {
|
||||
await stashChanges(worktreePath, branchName);
|
||||
didStash = true;
|
||||
} catch (stashError) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Failed to stash local changes: ${getErrorMessage(stashError)}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// 7. Verify upstream tracking or remote branch exists
|
||||
const hasUpstream = await hasUpstreamOrRemoteBranch(worktreePath, branchName, targetRemote);
|
||||
if (!hasUpstream) {
|
||||
let stashRecoveryFailed = false;
|
||||
if (didStash) {
|
||||
const stashPopped = await tryPopStash(worktreePath);
|
||||
stashRecoveryFailed = !stashPopped;
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: `Branch '${branchName}' has no upstream branch on remote '${targetRemote}'. Push it first or set upstream with: git branch --set-upstream-to=${targetRemote}/${branchName}${stashRecoveryFailed ? ' Local changes remain stashed and need manual recovery (run: git stash pop).' : ''}`,
|
||||
stashRecoveryFailed: stashRecoveryFailed || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
// 8. Pull latest changes
|
||||
let pullConflict = false;
|
||||
let pullConflictFiles: string[] = [];
|
||||
try {
|
||||
const pullOutput = await execGitCommand(['pull', targetRemote, branchName], worktreePath);
|
||||
|
||||
const alreadyUpToDate = pullOutput.includes('Already up to date');
|
||||
|
||||
// If no stash to reapply, return success
|
||||
if (!didStash) {
|
||||
return {
|
||||
success: true,
|
||||
branch: branchName,
|
||||
pulled: !alreadyUpToDate,
|
||||
hasLocalChanges: false,
|
||||
stashed: false,
|
||||
stashRestored: false,
|
||||
message: alreadyUpToDate ? 'Already up to date' : 'Pulled latest changes',
|
||||
};
|
||||
}
|
||||
} catch (pullError: unknown) {
|
||||
const err = pullError as { stderr?: string; stdout?: string; message?: string };
|
||||
const errorOutput = `${err.stderr || ''} ${err.stdout || ''} ${err.message || ''}`;
|
||||
|
||||
if (isConflictError(errorOutput)) {
|
||||
pullConflict = true;
|
||||
pullConflictFiles = await getConflictFiles(worktreePath);
|
||||
} else {
|
||||
// Non-conflict pull error
|
||||
let stashRecoveryFailed = false;
|
||||
if (didStash) {
|
||||
const stashPopped = await tryPopStash(worktreePath);
|
||||
stashRecoveryFailed = !stashPopped;
|
||||
}
|
||||
|
||||
// Check for common errors
|
||||
const errorMsg = err.stderr || err.message || 'Pull failed';
|
||||
if (errorMsg.includes('no tracking information')) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Branch '${branchName}' has no upstream branch. Push it first or set upstream with: git branch --set-upstream-to=${targetRemote}/${branchName}${stashRecoveryFailed ? ' Local changes remain stashed and need manual recovery (run: git stash pop).' : ''}`,
|
||||
stashRecoveryFailed: stashRecoveryFailed || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: `${errorMsg}${stashRecoveryFailed ? ' Local changes remain stashed and need manual recovery (run: git stash pop).' : ''}`,
|
||||
stashRecoveryFailed: stashRecoveryFailed || undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// 9. If pull had conflicts, return conflict info (don't try stash pop)
|
||||
if (pullConflict) {
|
||||
return {
|
||||
success: true,
|
||||
branch: branchName,
|
||||
pulled: true,
|
||||
hasConflicts: true,
|
||||
conflictSource: 'pull',
|
||||
conflictFiles: pullConflictFiles,
|
||||
stashed: didStash,
|
||||
stashRestored: false,
|
||||
message:
|
||||
`Pull resulted in merge conflicts. ${didStash ? 'Your local changes are still stashed.' : ''}`.trim(),
|
||||
};
|
||||
}
|
||||
|
||||
// 10. Pull succeeded, now try to reapply stash
|
||||
if (didStash) {
|
||||
return await reapplyStash(worktreePath, branchName);
|
||||
}
|
||||
|
||||
// Shouldn't reach here, but return a safe default
|
||||
return {
|
||||
success: true,
|
||||
branch: branchName,
|
||||
pulled: true,
|
||||
message: 'Pulled latest changes',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to reapply stashed changes after a successful pull.
|
||||
* Handles both clean reapplication and conflict scenarios.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @param branchName - Current branch name
|
||||
* @returns PullResult reflecting stash reapplication status
|
||||
*/
|
||||
async function reapplyStash(worktreePath: string, branchName: string): Promise<PullResult> {
|
||||
try {
|
||||
const stashPopOutput = await popStash(worktreePath);
|
||||
const stashPopCombined = stashPopOutput || '';
|
||||
|
||||
// Check if stash pop had conflicts
|
||||
if (isStashConflict(stashPopCombined)) {
|
||||
const stashConflictFiles = await getConflictFiles(worktreePath);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
branch: branchName,
|
||||
pulled: true,
|
||||
hasConflicts: true,
|
||||
conflictSource: 'stash',
|
||||
conflictFiles: stashConflictFiles,
|
||||
stashed: true,
|
||||
stashRestored: true, // Stash was applied but with conflicts
|
||||
message: 'Pull succeeded but reapplying your stashed changes resulted in merge conflicts.',
|
||||
};
|
||||
}
|
||||
|
||||
// Stash pop succeeded cleanly
|
||||
return {
|
||||
success: true,
|
||||
branch: branchName,
|
||||
pulled: true,
|
||||
hasConflicts: false,
|
||||
stashed: true,
|
||||
stashRestored: true,
|
||||
message: 'Pulled latest changes and restored your stashed changes.',
|
||||
};
|
||||
} catch (stashPopError: unknown) {
|
||||
const err = stashPopError as { stderr?: string; stdout?: string; message?: string };
|
||||
const errorOutput = `${err.stderr || ''} ${err.stdout || ''} ${err.message || ''}`;
|
||||
|
||||
// Check if stash pop failed due to conflicts
|
||||
if (isStashConflict(errorOutput)) {
|
||||
const stashConflictFiles = await getConflictFiles(worktreePath);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
branch: branchName,
|
||||
pulled: true,
|
||||
hasConflicts: true,
|
||||
conflictSource: 'stash',
|
||||
conflictFiles: stashConflictFiles,
|
||||
stashed: true,
|
||||
stashRestored: true,
|
||||
message: 'Pull succeeded but reapplying your stashed changes resulted in merge conflicts.',
|
||||
};
|
||||
}
|
||||
|
||||
// Non-conflict stash pop error - stash is still in the stash list
|
||||
logger.warn('Failed to reapply stash after pull', { worktreePath, error: errorOutput });
|
||||
|
||||
return {
|
||||
success: true,
|
||||
branch: branchName,
|
||||
pulled: true,
|
||||
hasConflicts: false,
|
||||
stashed: true,
|
||||
stashRestored: false,
|
||||
message:
|
||||
'Pull succeeded but failed to reapply stashed changes. Your changes are still in the stash list.',
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -5,8 +5,10 @@
|
||||
* Follows the same pattern as merge-service.ts and cherry-pick-service.ts.
|
||||
*/
|
||||
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { execGitCommand } from '../routes/worktree/common.js';
|
||||
import { execGitCommand, getCurrentBranch } from '../lib/git.js';
|
||||
|
||||
const logger = createLogger('RebaseService');
|
||||
|
||||
@@ -37,11 +39,23 @@ export interface RebaseResult {
|
||||
* @returns RebaseResult with success/failure information
|
||||
*/
|
||||
export async function runRebase(worktreePath: string, ontoBranch: string): Promise<RebaseResult> {
|
||||
// Reject branch names that start with a dash to prevent them from being
|
||||
// misinterpreted as git options.
|
||||
if (ontoBranch.startsWith('-')) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Invalid branch name: "${ontoBranch}" must not start with a dash.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Get current branch name before rebase
|
||||
const currentBranch = await getCurrentBranch(worktreePath);
|
||||
|
||||
try {
|
||||
await execGitCommand(['rebase', ontoBranch], worktreePath);
|
||||
// Pass ontoBranch after '--' so git treats it as a ref, not an option.
|
||||
// Set LC_ALL=C so git always emits English output regardless of the system
|
||||
// locale, making text-based conflict detection reliable.
|
||||
await execGitCommand(['rebase', '--', ontoBranch], worktreePath, { LC_ALL: 'C' });
|
||||
|
||||
return {
|
||||
success: true,
|
||||
@@ -50,15 +64,82 @@ export async function runRebase(worktreePath: string, ontoBranch: string): Promi
|
||||
message: `Successfully rebased ${currentBranch} onto ${ontoBranch}`,
|
||||
};
|
||||
} catch (rebaseError: unknown) {
|
||||
// Check if this is a rebase conflict
|
||||
// Check if this is a rebase conflict. We use a multi-layer strategy so
|
||||
// that detection is reliable even when locale settings vary or git's text
|
||||
// output changes across versions:
|
||||
//
|
||||
// 1. Primary (text-based): scan the error output for well-known English
|
||||
// conflict markers. Because we pass LC_ALL=C above these strings are
|
||||
// always in English, but we keep the check as one layer among several.
|
||||
//
|
||||
// 2. Repository-state check: run `git rev-parse --git-dir` to find the
|
||||
// actual .git directory, then verify whether the in-progress rebase
|
||||
// state directories (.git/rebase-merge or .git/rebase-apply) exist.
|
||||
// These are created by git at the start of a rebase and are the most
|
||||
// reliable indicator that a rebase is still in progress (i.e. stopped
|
||||
// due to conflicts).
|
||||
//
|
||||
// 3. Unmerged-path check: run `git status --porcelain` (machine-readable,
|
||||
// locale-independent) and look for lines whose first two characters
|
||||
// indicate an unmerged state (UU, AA, DD, AU, UA, DU, UD).
|
||||
//
|
||||
// hasConflicts is true when ANY of the three layers returns positive.
|
||||
const err = rebaseError as { stdout?: string; stderr?: string; message?: string };
|
||||
const output = `${err.stdout || ''} ${err.stderr || ''} ${err.message || ''}`;
|
||||
const hasConflicts =
|
||||
|
||||
// Layer 1 – text matching (locale-safe because we set LC_ALL=C above).
|
||||
const textIndicatesConflict =
|
||||
output.includes('CONFLICT') ||
|
||||
output.includes('could not apply') ||
|
||||
output.includes('Resolve all conflicts') ||
|
||||
output.includes('fix conflicts');
|
||||
|
||||
// Layers 2 & 3 – repository state inspection (locale-independent).
|
||||
let rebaseStateExists = false;
|
||||
let hasUnmergedPaths = false;
|
||||
try {
|
||||
// Find the canonical .git directory for this worktree.
|
||||
const gitDir = (await execGitCommand(['rev-parse', '--git-dir'], worktreePath)).trim();
|
||||
// git rev-parse --git-dir returns a path relative to cwd when the repo is
|
||||
// a worktree, so we resolve it against worktreePath.
|
||||
const resolvedGitDir = path.resolve(worktreePath, gitDir);
|
||||
|
||||
// Layer 2: check for rebase state directories.
|
||||
const rebaseMergeDir = path.join(resolvedGitDir, 'rebase-merge');
|
||||
const rebaseApplyDir = path.join(resolvedGitDir, 'rebase-apply');
|
||||
const [rebaseMergeExists, rebaseApplyExists] = await Promise.all([
|
||||
fs
|
||||
.access(rebaseMergeDir)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
fs
|
||||
.access(rebaseApplyDir)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
]);
|
||||
rebaseStateExists = rebaseMergeExists || rebaseApplyExists;
|
||||
} catch {
|
||||
// If rev-parse fails the repo may be in an unexpected state; fall back to
|
||||
// text-based detection only.
|
||||
}
|
||||
|
||||
try {
|
||||
// Layer 3: check for unmerged paths via machine-readable git status.
|
||||
const statusOutput = await execGitCommand(['status', '--porcelain'], worktreePath, {
|
||||
LC_ALL: 'C',
|
||||
});
|
||||
// Unmerged status codes occupy the first two characters of each line.
|
||||
// Standard unmerged codes: UU, AA, DD, AU, UA, DU, UD.
|
||||
hasUnmergedPaths = statusOutput
|
||||
.split('\n')
|
||||
.some((line) => /^(UU|AA|DD|AU|UA|DU|UD)/.test(line));
|
||||
} catch {
|
||||
// git status failing is itself a sign something is wrong; leave
|
||||
// hasUnmergedPaths as false and rely on the other layers.
|
||||
}
|
||||
|
||||
const hasConflicts = textIndicatesConflict || rebaseStateExists || hasUnmergedPaths;
|
||||
|
||||
if (hasConflicts) {
|
||||
// Get list of conflicted files
|
||||
const conflictFiles = await getConflictFiles(worktreePath);
|
||||
@@ -100,8 +181,8 @@ export async function abortRebase(worktreePath: string): Promise<boolean> {
|
||||
try {
|
||||
await execGitCommand(['rebase', '--abort'], worktreePath);
|
||||
return true;
|
||||
} catch {
|
||||
logger.warn('Failed to abort rebase after conflict');
|
||||
} catch (err) {
|
||||
logger.warn('Failed to abort rebase after conflict', err instanceof Error ? err.message : err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -126,14 +207,3 @@ export async function getConflictFiles(worktreePath: string): Promise<string[]>
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current branch name for the worktree.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @returns The current branch name
|
||||
*/
|
||||
export async function getCurrentBranch(worktreePath: string): Promise<string> {
|
||||
const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath);
|
||||
return branchOutput.trim();
|
||||
}
|
||||
|
||||
462
apps/server/src/services/stash-service.ts
Normal file
462
apps/server/src/services/stash-service.ts
Normal file
@@ -0,0 +1,462 @@
|
||||
/**
|
||||
* StashService - Stash operations without HTTP
|
||||
*
|
||||
* Encapsulates stash workflows including:
|
||||
* - Push (create) stashes with optional message and file selection
|
||||
* - List all stash entries with metadata and changed files
|
||||
* - Apply or pop a stash entry with conflict detection
|
||||
* - Drop (delete) a stash entry
|
||||
* - Conflict detection from command output and git diff
|
||||
* - Lifecycle event emission (start, progress, conflicts, success, failure)
|
||||
*
|
||||
* Extracted from the worktree stash route handlers to improve organisation
|
||||
* and testability. Follows the same pattern as pull-service.ts and
|
||||
* merge-service.ts.
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createEventEmitter } from '../lib/events.js';
|
||||
import { execGitCommand } from '../lib/git.js';
|
||||
import { getErrorMessage, logError } from '../routes/worktree/common.js';
|
||||
|
||||
const logger = createLogger('StashService');
|
||||
|
||||
// ============================================================================
|
||||
// Types
|
||||
// ============================================================================
|
||||
|
||||
export interface StashApplyOptions {
|
||||
/** When true, remove the stash entry after applying (git stash pop) */
|
||||
pop?: boolean;
|
||||
}
|
||||
|
||||
export interface StashApplyResult {
|
||||
success: boolean;
|
||||
error?: string;
|
||||
applied?: boolean;
|
||||
hasConflicts?: boolean;
|
||||
conflictFiles?: string[];
|
||||
operation?: 'apply' | 'pop';
|
||||
stashIndex?: number;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
export interface StashPushResult {
|
||||
success: boolean;
|
||||
error?: string;
|
||||
stashed: boolean;
|
||||
branch?: string;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
export interface StashEntry {
|
||||
index: number;
|
||||
message: string;
|
||||
branch: string;
|
||||
date: string;
|
||||
files: string[];
|
||||
}
|
||||
|
||||
export interface StashListResult {
|
||||
success: boolean;
|
||||
error?: string;
|
||||
stashes: StashEntry[];
|
||||
total: number;
|
||||
}
|
||||
|
||||
export interface StashDropResult {
|
||||
success: boolean;
|
||||
error?: string;
|
||||
dropped: boolean;
|
||||
stashIndex?: number;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Retrieve the list of files with unmerged (conflicted) entries using git diff.
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @returns Array of file paths that have unresolved conflicts
|
||||
*/
|
||||
export async function getConflictedFiles(worktreePath: string): Promise<string[]> {
|
||||
try {
|
||||
const diffOutput = await execGitCommand(
|
||||
['diff', '--name-only', '--diff-filter=U'],
|
||||
worktreePath
|
||||
);
|
||||
return diffOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f) => f.trim().length > 0);
|
||||
} catch {
|
||||
// If we cannot get the file list, return an empty array
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether command output indicates a merge conflict.
|
||||
*/
|
||||
function isConflictOutput(output: string): boolean {
|
||||
return output.includes('CONFLICT') || output.includes('Merge conflict');
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Main Service Function
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Apply or pop a stash entry in the given worktree.
|
||||
*
|
||||
* The workflow:
|
||||
* 1. Validate inputs
|
||||
* 2. Emit stash:start event
|
||||
* 3. Run `git stash apply` or `git stash pop`
|
||||
* 4. Emit stash:progress event with raw command output
|
||||
* 5. Check output for conflict markers; if conflicts found, collect files and
|
||||
* emit stash:conflicts event
|
||||
* 6. Emit stash:success or stash:failure depending on outcome
|
||||
* 7. Return a structured StashApplyResult
|
||||
*
|
||||
* @param worktreePath - Absolute path to the git worktree
|
||||
* @param stashIndex - Zero-based stash index (stash@{N})
|
||||
* @param options - Optional flags (pop)
|
||||
* @returns StashApplyResult with detailed status information
|
||||
*/
|
||||
export async function applyOrPop(
|
||||
worktreePath: string,
|
||||
stashIndex: number,
|
||||
options?: StashApplyOptions
|
||||
): Promise<StashApplyResult> {
|
||||
const emitter = createEventEmitter();
|
||||
const operation: 'apply' | 'pop' = options?.pop ? 'pop' : 'apply';
|
||||
const stashRef = `stash@{${stashIndex}}`;
|
||||
|
||||
logger.info(`[StashService] ${operation} ${stashRef} in ${worktreePath}`);
|
||||
|
||||
// 1. Emit start event
|
||||
emitter.emit('stash:start', { worktreePath, stashIndex, stashRef, operation });
|
||||
|
||||
try {
|
||||
// 2. Run git stash apply / pop
|
||||
let stdout = '';
|
||||
|
||||
try {
|
||||
stdout = await execGitCommand(['stash', operation, stashRef], worktreePath);
|
||||
} catch (gitError: unknown) {
|
||||
const err = gitError as { stdout?: string; stderr?: string; message?: string };
|
||||
const errStdout = err.stdout || '';
|
||||
const errStderr = err.stderr || err.message || '';
|
||||
|
||||
const combinedOutput = `${errStdout}\n${errStderr}`;
|
||||
|
||||
// 3. Emit progress with raw output
|
||||
emitter.emit('stash:progress', {
|
||||
worktreePath,
|
||||
stashIndex,
|
||||
operation,
|
||||
output: combinedOutput,
|
||||
});
|
||||
|
||||
// 4. Check if the error is a conflict
|
||||
if (isConflictOutput(combinedOutput)) {
|
||||
const conflictFiles = await getConflictedFiles(worktreePath);
|
||||
|
||||
emitter.emit('stash:conflicts', {
|
||||
worktreePath,
|
||||
stashIndex,
|
||||
operation,
|
||||
conflictFiles,
|
||||
});
|
||||
|
||||
const result: StashApplyResult = {
|
||||
success: true,
|
||||
applied: true,
|
||||
hasConflicts: true,
|
||||
conflictFiles,
|
||||
operation,
|
||||
stashIndex,
|
||||
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} with conflicts. Please resolve the conflicts.`,
|
||||
};
|
||||
|
||||
emitter.emit('stash:success', {
|
||||
worktreePath,
|
||||
stashIndex,
|
||||
operation,
|
||||
hasConflicts: true,
|
||||
conflictFiles,
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// 5. Non-conflict git error – re-throw so the outer catch logs and handles it
|
||||
throw gitError;
|
||||
}
|
||||
|
||||
// 6. Command succeeded – check stdout for conflict markers (some git versions
|
||||
// exit 0 even when conflicts occur during apply)
|
||||
const combinedOutput = stdout;
|
||||
|
||||
emitter.emit('stash:progress', { worktreePath, stashIndex, operation, output: combinedOutput });
|
||||
|
||||
if (isConflictOutput(combinedOutput)) {
|
||||
const conflictFiles = await getConflictedFiles(worktreePath);
|
||||
|
||||
emitter.emit('stash:conflicts', {
|
||||
worktreePath,
|
||||
stashIndex,
|
||||
operation,
|
||||
conflictFiles,
|
||||
});
|
||||
|
||||
const result: StashApplyResult = {
|
||||
success: true,
|
||||
applied: true,
|
||||
hasConflicts: true,
|
||||
conflictFiles,
|
||||
operation,
|
||||
stashIndex,
|
||||
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} with conflicts. Please resolve the conflicts.`,
|
||||
};
|
||||
|
||||
emitter.emit('stash:success', {
|
||||
worktreePath,
|
||||
stashIndex,
|
||||
operation,
|
||||
hasConflicts: true,
|
||||
conflictFiles,
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// 7. Clean success
|
||||
const result: StashApplyResult = {
|
||||
success: true,
|
||||
applied: true,
|
||||
hasConflicts: false,
|
||||
operation,
|
||||
stashIndex,
|
||||
message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} successfully`,
|
||||
};
|
||||
|
||||
emitter.emit('stash:success', {
|
||||
worktreePath,
|
||||
stashIndex,
|
||||
operation,
|
||||
hasConflicts: false,
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
const errorMessage = getErrorMessage(error);
|
||||
|
||||
logError(error, `Stash ${operation} failed`);
|
||||
|
||||
emitter.emit('stash:failure', {
|
||||
worktreePath,
|
||||
stashIndex,
|
||||
operation,
|
||||
error: errorMessage,
|
||||
});
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
applied: false,
|
||||
operation,
|
||||
stashIndex,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Push Stash
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Stash uncommitted changes (including untracked files) with an optional
|
||||
* message and optional file selection.
|
||||
*
|
||||
* Workflow:
|
||||
* 1. Check for uncommitted changes via `git status --porcelain`
|
||||
* 2. If no changes, return early with stashed: false
|
||||
* 3. Build and run `git stash push --include-untracked [-m message] [-- files]`
|
||||
* 4. Retrieve the current branch name
|
||||
* 5. Return a structured StashPushResult
|
||||
*
|
||||
* @param worktreePath - Absolute path to the git worktree
|
||||
* @param options - Optional message and files to selectively stash
|
||||
* @returns StashPushResult with stash status and branch info
|
||||
*/
|
||||
export async function pushStash(
|
||||
worktreePath: string,
|
||||
options?: { message?: string; files?: string[] }
|
||||
): Promise<StashPushResult> {
|
||||
const message = options?.message;
|
||||
const files = options?.files;
|
||||
|
||||
logger.info(`[StashService] push stash in ${worktreePath}`);
|
||||
|
||||
// 1. Check for any changes to stash
|
||||
const status = await execGitCommand(['status', '--porcelain'], worktreePath);
|
||||
|
||||
if (!status.trim()) {
|
||||
return {
|
||||
success: true,
|
||||
stashed: false,
|
||||
message: 'No changes to stash',
|
||||
};
|
||||
}
|
||||
|
||||
// 2. Build stash push command args
|
||||
const args = ['stash', 'push', '--include-untracked'];
|
||||
if (message && message.trim()) {
|
||||
args.push('-m', message.trim());
|
||||
}
|
||||
|
||||
// If specific files are provided, add them as pathspecs after '--'
|
||||
if (files && files.length > 0) {
|
||||
args.push('--');
|
||||
args.push(...files);
|
||||
}
|
||||
|
||||
// 3. Execute stash push
|
||||
await execGitCommand(args, worktreePath);
|
||||
|
||||
// 4. Get current branch name
|
||||
const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath);
|
||||
const branchName = branchOutput.trim();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
stashed: true,
|
||||
branch: branchName,
|
||||
message: message?.trim() || `WIP on ${branchName}`,
|
||||
};
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// List Stashes
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* List all stash entries for a worktree with metadata and changed files.
|
||||
*
|
||||
* Workflow:
|
||||
* 1. Run `git stash list` with a custom format to get index, message, and date
|
||||
* 2. Parse each stash line into a structured StashEntry
|
||||
* 3. For each entry, fetch the list of files changed via `git stash show`
|
||||
* 4. Return the full list as a StashListResult
|
||||
*
|
||||
* @param worktreePath - Absolute path to the git worktree
|
||||
* @returns StashListResult with all stash entries and their metadata
|
||||
*/
|
||||
export async function listStash(worktreePath: string): Promise<StashListResult> {
|
||||
logger.info(`[StashService] list stashes in ${worktreePath}`);
|
||||
|
||||
// 1. Get stash list with format: index, message, date
|
||||
// Use %aI (strict ISO 8601) instead of %ai to ensure cross-browser compatibility
|
||||
const stashOutput = await execGitCommand(
|
||||
['stash', 'list', '--format=%gd|||%s|||%aI'],
|
||||
worktreePath
|
||||
);
|
||||
|
||||
if (!stashOutput.trim()) {
|
||||
return {
|
||||
success: true,
|
||||
stashes: [],
|
||||
total: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const stashLines = stashOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((l) => l.trim());
|
||||
const stashes: StashEntry[] = [];
|
||||
|
||||
for (const line of stashLines) {
|
||||
const parts = line.split('|||');
|
||||
if (parts.length < 3) continue;
|
||||
|
||||
const refSpec = parts[0].trim(); // e.g., "stash@{0}"
|
||||
const stashMessage = parts[1].trim();
|
||||
const date = parts[2].trim();
|
||||
|
||||
// Extract index from stash@{N}; skip entries that don't match the expected format
|
||||
const indexMatch = refSpec.match(/stash@\{(\d+)\}/);
|
||||
if (!indexMatch) continue;
|
||||
const index = parseInt(indexMatch[1], 10);
|
||||
|
||||
// Extract branch name from message (format: "WIP on branch: hash message" or "On branch: hash message")
|
||||
let branch = '';
|
||||
const branchMatch = stashMessage.match(/^(?:WIP on|On) ([^:]+):/);
|
||||
if (branchMatch) {
|
||||
branch = branchMatch[1];
|
||||
}
|
||||
|
||||
// Get list of files in this stash
|
||||
let files: string[] = [];
|
||||
try {
|
||||
const filesOutput = await execGitCommand(
|
||||
['stash', 'show', refSpec, '--name-only'],
|
||||
worktreePath
|
||||
);
|
||||
files = filesOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f) => f.trim());
|
||||
} catch {
|
||||
// Ignore errors getting file list
|
||||
}
|
||||
|
||||
stashes.push({
|
||||
index,
|
||||
message: stashMessage,
|
||||
branch,
|
||||
date,
|
||||
files,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
stashes,
|
||||
total: stashes.length,
|
||||
};
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Drop Stash
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Drop (delete) a stash entry by index.
|
||||
*
|
||||
* @param worktreePath - Absolute path to the git worktree
|
||||
* @param stashIndex - Zero-based stash index (stash@{N})
|
||||
* @returns StashDropResult with drop status
|
||||
*/
|
||||
export async function dropStash(
|
||||
worktreePath: string,
|
||||
stashIndex: number
|
||||
): Promise<StashDropResult> {
|
||||
const stashRef = `stash@{${stashIndex}}`;
|
||||
|
||||
logger.info(`[StashService] drop ${stashRef} in ${worktreePath}`);
|
||||
|
||||
await execGitCommand(['stash', 'drop', stashRef], worktreePath);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
dropped: true,
|
||||
stashIndex,
|
||||
message: `Stash ${stashRef} dropped successfully`,
|
||||
};
|
||||
}
|
||||
441
apps/server/src/services/worktree-branch-service.ts
Normal file
441
apps/server/src/services/worktree-branch-service.ts
Normal file
@@ -0,0 +1,441 @@
|
||||
/**
|
||||
* WorktreeBranchService - Switch branch operations without HTTP
|
||||
*
|
||||
* Handles branch switching with automatic stash/reapply of local changes.
|
||||
* If there are uncommitted changes, they are stashed before switching and
|
||||
* reapplied after. If the stash pop results in merge conflicts, returns
|
||||
* a special response so the UI can create a conflict resolution task.
|
||||
*
|
||||
* For remote branches (e.g., "origin/feature"), automatically creates a
|
||||
* local tracking branch and checks it out.
|
||||
*
|
||||
* Also fetches the latest remote refs after switching.
|
||||
*
|
||||
* Extracted from the worktree switch-branch route to improve organization
|
||||
* and testability. Follows the same pattern as pull-service.ts and
|
||||
* rebase-service.ts.
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { execGitCommand } from '../lib/git.js';
|
||||
import { getErrorMessage } from '../routes/worktree/common.js';
|
||||
import type { EventEmitter } from '../lib/events.js';
|
||||
|
||||
const logger = createLogger('WorktreeBranchService');
|
||||
|
||||
// ============================================================================
|
||||
// Types
|
||||
// ============================================================================
|
||||
|
||||
export interface SwitchBranchResult {
|
||||
success: boolean;
|
||||
error?: string;
|
||||
result?: {
|
||||
previousBranch: string;
|
||||
currentBranch: string;
|
||||
message: string;
|
||||
hasConflicts?: boolean;
|
||||
stashedChanges?: boolean;
|
||||
};
|
||||
/** Set when checkout fails and stash pop produced conflicts during recovery */
|
||||
stashPopConflicts?: boolean;
|
||||
/** Human-readable message when stash pop conflicts occur during error recovery */
|
||||
stashPopConflictMessage?: string;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions
|
||||
// ============================================================================
|
||||
|
||||
function isExcludedWorktreeLine(line: string): boolean {
|
||||
return line.includes('.worktrees/') || line.endsWith('.worktrees');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there are any changes at all (including untracked) that should be stashed
|
||||
*/
|
||||
async function hasAnyChanges(cwd: string): Promise<boolean> {
|
||||
try {
|
||||
const stdout = await execGitCommand(['status', '--porcelain'], cwd);
|
||||
const lines = stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((line) => {
|
||||
if (!line.trim()) return false;
|
||||
if (isExcludedWorktreeLine(line)) return false;
|
||||
return true;
|
||||
});
|
||||
return lines.length > 0;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stash all local changes (including untracked files)
|
||||
* Returns true if a stash was created, false if there was nothing to stash
|
||||
*/
|
||||
async function stashChanges(cwd: string, message: string): Promise<boolean> {
|
||||
try {
|
||||
// Get stash count before
|
||||
const beforeOutput = await execGitCommand(['stash', 'list'], cwd);
|
||||
const countBefore = beforeOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((l) => l.trim()).length;
|
||||
|
||||
// Stash including untracked files
|
||||
await execGitCommand(['stash', 'push', '--include-untracked', '-m', message], cwd);
|
||||
|
||||
// Get stash count after to verify something was stashed
|
||||
const afterOutput = await execGitCommand(['stash', 'list'], cwd);
|
||||
const countAfter = afterOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((l) => l.trim()).length;
|
||||
|
||||
return countAfter > countBefore;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pop the most recent stash entry
|
||||
* Returns an object indicating success and whether there were conflicts
|
||||
*/
|
||||
async function popStash(
|
||||
cwd: string
|
||||
): Promise<{ success: boolean; hasConflicts: boolean; error?: string }> {
|
||||
try {
|
||||
const stdout = await execGitCommand(['stash', 'pop'], cwd);
|
||||
// Check for conflict markers in the output
|
||||
if (stdout.includes('CONFLICT') || stdout.includes('Merge conflict')) {
|
||||
return { success: false, hasConflicts: true };
|
||||
}
|
||||
return { success: true, hasConflicts: false };
|
||||
} catch (error) {
|
||||
const errorMsg = getErrorMessage(error);
|
||||
if (errorMsg.includes('CONFLICT') || errorMsg.includes('Merge conflict')) {
|
||||
return { success: false, hasConflicts: true, error: errorMsg };
|
||||
}
|
||||
return { success: false, hasConflicts: false, error: errorMsg };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch latest from all remotes (silently, with timeout)
|
||||
*/
|
||||
async function fetchRemotes(cwd: string): Promise<void> {
|
||||
try {
|
||||
await execGitCommand(['fetch', '--all', '--quiet'], cwd);
|
||||
} catch {
|
||||
// Ignore fetch errors - we may be offline
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a remote branch name like "origin/feature-branch" into its parts
|
||||
*/
|
||||
function parseRemoteBranch(branchName: string): { remote: string; branch: string } | null {
|
||||
const slashIndex = branchName.indexOf('/');
|
||||
if (slashIndex === -1) return null;
|
||||
return {
|
||||
remote: branchName.substring(0, slashIndex),
|
||||
branch: branchName.substring(slashIndex + 1),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a branch name refers to a remote branch
|
||||
*/
|
||||
async function isRemoteBranch(cwd: string, branchName: string): Promise<boolean> {
|
||||
try {
|
||||
const stdout = await execGitCommand(['branch', '-r', '--format=%(refname:short)'], cwd);
|
||||
const remoteBranches = stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map((b) => b.trim().replace(/^['"]|['"]$/g, ''))
|
||||
.filter((b) => b);
|
||||
return remoteBranches.includes(branchName);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a local branch already exists
|
||||
*/
|
||||
async function localBranchExists(cwd: string, branchName: string): Promise<boolean> {
|
||||
try {
|
||||
await execGitCommand(['rev-parse', '--verify', `refs/heads/${branchName}`], cwd);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Main Service Function
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Perform a full branch switch workflow on the given worktree.
|
||||
*
|
||||
* The workflow:
|
||||
* 1. Get current branch name
|
||||
* 2. Detect remote vs local branch and determine target
|
||||
* 3. Return early if already on target branch
|
||||
* 4. Validate branch existence
|
||||
* 5. Stash local changes if any
|
||||
* 6. Checkout the target branch
|
||||
* 7. Fetch latest from remotes
|
||||
* 8. Reapply stashed changes (detect conflicts)
|
||||
* 9. Handle error recovery (restore stash if checkout fails)
|
||||
*
|
||||
* @param worktreePath - Path to the git worktree
|
||||
* @param branchName - Branch to switch to (can be local or remote like "origin/feature")
|
||||
* @param events - Optional event emitter for lifecycle events
|
||||
* @returns SwitchBranchResult with detailed status information
|
||||
*/
|
||||
export async function performSwitchBranch(
|
||||
worktreePath: string,
|
||||
branchName: string,
|
||||
events?: EventEmitter
|
||||
): Promise<SwitchBranchResult> {
|
||||
// Emit start event
|
||||
events?.emit('switch:start', { worktreePath, branchName });
|
||||
|
||||
// 1. Get current branch
|
||||
const currentBranchOutput = await execGitCommand(
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
worktreePath
|
||||
);
|
||||
const previousBranch = currentBranchOutput.trim();
|
||||
|
||||
// 2. Determine the actual target branch name for checkout
|
||||
let targetBranch = branchName;
|
||||
let isRemote = false;
|
||||
|
||||
// Check if this is a remote branch (e.g., "origin/feature-branch")
|
||||
let parsedRemote: { remote: string; branch: string } | null = null;
|
||||
if (await isRemoteBranch(worktreePath, branchName)) {
|
||||
isRemote = true;
|
||||
parsedRemote = parseRemoteBranch(branchName);
|
||||
if (parsedRemote) {
|
||||
targetBranch = parsedRemote.branch;
|
||||
} else {
|
||||
events?.emit('switch:error', {
|
||||
worktreePath,
|
||||
branchName,
|
||||
error: `Failed to parse remote branch name '${branchName}'`,
|
||||
});
|
||||
return {
|
||||
success: false,
|
||||
error: `Failed to parse remote branch name '${branchName}'`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Return early if already on the target branch
|
||||
if (previousBranch === targetBranch) {
|
||||
events?.emit('switch:done', {
|
||||
worktreePath,
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
alreadyOnBranch: true,
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
message: `Already on branch '${targetBranch}'`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// 4. Check if target branch exists (locally or as remote ref)
|
||||
if (!isRemote) {
|
||||
try {
|
||||
await execGitCommand(['rev-parse', '--verify', branchName], worktreePath);
|
||||
} catch {
|
||||
events?.emit('switch:error', {
|
||||
worktreePath,
|
||||
branchName,
|
||||
error: `Branch '${branchName}' does not exist`,
|
||||
});
|
||||
return {
|
||||
success: false,
|
||||
error: `Branch '${branchName}' does not exist`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Stash local changes if any exist
|
||||
const hadChanges = await hasAnyChanges(worktreePath);
|
||||
let didStash = false;
|
||||
|
||||
if (hadChanges) {
|
||||
events?.emit('switch:stash', {
|
||||
worktreePath,
|
||||
previousBranch,
|
||||
targetBranch,
|
||||
action: 'push',
|
||||
});
|
||||
const stashMessage = `automaker-branch-switch: ${previousBranch} → ${targetBranch}`;
|
||||
didStash = await stashChanges(worktreePath, stashMessage);
|
||||
}
|
||||
|
||||
try {
|
||||
// 6. Switch to the target branch
|
||||
events?.emit('switch:checkout', {
|
||||
worktreePath,
|
||||
targetBranch,
|
||||
isRemote,
|
||||
previousBranch,
|
||||
});
|
||||
|
||||
if (isRemote) {
|
||||
if (!parsedRemote) {
|
||||
throw new Error(`Failed to parse remote branch name '${branchName}'`);
|
||||
}
|
||||
if (await localBranchExists(worktreePath, parsedRemote.branch)) {
|
||||
// Local branch exists, just checkout
|
||||
await execGitCommand(['checkout', parsedRemote.branch], worktreePath);
|
||||
} else {
|
||||
// Create local tracking branch from remote
|
||||
await execGitCommand(['checkout', '-b', parsedRemote.branch, branchName], worktreePath);
|
||||
}
|
||||
} else {
|
||||
await execGitCommand(['checkout', targetBranch], worktreePath);
|
||||
}
|
||||
|
||||
// 7. Fetch latest from remotes after switching
|
||||
await fetchRemotes(worktreePath);
|
||||
|
||||
// 8. Reapply stashed changes if we stashed earlier
|
||||
let hasConflicts = false;
|
||||
let conflictMessage = '';
|
||||
let stashReapplied = false;
|
||||
|
||||
if (didStash) {
|
||||
events?.emit('switch:pop', {
|
||||
worktreePath,
|
||||
targetBranch,
|
||||
action: 'pop',
|
||||
});
|
||||
|
||||
const popResult = await popStash(worktreePath);
|
||||
hasConflicts = popResult.hasConflicts;
|
||||
if (popResult.hasConflicts) {
|
||||
conflictMessage = `Switched to branch '${targetBranch}' but merge conflicts occurred when reapplying your local changes. Please resolve the conflicts.`;
|
||||
} else if (!popResult.success) {
|
||||
// Stash pop failed for a non-conflict reason - the stash is still there
|
||||
conflictMessage = `Switched to branch '${targetBranch}' but failed to reapply stashed changes: ${popResult.error}. Your changes are still in the stash.`;
|
||||
} else {
|
||||
stashReapplied = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasConflicts) {
|
||||
events?.emit('switch:done', {
|
||||
worktreePath,
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
hasConflicts: true,
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
message: conflictMessage,
|
||||
hasConflicts: true,
|
||||
stashedChanges: true,
|
||||
},
|
||||
};
|
||||
} else if (didStash && !stashReapplied) {
|
||||
// Stash pop failed for a non-conflict reason — stash is still present
|
||||
events?.emit('switch:done', {
|
||||
worktreePath,
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
stashPopFailed: true,
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
message: conflictMessage,
|
||||
hasConflicts: false,
|
||||
stashedChanges: true,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
const stashNote = stashReapplied ? ' (local changes stashed and reapplied)' : '';
|
||||
events?.emit('switch:done', {
|
||||
worktreePath,
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
stashReapplied,
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
previousBranch,
|
||||
currentBranch: targetBranch,
|
||||
message: `Switched to branch '${targetBranch}'${stashNote}`,
|
||||
hasConflicts: false,
|
||||
stashedChanges: stashReapplied,
|
||||
},
|
||||
};
|
||||
}
|
||||
} catch (checkoutError) {
|
||||
// 9. If checkout failed and we stashed, try to restore the stash
|
||||
if (didStash) {
|
||||
const popResult = await popStash(worktreePath);
|
||||
if (popResult.hasConflicts) {
|
||||
// Stash pop itself produced merge conflicts — the working tree is now in a
|
||||
// conflicted state even though the checkout failed. Surface this clearly so
|
||||
// the caller can prompt the user (or AI) to resolve conflicts rather than
|
||||
// simply retrying the branch switch.
|
||||
const checkoutErrorMsg = getErrorMessage(checkoutError);
|
||||
events?.emit('switch:error', {
|
||||
worktreePath,
|
||||
branchName,
|
||||
error: checkoutErrorMsg,
|
||||
stashPopConflicts: true,
|
||||
});
|
||||
return {
|
||||
success: false,
|
||||
error: checkoutErrorMsg,
|
||||
stashPopConflicts: true,
|
||||
stashPopConflictMessage:
|
||||
'Stash pop resulted in conflicts: your stashed changes were partially reapplied ' +
|
||||
'but produced merge conflicts. Please resolve the conflicts before retrying the branch switch.',
|
||||
};
|
||||
} else if (!popResult.success) {
|
||||
// Stash pop failed for a non-conflict reason; the stash entry is still intact.
|
||||
// Include this detail alongside the original checkout error.
|
||||
const checkoutErrorMsg = getErrorMessage(checkoutError);
|
||||
const combinedMessage =
|
||||
`${checkoutErrorMsg}. Additionally, restoring your stashed changes failed: ` +
|
||||
`${popResult.error ?? 'unknown error'} — your changes are still saved in the stash.`;
|
||||
events?.emit('switch:error', {
|
||||
worktreePath,
|
||||
branchName,
|
||||
error: combinedMessage,
|
||||
});
|
||||
return {
|
||||
success: false,
|
||||
error: combinedMessage,
|
||||
stashPopConflicts: false,
|
||||
};
|
||||
}
|
||||
// popResult.success === true: stash was cleanly restored, re-throw the checkout error
|
||||
}
|
||||
throw checkoutError;
|
||||
}
|
||||
}
|
||||
@@ -1,60 +1,196 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseGitLogOutput } from '../src/lib/git-log-parser.js';
|
||||
|
||||
// Mock data with NUL-based separator
|
||||
const mockGitOutput = `a1b2c3d4e5f67890abcd1234567890abcd1234\x00a1b2c3\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00This is the commit body\x00e5f6g7h8i9j0klmnoprstuv\x00e5f6g7\x00Jane Smith\x00jane@example.com\x002023-01-02T12:00:00Z\x00Fix bug\x00Fixed the bug with ---END--- in the message\x00q1w2e3r4t5y6u7i8o9p0asdfghjkl\x00q1w2e3\x00Bob Johnson\x00bob@example.com\x002023-01-03T12:00:00Z\x00Another commit\x00Empty body\x00`;
|
||||
// Mock data: fields within each commit are newline-separated,
|
||||
// commits are NUL-separated (matching the parser contract).
|
||||
const mockGitOutput = [
|
||||
'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is the commit body',
|
||||
'e5f6g7h8i9j0klmnoprstuv\ne5f6g7\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in the message',
|
||||
'q1w2e3r4t5y6u7i8o9p0asdfghjkl\nq1w2e3\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nEmpty body',
|
||||
].join('\0');
|
||||
|
||||
// Mock data with problematic ---END--- in commit message
|
||||
const mockOutputWithEndMarker = `a1b2c3d4e5f67890abcd1234567890abcd1234\x00a1b2c3\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00This is the commit body\x00---END--- is in this message\x00e5f6g7h8i9j0klmnoprstuv\x00e5f6g7\x00Jane Smith\x00jane@example.com\x002023-01-02T12:00:00Z\x00Fix bug\x00Fixed the bug with ---END--- in the message\x00q1w2e3r4t5y6u7i8o9p0asdfghjkl\x00q1w2e3\x00Bob Johnson\x00bob@example.com\x002023-01-03T12:00:00Z\x00Another commit\x00Empty body\x00`;
|
||||
// Mock data where commit bodies contain ---END--- markers
|
||||
const mockOutputWithEndMarker = [
|
||||
'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is the commit body\n---END--- is in this message',
|
||||
'e5f6g7h8i9j0klmnoprstuv\ne5f6g7\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in the message',
|
||||
'q1w2e3r4t5y6u7i8o9p0asdfghjkl\nq1w2e3\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nEmpty body',
|
||||
].join('\0');
|
||||
|
||||
console.log('Testing parseGitLogOutput with NUL-based separator...\n');
|
||||
// Single-commit mock: fields newline-separated, no trailing NUL needed
|
||||
const singleCommitOutput =
|
||||
'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nSingle commit\nSingle commit body';
|
||||
|
||||
// Test 1: Normal parsing
|
||||
console.log('Test 1: Normal parsing');
|
||||
try {
|
||||
const commits = parseGitLogOutput(mockGitOutput);
|
||||
console.log(`✓ Parsed ${commits.length} commits successfully`);
|
||||
console.log('First commit:', commits[0]);
|
||||
console.log('Second commit:', commits[1]);
|
||||
console.log('Third commit:', commits[2]);
|
||||
console.log('');
|
||||
} catch (error) {
|
||||
console.error('✗ Test 1 failed:', error);
|
||||
}
|
||||
describe('parseGitLogOutput', () => {
|
||||
describe('normal parsing (three commits)', () => {
|
||||
it('returns the correct number of commits', () => {
|
||||
const commits = parseGitLogOutput(mockGitOutput);
|
||||
expect(commits.length).toBe(3);
|
||||
});
|
||||
|
||||
// Test 2: Parsing with ---END--- in commit messages
|
||||
console.log('Test 2: Parsing with ---END--- in commit messages');
|
||||
try {
|
||||
const commits = parseGitLogOutput(mockOutputWithEndMarker);
|
||||
console.log(`✓ Parsed ${commits.length} commits successfully`);
|
||||
console.log('Commits with ---END--- in messages:');
|
||||
commits.forEach((commit, index) => {
|
||||
console.log(`${index + 1}. ${commit.subject}: "${commit.body}"`);
|
||||
it('parses the first commit fields correctly', () => {
|
||||
const commits = parseGitLogOutput(mockGitOutput);
|
||||
expect(commits[0].hash).toBe('a1b2c3d4e5f67890abcd1234567890abcd1234');
|
||||
expect(commits[0].shortHash).toBe('a1b2c3');
|
||||
expect(commits[0].author).toBe('John Doe');
|
||||
expect(commits[0].authorEmail).toBe('john@example.com');
|
||||
expect(commits[0].date).toBe('2023-01-01T12:00:00Z');
|
||||
expect(commits[0].subject).toBe('Initial commit');
|
||||
expect(commits[0].body).toBe('This is the commit body');
|
||||
});
|
||||
|
||||
it('parses the second commit fields correctly', () => {
|
||||
const commits = parseGitLogOutput(mockGitOutput);
|
||||
expect(commits[1].hash).toBe('e5f6g7h8i9j0klmnoprstuv');
|
||||
expect(commits[1].shortHash).toBe('e5f6g7');
|
||||
expect(commits[1].author).toBe('Jane Smith');
|
||||
expect(commits[1].subject).toBe('Fix bug');
|
||||
expect(commits[1].body).toMatch(/---END---/);
|
||||
});
|
||||
|
||||
it('parses the third commit fields correctly', () => {
|
||||
const commits = parseGitLogOutput(mockGitOutput);
|
||||
expect(commits[2].hash).toBe('q1w2e3r4t5y6u7i8o9p0asdfghjkl');
|
||||
expect(commits[2].shortHash).toBe('q1w2e3');
|
||||
expect(commits[2].author).toBe('Bob Johnson');
|
||||
expect(commits[2].subject).toBe('Another commit');
|
||||
expect(commits[2].body).toBe('Empty body');
|
||||
});
|
||||
});
|
||||
console.log('');
|
||||
} catch (error) {
|
||||
console.error('✗ Test 2 failed:', error);
|
||||
}
|
||||
|
||||
// Test 3: Empty output
|
||||
console.log('Test 3: Empty output');
|
||||
try {
|
||||
const commits = parseGitLogOutput('');
|
||||
console.log(`✓ Parsed ${commits.length} commits from empty output`);
|
||||
console.log('');
|
||||
} catch (error) {
|
||||
console.error('✗ Test 3 failed:', error);
|
||||
}
|
||||
describe('parsing with ---END--- in commit messages', () => {
|
||||
it('returns the correct number of commits', () => {
|
||||
const commits = parseGitLogOutput(mockOutputWithEndMarker);
|
||||
expect(commits.length).toBe(3);
|
||||
});
|
||||
|
||||
// Test 4: Output with only one commit
|
||||
console.log('Test 4: Output with only one commit');
|
||||
const singleCommitOutput = `a1b2c3d4e5f67890abcd1234567890abcd1234\x00a1b2c3\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Single commit\x00Single commit body\x00`;
|
||||
try {
|
||||
const commits = parseGitLogOutput(singleCommitOutput);
|
||||
console.log(`✓ Parsed ${commits.length} commits successfully`);
|
||||
console.log('Single commit:', commits[0]);
|
||||
console.log('');
|
||||
} catch (error) {
|
||||
console.error('✗ Test 4 failed:', error);
|
||||
}
|
||||
it('preserves ---END--- text in the body of the first commit', () => {
|
||||
const commits = parseGitLogOutput(mockOutputWithEndMarker);
|
||||
expect(commits[0].subject).toBe('Initial commit');
|
||||
expect(commits[0].body).toMatch(/---END---/);
|
||||
});
|
||||
|
||||
console.log('All tests completed!');
|
||||
it('preserves ---END--- text in the body of the second commit', () => {
|
||||
const commits = parseGitLogOutput(mockOutputWithEndMarker);
|
||||
expect(commits[1].subject).toBe('Fix bug');
|
||||
expect(commits[1].body).toMatch(/---END---/);
|
||||
});
|
||||
|
||||
it('parses the third commit without ---END--- interference', () => {
|
||||
const commits = parseGitLogOutput(mockOutputWithEndMarker);
|
||||
expect(commits[2].subject).toBe('Another commit');
|
||||
expect(commits[2].body).toBe('Empty body');
|
||||
});
|
||||
});
|
||||
|
||||
describe('empty output', () => {
|
||||
it('returns an empty array for an empty string', () => {
|
||||
const commits = parseGitLogOutput('');
|
||||
expect(commits).toEqual([]);
|
||||
expect(commits.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('single-commit output', () => {
|
||||
it('returns exactly one commit', () => {
|
||||
const commits = parseGitLogOutput(singleCommitOutput);
|
||||
expect(commits.length).toBe(1);
|
||||
});
|
||||
|
||||
it('parses the single commit fields correctly', () => {
|
||||
const commits = parseGitLogOutput(singleCommitOutput);
|
||||
expect(commits[0].hash).toBe('a1b2c3d4e5f67890abcd1234567890abcd1234');
|
||||
expect(commits[0].shortHash).toBe('a1b2c3');
|
||||
expect(commits[0].author).toBe('John Doe');
|
||||
expect(commits[0].authorEmail).toBe('john@example.com');
|
||||
expect(commits[0].date).toBe('2023-01-01T12:00:00Z');
|
||||
expect(commits[0].subject).toBe('Single commit');
|
||||
expect(commits[0].body).toBe('Single commit body');
|
||||
});
|
||||
});
|
||||
|
||||
describe('multi-line commit body', () => {
|
||||
// Test vector from test-proper-nul-format.js: commit with a 3-line body
|
||||
const multiLineBodyOutput =
|
||||
[
|
||||
'abc123\nabc1\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is a normal commit body',
|
||||
'def456\ndef4\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in this message',
|
||||
'ghi789\nghi7\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nThis body has multiple lines\nSecond line\nThird line',
|
||||
].join('\0') + '\0';
|
||||
|
||||
it('returns 3 commits', () => {
|
||||
const commits = parseGitLogOutput(multiLineBodyOutput);
|
||||
expect(commits.length).toBe(3);
|
||||
});
|
||||
|
||||
it('parses the first commit correctly', () => {
|
||||
const commits = parseGitLogOutput(multiLineBodyOutput);
|
||||
expect(commits[0].hash).toBe('abc123');
|
||||
expect(commits[0].shortHash).toBe('abc1');
|
||||
expect(commits[0].author).toBe('John Doe');
|
||||
expect(commits[0].authorEmail).toBe('john@example.com');
|
||||
expect(commits[0].date).toBe('2023-01-01T12:00:00Z');
|
||||
expect(commits[0].subject).toBe('Initial commit');
|
||||
expect(commits[0].body).toBe('This is a normal commit body');
|
||||
});
|
||||
|
||||
it('parses the second commit with ---END--- in body correctly', () => {
|
||||
const commits = parseGitLogOutput(multiLineBodyOutput);
|
||||
expect(commits[1].hash).toBe('def456');
|
||||
expect(commits[1].shortHash).toBe('def4');
|
||||
expect(commits[1].author).toBe('Jane Smith');
|
||||
expect(commits[1].subject).toBe('Fix bug');
|
||||
expect(commits[1].body).toContain('---END---');
|
||||
});
|
||||
|
||||
it('parses the third commit with a multi-line body correctly', () => {
|
||||
const commits = parseGitLogOutput(multiLineBodyOutput);
|
||||
expect(commits[2].hash).toBe('ghi789');
|
||||
expect(commits[2].shortHash).toBe('ghi7');
|
||||
expect(commits[2].author).toBe('Bob Johnson');
|
||||
expect(commits[2].subject).toBe('Another commit');
|
||||
expect(commits[2].body).toBe('This body has multiple lines\nSecond line\nThird line');
|
||||
});
|
||||
});
|
||||
|
||||
describe('commit with empty body (trailing blank lines after subject)', () => {
|
||||
// Test vector from test-proper-nul-format.js: empty body commit
|
||||
const emptyBodyOutput =
|
||||
'empty123\nempty1\nAlice Brown\nalice@example.com\n2023-01-04T12:00:00Z\nEmpty body commit\n\n\0';
|
||||
|
||||
it('returns 1 commit', () => {
|
||||
const commits = parseGitLogOutput(emptyBodyOutput);
|
||||
expect(commits.length).toBe(1);
|
||||
});
|
||||
|
||||
it('parses the commit subject correctly', () => {
|
||||
const commits = parseGitLogOutput(emptyBodyOutput);
|
||||
expect(commits[0].hash).toBe('empty123');
|
||||
expect(commits[0].shortHash).toBe('empty1');
|
||||
expect(commits[0].author).toBe('Alice Brown');
|
||||
expect(commits[0].subject).toBe('Empty body commit');
|
||||
});
|
||||
|
||||
it('produces an empty body string when only blank lines follow the subject', () => {
|
||||
const commits = parseGitLogOutput(emptyBodyOutput);
|
||||
expect(commits[0].body).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('leading empty lines in a commit block', () => {
|
||||
// Blocks that start with blank lines before the hash field
|
||||
const outputWithLeadingBlanks =
|
||||
'\n\nabc123\nabc1\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nSubject here\nBody here';
|
||||
|
||||
it('returns 1 commit despite leading blank lines', () => {
|
||||
const commits = parseGitLogOutput(outputWithLeadingBlanks);
|
||||
expect(commits.length).toBe(1);
|
||||
});
|
||||
|
||||
it('parses the commit fields correctly when block has leading empty lines', () => {
|
||||
const commits = parseGitLogOutput(outputWithLeadingBlanks);
|
||||
expect(commits[0].hash).toBe('abc123');
|
||||
expect(commits[0].subject).toBe('Subject here');
|
||||
expect(commits[0].body).toBe('Body here');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
// Test to verify the NUL-based delimiter functionality
|
||||
// This simulates exactly what git would produce with the new format
|
||||
|
||||
console.log('Testing NUL-based delimiter functionality...\n');
|
||||
|
||||
// Simulate git log output with proper NUL-based separator format
|
||||
// Each commit has 7 fields separated by NUL: hash, shortHash, author, authorEmail, date, subject, body
|
||||
const gitOutput = `abc123\x00abc1\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00This is a normal commit body\x00def456\x00def4\x00Jane Smith\x00jane@example.com\x002023-01-02T12:00:00Z\x00Fix bug\x00Fixed the bug with ---END--- in this message\x00ghi789\x00ghi7\x00Bob Johnson\x00bob@example.com\x002023-01-03T12:00:00Z\x00Another commit\x00This body has multiple lines\nSecond line\nThird line\x00`;
|
||||
|
||||
// Test the parsing logic
|
||||
console.log('1. Testing split on NUL character...');
|
||||
const commitBlocks = gitOutput.split('\0').filter((block) => block.trim());
|
||||
console.log(` ✓ Found ${commitBlocks.length} commit blocks`);
|
||||
|
||||
console.log('\n2. Testing parsing of each commit block...');
|
||||
const commits = [];
|
||||
for (const block of commitBlocks) {
|
||||
const fields = block.split('\n');
|
||||
|
||||
// Validate we have all expected fields
|
||||
if (fields.length >= 6) {
|
||||
const commit = {
|
||||
hash: fields[0].trim(),
|
||||
shortHash: fields[1].trim(),
|
||||
author: fields[2].trim(),
|
||||
authorEmail: fields[3].trim(),
|
||||
date: fields[4].trim(),
|
||||
subject: fields[5].trim(),
|
||||
body: fields.slice(6).join('\n').trim(),
|
||||
};
|
||||
commits.push(commit);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n3. Successfully parsed ${commits.length} commits:`);
|
||||
commits.forEach((commit, index) => {
|
||||
console.log(`\n Commit ${index + 1}:`);
|
||||
console.log(` - Hash: ${commit.hash}`);
|
||||
console.log(` - Short hash: ${commit.shortHash}`);
|
||||
console.log(` - Author: ${commit.author}`);
|
||||
console.log(` - Email: ${commit.authorEmail}`);
|
||||
console.log(` - Date: ${commit.date}`);
|
||||
console.log(` - Subject: ${commit.subject}`);
|
||||
console.log(` - Body: "${commit.body}"`);
|
||||
});
|
||||
|
||||
// Test with problematic ---END--- in commit message
|
||||
console.log('\n4. Testing with ---END--- in commit message...');
|
||||
const problematicOutput = `test123\x00test1\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00This contains ---END--- but should be parsed correctly\x00`;
|
||||
const problematicCommits = problematicOutput
|
||||
.split('\0')
|
||||
.filter((block) => block.trim())
|
||||
.map((block) => {
|
||||
const fields = block.split('\n');
|
||||
if (fields.length >= 6) {
|
||||
return {
|
||||
hash: fields[0].trim(),
|
||||
shortHash: fields[1].trim(),
|
||||
author: fields[2].trim(),
|
||||
authorEmail: fields[3].trim(),
|
||||
date: fields[4].trim(),
|
||||
subject: fields[5].trim(),
|
||||
body: fields.slice(6).join('\n').trim(),
|
||||
};
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter((commit) => commit !== null);
|
||||
|
||||
console.log(` ✓ Found ${problematicCommits.length} commits`);
|
||||
console.log(` Subject: "${problematicCommits[0].subject}"`);
|
||||
console.log(` Body: "${problematicCommits[0].body}"`);
|
||||
|
||||
// Test with empty body
|
||||
console.log('\n5. Testing commit with empty body...');
|
||||
const emptyBodyOutput = `empty123\x00empty1\x00Alice Brown\x00alice@example.com\x002023-01-04T12:00:00Z\x00Empty body commit\x00\x00`;
|
||||
const emptyBodyCommits = emptyBodyOutput
|
||||
.split('\0')
|
||||
.filter((block) => block.trim())
|
||||
.map((block) => {
|
||||
const fields = block.split('\n');
|
||||
if (fields.length >= 6) {
|
||||
return {
|
||||
hash: fields[0].trim(),
|
||||
shortHash: fields[1].trim(),
|
||||
author: fields[2].trim(),
|
||||
authorEmail: fields[3].trim(),
|
||||
date: fields[4].trim(),
|
||||
subject: fields[5].trim(),
|
||||
body: fields.slice(6).join('\n').trim(),
|
||||
};
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter((commit) => commit !== null);
|
||||
|
||||
console.log(` ✓ Found ${emptyBodyCommits.length} commits`);
|
||||
console.log(` Subject: "${emptyBodyCommits[0].subject}"`);
|
||||
console.log(` Body: "${emptyBodyCommits[0].body}" (should be empty)`);
|
||||
|
||||
console.log('\n✅ All tests passed! NUL-based delimiter works correctly.');
|
||||
console.log('\nSummary:');
|
||||
console.log('- NUL character (\\x00) properly separates commits');
|
||||
console.log('- Each commit is split into exactly 7 fields');
|
||||
console.log('- ---END--- in commit messages is handled correctly');
|
||||
console.log('- Empty commit bodies are preserved as empty strings');
|
||||
console.log('- Multi-line commit bodies are preserved correctly');
|
||||
@@ -1,48 +0,0 @@
|
||||
// Simple test to verify the NUL-based delimiter works
|
||||
// This simulates what git would produce with the new format
|
||||
|
||||
console.log('Testing NUL-based delimiter functionality...\n');
|
||||
|
||||
// Simulate git log output with NUL-based separator
|
||||
const gitOutputWithNul = `abc123\x00abc1\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00This is a normal commit body\x00def456\x00def4\x00Jane Smith\x00jane@example.com\x002023-01-02T12:00:00Z\x00Fix bug\x00Fixed the bug with ---END--- in this message\x00ghi789\x00ghi7\x00Bob Johnson\x00bob@example.com\x002023-01-03T12:00:00Z\x00Another commit\x00This body has multiple lines\nSecond line\nThird line\x00`;
|
||||
|
||||
// Test splitting on NUL
|
||||
console.log('1. Testing split on NUL character...');
|
||||
const commits = gitOutputWithNul.split('\0').filter((block) => block.trim());
|
||||
console.log(` ✓ Found ${commits.length} commits`);
|
||||
|
||||
console.log('\n2. Testing parsing of each commit...');
|
||||
commits.forEach((commit, index) => {
|
||||
const fields = commit.split('\n');
|
||||
console.log(`\n Commit ${index + 1}:`);
|
||||
console.log(` - Hash: ${fields[0]}`);
|
||||
console.log(` - Short hash: ${fields[1]}`);
|
||||
console.log(` - Author: ${fields[2]}`);
|
||||
console.log(` - Email: ${fields[3]}`);
|
||||
console.log(` - Date: ${fields[4]}`);
|
||||
console.log(` - Subject: ${fields[5]}`);
|
||||
console.log(` - Body: "${fields.slice(6).join('\n')}"`);
|
||||
});
|
||||
|
||||
// Test with problematic ---END--- in message
|
||||
console.log('\n3. Testing with ---END--- in commit message...');
|
||||
const problematicOutput = `abc123\x00abc1\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00This contains ---END--- but should be parsed correctly\x00`;
|
||||
const problematicCommits = problematicOutput.split('\0').filter((block) => block.trim());
|
||||
console.log(
|
||||
` ✓ Found ${problematicCommits.length} commits (correctly ignoring ---END--- in message)`
|
||||
);
|
||||
|
||||
// Test empty blocks
|
||||
console.log('\n4. Testing with empty blocks...');
|
||||
const outputWithEmptyBlocks = `abc123\x00abc1\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Valid commit\x00Body here\x00\x00def456\x00def4\x00Jane Smith\x00jane@example.com\x002023-01-02T12:00:00Z\x00Another valid commit\x00Another body\x00`;
|
||||
const outputWithEmptyBlocksParsed = outputWithEmptyBlocks
|
||||
.split('\0')
|
||||
.filter((block) => block.trim());
|
||||
console.log(` ✓ Found ${outputWithEmptyBlocksParsed.length} commits (empty blocks filtered out)`);
|
||||
|
||||
console.log('\n✅ All tests passed! NUL-based delimiter works correctly.');
|
||||
console.log('\nSummary:');
|
||||
console.log('- NUL character (\\x00) properly separates commits');
|
||||
console.log('- ---END--- in commit messages is handled correctly');
|
||||
console.log('- Empty blocks are filtered out');
|
||||
console.log('- Multi-line commit bodies are preserved');
|
||||
@@ -1,165 +0,0 @@
|
||||
// Test to verify the proper NUL-based delimiter functionality
|
||||
// Each commit: field1\nfield2\nfield3\x00field1\nfield2\nfield3\x00...
|
||||
|
||||
console.log('Testing proper NUL-based delimiter format...\n');
|
||||
|
||||
// Proper git output format with NUL between commits
|
||||
const gitOutput = `abc123
|
||||
abc1
|
||||
John Doe
|
||||
john@example.com
|
||||
2023-01-01T12:00:00Z
|
||||
Initial commit
|
||||
This is a normal commit body\x00def456
|
||||
def4
|
||||
Jane Smith
|
||||
jane@example.com
|
||||
2023-01-02T12:00:00Z
|
||||
Fix bug
|
||||
Fixed the bug with ---END--- in this message\x00ghi789
|
||||
ghi7
|
||||
Bob Johnson
|
||||
bob@example.com
|
||||
2023-01-03T12:00:00Z
|
||||
Another commit
|
||||
This body has multiple lines
|
||||
Second line
|
||||
Third line\x00`;
|
||||
|
||||
console.log('1. Testing split on NUL character...');
|
||||
const commitBlocks = gitOutput.split('\0').filter((block) => block.trim());
|
||||
console.log(` ✓ Found ${commitBlocks.length} commit blocks`);
|
||||
|
||||
console.log('\n2. Testing parsing of each commit block...');
|
||||
const commits = [];
|
||||
for (const block of commitBlocks) {
|
||||
const allLines = block.split('\n');
|
||||
|
||||
// Skip leading empty lines
|
||||
let startIndex = 0;
|
||||
while (startIndex < allLines.length && allLines[startIndex].trim() === '') {
|
||||
startIndex++;
|
||||
}
|
||||
const lines = allLines.slice(startIndex);
|
||||
|
||||
if (lines.length >= 6) {
|
||||
const commit = {
|
||||
hash: lines[0].trim(),
|
||||
shortHash: lines[1].trim(),
|
||||
author: lines[2].trim(),
|
||||
authorEmail: lines[3].trim(),
|
||||
date: lines[4].trim(),
|
||||
subject: lines[5].trim(),
|
||||
body: lines.slice(6).join('\n').trim(),
|
||||
};
|
||||
commits.push(commit);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n3. Successfully parsed ${commits.length} commits:`);
|
||||
commits.forEach((commit, index) => {
|
||||
console.log(`\n Commit ${index + 1}:`);
|
||||
console.log(` - Hash: ${commit.hash}`);
|
||||
console.log(` - Short hash: ${commit.shortHash}`);
|
||||
console.log(` - Author: ${commit.author}`);
|
||||
console.log(` - Email: ${commit.authorEmail}`);
|
||||
console.log(` - Date: ${commit.date}`);
|
||||
console.log(` - Subject: ${commit.subject}`);
|
||||
console.log(` - Body: "${commit.body}"`);
|
||||
});
|
||||
|
||||
// Test with problematic ---END--- in commit message
|
||||
console.log('\n4. Testing with ---END--- in commit message...');
|
||||
const problematicOutput = `test123
|
||||
test1
|
||||
John Doe
|
||||
john@example.com
|
||||
2023-01-01T12:00:00Z
|
||||
Initial commit
|
||||
This contains ---END--- but should be parsed correctly\x00`;
|
||||
const problematicCommits = problematicOutput
|
||||
.split('\0')
|
||||
.filter((block) => block.trim())
|
||||
.map((block) => {
|
||||
const allLines = block.split('\n');
|
||||
|
||||
// Skip leading empty lines
|
||||
let startIndex = 0;
|
||||
while (startIndex < allLines.length && allLines[startIndex].trim() === '') {
|
||||
startIndex++;
|
||||
}
|
||||
const lines = allLines.slice(startIndex);
|
||||
|
||||
if (lines.length >= 6) {
|
||||
return {
|
||||
hash: lines[0].trim(),
|
||||
shortHash: lines[1].trim(),
|
||||
author: lines[2].trim(),
|
||||
authorEmail: lines[3].trim(),
|
||||
date: lines[4].trim(),
|
||||
subject: lines[5].trim(),
|
||||
body: lines.slice(6).join('\n').trim(),
|
||||
};
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter((commit) => commit !== null);
|
||||
|
||||
console.log(` ✓ Found ${problematicCommits.length} commits`);
|
||||
if (problematicCommits.length > 0) {
|
||||
console.log(` Subject: "${problematicCommits[0].subject}"`);
|
||||
console.log(` Body: "${problematicCommits[0].body}"`);
|
||||
}
|
||||
|
||||
// Test with empty body
|
||||
console.log('\n5. Testing commit with empty body...');
|
||||
const emptyBodyOutput = `empty123
|
||||
empty1
|
||||
Alice Brown
|
||||
alice@example.com
|
||||
2023-01-04T12:00:00Z
|
||||
Empty body commit
|
||||
|
||||
\x00`;
|
||||
const emptyBodyCommits = emptyBodyOutput
|
||||
.split('\0')
|
||||
.filter((block) => block.trim())
|
||||
.map((block) => {
|
||||
const allLines = block.split('\n');
|
||||
|
||||
// Skip leading empty lines
|
||||
let startIndex = 0;
|
||||
while (startIndex < allLines.length && allLines[startIndex].trim() === '') {
|
||||
startIndex++;
|
||||
}
|
||||
const lines = allLines.slice(startIndex);
|
||||
|
||||
if (lines.length >= 6) {
|
||||
return {
|
||||
hash: lines[0].trim(),
|
||||
shortHash: lines[1].trim(),
|
||||
author: lines[2].trim(),
|
||||
authorEmail: lines[3].trim(),
|
||||
date: lines[4].trim(),
|
||||
subject: lines[5].trim(),
|
||||
body: lines.slice(6).join('\n').trim(),
|
||||
};
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter((commit) => commit !== null);
|
||||
|
||||
console.log(` ✓ Found ${emptyBodyCommits.length} commits`);
|
||||
if (emptyBodyCommits.length > 0) {
|
||||
console.log(` Subject: "${emptyBodyCommits[0].subject}"`);
|
||||
console.log(` Body: "${emptyBodyCommits[0].body}" (should be empty)`);
|
||||
}
|
||||
|
||||
console.log('\n✅ All tests passed! NUL-based delimiter works correctly.');
|
||||
console.log('\nKey insights:');
|
||||
console.log('- NUL character (\\x00) separates commits');
|
||||
console.log('- Newlines (\\n) separate fields within a commit');
|
||||
console.log('- The parsing logic handles leading empty lines correctly');
|
||||
console.log('- ---END--- in commit messages is handled correctly');
|
||||
console.log('- Empty commit bodies are preserved as empty strings');
|
||||
console.log('- Multi-line commit bodies are preserved correctly');
|
||||
@@ -1,37 +0,0 @@
|
||||
// Simple test to understand the NUL character behavior
|
||||
|
||||
console.log('Testing NUL character behavior...\n');
|
||||
|
||||
// Create a string with NUL characters
|
||||
const str1 =
|
||||
'abc123\x00abc1\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00This is a normal commit body\x00';
|
||||
|
||||
console.log('Original string length:', str1.length);
|
||||
console.log('String representation:', str1);
|
||||
|
||||
// Split on NUL
|
||||
console.log('\n1. Split on NUL character:');
|
||||
const parts = str1.split('\0');
|
||||
console.log('Number of parts:', parts.length);
|
||||
parts.forEach((part, index) => {
|
||||
console.log(`Part ${index}: "${part}" (length: ${part.length})`);
|
||||
});
|
||||
|
||||
// Test with actual git format
|
||||
console.log('\n2. Testing with actual git format:');
|
||||
const gitFormat = `abc123\x00abc1\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00Body text here\x00def456\x00def4\x00Jane Smith\x00jane@example.com\x002023-01-02T12:00:00Z\x00Second commit\x00Body with ---END--- text\x00`;
|
||||
|
||||
const gitParts = gitFormat.split('\0').filter((block) => block.trim());
|
||||
console.log('Number of commits found:', gitParts.length);
|
||||
|
||||
console.log('\nAnalyzing each commit:');
|
||||
gitParts.forEach((block, index) => {
|
||||
console.log(`\nCommit ${index + 1}:`);
|
||||
console.log(`Block: "${block}"`);
|
||||
const fields = block.split('\n');
|
||||
console.log(`Number of fields: ${fields.length}`);
|
||||
fields.forEach((field, fieldIndex) => {
|
||||
const fieldNames = ['hash', 'shortHash', 'author', 'authorEmail', 'date', 'subject', 'body'];
|
||||
console.log(` ${fieldNames[fieldIndex] || `field${fieldIndex}`}: "${field}"`);
|
||||
});
|
||||
});
|
||||
196
apps/server/tests/unit/lib/git-log-parser.test.ts
Normal file
196
apps/server/tests/unit/lib/git-log-parser.test.ts
Normal file
@@ -0,0 +1,196 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseGitLogOutput } from '../../../src/lib/git-log-parser.js';
|
||||
|
||||
// Mock data: fields within each commit are newline-separated,
|
||||
// commits are NUL-separated (matching the parser contract).
|
||||
const mockGitOutput = [
|
||||
'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is the commit body',
|
||||
'e5f6g7h8i9j0klmnoprstuv\ne5f6g7\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in the message',
|
||||
'q1w2e3r4t5y6u7i8o9p0asdfghjkl\nq1w2e3\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nEmpty body',
|
||||
].join('\0');
|
||||
|
||||
// Mock data where commit bodies contain ---END--- markers
|
||||
const mockOutputWithEndMarker = [
|
||||
'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is the commit body\n---END--- is in this message',
|
||||
'e5f6g7h8i9j0klmnoprstuv\ne5f6g7\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in the message',
|
||||
'q1w2e3r4t5y6u7i8o9p0asdfghjkl\nq1w2e3\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nEmpty body',
|
||||
].join('\0');
|
||||
|
||||
// Single-commit mock: fields newline-separated, no trailing NUL needed
|
||||
const singleCommitOutput =
|
||||
'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nSingle commit\nSingle commit body';
|
||||
|
||||
describe('parseGitLogOutput', () => {
|
||||
describe('normal parsing (three commits)', () => {
|
||||
it('returns the correct number of commits', () => {
|
||||
const commits = parseGitLogOutput(mockGitOutput);
|
||||
expect(commits.length).toBe(3);
|
||||
});
|
||||
|
||||
it('parses the first commit fields correctly', () => {
|
||||
const commits = parseGitLogOutput(mockGitOutput);
|
||||
expect(commits[0].hash).toBe('a1b2c3d4e5f67890abcd1234567890abcd1234');
|
||||
expect(commits[0].shortHash).toBe('a1b2c3');
|
||||
expect(commits[0].author).toBe('John Doe');
|
||||
expect(commits[0].authorEmail).toBe('john@example.com');
|
||||
expect(commits[0].date).toBe('2023-01-01T12:00:00Z');
|
||||
expect(commits[0].subject).toBe('Initial commit');
|
||||
expect(commits[0].body).toBe('This is the commit body');
|
||||
});
|
||||
|
||||
it('parses the second commit fields correctly', () => {
|
||||
const commits = parseGitLogOutput(mockGitOutput);
|
||||
expect(commits[1].hash).toBe('e5f6g7h8i9j0klmnoprstuv');
|
||||
expect(commits[1].shortHash).toBe('e5f6g7');
|
||||
expect(commits[1].author).toBe('Jane Smith');
|
||||
expect(commits[1].subject).toBe('Fix bug');
|
||||
expect(commits[1].body).toMatch(/---END---/);
|
||||
});
|
||||
|
||||
it('parses the third commit fields correctly', () => {
|
||||
const commits = parseGitLogOutput(mockGitOutput);
|
||||
expect(commits[2].hash).toBe('q1w2e3r4t5y6u7i8o9p0asdfghjkl');
|
||||
expect(commits[2].shortHash).toBe('q1w2e3');
|
||||
expect(commits[2].author).toBe('Bob Johnson');
|
||||
expect(commits[2].subject).toBe('Another commit');
|
||||
expect(commits[2].body).toBe('Empty body');
|
||||
});
|
||||
});
|
||||
|
||||
describe('parsing with ---END--- in commit messages', () => {
|
||||
it('returns the correct number of commits', () => {
|
||||
const commits = parseGitLogOutput(mockOutputWithEndMarker);
|
||||
expect(commits.length).toBe(3);
|
||||
});
|
||||
|
||||
it('preserves ---END--- text in the body of the first commit', () => {
|
||||
const commits = parseGitLogOutput(mockOutputWithEndMarker);
|
||||
expect(commits[0].subject).toBe('Initial commit');
|
||||
expect(commits[0].body).toMatch(/---END---/);
|
||||
});
|
||||
|
||||
it('preserves ---END--- text in the body of the second commit', () => {
|
||||
const commits = parseGitLogOutput(mockOutputWithEndMarker);
|
||||
expect(commits[1].subject).toBe('Fix bug');
|
||||
expect(commits[1].body).toMatch(/---END---/);
|
||||
});
|
||||
|
||||
it('parses the third commit without ---END--- interference', () => {
|
||||
const commits = parseGitLogOutput(mockOutputWithEndMarker);
|
||||
expect(commits[2].subject).toBe('Another commit');
|
||||
expect(commits[2].body).toBe('Empty body');
|
||||
});
|
||||
});
|
||||
|
||||
describe('empty output', () => {
|
||||
it('returns an empty array for an empty string', () => {
|
||||
const commits = parseGitLogOutput('');
|
||||
expect(commits).toEqual([]);
|
||||
expect(commits.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('single-commit output', () => {
|
||||
it('returns exactly one commit', () => {
|
||||
const commits = parseGitLogOutput(singleCommitOutput);
|
||||
expect(commits.length).toBe(1);
|
||||
});
|
||||
|
||||
it('parses the single commit fields correctly', () => {
|
||||
const commits = parseGitLogOutput(singleCommitOutput);
|
||||
expect(commits[0].hash).toBe('a1b2c3d4e5f67890abcd1234567890abcd1234');
|
||||
expect(commits[0].shortHash).toBe('a1b2c3');
|
||||
expect(commits[0].author).toBe('John Doe');
|
||||
expect(commits[0].authorEmail).toBe('john@example.com');
|
||||
expect(commits[0].date).toBe('2023-01-01T12:00:00Z');
|
||||
expect(commits[0].subject).toBe('Single commit');
|
||||
expect(commits[0].body).toBe('Single commit body');
|
||||
});
|
||||
});
|
||||
|
||||
describe('multi-line commit body', () => {
|
||||
// Test vector from test-proper-nul-format.js: commit with a 3-line body
|
||||
const multiLineBodyOutput =
|
||||
[
|
||||
'abc123\nabc1\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is a normal commit body',
|
||||
'def456\ndef4\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in this message',
|
||||
'ghi789\nghi7\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nThis body has multiple lines\nSecond line\nThird line',
|
||||
].join('\0') + '\0';
|
||||
|
||||
it('returns 3 commits', () => {
|
||||
const commits = parseGitLogOutput(multiLineBodyOutput);
|
||||
expect(commits.length).toBe(3);
|
||||
});
|
||||
|
||||
it('parses the first commit correctly', () => {
|
||||
const commits = parseGitLogOutput(multiLineBodyOutput);
|
||||
expect(commits[0].hash).toBe('abc123');
|
||||
expect(commits[0].shortHash).toBe('abc1');
|
||||
expect(commits[0].author).toBe('John Doe');
|
||||
expect(commits[0].authorEmail).toBe('john@example.com');
|
||||
expect(commits[0].date).toBe('2023-01-01T12:00:00Z');
|
||||
expect(commits[0].subject).toBe('Initial commit');
|
||||
expect(commits[0].body).toBe('This is a normal commit body');
|
||||
});
|
||||
|
||||
it('parses the second commit with ---END--- in body correctly', () => {
|
||||
const commits = parseGitLogOutput(multiLineBodyOutput);
|
||||
expect(commits[1].hash).toBe('def456');
|
||||
expect(commits[1].shortHash).toBe('def4');
|
||||
expect(commits[1].author).toBe('Jane Smith');
|
||||
expect(commits[1].subject).toBe('Fix bug');
|
||||
expect(commits[1].body).toContain('---END---');
|
||||
});
|
||||
|
||||
it('parses the third commit with a multi-line body correctly', () => {
|
||||
const commits = parseGitLogOutput(multiLineBodyOutput);
|
||||
expect(commits[2].hash).toBe('ghi789');
|
||||
expect(commits[2].shortHash).toBe('ghi7');
|
||||
expect(commits[2].author).toBe('Bob Johnson');
|
||||
expect(commits[2].subject).toBe('Another commit');
|
||||
expect(commits[2].body).toBe('This body has multiple lines\nSecond line\nThird line');
|
||||
});
|
||||
});
|
||||
|
||||
describe('commit with empty body (trailing blank lines after subject)', () => {
|
||||
// Test vector from test-proper-nul-format.js: empty body commit
|
||||
const emptyBodyOutput =
|
||||
'empty123\nempty1\nAlice Brown\nalice@example.com\n2023-01-04T12:00:00Z\nEmpty body commit\n\n\0';
|
||||
|
||||
it('returns 1 commit', () => {
|
||||
const commits = parseGitLogOutput(emptyBodyOutput);
|
||||
expect(commits.length).toBe(1);
|
||||
});
|
||||
|
||||
it('parses the commit subject correctly', () => {
|
||||
const commits = parseGitLogOutput(emptyBodyOutput);
|
||||
expect(commits[0].hash).toBe('empty123');
|
||||
expect(commits[0].shortHash).toBe('empty1');
|
||||
expect(commits[0].author).toBe('Alice Brown');
|
||||
expect(commits[0].subject).toBe('Empty body commit');
|
||||
});
|
||||
|
||||
it('produces an empty body string when only blank lines follow the subject', () => {
|
||||
const commits = parseGitLogOutput(emptyBodyOutput);
|
||||
expect(commits[0].body).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('leading empty lines in a commit block', () => {
|
||||
// Blocks that start with blank lines before the hash field
|
||||
const outputWithLeadingBlanks =
|
||||
'\n\nabc123\nabc1\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nSubject here\nBody here';
|
||||
|
||||
it('returns 1 commit despite leading blank lines', () => {
|
||||
const commits = parseGitLogOutput(outputWithLeadingBlanks);
|
||||
expect(commits.length).toBe(1);
|
||||
});
|
||||
|
||||
it('parses the commit fields correctly when block has leading empty lines', () => {
|
||||
const commits = parseGitLogOutput(outputWithLeadingBlanks);
|
||||
expect(commits[0].hash).toBe('abc123');
|
||||
expect(commits[0].subject).toBe('Subject here');
|
||||
expect(commits[0].body).toBe('Body here');
|
||||
});
|
||||
});
|
||||
});
|
||||
83
apps/server/tests/unit/lib/nul-delimiter.test.ts
Normal file
83
apps/server/tests/unit/lib/nul-delimiter.test.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
// Automated tests for NUL character behavior in git commit parsing
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
|
||||
describe('NUL character behavior', () => {
|
||||
// Create a string with NUL characters
|
||||
const str1 =
|
||||
'abc123\x00abc1\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00This is a normal commit body\x00';
|
||||
|
||||
describe('split on NUL character', () => {
|
||||
const parts = str1.split('\0');
|
||||
|
||||
it('should produce the expected number of parts', () => {
|
||||
// 7 fields + 1 trailing empty string from the trailing \x00
|
||||
expect(parts.length).toBe(8);
|
||||
});
|
||||
|
||||
it('should contain the expected part values', () => {
|
||||
expect(parts[0]).toBe('abc123');
|
||||
expect(parts[1]).toBe('abc1');
|
||||
expect(parts[2]).toBe('John Doe');
|
||||
expect(parts[3]).toBe('john@example.com');
|
||||
expect(parts[4]).toBe('2023-01-01T12:00:00Z');
|
||||
expect(parts[5]).toBe('Initial commit');
|
||||
expect(parts[6]).toBe('This is a normal commit body');
|
||||
expect(parts[7]).toBe('');
|
||||
});
|
||||
|
||||
it('should have correct lengths for each part', () => {
|
||||
expect(parts[0].length).toBe(6); // 'abc123'
|
||||
expect(parts[1].length).toBe(4); // 'abc1'
|
||||
expect(parts[2].length).toBe(8); // 'John Doe'
|
||||
expect(parts[3].length).toBe(16); // 'john@example.com'
|
||||
expect(parts[4].length).toBe(20); // '2023-01-01T12:00:00Z'
|
||||
expect(parts[5].length).toBe(14); // 'Initial commit'
|
||||
expect(parts[6].length).toBe(28); // 'This is a normal commit body'
|
||||
expect(parts[7].length).toBe(0); // trailing empty
|
||||
});
|
||||
});
|
||||
|
||||
describe('git format split and filter', () => {
|
||||
const gitFormat = `abc123\x00abc1\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00Body text here\x00def456\x00def4\x00Jane Smith\x00jane@example.com\x002023-01-02T12:00:00Z\x00Second commit\x00Body with ---END--- text\x00`;
|
||||
|
||||
const gitParts = gitFormat.split('\0').filter((block) => block.trim());
|
||||
|
||||
it('should produce the expected number of non-empty parts after filtering', () => {
|
||||
// 14 non-empty field strings (7 fields per commit × 2 commits); trailing empty is filtered out
|
||||
expect(gitParts.length).toBe(14);
|
||||
});
|
||||
|
||||
it('should contain correct field values for the first commit', () => {
|
||||
const fields = gitParts.slice(0, 7);
|
||||
expect(fields.length).toBe(7);
|
||||
expect(fields[0]).toBe('abc123'); // hash
|
||||
expect(fields[1]).toBe('abc1'); // shortHash
|
||||
expect(fields[2]).toBe('John Doe'); // author
|
||||
expect(fields[3]).toBe('john@example.com'); // authorEmail
|
||||
expect(fields[4]).toBe('2023-01-01T12:00:00Z'); // date
|
||||
expect(fields[5]).toBe('Initial commit'); // subject
|
||||
expect(fields[6]).toBe('Body text here'); // body
|
||||
});
|
||||
|
||||
it('should contain correct field values for the second commit', () => {
|
||||
const fields = gitParts.slice(7, 14);
|
||||
expect(fields.length).toBe(7);
|
||||
expect(fields[0]).toBe('def456'); // hash
|
||||
expect(fields[1]).toBe('def4'); // shortHash
|
||||
expect(fields[2]).toBe('Jane Smith'); // author
|
||||
expect(fields[3]).toBe('jane@example.com'); // authorEmail
|
||||
expect(fields[4]).toBe('2023-01-02T12:00:00Z'); // date
|
||||
expect(fields[5]).toBe('Second commit'); // subject
|
||||
expect(fields[6]).toBe('Body with ---END--- text'); // body (---END--- handled correctly)
|
||||
});
|
||||
|
||||
it('each part should have the expected number of newline-delimited fields', () => {
|
||||
// Each gitPart is a single field value (no internal newlines), so split('\n') yields 1 field
|
||||
gitParts.forEach((block) => {
|
||||
const fields = block.split('\n');
|
||||
expect(fields.length).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -247,6 +247,12 @@ describe('codex-provider.ts', () => {
|
||||
|
||||
it('uses the SDK when no tools are requested and an API key is present', async () => {
|
||||
process.env[OPENAI_API_KEY_ENV] = 'sk-test';
|
||||
// Override auth indicators so CLI-native auth doesn't take priority over API key
|
||||
vi.mocked(getCodexAuthIndicators).mockResolvedValue({
|
||||
hasAuthFile: false,
|
||||
hasOAuthToken: false,
|
||||
hasApiKey: false,
|
||||
});
|
||||
codexRunMock.mockResolvedValue({ finalResponse: 'Hello from SDK' });
|
||||
|
||||
const results = await collectAsyncGenerator<ProviderMessage>(
|
||||
@@ -264,6 +270,12 @@ describe('codex-provider.ts', () => {
|
||||
|
||||
it('uses the SDK when API key is present, even for tool requests (to avoid OAuth issues)', async () => {
|
||||
process.env[OPENAI_API_KEY_ENV] = 'sk-test';
|
||||
// Override auth indicators so CLI-native auth doesn't take priority over API key
|
||||
vi.mocked(getCodexAuthIndicators).mockResolvedValue({
|
||||
hasAuthFile: false,
|
||||
hasOAuthToken: false,
|
||||
hasApiKey: false,
|
||||
});
|
||||
vi.mocked(spawnJSONLProcess).mockReturnValue((async function* () {})());
|
||||
|
||||
await collectAsyncGenerator(
|
||||
|
||||
@@ -1,27 +1,15 @@
|
||||
import { describe, it, expect, vi, beforeEach, type Mock } from 'vitest';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import type { Request, Response } from 'express';
|
||||
import { createMockExpressContext } from '../../../utils/mocks.js';
|
||||
|
||||
vi.mock('child_process', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('child_process')>();
|
||||
return {
|
||||
...actual,
|
||||
execFile: vi.fn(),
|
||||
};
|
||||
});
|
||||
vi.mock('@/services/worktree-branch-service.js', () => ({
|
||||
performSwitchBranch: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('util', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('util')>();
|
||||
return {
|
||||
...actual,
|
||||
promisify: (fn: unknown) => fn,
|
||||
};
|
||||
});
|
||||
|
||||
import { execFile } from 'child_process';
|
||||
import { performSwitchBranch } from '@/services/worktree-branch-service.js';
|
||||
import { createSwitchBranchHandler } from '@/routes/worktree/routes/switch-branch.js';
|
||||
|
||||
const mockExecFile = execFile as Mock;
|
||||
const mockPerformSwitchBranch = vi.mocked(performSwitchBranch);
|
||||
|
||||
describe('switch-branch route', () => {
|
||||
let req: Request;
|
||||
@@ -34,42 +22,77 @@ describe('switch-branch route', () => {
|
||||
res = context.res;
|
||||
});
|
||||
|
||||
it('should return 400 when branchName is missing', async () => {
|
||||
req.body = { worktreePath: '/repo/path' };
|
||||
|
||||
const handler = createSwitchBranchHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'branchName required',
|
||||
});
|
||||
expect(mockPerformSwitchBranch).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 400 when branchName starts with a dash', async () => {
|
||||
req.body = { worktreePath: '/repo/path', branchName: '-flag' };
|
||||
|
||||
const handler = createSwitchBranchHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'Invalid branch name',
|
||||
});
|
||||
expect(mockPerformSwitchBranch).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 400 when branchName starts with double dash', async () => {
|
||||
req.body = { worktreePath: '/repo/path', branchName: '--option' };
|
||||
|
||||
const handler = createSwitchBranchHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'Invalid branch name',
|
||||
});
|
||||
expect(mockPerformSwitchBranch).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 400 when branchName contains invalid characters', async () => {
|
||||
req.body = { worktreePath: '/repo/path', branchName: 'branch name with spaces' };
|
||||
|
||||
const handler = createSwitchBranchHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'Invalid branch name',
|
||||
});
|
||||
expect(mockPerformSwitchBranch).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow switching when only untracked files exist', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/repo/path',
|
||||
branchName: 'feature/test',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation(async (file: string, args: string[]) => {
|
||||
const command = `${file} ${args.join(' ')}`;
|
||||
if (command === 'git rev-parse --abbrev-ref HEAD') {
|
||||
return { stdout: 'main\n', stderr: '' };
|
||||
}
|
||||
if (command === 'git rev-parse --verify feature/test') {
|
||||
return { stdout: 'abc123\n', stderr: '' };
|
||||
}
|
||||
if (command === 'git branch -r --format=%(refname:short)') {
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
if (command === 'git status --porcelain') {
|
||||
return { stdout: '?? .automaker/\n?? notes.txt\n', stderr: '' };
|
||||
}
|
||||
if (command === 'git checkout feature/test') {
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
if (command === 'git fetch --all --quiet') {
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
if (command === 'git stash list') {
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
if (command.startsWith('git stash push')) {
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
if (command === 'git stash pop') {
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
return { stdout: '', stderr: '' };
|
||||
mockPerformSwitchBranch.mockResolvedValue({
|
||||
success: true,
|
||||
result: {
|
||||
previousBranch: 'main',
|
||||
currentBranch: 'feature/test',
|
||||
message: "Switched to branch 'feature/test'",
|
||||
hasConflicts: false,
|
||||
stashedChanges: false,
|
||||
},
|
||||
});
|
||||
|
||||
const handler = createSwitchBranchHandler();
|
||||
@@ -85,11 +108,7 @@ describe('switch-branch route', () => {
|
||||
stashedChanges: false,
|
||||
},
|
||||
});
|
||||
expect(mockExecFile).toHaveBeenCalledWith(
|
||||
'git',
|
||||
['checkout', 'feature/test'],
|
||||
expect.objectContaining({ cwd: '/repo/path' })
|
||||
);
|
||||
expect(mockPerformSwitchBranch).toHaveBeenCalledWith('/repo/path', 'feature/test', undefined);
|
||||
});
|
||||
|
||||
it('should stash changes and switch when tracked files are modified', async () => {
|
||||
@@ -98,42 +117,15 @@ describe('switch-branch route', () => {
|
||||
branchName: 'feature/test',
|
||||
};
|
||||
|
||||
let stashListCallCount = 0;
|
||||
|
||||
mockExecFile.mockImplementation(async (file: string, args: string[]) => {
|
||||
const command = `${file} ${args.join(' ')}`;
|
||||
if (command === 'git rev-parse --abbrev-ref HEAD') {
|
||||
return { stdout: 'main\n', stderr: '' };
|
||||
}
|
||||
if (command === 'git rev-parse --verify feature/test') {
|
||||
return { stdout: 'abc123\n', stderr: '' };
|
||||
}
|
||||
if (command === 'git status --porcelain') {
|
||||
return { stdout: ' M src/index.ts\n?? notes.txt\n', stderr: '' };
|
||||
}
|
||||
if (command === 'git branch -r --format=%(refname:short)') {
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
if (command === 'git stash list') {
|
||||
stashListCallCount++;
|
||||
if (stashListCallCount === 1) {
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
return { stdout: 'stash@{0}: automaker-branch-switch\n', stderr: '' };
|
||||
}
|
||||
if (command.startsWith('git stash push')) {
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
if (command === 'git checkout feature/test') {
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
if (command === 'git fetch --all --quiet') {
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
if (command === 'git stash pop') {
|
||||
return { stdout: 'Already applied.\n', stderr: '' };
|
||||
}
|
||||
return { stdout: '', stderr: '' };
|
||||
mockPerformSwitchBranch.mockResolvedValue({
|
||||
success: true,
|
||||
result: {
|
||||
previousBranch: 'main',
|
||||
currentBranch: 'feature/test',
|
||||
message: "Switched to branch 'feature/test' (local changes stashed and reapplied)",
|
||||
hasConflicts: false,
|
||||
stashedChanges: true,
|
||||
},
|
||||
});
|
||||
|
||||
const handler = createSwitchBranchHandler();
|
||||
|
||||
Reference in New Issue
Block a user