mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-03-17 10:03:08 +00:00
Compare commits
1 Commits
v1.0.0
...
feature/pu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa345a50ac |
19
.github/workflows/e2e-tests.yml
vendored
19
.github/workflows/e2e-tests.yml
vendored
@@ -46,8 +46,7 @@ jobs:
|
|||||||
echo "SERVER_PID=$SERVER_PID" >> $GITHUB_ENV
|
echo "SERVER_PID=$SERVER_PID" >> $GITHUB_ENV
|
||||||
|
|
||||||
env:
|
env:
|
||||||
PORT: 3108
|
PORT: 3008
|
||||||
TEST_SERVER_PORT: 3108
|
|
||||||
NODE_ENV: test
|
NODE_ENV: test
|
||||||
# Use a deterministic API key so Playwright can log in reliably
|
# Use a deterministic API key so Playwright can log in reliably
|
||||||
AUTOMAKER_API_KEY: test-api-key-for-e2e-tests
|
AUTOMAKER_API_KEY: test-api-key-for-e2e-tests
|
||||||
@@ -82,13 +81,13 @@ jobs:
|
|||||||
|
|
||||||
# Wait for health endpoint
|
# Wait for health endpoint
|
||||||
for i in {1..60}; do
|
for i in {1..60}; do
|
||||||
if curl -s -f http://localhost:3108/api/health > /dev/null 2>&1; then
|
if curl -s -f http://localhost:3008/api/health > /dev/null 2>&1; then
|
||||||
echo "Backend server is ready!"
|
echo "Backend server is ready!"
|
||||||
echo "=== Backend logs ==="
|
echo "=== Backend logs ==="
|
||||||
cat backend.log
|
cat backend.log
|
||||||
echo ""
|
echo ""
|
||||||
echo "Health check response:"
|
echo "Health check response:"
|
||||||
curl -s http://localhost:3108/api/health | jq . 2>/dev/null || echo "Health check: $(curl -s http://localhost:3108/api/health 2>/dev/null || echo 'No response')"
|
curl -s http://localhost:3008/api/health | jq . 2>/dev/null || echo "Health check: $(curl -s http://localhost:3008/api/health 2>/dev/null || echo 'No response')"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -112,11 +111,11 @@ jobs:
|
|||||||
ps aux | grep -E "(node|tsx)" | grep -v grep || echo "No node processes found"
|
ps aux | grep -E "(node|tsx)" | grep -v grep || echo "No node processes found"
|
||||||
echo ""
|
echo ""
|
||||||
echo "=== Port status ==="
|
echo "=== Port status ==="
|
||||||
netstat -tlnp 2>/dev/null | grep :3108 || echo "Port 3108 not listening"
|
netstat -tlnp 2>/dev/null | grep :3008 || echo "Port 3008 not listening"
|
||||||
lsof -i :3108 2>/dev/null || echo "lsof not available or port not in use"
|
lsof -i :3008 2>/dev/null || echo "lsof not available or port not in use"
|
||||||
echo ""
|
echo ""
|
||||||
echo "=== Health endpoint test ==="
|
echo "=== Health endpoint test ==="
|
||||||
curl -v http://localhost:3108/api/health 2>&1 || echo "Health endpoint failed"
|
curl -v http://localhost:3008/api/health 2>&1 || echo "Health endpoint failed"
|
||||||
|
|
||||||
# Kill the server process if it's still hanging
|
# Kill the server process if it's still hanging
|
||||||
if kill -0 $SERVER_PID 2>/dev/null; then
|
if kill -0 $SERVER_PID 2>/dev/null; then
|
||||||
@@ -133,8 +132,8 @@ jobs:
|
|||||||
run: npm run test --workspace=apps/ui
|
run: npm run test --workspace=apps/ui
|
||||||
env:
|
env:
|
||||||
CI: true
|
CI: true
|
||||||
VITE_SERVER_URL: http://localhost:3108
|
VITE_SERVER_URL: http://localhost:3008
|
||||||
SERVER_URL: http://localhost:3108
|
SERVER_URL: http://localhost:3008
|
||||||
VITE_SKIP_SETUP: 'true'
|
VITE_SKIP_SETUP: 'true'
|
||||||
# Keep UI-side login/defaults consistent
|
# Keep UI-side login/defaults consistent
|
||||||
AUTOMAKER_API_KEY: test-api-key-for-e2e-tests
|
AUTOMAKER_API_KEY: test-api-key-for-e2e-tests
|
||||||
@@ -149,7 +148,7 @@ jobs:
|
|||||||
ps aux | grep -E "(node|tsx)" | grep -v grep || echo "No node processes found"
|
ps aux | grep -E "(node|tsx)" | grep -v grep || echo "No node processes found"
|
||||||
echo ""
|
echo ""
|
||||||
echo "=== Port status ==="
|
echo "=== Port status ==="
|
||||||
netstat -tlnp 2>/dev/null | grep :3108 || echo "Port 3108 not listening"
|
netstat -tlnp 2>/dev/null | grep :3008 || echo "Port 3008 not listening"
|
||||||
|
|
||||||
- name: Upload Playwright report
|
- name: Upload Playwright report
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
|
|||||||
@@ -209,10 +209,9 @@ COPY libs ./libs
|
|||||||
COPY apps/ui ./apps/ui
|
COPY apps/ui ./apps/ui
|
||||||
|
|
||||||
# Build packages in dependency order, then build UI
|
# Build packages in dependency order, then build UI
|
||||||
# When VITE_SERVER_URL is empty, the UI uses relative URLs (e.g., /api/...) which nginx proxies
|
# VITE_SERVER_URL tells the UI where to find the API server
|
||||||
# to the server container. This avoids CORS issues entirely in Docker Compose setups.
|
# Use ARG to allow overriding at build time: --build-arg VITE_SERVER_URL=http://api.example.com
|
||||||
# Override at build time if needed: --build-arg VITE_SERVER_URL=http://api.example.com
|
ARG VITE_SERVER_URL=http://localhost:3008
|
||||||
ARG VITE_SERVER_URL=
|
|
||||||
ENV VITE_SKIP_ELECTRON=true
|
ENV VITE_SKIP_ELECTRON=true
|
||||||
ENV VITE_SERVER_URL=${VITE_SERVER_URL}
|
ENV VITE_SERVER_URL=${VITE_SERVER_URL}
|
||||||
RUN npm run build:packages && npm run build --workspace=apps/ui
|
RUN npm run build:packages && npm run build --workspace=apps/ui
|
||||||
|
|||||||
@@ -52,12 +52,6 @@ HOST=0.0.0.0
|
|||||||
# Port to run the server on
|
# Port to run the server on
|
||||||
PORT=3008
|
PORT=3008
|
||||||
|
|
||||||
# Port to run the server on for testing
|
|
||||||
TEST_SERVER_PORT=3108
|
|
||||||
|
|
||||||
# Port to run the UI on for testing
|
|
||||||
TEST_PORT=3107
|
|
||||||
|
|
||||||
# Data directory for sessions and metadata
|
# Data directory for sessions and metadata
|
||||||
DATA_DIR=./data
|
DATA_DIR=./data
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@automaker/server",
|
"name": "@automaker/server",
|
||||||
"version": "0.15.0",
|
"version": "0.13.0",
|
||||||
"description": "Backend server for Automaker - provides API for both web and Electron modes",
|
"description": "Backend server for Automaker - provides API for both web and Electron modes",
|
||||||
"author": "AutoMaker Team",
|
"author": "AutoMaker Team",
|
||||||
"license": "SEE LICENSE IN LICENSE",
|
"license": "SEE LICENSE IN LICENSE",
|
||||||
|
|||||||
@@ -267,26 +267,6 @@ app.use(
|
|||||||
// CORS configuration
|
// CORS configuration
|
||||||
// When using credentials (cookies), origin cannot be '*'
|
// When using credentials (cookies), origin cannot be '*'
|
||||||
// We dynamically allow the requesting origin for local development
|
// We dynamically allow the requesting origin for local development
|
||||||
|
|
||||||
// Check if origin is a local/private network address
|
|
||||||
function isLocalOrigin(origin: string): boolean {
|
|
||||||
try {
|
|
||||||
const url = new URL(origin);
|
|
||||||
const hostname = url.hostname;
|
|
||||||
return (
|
|
||||||
hostname === 'localhost' ||
|
|
||||||
hostname === '127.0.0.1' ||
|
|
||||||
hostname === '[::1]' ||
|
|
||||||
hostname === '0.0.0.0' ||
|
|
||||||
hostname.startsWith('192.168.') ||
|
|
||||||
hostname.startsWith('10.') ||
|
|
||||||
/^172\.(1[6-9]|2[0-9]|3[0-1])\./.test(hostname)
|
|
||||||
);
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
app.use(
|
app.use(
|
||||||
cors({
|
cors({
|
||||||
origin: (origin, callback) => {
|
origin: (origin, callback) => {
|
||||||
@@ -297,26 +277,36 @@ app.use(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If CORS_ORIGIN is set, use it (can be comma-separated list)
|
// If CORS_ORIGIN is set, use it (can be comma-separated list)
|
||||||
const allowedOrigins = process.env.CORS_ORIGIN?.split(',')
|
const allowedOrigins = process.env.CORS_ORIGIN?.split(',').map((o) => o.trim());
|
||||||
.map((o) => o.trim())
|
if (allowedOrigins && allowedOrigins.length > 0 && allowedOrigins[0] !== '*') {
|
||||||
.filter(Boolean);
|
|
||||||
if (allowedOrigins && allowedOrigins.length > 0) {
|
|
||||||
if (allowedOrigins.includes('*')) {
|
|
||||||
callback(null, true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (allowedOrigins.includes(origin)) {
|
if (allowedOrigins.includes(origin)) {
|
||||||
callback(null, origin);
|
callback(null, origin);
|
||||||
return;
|
} else {
|
||||||
|
callback(new Error('Not allowed by CORS'));
|
||||||
}
|
}
|
||||||
// Fall through to local network check below
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Allow all localhost/loopback/private network origins (any port)
|
// For local development, allow all localhost/loopback origins (any port)
|
||||||
if (isLocalOrigin(origin)) {
|
try {
|
||||||
|
const url = new URL(origin);
|
||||||
|
const hostname = url.hostname;
|
||||||
|
|
||||||
|
if (
|
||||||
|
hostname === 'localhost' ||
|
||||||
|
hostname === '127.0.0.1' ||
|
||||||
|
hostname === '::1' ||
|
||||||
|
hostname === '0.0.0.0' ||
|
||||||
|
hostname.startsWith('192.168.') ||
|
||||||
|
hostname.startsWith('10.') ||
|
||||||
|
hostname.startsWith('172.')
|
||||||
|
) {
|
||||||
callback(null, origin);
|
callback(null, origin);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore URL parsing errors
|
||||||
|
}
|
||||||
|
|
||||||
// Reject other origins by default for security
|
// Reject other origins by default for security
|
||||||
callback(new Error('Not allowed by CORS'));
|
callback(new Error('Not allowed by CORS'));
|
||||||
|
|||||||
@@ -1,37 +0,0 @@
|
|||||||
/**
|
|
||||||
* Shared execution utilities
|
|
||||||
*
|
|
||||||
* Common helpers for spawning child processes with the correct environment.
|
|
||||||
* Used by both route handlers and service layers.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
|
|
||||||
const logger = createLogger('ExecUtils');
|
|
||||||
|
|
||||||
// Extended PATH to include common tool installation locations
|
|
||||||
export const extendedPath = [
|
|
||||||
process.env.PATH,
|
|
||||||
'/opt/homebrew/bin',
|
|
||||||
'/usr/local/bin',
|
|
||||||
'/home/linuxbrew/.linuxbrew/bin',
|
|
||||||
`${process.env.HOME}/.local/bin`,
|
|
||||||
]
|
|
||||||
.filter(Boolean)
|
|
||||||
.join(':');
|
|
||||||
|
|
||||||
export const execEnv = {
|
|
||||||
...process.env,
|
|
||||||
PATH: extendedPath,
|
|
||||||
};
|
|
||||||
|
|
||||||
export function getErrorMessage(error: unknown): string {
|
|
||||||
if (error instanceof Error) {
|
|
||||||
return error.message;
|
|
||||||
}
|
|
||||||
return String(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function logError(error: unknown, context: string): void {
|
|
||||||
logger.error(`${context}:`, error);
|
|
||||||
}
|
|
||||||
@@ -133,16 +133,12 @@ export const TOOL_PRESETS = {
|
|||||||
'Read',
|
'Read',
|
||||||
'Write',
|
'Write',
|
||||||
'Edit',
|
'Edit',
|
||||||
'MultiEdit',
|
|
||||||
'Glob',
|
'Glob',
|
||||||
'Grep',
|
'Grep',
|
||||||
'LS',
|
|
||||||
'Bash',
|
'Bash',
|
||||||
'WebSearch',
|
'WebSearch',
|
||||||
'WebFetch',
|
'WebFetch',
|
||||||
'TodoWrite',
|
'TodoWrite',
|
||||||
'Task',
|
|
||||||
'Skill',
|
|
||||||
] as const,
|
] as const,
|
||||||
|
|
||||||
/** Tools for chat/interactive mode */
|
/** Tools for chat/interactive mode */
|
||||||
@@ -150,16 +146,12 @@ export const TOOL_PRESETS = {
|
|||||||
'Read',
|
'Read',
|
||||||
'Write',
|
'Write',
|
||||||
'Edit',
|
'Edit',
|
||||||
'MultiEdit',
|
|
||||||
'Glob',
|
'Glob',
|
||||||
'Grep',
|
'Grep',
|
||||||
'LS',
|
|
||||||
'Bash',
|
'Bash',
|
||||||
'WebSearch',
|
'WebSearch',
|
||||||
'WebFetch',
|
'WebFetch',
|
||||||
'TodoWrite',
|
'TodoWrite',
|
||||||
'Task',
|
|
||||||
'Skill',
|
|
||||||
] as const,
|
] as const,
|
||||||
} as const;
|
} as const;
|
||||||
|
|
||||||
@@ -290,15 +282,11 @@ function buildThinkingOptions(thinkingLevel?: ThinkingLevel): Partial<Options> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build system prompt and settingSources based on two independent settings:
|
* Build system prompt configuration based on autoLoadClaudeMd setting.
|
||||||
* - useClaudeCodeSystemPrompt: controls whether to use the 'claude_code' preset as the base prompt
|
* When autoLoadClaudeMd is true:
|
||||||
* - autoLoadClaudeMd: controls whether to add settingSources for SDK to load CLAUDE.md files
|
* - Uses preset mode with 'claude_code' to enable CLAUDE.md auto-loading
|
||||||
*
|
* - If there's a custom systemPrompt, appends it to the preset
|
||||||
* These combine independently (4 possible states):
|
* - Sets settingSources to ['project'] for SDK to load CLAUDE.md files
|
||||||
* 1. Both ON: preset + settingSources (full Claude Code experience)
|
|
||||||
* 2. useClaudeCodeSystemPrompt ON, autoLoadClaudeMd OFF: preset only (no CLAUDE.md auto-loading)
|
|
||||||
* 3. useClaudeCodeSystemPrompt OFF, autoLoadClaudeMd ON: plain string + settingSources
|
|
||||||
* 4. Both OFF: plain string only
|
|
||||||
*
|
*
|
||||||
* @param config - The SDK options config
|
* @param config - The SDK options config
|
||||||
* @returns Object with systemPrompt and settingSources for SDK options
|
* @returns Object with systemPrompt and settingSources for SDK options
|
||||||
@@ -307,34 +295,27 @@ function buildClaudeMdOptions(config: CreateSdkOptionsConfig): {
|
|||||||
systemPrompt?: string | SystemPromptConfig;
|
systemPrompt?: string | SystemPromptConfig;
|
||||||
settingSources?: Array<'user' | 'project' | 'local'>;
|
settingSources?: Array<'user' | 'project' | 'local'>;
|
||||||
} {
|
} {
|
||||||
const result: {
|
if (!config.autoLoadClaudeMd) {
|
||||||
systemPrompt?: string | SystemPromptConfig;
|
// Standard mode - just pass through the system prompt as-is
|
||||||
settingSources?: Array<'user' | 'project' | 'local'>;
|
return config.systemPrompt ? { systemPrompt: config.systemPrompt } : {};
|
||||||
} = {};
|
}
|
||||||
|
|
||||||
// Determine system prompt format based on useClaudeCodeSystemPrompt
|
// Auto-load CLAUDE.md mode - use preset with settingSources
|
||||||
if (config.useClaudeCodeSystemPrompt) {
|
const result: {
|
||||||
// Use Claude Code's built-in system prompt as the base
|
systemPrompt: SystemPromptConfig;
|
||||||
const presetConfig: SystemPromptConfig = {
|
settingSources: Array<'user' | 'project' | 'local'>;
|
||||||
|
} = {
|
||||||
|
systemPrompt: {
|
||||||
type: 'preset',
|
type: 'preset',
|
||||||
preset: 'claude_code',
|
preset: 'claude_code',
|
||||||
|
},
|
||||||
|
// Load both user (~/.claude/CLAUDE.md) and project (.claude/CLAUDE.md) settings
|
||||||
|
settingSources: ['user', 'project'],
|
||||||
};
|
};
|
||||||
|
|
||||||
// If there's a custom system prompt, append it to the preset
|
// If there's a custom system prompt, append it to the preset
|
||||||
if (config.systemPrompt) {
|
if (config.systemPrompt) {
|
||||||
presetConfig.append = config.systemPrompt;
|
result.systemPrompt.append = config.systemPrompt;
|
||||||
}
|
|
||||||
result.systemPrompt = presetConfig;
|
|
||||||
} else {
|
|
||||||
// Standard mode - just pass through the system prompt as-is
|
|
||||||
if (config.systemPrompt) {
|
|
||||||
result.systemPrompt = config.systemPrompt;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine settingSources based on autoLoadClaudeMd
|
|
||||||
if (config.autoLoadClaudeMd) {
|
|
||||||
// Load both user (~/.claude/CLAUDE.md) and project (.claude/CLAUDE.md) settings
|
|
||||||
result.settingSources = ['user', 'project'];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
@@ -342,14 +323,12 @@ function buildClaudeMdOptions(config: CreateSdkOptionsConfig): {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* System prompt configuration for SDK options
|
* System prompt configuration for SDK options
|
||||||
* The 'claude_code' preset provides the system prompt only — it does NOT auto-load
|
* When using preset mode with claude_code, CLAUDE.md files are automatically loaded
|
||||||
* CLAUDE.md files. CLAUDE.md auto-loading is controlled independently by
|
|
||||||
* settingSources (set via autoLoadClaudeMd). These two settings are orthogonal.
|
|
||||||
*/
|
*/
|
||||||
export interface SystemPromptConfig {
|
export interface SystemPromptConfig {
|
||||||
/** Use preset mode to select the base system prompt */
|
/** Use preset mode with claude_code to enable CLAUDE.md auto-loading */
|
||||||
type: 'preset';
|
type: 'preset';
|
||||||
/** The preset to use - 'claude_code' uses the Claude Code system prompt */
|
/** The preset to use - 'claude_code' enables CLAUDE.md loading */
|
||||||
preset: 'claude_code';
|
preset: 'claude_code';
|
||||||
/** Optional additional prompt to append to the preset */
|
/** Optional additional prompt to append to the preset */
|
||||||
append?: string;
|
append?: string;
|
||||||
@@ -383,19 +362,11 @@ export interface CreateSdkOptionsConfig {
|
|||||||
/** Enable auto-loading of CLAUDE.md files via SDK's settingSources */
|
/** Enable auto-loading of CLAUDE.md files via SDK's settingSources */
|
||||||
autoLoadClaudeMd?: boolean;
|
autoLoadClaudeMd?: boolean;
|
||||||
|
|
||||||
/** Use Claude Code's built-in system prompt (claude_code preset) as the base prompt */
|
|
||||||
useClaudeCodeSystemPrompt?: boolean;
|
|
||||||
|
|
||||||
/** MCP servers to make available to the agent */
|
/** MCP servers to make available to the agent */
|
||||||
mcpServers?: Record<string, McpServerConfig>;
|
mcpServers?: Record<string, McpServerConfig>;
|
||||||
|
|
||||||
/** Extended thinking level for Claude models */
|
/** Extended thinking level for Claude models */
|
||||||
thinkingLevel?: ThinkingLevel;
|
thinkingLevel?: ThinkingLevel;
|
||||||
|
|
||||||
/** Optional user-configured max turns override (from settings).
|
|
||||||
* When provided, overrides the preset MAX_TURNS for the use case.
|
|
||||||
* Range: 1-2000. */
|
|
||||||
maxTurns?: number;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Re-export MCP types from @automaker/types for convenience
|
// Re-export MCP types from @automaker/types for convenience
|
||||||
@@ -432,7 +403,7 @@ export function createSpecGenerationOptions(config: CreateSdkOptionsConfig): Opt
|
|||||||
// See: https://github.com/AutoMaker-Org/automaker/issues/149
|
// See: https://github.com/AutoMaker-Org/automaker/issues/149
|
||||||
permissionMode: 'default',
|
permissionMode: 'default',
|
||||||
model: getModelForUseCase('spec', config.model),
|
model: getModelForUseCase('spec', config.model),
|
||||||
maxTurns: config.maxTurns ?? MAX_TURNS.maximum,
|
maxTurns: MAX_TURNS.maximum,
|
||||||
cwd: config.cwd,
|
cwd: config.cwd,
|
||||||
allowedTools: [...TOOL_PRESETS.specGeneration],
|
allowedTools: [...TOOL_PRESETS.specGeneration],
|
||||||
...claudeMdOptions,
|
...claudeMdOptions,
|
||||||
@@ -466,7 +437,7 @@ export function createFeatureGenerationOptions(config: CreateSdkOptionsConfig):
|
|||||||
// Override permissionMode - feature generation only needs read-only tools
|
// Override permissionMode - feature generation only needs read-only tools
|
||||||
permissionMode: 'default',
|
permissionMode: 'default',
|
||||||
model: getModelForUseCase('features', config.model),
|
model: getModelForUseCase('features', config.model),
|
||||||
maxTurns: config.maxTurns ?? MAX_TURNS.quick,
|
maxTurns: MAX_TURNS.quick,
|
||||||
cwd: config.cwd,
|
cwd: config.cwd,
|
||||||
allowedTools: [...TOOL_PRESETS.readOnly],
|
allowedTools: [...TOOL_PRESETS.readOnly],
|
||||||
...claudeMdOptions,
|
...claudeMdOptions,
|
||||||
@@ -497,7 +468,7 @@ export function createSuggestionsOptions(config: CreateSdkOptionsConfig): Option
|
|||||||
return {
|
return {
|
||||||
...getBaseOptions(),
|
...getBaseOptions(),
|
||||||
model: getModelForUseCase('suggestions', config.model),
|
model: getModelForUseCase('suggestions', config.model),
|
||||||
maxTurns: config.maxTurns ?? MAX_TURNS.extended,
|
maxTurns: MAX_TURNS.extended,
|
||||||
cwd: config.cwd,
|
cwd: config.cwd,
|
||||||
allowedTools: [...TOOL_PRESETS.readOnly],
|
allowedTools: [...TOOL_PRESETS.readOnly],
|
||||||
...claudeMdOptions,
|
...claudeMdOptions,
|
||||||
@@ -535,7 +506,7 @@ export function createChatOptions(config: CreateSdkOptionsConfig): Options {
|
|||||||
return {
|
return {
|
||||||
...getBaseOptions(),
|
...getBaseOptions(),
|
||||||
model: getModelForUseCase('chat', effectiveModel),
|
model: getModelForUseCase('chat', effectiveModel),
|
||||||
maxTurns: config.maxTurns ?? MAX_TURNS.standard,
|
maxTurns: MAX_TURNS.standard,
|
||||||
cwd: config.cwd,
|
cwd: config.cwd,
|
||||||
allowedTools: [...TOOL_PRESETS.chat],
|
allowedTools: [...TOOL_PRESETS.chat],
|
||||||
...claudeMdOptions,
|
...claudeMdOptions,
|
||||||
@@ -570,7 +541,7 @@ export function createAutoModeOptions(config: CreateSdkOptionsConfig): Options {
|
|||||||
return {
|
return {
|
||||||
...getBaseOptions(),
|
...getBaseOptions(),
|
||||||
model: getModelForUseCase('auto', config.model),
|
model: getModelForUseCase('auto', config.model),
|
||||||
maxTurns: config.maxTurns ?? MAX_TURNS.maximum,
|
maxTurns: MAX_TURNS.maximum,
|
||||||
cwd: config.cwd,
|
cwd: config.cwd,
|
||||||
allowedTools: [...TOOL_PRESETS.fullAccess],
|
allowedTools: [...TOOL_PRESETS.fullAccess],
|
||||||
...claudeMdOptions,
|
...claudeMdOptions,
|
||||||
|
|||||||
@@ -33,16 +33,9 @@ import {
|
|||||||
|
|
||||||
const logger = createLogger('SettingsHelper');
|
const logger = createLogger('SettingsHelper');
|
||||||
|
|
||||||
/** Default number of agent turns used when no value is configured. */
|
|
||||||
export const DEFAULT_MAX_TURNS = 10000;
|
|
||||||
|
|
||||||
/** Upper bound for the max-turns clamp; values above this are capped here. */
|
|
||||||
export const MAX_ALLOWED_TURNS = 10000;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the autoLoadClaudeMd setting, with project settings taking precedence over global.
|
* Get the autoLoadClaudeMd setting, with project settings taking precedence over global.
|
||||||
* Falls back to global settings and defaults to true when unset.
|
* Returns false if settings service is not available.
|
||||||
* Returns true if settings service is not available.
|
|
||||||
*
|
*
|
||||||
* @param projectPath - Path to the project
|
* @param projectPath - Path to the project
|
||||||
* @param settingsService - Optional settings service instance
|
* @param settingsService - Optional settings service instance
|
||||||
@@ -55,8 +48,8 @@ export async function getAutoLoadClaudeMdSetting(
|
|||||||
logPrefix = '[SettingsHelper]'
|
logPrefix = '[SettingsHelper]'
|
||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
if (!settingsService) {
|
if (!settingsService) {
|
||||||
logger.info(`${logPrefix} SettingsService not available, autoLoadClaudeMd defaulting to true`);
|
logger.info(`${logPrefix} SettingsService not available, autoLoadClaudeMd disabled`);
|
||||||
return true;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -71,7 +64,7 @@ export async function getAutoLoadClaudeMdSetting(
|
|||||||
|
|
||||||
// Fall back to global settings
|
// Fall back to global settings
|
||||||
const globalSettings = await settingsService.getGlobalSettings();
|
const globalSettings = await settingsService.getGlobalSettings();
|
||||||
const result = globalSettings.autoLoadClaudeMd ?? true;
|
const result = globalSettings.autoLoadClaudeMd ?? false;
|
||||||
logger.info(`${logPrefix} autoLoadClaudeMd from global settings: ${result}`);
|
logger.info(`${logPrefix} autoLoadClaudeMd from global settings: ${result}`);
|
||||||
return result;
|
return result;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -80,84 +73,6 @@ export async function getAutoLoadClaudeMdSetting(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the useClaudeCodeSystemPrompt setting, with project settings taking precedence over global.
|
|
||||||
* Falls back to global settings and defaults to true when unset.
|
|
||||||
* Returns true if settings service is not available.
|
|
||||||
*
|
|
||||||
* @param projectPath - Path to the project
|
|
||||||
* @param settingsService - Optional settings service instance
|
|
||||||
* @param logPrefix - Prefix for log messages (e.g., '[AgentService]')
|
|
||||||
* @returns Promise resolving to the useClaudeCodeSystemPrompt setting value
|
|
||||||
*/
|
|
||||||
export async function getUseClaudeCodeSystemPromptSetting(
|
|
||||||
projectPath: string,
|
|
||||||
settingsService?: SettingsService | null,
|
|
||||||
logPrefix = '[SettingsHelper]'
|
|
||||||
): Promise<boolean> {
|
|
||||||
if (!settingsService) {
|
|
||||||
logger.info(
|
|
||||||
`${logPrefix} SettingsService not available, useClaudeCodeSystemPrompt defaulting to true`
|
|
||||||
);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Check project settings first (takes precedence)
|
|
||||||
const projectSettings = await settingsService.getProjectSettings(projectPath);
|
|
||||||
if (projectSettings.useClaudeCodeSystemPrompt !== undefined) {
|
|
||||||
logger.info(
|
|
||||||
`${logPrefix} useClaudeCodeSystemPrompt from project settings: ${projectSettings.useClaudeCodeSystemPrompt}`
|
|
||||||
);
|
|
||||||
return projectSettings.useClaudeCodeSystemPrompt;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fall back to global settings
|
|
||||||
const globalSettings = await settingsService.getGlobalSettings();
|
|
||||||
const result = globalSettings.useClaudeCodeSystemPrompt ?? true;
|
|
||||||
logger.info(`${logPrefix} useClaudeCodeSystemPrompt from global settings: ${result}`);
|
|
||||||
return result;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`${logPrefix} Failed to load useClaudeCodeSystemPrompt setting:`, error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the default max turns setting from global settings.
|
|
||||||
*
|
|
||||||
* Reads the user's configured `defaultMaxTurns` setting, which controls the maximum
|
|
||||||
* number of agent turns (tool-call round-trips) for feature execution.
|
|
||||||
*
|
|
||||||
* @param settingsService - Settings service instance (may be null)
|
|
||||||
* @param logPrefix - Logging prefix for debugging
|
|
||||||
* @returns The user's configured max turns, or {@link DEFAULT_MAX_TURNS} as default
|
|
||||||
*/
|
|
||||||
export async function getDefaultMaxTurnsSetting(
|
|
||||||
settingsService?: SettingsService | null,
|
|
||||||
logPrefix = '[SettingsHelper]'
|
|
||||||
): Promise<number> {
|
|
||||||
if (!settingsService) {
|
|
||||||
logger.info(
|
|
||||||
`${logPrefix} SettingsService not available, using default maxTurns=${DEFAULT_MAX_TURNS}`
|
|
||||||
);
|
|
||||||
return DEFAULT_MAX_TURNS;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const globalSettings = await settingsService.getGlobalSettings();
|
|
||||||
const raw = globalSettings.defaultMaxTurns;
|
|
||||||
const result = Number.isFinite(raw) ? (raw as number) : DEFAULT_MAX_TURNS;
|
|
||||||
// Clamp to valid range
|
|
||||||
const clamped = Math.max(1, Math.min(MAX_ALLOWED_TURNS, Math.floor(result)));
|
|
||||||
logger.debug(`${logPrefix} defaultMaxTurns from global settings: ${clamped}`);
|
|
||||||
return clamped;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`${logPrefix} Failed to load defaultMaxTurns setting:`, error);
|
|
||||||
return DEFAULT_MAX_TURNS;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Filters out CLAUDE.md from context files when autoLoadClaudeMd is enabled
|
* Filters out CLAUDE.md from context files when autoLoadClaudeMd is enabled
|
||||||
* and rebuilds the formatted prompt without it.
|
* and rebuilds the formatted prompt without it.
|
||||||
|
|||||||
@@ -33,23 +33,8 @@ const logger = createLogger('ClaudeProvider');
|
|||||||
*/
|
*/
|
||||||
type ProviderConfig = ClaudeApiProfile | ClaudeCompatibleProvider;
|
type ProviderConfig = ClaudeApiProfile | ClaudeCompatibleProvider;
|
||||||
|
|
||||||
// System vars are always passed from process.env regardless of profile.
|
// System vars are always passed from process.env regardless of profile
|
||||||
// Includes filesystem, locale, and temp directory vars that the Claude CLI
|
const SYSTEM_ENV_VARS = ['PATH', 'HOME', 'SHELL', 'TERM', 'USER', 'LANG', 'LC_ALL'];
|
||||||
// needs internally for config resolution and temp file creation.
|
|
||||||
const SYSTEM_ENV_VARS = [
|
|
||||||
'PATH',
|
|
||||||
'HOME',
|
|
||||||
'SHELL',
|
|
||||||
'TERM',
|
|
||||||
'USER',
|
|
||||||
'LANG',
|
|
||||||
'LC_ALL',
|
|
||||||
'TMPDIR',
|
|
||||||
'XDG_CONFIG_HOME',
|
|
||||||
'XDG_DATA_HOME',
|
|
||||||
'XDG_CACHE_HOME',
|
|
||||||
'XDG_STATE_HOME',
|
|
||||||
];
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the config is a ClaudeCompatibleProvider (new system)
|
* Check if the config is a ClaudeCompatibleProvider (new system)
|
||||||
@@ -195,7 +180,7 @@ export class ClaudeProvider extends BaseProvider {
|
|||||||
model,
|
model,
|
||||||
cwd,
|
cwd,
|
||||||
systemPrompt,
|
systemPrompt,
|
||||||
maxTurns = 1000,
|
maxTurns = 100,
|
||||||
allowedTools,
|
allowedTools,
|
||||||
abortController,
|
abortController,
|
||||||
conversationHistory,
|
conversationHistory,
|
||||||
@@ -228,8 +213,6 @@ export class ClaudeProvider extends BaseProvider {
|
|||||||
env: buildEnv(providerConfig, credentials),
|
env: buildEnv(providerConfig, credentials),
|
||||||
// Pass through allowedTools if provided by caller (decided by sdk-options.ts)
|
// Pass through allowedTools if provided by caller (decided by sdk-options.ts)
|
||||||
...(allowedTools && { allowedTools }),
|
...(allowedTools && { allowedTools }),
|
||||||
// Restrict available built-in tools if specified (tools: [] disables all tools)
|
|
||||||
...(options.tools && { tools: options.tools }),
|
|
||||||
// AUTONOMOUS MODE: Always bypass permissions for fully autonomous operation
|
// AUTONOMOUS MODE: Always bypass permissions for fully autonomous operation
|
||||||
permissionMode: 'bypassPermissions',
|
permissionMode: 'bypassPermissions',
|
||||||
allowDangerouslySkipPermissions: true,
|
allowDangerouslySkipPermissions: true,
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ import {
|
|||||||
supportsReasoningEffort,
|
supportsReasoningEffort,
|
||||||
validateBareModelId,
|
validateBareModelId,
|
||||||
calculateReasoningTimeout,
|
calculateReasoningTimeout,
|
||||||
|
DEFAULT_TIMEOUT_MS,
|
||||||
type CodexApprovalPolicy,
|
type CodexApprovalPolicy,
|
||||||
type CodexSandboxMode,
|
type CodexSandboxMode,
|
||||||
type CodexAuthStatus,
|
type CodexAuthStatus,
|
||||||
@@ -51,7 +52,6 @@ import { CODEX_MODELS } from './codex-models.js';
|
|||||||
|
|
||||||
const CODEX_COMMAND = 'codex';
|
const CODEX_COMMAND = 'codex';
|
||||||
const CODEX_EXEC_SUBCOMMAND = 'exec';
|
const CODEX_EXEC_SUBCOMMAND = 'exec';
|
||||||
const CODEX_RESUME_SUBCOMMAND = 'resume';
|
|
||||||
const CODEX_JSON_FLAG = '--json';
|
const CODEX_JSON_FLAG = '--json';
|
||||||
const CODEX_MODEL_FLAG = '--model';
|
const CODEX_MODEL_FLAG = '--model';
|
||||||
const CODEX_VERSION_FLAG = '--version';
|
const CODEX_VERSION_FLAG = '--version';
|
||||||
@@ -98,7 +98,7 @@ const TEXT_ENCODING = 'utf-8';
|
|||||||
*
|
*
|
||||||
* @see calculateReasoningTimeout from @automaker/types
|
* @see calculateReasoningTimeout from @automaker/types
|
||||||
*/
|
*/
|
||||||
const CODEX_CLI_TIMEOUT_MS = 120000; // 2 minutes — matches CLI provider base timeout
|
const CODEX_CLI_TIMEOUT_MS = DEFAULT_TIMEOUT_MS;
|
||||||
const CODEX_FEATURE_GENERATION_BASE_TIMEOUT_MS = 300000; // 5 minutes for feature generation
|
const CODEX_FEATURE_GENERATION_BASE_TIMEOUT_MS = 300000; // 5 minutes for feature generation
|
||||||
const SYSTEM_PROMPT_SEPARATOR = '\n\n';
|
const SYSTEM_PROMPT_SEPARATOR = '\n\n';
|
||||||
const CODEX_INSTRUCTIONS_DIR = '.codex';
|
const CODEX_INSTRUCTIONS_DIR = '.codex';
|
||||||
@@ -127,16 +127,11 @@ const DEFAULT_ALLOWED_TOOLS = [
|
|||||||
'Read',
|
'Read',
|
||||||
'Write',
|
'Write',
|
||||||
'Edit',
|
'Edit',
|
||||||
'MultiEdit',
|
|
||||||
'Glob',
|
'Glob',
|
||||||
'Grep',
|
'Grep',
|
||||||
'LS',
|
|
||||||
'Bash',
|
'Bash',
|
||||||
'WebSearch',
|
'WebSearch',
|
||||||
'WebFetch',
|
'WebFetch',
|
||||||
'TodoWrite',
|
|
||||||
'Task',
|
|
||||||
'Skill',
|
|
||||||
] as const;
|
] as const;
|
||||||
const SEARCH_TOOL_NAMES = new Set(['WebSearch', 'WebFetch']);
|
const SEARCH_TOOL_NAMES = new Set(['WebSearch', 'WebFetch']);
|
||||||
const MIN_MAX_TURNS = 1;
|
const MIN_MAX_TURNS = 1;
|
||||||
@@ -361,14 +356,9 @@ function resolveSystemPrompt(systemPrompt?: unknown): string | null {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildPromptText(options: ExecuteOptions): string {
|
|
||||||
return typeof options.prompt === 'string'
|
|
||||||
? options.prompt
|
|
||||||
: extractTextFromContent(options.prompt);
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildCombinedPrompt(options: ExecuteOptions, systemPromptText?: string | null): string {
|
function buildCombinedPrompt(options: ExecuteOptions, systemPromptText?: string | null): string {
|
||||||
const promptText = buildPromptText(options);
|
const promptText =
|
||||||
|
typeof options.prompt === 'string' ? options.prompt : extractTextFromContent(options.prompt);
|
||||||
const historyText = options.conversationHistory
|
const historyText = options.conversationHistory
|
||||||
? formatHistoryAsText(options.conversationHistory)
|
? formatHistoryAsText(options.conversationHistory)
|
||||||
: '';
|
: '';
|
||||||
@@ -381,11 +371,6 @@ function buildCombinedPrompt(options: ExecuteOptions, systemPromptText?: string
|
|||||||
return `${historyText}${systemSection}${HISTORY_HEADER}${promptText}`;
|
return `${historyText}${systemSection}${HISTORY_HEADER}${promptText}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildResumePrompt(options: ExecuteOptions): string {
|
|
||||||
const promptText = buildPromptText(options);
|
|
||||||
return `${HISTORY_HEADER}${promptText}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatConfigValue(value: string | number | boolean): string {
|
function formatConfigValue(value: string | number | boolean): string {
|
||||||
return String(value);
|
return String(value);
|
||||||
}
|
}
|
||||||
@@ -753,16 +738,6 @@ export class CodexProvider extends BaseProvider {
|
|||||||
);
|
);
|
||||||
const baseSystemPrompt = resolveSystemPrompt(options.systemPrompt);
|
const baseSystemPrompt = resolveSystemPrompt(options.systemPrompt);
|
||||||
const resolvedMaxTurns = resolveMaxTurns(options.maxTurns);
|
const resolvedMaxTurns = resolveMaxTurns(options.maxTurns);
|
||||||
if (resolvedMaxTurns === null && options.maxTurns === undefined) {
|
|
||||||
logger.warn(
|
|
||||||
`[executeQuery] maxTurns not provided — Codex CLI will use its internal default. ` +
|
|
||||||
`This may cause premature completion. Model: ${options.model}`
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
logger.info(
|
|
||||||
`[executeQuery] maxTurns: requested=${options.maxTurns}, resolved=${resolvedMaxTurns}, model=${options.model}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const resolvedAllowedTools = options.allowedTools ?? Array.from(DEFAULT_ALLOWED_TOOLS);
|
const resolvedAllowedTools = options.allowedTools ?? Array.from(DEFAULT_ALLOWED_TOOLS);
|
||||||
const restrictTools = !hasMcpServers || options.mcpUnrestrictedTools === false;
|
const restrictTools = !hasMcpServers || options.mcpUnrestrictedTools === false;
|
||||||
const wantsOutputSchema = Boolean(
|
const wantsOutputSchema = Boolean(
|
||||||
@@ -809,22 +784,16 @@ export class CodexProvider extends BaseProvider {
|
|||||||
}
|
}
|
||||||
const searchEnabled =
|
const searchEnabled =
|
||||||
codexSettings.enableWebSearch || resolveSearchEnabled(resolvedAllowedTools, restrictTools);
|
codexSettings.enableWebSearch || resolveSearchEnabled(resolvedAllowedTools, restrictTools);
|
||||||
const isResumeQuery = Boolean(options.sdkSessionId);
|
const schemaPath = await writeOutputSchemaFile(options.cwd, options.outputFormat);
|
||||||
const schemaPath = isResumeQuery
|
const imageBlocks = codexSettings.enableImages ? extractImageBlocks(options.prompt) : [];
|
||||||
? null
|
const imagePaths = await writeImageFiles(options.cwd, imageBlocks);
|
||||||
: await writeOutputSchemaFile(options.cwd, options.outputFormat);
|
|
||||||
const imageBlocks =
|
|
||||||
!isResumeQuery && codexSettings.enableImages ? extractImageBlocks(options.prompt) : [];
|
|
||||||
const imagePaths = isResumeQuery ? [] : await writeImageFiles(options.cwd, imageBlocks);
|
|
||||||
const approvalPolicy =
|
const approvalPolicy =
|
||||||
hasMcpServers && options.mcpAutoApproveTools !== undefined
|
hasMcpServers && options.mcpAutoApproveTools !== undefined
|
||||||
? options.mcpAutoApproveTools
|
? options.mcpAutoApproveTools
|
||||||
? 'never'
|
? 'never'
|
||||||
: 'on-request'
|
: 'on-request'
|
||||||
: codexSettings.approvalPolicy;
|
: codexSettings.approvalPolicy;
|
||||||
const promptText = isResumeQuery
|
const promptText = buildCombinedPrompt(options, combinedSystemPrompt);
|
||||||
? buildResumePrompt(options)
|
|
||||||
: buildCombinedPrompt(options, combinedSystemPrompt);
|
|
||||||
const commandPath = executionPlan.cliPath || CODEX_COMMAND;
|
const commandPath = executionPlan.cliPath || CODEX_COMMAND;
|
||||||
|
|
||||||
// Build config overrides for max turns and reasoning effort
|
// Build config overrides for max turns and reasoning effort
|
||||||
@@ -854,30 +823,21 @@ export class CodexProvider extends BaseProvider {
|
|||||||
const preExecArgs: string[] = [];
|
const preExecArgs: string[] = [];
|
||||||
|
|
||||||
// Add additional directories with write access
|
// Add additional directories with write access
|
||||||
if (
|
if (codexSettings.additionalDirs && codexSettings.additionalDirs.length > 0) {
|
||||||
!isResumeQuery &&
|
|
||||||
codexSettings.additionalDirs &&
|
|
||||||
codexSettings.additionalDirs.length > 0
|
|
||||||
) {
|
|
||||||
for (const dir of codexSettings.additionalDirs) {
|
for (const dir of codexSettings.additionalDirs) {
|
||||||
preExecArgs.push(CODEX_ADD_DIR_FLAG, dir);
|
preExecArgs.push(CODEX_ADD_DIR_FLAG, dir);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If images were written to disk, add the image directory so the CLI can access them.
|
// If images were written to disk, add the image directory so the CLI can access them
|
||||||
// Note: imagePaths is set to [] when isResumeQuery is true, so this check is sufficient.
|
|
||||||
if (imagePaths.length > 0) {
|
if (imagePaths.length > 0) {
|
||||||
const imageDir = path.join(options.cwd, CODEX_INSTRUCTIONS_DIR, IMAGE_TEMP_DIR);
|
const imageDir = path.join(options.cwd, CODEX_INSTRUCTIONS_DIR, IMAGE_TEMP_DIR);
|
||||||
preExecArgs.push(CODEX_ADD_DIR_FLAG, imageDir);
|
preExecArgs.push(CODEX_ADD_DIR_FLAG, imageDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Model is already bare (no prefix) - validated by executeQuery
|
// Model is already bare (no prefix) - validated by executeQuery
|
||||||
const codexCommand = isResumeQuery
|
|
||||||
? [CODEX_EXEC_SUBCOMMAND, CODEX_RESUME_SUBCOMMAND]
|
|
||||||
: [CODEX_EXEC_SUBCOMMAND];
|
|
||||||
|
|
||||||
const args = [
|
const args = [
|
||||||
...codexCommand,
|
CODEX_EXEC_SUBCOMMAND,
|
||||||
CODEX_YOLO_FLAG,
|
CODEX_YOLO_FLAG,
|
||||||
CODEX_SKIP_GIT_REPO_CHECK_FLAG,
|
CODEX_SKIP_GIT_REPO_CHECK_FLAG,
|
||||||
...preExecArgs,
|
...preExecArgs,
|
||||||
@@ -886,7 +846,6 @@ export class CodexProvider extends BaseProvider {
|
|||||||
CODEX_JSON_FLAG,
|
CODEX_JSON_FLAG,
|
||||||
...configOverrideArgs,
|
...configOverrideArgs,
|
||||||
...(schemaPath ? [CODEX_OUTPUT_SCHEMA_FLAG, schemaPath] : []),
|
...(schemaPath ? [CODEX_OUTPUT_SCHEMA_FLAG, schemaPath] : []),
|
||||||
...(options.sdkSessionId ? [options.sdkSessionId] : []),
|
|
||||||
'-', // Read prompt from stdin to avoid shell escaping issues
|
'-', // Read prompt from stdin to avoid shell escaping issues
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ import {
|
|||||||
type CopilotRuntimeModel,
|
type CopilotRuntimeModel,
|
||||||
} from '@automaker/types';
|
} from '@automaker/types';
|
||||||
import { createLogger, isAbortError } from '@automaker/utils';
|
import { createLogger, isAbortError } from '@automaker/utils';
|
||||||
import { resolveModelString } from '@automaker/model-resolver';
|
|
||||||
import { CopilotClient, type PermissionRequest } from '@github/copilot-sdk';
|
import { CopilotClient, type PermissionRequest } from '@github/copilot-sdk';
|
||||||
import {
|
import {
|
||||||
normalizeTodos,
|
normalizeTodos,
|
||||||
@@ -117,12 +116,6 @@ export interface CopilotError extends Error {
|
|||||||
suggestion?: string;
|
suggestion?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
type CopilotSession = Awaited<ReturnType<CopilotClient['createSession']>>;
|
|
||||||
type CopilotSessionOptions = Parameters<CopilotClient['createSession']>[0];
|
|
||||||
type ResumableCopilotClient = CopilotClient & {
|
|
||||||
resumeSession?: (sessionId: string, options: CopilotSessionOptions) => Promise<CopilotSession>;
|
|
||||||
};
|
|
||||||
|
|
||||||
// =============================================================================
|
// =============================================================================
|
||||||
// Tool Name Normalization
|
// Tool Name Normalization
|
||||||
// =============================================================================
|
// =============================================================================
|
||||||
@@ -389,14 +382,9 @@ export class CopilotProvider extends CliProvider {
|
|||||||
|
|
||||||
case 'session.error': {
|
case 'session.error': {
|
||||||
const errorEvent = sdkEvent as SdkSessionErrorEvent;
|
const errorEvent = sdkEvent as SdkSessionErrorEvent;
|
||||||
const enrichedError =
|
|
||||||
errorEvent.data.message ||
|
|
||||||
(errorEvent.data.code
|
|
||||||
? `Copilot agent error (code: ${errorEvent.data.code})`
|
|
||||||
: 'Copilot agent error');
|
|
||||||
return {
|
return {
|
||||||
type: 'error',
|
type: 'error',
|
||||||
error: enrichedError,
|
error: errorEvent.data.message || 'Unknown error',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -528,11 +516,7 @@ export class CopilotProvider extends CliProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const promptText = this.extractPromptText(options);
|
const promptText = this.extractPromptText(options);
|
||||||
// resolveModelString may return dash-separated canonical names (e.g. "claude-sonnet-4-6"),
|
const bareModel = options.model || DEFAULT_BARE_MODEL;
|
||||||
// but the Copilot SDK expects dot-separated version suffixes (e.g. "claude-sonnet-4.6").
|
|
||||||
// Normalize by converting the last dash-separated numeric pair to dot notation.
|
|
||||||
const resolvedModel = resolveModelString(options.model || DEFAULT_BARE_MODEL);
|
|
||||||
const bareModel = resolvedModel.replace(/-(\d+)-(\d+)$/, '-$1.$2');
|
|
||||||
const workingDirectory = options.cwd || process.cwd();
|
const workingDirectory = options.cwd || process.cwd();
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
@@ -570,14 +554,12 @@ export class CopilotProvider extends CliProvider {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// Declare session outside try so it's accessible in the catch block for cleanup.
|
|
||||||
let session: CopilotSession | undefined;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await client.start();
|
await client.start();
|
||||||
logger.debug(`CopilotClient started with cwd: ${workingDirectory}`);
|
logger.debug(`CopilotClient started with cwd: ${workingDirectory}`);
|
||||||
|
|
||||||
const sessionOptions: CopilotSessionOptions = {
|
// Create session with streaming enabled for real-time events
|
||||||
|
const session = await client.createSession({
|
||||||
model: bareModel,
|
model: bareModel,
|
||||||
streaming: true,
|
streaming: true,
|
||||||
// AUTONOMOUS MODE: Auto-approve all permission requests.
|
// AUTONOMOUS MODE: Auto-approve all permission requests.
|
||||||
@@ -590,33 +572,13 @@ export class CopilotProvider extends CliProvider {
|
|||||||
logger.debug(`Permission request: ${request.kind}`);
|
logger.debug(`Permission request: ${request.kind}`);
|
||||||
return { kind: 'approved' };
|
return { kind: 'approved' };
|
||||||
},
|
},
|
||||||
};
|
});
|
||||||
|
|
||||||
// Resume the previous Copilot session when possible; otherwise create a fresh one.
|
const sessionId = session.sessionId;
|
||||||
const resumableClient = client as ResumableCopilotClient;
|
logger.debug(`Session created: ${sessionId}`);
|
||||||
let sessionResumed = false;
|
|
||||||
if (options.sdkSessionId && typeof resumableClient.resumeSession === 'function') {
|
|
||||||
try {
|
|
||||||
session = await resumableClient.resumeSession(options.sdkSessionId, sessionOptions);
|
|
||||||
sessionResumed = true;
|
|
||||||
logger.debug(`Resumed Copilot session: ${session.sessionId}`);
|
|
||||||
} catch (resumeError) {
|
|
||||||
logger.warn(
|
|
||||||
`Failed to resume Copilot session "${options.sdkSessionId}", creating a new session: ${resumeError}`
|
|
||||||
);
|
|
||||||
session = await client.createSession(sessionOptions);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
session = await client.createSession(sessionOptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
// session is always assigned by this point (both branches above assign it)
|
|
||||||
const activeSession = session!;
|
|
||||||
const sessionId = activeSession.sessionId;
|
|
||||||
logger.debug(`Session ${sessionResumed ? 'resumed' : 'created'}: ${sessionId}`);
|
|
||||||
|
|
||||||
// Set up event handler to push events to queue
|
// Set up event handler to push events to queue
|
||||||
activeSession.on((event: SdkEvent) => {
|
session.on((event: SdkEvent) => {
|
||||||
logger.debug(`SDK event: ${event.type}`);
|
logger.debug(`SDK event: ${event.type}`);
|
||||||
|
|
||||||
if (event.type === 'session.idle') {
|
if (event.type === 'session.idle') {
|
||||||
@@ -634,7 +596,7 @@ export class CopilotProvider extends CliProvider {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Send the prompt (non-blocking)
|
// Send the prompt (non-blocking)
|
||||||
await activeSession.send({ prompt: promptText });
|
await session.send({ prompt: promptText });
|
||||||
|
|
||||||
// Process events as they arrive
|
// Process events as they arrive
|
||||||
while (!sessionComplete || eventQueue.length > 0) {
|
while (!sessionComplete || eventQueue.length > 0) {
|
||||||
@@ -642,7 +604,7 @@ export class CopilotProvider extends CliProvider {
|
|||||||
|
|
||||||
// Check for errors first (before processing events to avoid race condition)
|
// Check for errors first (before processing events to avoid race condition)
|
||||||
if (sessionError) {
|
if (sessionError) {
|
||||||
await activeSession.destroy();
|
await session.destroy();
|
||||||
await client.stop();
|
await client.stop();
|
||||||
throw sessionError;
|
throw sessionError;
|
||||||
}
|
}
|
||||||
@@ -662,19 +624,11 @@ export class CopilotProvider extends CliProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
await activeSession.destroy();
|
await session.destroy();
|
||||||
await client.stop();
|
await client.stop();
|
||||||
logger.debug('CopilotClient stopped successfully');
|
logger.debug('CopilotClient stopped successfully');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Ensure session is destroyed and client is stopped on error to prevent leaks.
|
// Ensure client is stopped on error
|
||||||
// The session may have been created/resumed before the error occurred.
|
|
||||||
if (session) {
|
|
||||||
try {
|
|
||||||
await session.destroy();
|
|
||||||
} catch (sessionCleanupError) {
|
|
||||||
logger.debug(`Failed to destroy session during cleanup: ${sessionCleanupError}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try {
|
try {
|
||||||
await client.stop();
|
await client.stop();
|
||||||
} catch (cleanupError) {
|
} catch (cleanupError) {
|
||||||
|
|||||||
@@ -450,11 +450,6 @@ export class CursorProvider extends CliProvider {
|
|||||||
cliArgs.push('--model', model);
|
cliArgs.push('--model', model);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resume an existing chat when a provider session ID is available
|
|
||||||
if (options.sdkSessionId) {
|
|
||||||
cliArgs.push('--resume', options.sdkSessionId);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use '-' to indicate reading prompt from stdin
|
// Use '-' to indicate reading prompt from stdin
|
||||||
cliArgs.push('-');
|
cliArgs.push('-');
|
||||||
|
|
||||||
@@ -562,14 +557,10 @@ export class CursorProvider extends CliProvider {
|
|||||||
const resultEvent = cursorEvent as CursorResultEvent;
|
const resultEvent = cursorEvent as CursorResultEvent;
|
||||||
|
|
||||||
if (resultEvent.is_error) {
|
if (resultEvent.is_error) {
|
||||||
const errorText = resultEvent.error || resultEvent.result || '';
|
|
||||||
const enrichedError =
|
|
||||||
errorText ||
|
|
||||||
`Cursor agent failed (duration: ${resultEvent.duration_ms}ms, subtype: ${resultEvent.subtype}, session: ${resultEvent.session_id ?? 'none'})`;
|
|
||||||
return {
|
return {
|
||||||
type: 'error',
|
type: 'error',
|
||||||
session_id: resultEvent.session_id,
|
session_id: resultEvent.session_id,
|
||||||
error: enrichedError,
|
error: resultEvent.error || resultEvent.result || 'Unknown error',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ import type {
|
|||||||
import { validateBareModelId } from '@automaker/types';
|
import { validateBareModelId } from '@automaker/types';
|
||||||
import { GEMINI_MODEL_MAP, type GeminiAuthStatus } from '@automaker/types';
|
import { GEMINI_MODEL_MAP, type GeminiAuthStatus } from '@automaker/types';
|
||||||
import { createLogger, isAbortError } from '@automaker/utils';
|
import { createLogger, isAbortError } from '@automaker/utils';
|
||||||
import { spawnJSONLProcess, type SubprocessOptions } from '@automaker/platform';
|
import { spawnJSONLProcess } from '@automaker/platform';
|
||||||
import { normalizeTodos } from './tool-normalization.js';
|
import { normalizeTodos } from './tool-normalization.js';
|
||||||
|
|
||||||
// Create logger for this module
|
// Create logger for this module
|
||||||
@@ -263,14 +263,6 @@ export class GeminiProvider extends CliProvider {
|
|||||||
// Use explicit approval-mode for clearer semantics
|
// Use explicit approval-mode for clearer semantics
|
||||||
cliArgs.push('--approval-mode', 'yolo');
|
cliArgs.push('--approval-mode', 'yolo');
|
||||||
|
|
||||||
// Force headless (non-interactive) mode with --prompt flag.
|
|
||||||
// The actual prompt content is passed via stdin (see buildSubprocessOptions()),
|
|
||||||
// but we MUST include -p to trigger headless mode. Without it, Gemini CLI
|
|
||||||
// starts in interactive mode which adds significant startup overhead
|
|
||||||
// (interactive REPL setup, extra context loading, etc.).
|
|
||||||
// Per Gemini CLI docs: stdin content is "appended to" the -p value.
|
|
||||||
cliArgs.push('--prompt', '');
|
|
||||||
|
|
||||||
// Explicitly include the working directory in allowed workspace directories
|
// Explicitly include the working directory in allowed workspace directories
|
||||||
// This ensures Gemini CLI allows file operations in the project directory,
|
// This ensures Gemini CLI allows file operations in the project directory,
|
||||||
// even if it has a different workspace cached from a previous session
|
// even if it has a different workspace cached from a previous session
|
||||||
@@ -278,15 +270,13 @@ export class GeminiProvider extends CliProvider {
|
|||||||
cliArgs.push('--include-directories', options.cwd);
|
cliArgs.push('--include-directories', options.cwd);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resume an existing Gemini session when one is available
|
|
||||||
if (options.sdkSessionId) {
|
|
||||||
cliArgs.push('--resume', options.sdkSessionId);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note: Gemini CLI doesn't have a --thinking-level flag.
|
// Note: Gemini CLI doesn't have a --thinking-level flag.
|
||||||
// Thinking capabilities are determined by the model selection (e.g., gemini-2.5-pro).
|
// Thinking capabilities are determined by the model selection (e.g., gemini-2.5-pro).
|
||||||
// The model handles thinking internally based on the task complexity.
|
// The model handles thinking internally based on the task complexity.
|
||||||
|
|
||||||
|
// The prompt will be passed as the last positional argument
|
||||||
|
// We'll append it in executeQuery after extracting the text
|
||||||
|
|
||||||
return cliArgs;
|
return cliArgs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -381,13 +371,10 @@ export class GeminiProvider extends CliProvider {
|
|||||||
const resultEvent = geminiEvent as GeminiResultEvent;
|
const resultEvent = geminiEvent as GeminiResultEvent;
|
||||||
|
|
||||||
if (resultEvent.status === 'error') {
|
if (resultEvent.status === 'error') {
|
||||||
const enrichedError =
|
|
||||||
resultEvent.error ||
|
|
||||||
`Gemini agent failed (duration: ${resultEvent.stats?.duration_ms ?? 'unknown'}ms, session: ${resultEvent.session_id ?? 'none'})`;
|
|
||||||
return {
|
return {
|
||||||
type: 'error',
|
type: 'error',
|
||||||
session_id: resultEvent.session_id,
|
session_id: resultEvent.session_id,
|
||||||
error: enrichedError,
|
error: resultEvent.error || 'Unknown error',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -404,12 +391,10 @@ export class GeminiProvider extends CliProvider {
|
|||||||
|
|
||||||
case 'error': {
|
case 'error': {
|
||||||
const errorEvent = geminiEvent as GeminiResultEvent;
|
const errorEvent = geminiEvent as GeminiResultEvent;
|
||||||
const enrichedError =
|
|
||||||
errorEvent.error || `Gemini agent failed (session: ${errorEvent.session_id ?? 'none'})`;
|
|
||||||
return {
|
return {
|
||||||
type: 'error',
|
type: 'error',
|
||||||
session_id: errorEvent.session_id,
|
session_id: errorEvent.session_id,
|
||||||
error: enrichedError,
|
error: errorEvent.error || 'Unknown error',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -423,32 +408,6 @@ export class GeminiProvider extends CliProvider {
|
|||||||
// CliProvider Overrides
|
// CliProvider Overrides
|
||||||
// ==========================================================================
|
// ==========================================================================
|
||||||
|
|
||||||
/**
|
|
||||||
* Build subprocess options with stdin data for prompt and speed-optimized env vars.
|
|
||||||
*
|
|
||||||
* Passes the prompt via stdin instead of --prompt CLI arg to:
|
|
||||||
* - Avoid shell argument size limits with large prompts (system prompt + context)
|
|
||||||
* - Avoid shell escaping issues with special characters in prompts
|
|
||||||
* - Match the pattern used by Cursor, OpenCode, and Codex providers
|
|
||||||
*
|
|
||||||
* Also injects environment variables to reduce Gemini CLI startup overhead:
|
|
||||||
* - GEMINI_TELEMETRY_ENABLED=false: Disables OpenTelemetry collection
|
|
||||||
*/
|
|
||||||
protected buildSubprocessOptions(options: ExecuteOptions, cliArgs: string[]): SubprocessOptions {
|
|
||||||
const subprocessOptions = super.buildSubprocessOptions(options, cliArgs);
|
|
||||||
|
|
||||||
// Pass prompt via stdin to avoid shell interpretation of special characters
|
|
||||||
// and shell argument size limits with large system prompts + context files
|
|
||||||
subprocessOptions.stdinData = this.extractPromptText(options);
|
|
||||||
|
|
||||||
// Disable telemetry to reduce startup overhead
|
|
||||||
if (subprocessOptions.env) {
|
|
||||||
subprocessOptions.env['GEMINI_TELEMETRY_ENABLED'] = 'false';
|
|
||||||
}
|
|
||||||
|
|
||||||
return subprocessOptions;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Override error mapping for Gemini-specific error codes
|
* Override error mapping for Gemini-specific error codes
|
||||||
*/
|
*/
|
||||||
@@ -558,21 +517,14 @@ export class GeminiProvider extends CliProvider {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure .geminiignore exists in the working directory to prevent Gemini CLI
|
// Extract prompt text to pass as positional argument
|
||||||
// from scanning .git and node_modules directories during startup. This reduces
|
const promptText = this.extractPromptText(options);
|
||||||
// startup time significantly (reported: 35s → 11s) by skipping large directories
|
|
||||||
// that Gemini CLI would otherwise traverse for context discovery.
|
|
||||||
await this.ensureGeminiIgnore(options.cwd || process.cwd());
|
|
||||||
|
|
||||||
// Embed system prompt into the user prompt so Gemini CLI receives
|
// Build CLI args and append the prompt as the last positional argument
|
||||||
// project context (CLAUDE.md, CODE_QUALITY.md, etc.) that would
|
const cliArgs = this.buildCliArgs(options);
|
||||||
// otherwise be silently dropped since Gemini CLI has no --system-prompt flag.
|
cliArgs.push(promptText); // Gemini CLI uses positional args for the prompt
|
||||||
const effectiveOptions = this.embedSystemPromptIntoPrompt(options);
|
|
||||||
|
|
||||||
// Build CLI args for headless execution.
|
const subprocessOptions = this.buildSubprocessOptions(options, cliArgs);
|
||||||
const cliArgs = this.buildCliArgs(effectiveOptions);
|
|
||||||
|
|
||||||
const subprocessOptions = this.buildSubprocessOptions(effectiveOptions, cliArgs);
|
|
||||||
|
|
||||||
let sessionId: string | undefined;
|
let sessionId: string | undefined;
|
||||||
|
|
||||||
@@ -625,49 +577,6 @@ export class GeminiProvider extends CliProvider {
|
|||||||
// Gemini-Specific Methods
|
// Gemini-Specific Methods
|
||||||
// ==========================================================================
|
// ==========================================================================
|
||||||
|
|
||||||
/**
|
|
||||||
* Ensure a .geminiignore file exists in the working directory.
|
|
||||||
*
|
|
||||||
* Gemini CLI scans the working directory for context discovery during startup.
|
|
||||||
* Excluding .git and node_modules dramatically reduces startup time by preventing
|
|
||||||
* traversal of large directories (reported improvement: 35s → 11s).
|
|
||||||
*
|
|
||||||
* Only creates the file if it doesn't already exist to avoid overwriting user config.
|
|
||||||
*/
|
|
||||||
private async ensureGeminiIgnore(cwd: string): Promise<void> {
|
|
||||||
const ignorePath = path.join(cwd, '.geminiignore');
|
|
||||||
const content = [
|
|
||||||
'# Auto-generated by Automaker to speed up Gemini CLI startup',
|
|
||||||
'# Prevents Gemini CLI from scanning large directories during context discovery',
|
|
||||||
'.git',
|
|
||||||
'node_modules',
|
|
||||||
'dist',
|
|
||||||
'build',
|
|
||||||
'.next',
|
|
||||||
'.nuxt',
|
|
||||||
'coverage',
|
|
||||||
'.automaker',
|
|
||||||
'.worktrees',
|
|
||||||
'.vscode',
|
|
||||||
'.idea',
|
|
||||||
'*.lock',
|
|
||||||
'',
|
|
||||||
].join('\n');
|
|
||||||
try {
|
|
||||||
// Use 'wx' flag for atomic creation - fails if file exists (EEXIST)
|
|
||||||
await fs.writeFile(ignorePath, content, { encoding: 'utf-8', flag: 'wx' });
|
|
||||||
logger.debug(`Created .geminiignore at ${ignorePath}`);
|
|
||||||
} catch (writeError) {
|
|
||||||
// EEXIST means file already exists - that's fine, preserve user's file
|
|
||||||
if ((writeError as NodeJS.ErrnoException).code === 'EEXIST') {
|
|
||||||
logger.debug(`.geminiignore already exists at ${ignorePath}, preserving existing file`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// Non-fatal: startup will just be slower without the ignore file
|
|
||||||
logger.debug(`Failed to create .geminiignore: ${writeError}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a GeminiError with details
|
* Create a GeminiError with details
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -19,11 +19,10 @@ export function createAnalyzeProjectHandler(autoModeService: AutoModeServiceComp
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Kick off analysis in the background; attach a rejection handler so
|
// Start analysis in background
|
||||||
// unhandled-promise warnings don't surface and errors are at least logged.
|
autoModeService.analyzeProject(projectPath).catch((error) => {
|
||||||
// Synchronous throws (e.g. "not implemented") still propagate here.
|
logger.error(`[AutoMode] Project analysis error:`, error);
|
||||||
const analysisPromise = autoModeService.analyzeProject(projectPath);
|
});
|
||||||
analysisPromise.catch((err) => logError(err, 'Background analyzeProject failed'));
|
|
||||||
|
|
||||||
res.json({ success: true, message: 'Project analysis started' });
|
res.json({ success: true, message: 'Project analysis started' });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -114,20 +114,9 @@ export function mapBacklogPlanError(rawMessage: string): string {
|
|||||||
return 'Claude CLI could not be launched. Make sure the Claude CLI is installed and available in PATH, or check that Node.js is correctly installed. Try running "which claude" or "claude --version" in your terminal to verify.';
|
return 'Claude CLI could not be launched. Make sure the Claude CLI is installed and available in PATH, or check that Node.js is correctly installed. Try running "which claude" or "claude --version" in your terminal to verify.';
|
||||||
}
|
}
|
||||||
|
|
||||||
// Claude Code process crash - extract exit code for diagnostics
|
// Claude Code process crash
|
||||||
if (rawMessage.includes('Claude Code process exited')) {
|
if (rawMessage.includes('Claude Code process exited')) {
|
||||||
const exitCodeMatch = rawMessage.match(/exited with code (\d+)/);
|
return 'Claude exited unexpectedly. Try again. If it keeps happening, re-run `claude login` or update your API key in Setup.';
|
||||||
const exitCode = exitCodeMatch ? exitCodeMatch[1] : 'unknown';
|
|
||||||
logger.error(`[BacklogPlan] Claude process exit code: ${exitCode}`);
|
|
||||||
return `Claude exited unexpectedly (exit code: ${exitCode}). This is usually a transient issue. Try again. If it keeps happening, re-run \`claude login\` or update your API key in Setup.`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Claude Code process killed by signal
|
|
||||||
if (rawMessage.includes('Claude Code process terminated by signal')) {
|
|
||||||
const signalMatch = rawMessage.match(/terminated by signal (\w+)/);
|
|
||||||
const signal = signalMatch ? signalMatch[1] : 'unknown';
|
|
||||||
logger.error(`[BacklogPlan] Claude process terminated by signal: ${signal}`);
|
|
||||||
return `Claude was terminated by signal ${signal}. This may indicate a resource issue. Try again.`;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rate limiting
|
// Rate limiting
|
||||||
|
|||||||
@@ -3,9 +3,6 @@
|
|||||||
*
|
*
|
||||||
* Model is configurable via phaseModels.backlogPlanningModel in settings
|
* Model is configurable via phaseModels.backlogPlanningModel in settings
|
||||||
* (defaults to Sonnet). Can be overridden per-call via model parameter.
|
* (defaults to Sonnet). Can be overridden per-call via model parameter.
|
||||||
*
|
|
||||||
* Includes automatic retry for transient CLI failures (e.g., "Claude Code
|
|
||||||
* process exited unexpectedly") to improve reliability.
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type { EventEmitter } from '../../lib/events.js';
|
import type { EventEmitter } from '../../lib/events.js';
|
||||||
@@ -15,10 +12,8 @@ import {
|
|||||||
isCursorModel,
|
isCursorModel,
|
||||||
stripProviderPrefix,
|
stripProviderPrefix,
|
||||||
type ThinkingLevel,
|
type ThinkingLevel,
|
||||||
type SystemPromptPreset,
|
|
||||||
} from '@automaker/types';
|
} from '@automaker/types';
|
||||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||||
import { getCurrentBranch } from '@automaker/git-utils';
|
|
||||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||||
import { ProviderFactory } from '../../providers/provider-factory.js';
|
import { ProviderFactory } from '../../providers/provider-factory.js';
|
||||||
import { extractJsonWithArray } from '../../lib/json-extractor.js';
|
import { extractJsonWithArray } from '../../lib/json-extractor.js';
|
||||||
@@ -32,28 +27,10 @@ import {
|
|||||||
import type { SettingsService } from '../../services/settings-service.js';
|
import type { SettingsService } from '../../services/settings-service.js';
|
||||||
import {
|
import {
|
||||||
getAutoLoadClaudeMdSetting,
|
getAutoLoadClaudeMdSetting,
|
||||||
getUseClaudeCodeSystemPromptSetting,
|
|
||||||
getPromptCustomization,
|
getPromptCustomization,
|
||||||
getPhaseModelWithOverrides,
|
getPhaseModelWithOverrides,
|
||||||
getProviderByModelId,
|
|
||||||
} from '../../lib/settings-helpers.js';
|
} from '../../lib/settings-helpers.js';
|
||||||
|
|
||||||
/** Maximum number of retry attempts for transient CLI failures */
|
|
||||||
const MAX_RETRIES = 2;
|
|
||||||
/** Delay between retries in milliseconds */
|
|
||||||
const RETRY_DELAY_MS = 2000;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if an error is retryable (transient CLI process failure)
|
|
||||||
*/
|
|
||||||
function isRetryableError(error: unknown): boolean {
|
|
||||||
const message = error instanceof Error ? error.message : String(error);
|
|
||||||
return (
|
|
||||||
message.includes('Claude Code process exited') ||
|
|
||||||
message.includes('Claude Code process terminated by signal')
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const featureLoader = new FeatureLoader();
|
const featureLoader = new FeatureLoader();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -107,53 +84,6 @@ function parsePlanResponse(response: string): BacklogPlanResult {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Try to parse a valid plan response without fallback behavior.
|
|
||||||
* Returns null if parsing fails.
|
|
||||||
*/
|
|
||||||
function tryParsePlanResponse(response: string): BacklogPlanResult | null {
|
|
||||||
if (!response || response.trim().length === 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return extractJsonWithArray<BacklogPlanResult>(response, 'changes', { logger });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Choose the most reliable response text between streamed assistant chunks
|
|
||||||
* and provider final result payload.
|
|
||||||
*/
|
|
||||||
function selectBestResponseText(accumulatedText: string, providerResultText: string): string {
|
|
||||||
const hasAccumulated = accumulatedText.trim().length > 0;
|
|
||||||
const hasProviderResult = providerResultText.trim().length > 0;
|
|
||||||
|
|
||||||
if (!hasProviderResult) {
|
|
||||||
return accumulatedText;
|
|
||||||
}
|
|
||||||
if (!hasAccumulated) {
|
|
||||||
return providerResultText;
|
|
||||||
}
|
|
||||||
|
|
||||||
const accumulatedParsed = tryParsePlanResponse(accumulatedText);
|
|
||||||
const providerParsed = tryParsePlanResponse(providerResultText);
|
|
||||||
|
|
||||||
if (providerParsed && !accumulatedParsed) {
|
|
||||||
logger.info('[BacklogPlan] Using provider result (parseable JSON)');
|
|
||||||
return providerResultText;
|
|
||||||
}
|
|
||||||
if (accumulatedParsed && !providerParsed) {
|
|
||||||
logger.info('[BacklogPlan] Keeping accumulated text (parseable JSON)');
|
|
||||||
return accumulatedText;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (providerResultText.length > accumulatedText.length) {
|
|
||||||
logger.info('[BacklogPlan] Using provider result (longer content)');
|
|
||||||
return providerResultText;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('[BacklogPlan] Keeping accumulated text (longer content)');
|
|
||||||
return accumulatedText;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate a backlog modification plan based on user prompt
|
* Generate a backlog modification plan based on user prompt
|
||||||
*/
|
*/
|
||||||
@@ -163,40 +93,11 @@ export async function generateBacklogPlan(
|
|||||||
events: EventEmitter,
|
events: EventEmitter,
|
||||||
abortController: AbortController,
|
abortController: AbortController,
|
||||||
settingsService?: SettingsService,
|
settingsService?: SettingsService,
|
||||||
model?: string,
|
model?: string
|
||||||
branchName?: string
|
|
||||||
): Promise<BacklogPlanResult> {
|
): Promise<BacklogPlanResult> {
|
||||||
try {
|
try {
|
||||||
// Load current features
|
// Load current features
|
||||||
const allFeatures = await featureLoader.getAll(projectPath);
|
const features = await featureLoader.getAll(projectPath);
|
||||||
|
|
||||||
// Filter features by branch if specified (worktree-scoped backlog)
|
|
||||||
let features: Feature[];
|
|
||||||
if (branchName) {
|
|
||||||
// Determine the primary branch so unassigned features show for the main worktree
|
|
||||||
let primaryBranch: string | null = null;
|
|
||||||
try {
|
|
||||||
primaryBranch = await getCurrentBranch(projectPath);
|
|
||||||
} catch {
|
|
||||||
// If git fails, fall back to 'main' so unassigned features are visible
|
|
||||||
// when branchName matches a common default branch name
|
|
||||||
primaryBranch = 'main';
|
|
||||||
}
|
|
||||||
const isMainBranch = branchName === primaryBranch;
|
|
||||||
|
|
||||||
features = allFeatures.filter((f) => {
|
|
||||||
if (!f.branchName) {
|
|
||||||
// Unassigned features belong to the main/primary worktree
|
|
||||||
return isMainBranch;
|
|
||||||
}
|
|
||||||
return f.branchName === branchName;
|
|
||||||
});
|
|
||||||
logger.info(
|
|
||||||
`[BacklogPlan] Filtered to ${features.length}/${allFeatures.length} features for branch: ${branchName}`
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
features = allFeatures;
|
|
||||||
}
|
|
||||||
|
|
||||||
events.emit('backlog-plan:event', {
|
events.emit('backlog-plan:event', {
|
||||||
type: 'backlog_plan_progress',
|
type: 'backlog_plan_progress',
|
||||||
@@ -232,35 +133,6 @@ export async function generateBacklogPlan(
|
|||||||
effectiveModel = resolved.model;
|
effectiveModel = resolved.model;
|
||||||
thinkingLevel = resolved.thinkingLevel;
|
thinkingLevel = resolved.thinkingLevel;
|
||||||
credentials = await settingsService?.getCredentials();
|
credentials = await settingsService?.getCredentials();
|
||||||
// Resolve Claude-compatible provider when client sends a model (e.g. MiniMax, GLM)
|
|
||||||
if (settingsService) {
|
|
||||||
const providerResult = await getProviderByModelId(
|
|
||||||
effectiveModel,
|
|
||||||
settingsService,
|
|
||||||
'[BacklogPlan]'
|
|
||||||
);
|
|
||||||
if (providerResult.provider) {
|
|
||||||
claudeCompatibleProvider = providerResult.provider;
|
|
||||||
if (providerResult.credentials) {
|
|
||||||
credentials = providerResult.credentials;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Fallback: use phase settings provider if model lookup found nothing (e.g. model
|
|
||||||
// string format differs from provider's model id, but backlog planning phase has providerId).
|
|
||||||
if (!claudeCompatibleProvider) {
|
|
||||||
const phaseResult = await getPhaseModelWithOverrides(
|
|
||||||
'backlogPlanningModel',
|
|
||||||
settingsService,
|
|
||||||
projectPath,
|
|
||||||
'[BacklogPlan]'
|
|
||||||
);
|
|
||||||
const phaseResolved = resolvePhaseModel(phaseResult.phaseModel);
|
|
||||||
if (phaseResult.provider && phaseResolved.model === effectiveModel) {
|
|
||||||
claudeCompatibleProvider = phaseResult.provider;
|
|
||||||
credentials = phaseResult.credentials ?? credentials;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (settingsService) {
|
} else if (settingsService) {
|
||||||
// Use settings-based model with provider info
|
// Use settings-based model with provider info
|
||||||
const phaseResult = await getPhaseModelWithOverrides(
|
const phaseResult = await getPhaseModelWithOverrides(
|
||||||
@@ -290,23 +162,17 @@ export async function generateBacklogPlan(
|
|||||||
// Strip provider prefix - providers expect bare model IDs
|
// Strip provider prefix - providers expect bare model IDs
|
||||||
const bareModel = stripProviderPrefix(effectiveModel);
|
const bareModel = stripProviderPrefix(effectiveModel);
|
||||||
|
|
||||||
// Get autoLoadClaudeMd and useClaudeCodeSystemPrompt settings
|
// Get autoLoadClaudeMd setting
|
||||||
const autoLoadClaudeMd = await getAutoLoadClaudeMdSetting(
|
const autoLoadClaudeMd = await getAutoLoadClaudeMdSetting(
|
||||||
projectPath,
|
projectPath,
|
||||||
settingsService,
|
settingsService,
|
||||||
'[BacklogPlan]'
|
'[BacklogPlan]'
|
||||||
);
|
);
|
||||||
const useClaudeCodeSystemPrompt = await getUseClaudeCodeSystemPromptSetting(
|
|
||||||
projectPath,
|
|
||||||
settingsService,
|
|
||||||
'[BacklogPlan]'
|
|
||||||
);
|
|
||||||
|
|
||||||
// For Cursor models, we need to combine prompts with explicit instructions
|
// For Cursor models, we need to combine prompts with explicit instructions
|
||||||
// because Cursor doesn't support systemPrompt separation like Claude SDK
|
// because Cursor doesn't support systemPrompt separation like Claude SDK
|
||||||
let finalPrompt = userPrompt;
|
let finalPrompt = userPrompt;
|
||||||
let finalSystemPrompt: string | SystemPromptPreset | undefined = systemPrompt;
|
let finalSystemPrompt: string | undefined = systemPrompt;
|
||||||
let finalSettingSources: Array<'user' | 'project' | 'local'> | undefined;
|
|
||||||
|
|
||||||
if (isCursorModel(effectiveModel)) {
|
if (isCursorModel(effectiveModel)) {
|
||||||
logger.info('[BacklogPlan] Using Cursor model - adding explicit no-file-write instructions');
|
logger.info('[BacklogPlan] Using Cursor model - adding explicit no-file-write instructions');
|
||||||
@@ -321,65 +187,25 @@ CRITICAL INSTRUCTIONS:
|
|||||||
|
|
||||||
${userPrompt}`;
|
${userPrompt}`;
|
||||||
finalSystemPrompt = undefined; // System prompt is now embedded in the user prompt
|
finalSystemPrompt = undefined; // System prompt is now embedded in the user prompt
|
||||||
} else if (claudeCompatibleProvider) {
|
|
||||||
// Claude-compatible providers (MiniMax, GLM, etc.) use a plain API; do not use
|
|
||||||
// the claude_code preset (which is for Claude CLI/subprocess and can break the request).
|
|
||||||
finalSystemPrompt = systemPrompt;
|
|
||||||
} else if (useClaudeCodeSystemPrompt) {
|
|
||||||
// Use claude_code preset for native Claude so the SDK subprocess
|
|
||||||
// authenticates via CLI OAuth or API key the same way all other SDK calls do.
|
|
||||||
finalSystemPrompt = {
|
|
||||||
type: 'preset',
|
|
||||||
preset: 'claude_code',
|
|
||||||
append: systemPrompt,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
// Include settingSources when autoLoadClaudeMd is enabled
|
|
||||||
if (autoLoadClaudeMd) {
|
|
||||||
finalSettingSources = ['user', 'project'];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute the query with retry logic for transient CLI failures
|
// Execute the query
|
||||||
const queryOptions = {
|
const stream = provider.executeQuery({
|
||||||
prompt: finalPrompt,
|
prompt: finalPrompt,
|
||||||
model: bareModel,
|
model: bareModel,
|
||||||
cwd: projectPath,
|
cwd: projectPath,
|
||||||
systemPrompt: finalSystemPrompt,
|
systemPrompt: finalSystemPrompt,
|
||||||
maxTurns: 1,
|
maxTurns: 1,
|
||||||
tools: [] as string[], // Disable all built-in tools - plan generation only needs text output
|
allowedTools: [], // No tools needed for this
|
||||||
abortController,
|
abortController,
|
||||||
settingSources: finalSettingSources,
|
settingSources: autoLoadClaudeMd ? ['user', 'project'] : undefined,
|
||||||
|
readOnly: true, // Plan generation only generates text, doesn't write files
|
||||||
thinkingLevel, // Pass thinking level for extended thinking
|
thinkingLevel, // Pass thinking level for extended thinking
|
||||||
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
};
|
});
|
||||||
|
|
||||||
let responseText = '';
|
let responseText = '';
|
||||||
let bestResponseText = ''; // Preserve best response across all retry attempts
|
|
||||||
let recoveredResult: BacklogPlanResult | null = null;
|
|
||||||
let lastError: unknown = null;
|
|
||||||
|
|
||||||
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
|
||||||
if (abortController.signal.aborted) {
|
|
||||||
throw new Error('Generation aborted');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (attempt > 0) {
|
|
||||||
logger.info(
|
|
||||||
`[BacklogPlan] Retry attempt ${attempt}/${MAX_RETRIES} after transient failure`
|
|
||||||
);
|
|
||||||
events.emit('backlog-plan:event', {
|
|
||||||
type: 'backlog_plan_progress',
|
|
||||||
content: `Retrying... (attempt ${attempt + 1}/${MAX_RETRIES + 1})`,
|
|
||||||
});
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, RETRY_DELAY_MS));
|
|
||||||
}
|
|
||||||
|
|
||||||
let accumulatedText = '';
|
|
||||||
let providerResultText = '';
|
|
||||||
|
|
||||||
try {
|
|
||||||
const stream = provider.executeQuery(queryOptions);
|
|
||||||
|
|
||||||
for await (const msg of stream) {
|
for await (const msg of stream) {
|
||||||
if (abortController.signal.aborted) {
|
if (abortController.signal.aborted) {
|
||||||
@@ -390,76 +216,25 @@ ${userPrompt}`;
|
|||||||
if (msg.message?.content) {
|
if (msg.message?.content) {
|
||||||
for (const block of msg.message.content) {
|
for (const block of msg.message.content) {
|
||||||
if (block.type === 'text') {
|
if (block.type === 'text') {
|
||||||
accumulatedText += block.text;
|
responseText += block.text;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (msg.type === 'result' && msg.subtype === 'success' && msg.result) {
|
} else if (msg.type === 'result' && msg.subtype === 'success' && msg.result) {
|
||||||
providerResultText = msg.result;
|
// Use result if it's a final accumulated message (from Cursor provider)
|
||||||
logger.info(
|
logger.info('[BacklogPlan] Received result from Cursor, length:', msg.result.length);
|
||||||
'[BacklogPlan] Received result from provider, length:',
|
logger.info('[BacklogPlan] Previous responseText length:', responseText.length);
|
||||||
providerResultText.length
|
if (msg.result.length > responseText.length) {
|
||||||
);
|
logger.info('[BacklogPlan] Using Cursor result (longer than accumulated text)');
|
||||||
logger.info('[BacklogPlan] Accumulated response length:', accumulatedText.length);
|
responseText = msg.result;
|
||||||
|
} else {
|
||||||
|
logger.info('[BacklogPlan] Keeping accumulated text (longer than Cursor result)');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
responseText = selectBestResponseText(accumulatedText, providerResultText);
|
|
||||||
|
|
||||||
// If we got here, the stream completed successfully
|
|
||||||
lastError = null;
|
|
||||||
break;
|
|
||||||
} catch (error) {
|
|
||||||
lastError = error;
|
|
||||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
||||||
responseText = selectBestResponseText(accumulatedText, providerResultText);
|
|
||||||
|
|
||||||
// Preserve the best response text across all attempts so that if a retry
|
|
||||||
// crashes immediately (empty response), we can still recover from an earlier attempt
|
|
||||||
bestResponseText = selectBestResponseText(bestResponseText, responseText);
|
|
||||||
|
|
||||||
// Claude SDK can occasionally exit non-zero after emitting a complete response.
|
|
||||||
// If we already have valid JSON, recover instead of failing the entire planning flow.
|
|
||||||
if (isRetryableError(error)) {
|
|
||||||
const parsed = tryParsePlanResponse(bestResponseText);
|
|
||||||
if (parsed) {
|
|
||||||
logger.warn(
|
|
||||||
'[BacklogPlan] Recovered from transient CLI exit using accumulated valid response'
|
|
||||||
);
|
|
||||||
recoveredResult = parsed;
|
|
||||||
lastError = null;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// On final retryable failure, degrade gracefully if we have text from any attempt.
|
|
||||||
if (attempt >= MAX_RETRIES && bestResponseText.trim().length > 0) {
|
|
||||||
logger.warn(
|
|
||||||
'[BacklogPlan] Final retryable CLI failure with non-empty response, attempting fallback parse'
|
|
||||||
);
|
|
||||||
recoveredResult = parsePlanResponse(bestResponseText);
|
|
||||||
lastError = null;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only retry on transient CLI failures, not on user aborts or other errors
|
|
||||||
if (!isRetryableError(error) || attempt >= MAX_RETRIES) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.warn(
|
|
||||||
`[BacklogPlan] Transient CLI failure (attempt ${attempt + 1}/${MAX_RETRIES + 1}): ${errorMessage}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we exhausted retries, throw the last error
|
|
||||||
if (lastError) {
|
|
||||||
throw lastError;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse the response
|
// Parse the response
|
||||||
const result = recoveredResult ?? parsePlanResponse(responseText);
|
const result = parsePlanResponse(responseText);
|
||||||
|
|
||||||
await saveBacklogPlan(projectPath, {
|
await saveBacklogPlan(projectPath, {
|
||||||
savedAt: new Date().toISOString(),
|
savedAt: new Date().toISOString(),
|
||||||
|
|||||||
@@ -17,11 +17,10 @@ import type { SettingsService } from '../../../services/settings-service.js';
|
|||||||
export function createGenerateHandler(events: EventEmitter, settingsService?: SettingsService) {
|
export function createGenerateHandler(events: EventEmitter, settingsService?: SettingsService) {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
const { projectPath, prompt, model, branchName } = req.body as {
|
const { projectPath, prompt, model } = req.body as {
|
||||||
projectPath: string;
|
projectPath: string;
|
||||||
prompt: string;
|
prompt: string;
|
||||||
model?: string;
|
model?: string;
|
||||||
branchName?: string;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!projectPath) {
|
if (!projectPath) {
|
||||||
@@ -43,29 +42,27 @@ export function createGenerateHandler(events: EventEmitter, settingsService?: Se
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const abortController = new AbortController();
|
setRunningState(true);
|
||||||
setRunningState(true, abortController);
|
|
||||||
setRunningDetails({
|
setRunningDetails({
|
||||||
projectPath,
|
projectPath,
|
||||||
prompt,
|
prompt,
|
||||||
model,
|
model,
|
||||||
startedAt: new Date().toISOString(),
|
startedAt: new Date().toISOString(),
|
||||||
});
|
});
|
||||||
|
const abortController = new AbortController();
|
||||||
|
setRunningState(true, abortController);
|
||||||
|
|
||||||
// Start generation in background
|
// Start generation in background
|
||||||
// Note: generateBacklogPlan handles its own error event emission
|
// Note: generateBacklogPlan handles its own error event emission,
|
||||||
// and state cleanup in its finally block, so we only log here
|
// so we only log here to avoid duplicate error toasts
|
||||||
generateBacklogPlan(
|
generateBacklogPlan(projectPath, prompt, events, abortController, settingsService, model)
|
||||||
projectPath,
|
.catch((error) => {
|
||||||
prompt,
|
|
||||||
events,
|
|
||||||
abortController,
|
|
||||||
settingsService,
|
|
||||||
model,
|
|
||||||
branchName
|
|
||||||
).catch((error) => {
|
|
||||||
// Just log - error event already emitted by generateBacklogPlan
|
// Just log - error event already emitted by generateBacklogPlan
|
||||||
logError(error, 'Generate backlog plan failed (background)');
|
logError(error, 'Generate backlog plan failed (background)');
|
||||||
|
})
|
||||||
|
.finally(() => {
|
||||||
|
setRunningState(false, null);
|
||||||
|
setRunningDetails(null);
|
||||||
});
|
});
|
||||||
|
|
||||||
res.json({ success: true });
|
res.json({ success: true });
|
||||||
|
|||||||
@@ -142,33 +142,11 @@ function mapDescribeImageError(rawMessage: string | undefined): {
|
|||||||
|
|
||||||
if (!rawMessage) return baseResponse;
|
if (!rawMessage) return baseResponse;
|
||||||
|
|
||||||
if (
|
if (rawMessage.includes('Claude Code process exited')) {
|
||||||
rawMessage.includes('Claude Code process exited') ||
|
|
||||||
rawMessage.includes('Claude Code process terminated by signal')
|
|
||||||
) {
|
|
||||||
const exitCodeMatch = rawMessage.match(/exited with code (\d+)/);
|
|
||||||
const signalMatch = rawMessage.match(/terminated by signal (\w+)/);
|
|
||||||
const detail = exitCodeMatch
|
|
||||||
? ` (exit code: ${exitCodeMatch[1]})`
|
|
||||||
: signalMatch
|
|
||||||
? ` (signal: ${signalMatch[1]})`
|
|
||||||
: '';
|
|
||||||
|
|
||||||
// Crash/OS-kill signals suggest a process crash, not an auth failure —
|
|
||||||
// omit auth recovery advice and suggest retry/reporting instead.
|
|
||||||
const crashSignals = ['SIGSEGV', 'SIGABRT', 'SIGKILL', 'SIGBUS', 'SIGTRAP'];
|
|
||||||
const isCrashSignal = signalMatch ? crashSignals.includes(signalMatch[1]) : false;
|
|
||||||
|
|
||||||
if (isCrashSignal) {
|
|
||||||
return {
|
return {
|
||||||
statusCode: 503,
|
statusCode: 503,
|
||||||
userMessage: `Claude crashed unexpectedly${detail} while describing the image. This may be a transient condition. Please try again. If the problem persists, collect logs and report the issue.`,
|
userMessage:
|
||||||
};
|
'Claude exited unexpectedly while describing the image. Try again. If it keeps happening, re-run `claude login` or update your API key in Setup so Claude can restart cleanly.',
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
statusCode: 503,
|
|
||||||
userMessage: `Claude exited unexpectedly${detail} while describing the image. This is usually a transient issue. Try again. If it keeps happening, re-run \`claude login\` or update your API key in Setup.`,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -44,11 +44,7 @@ export function createFeaturesRoutes(
|
|||||||
validatePathParams('projectPath'),
|
validatePathParams('projectPath'),
|
||||||
createCreateHandler(featureLoader, events)
|
createCreateHandler(featureLoader, events)
|
||||||
);
|
);
|
||||||
router.post(
|
router.post('/update', validatePathParams('projectPath'), createUpdateHandler(featureLoader));
|
||||||
'/update',
|
|
||||||
validatePathParams('projectPath'),
|
|
||||||
createUpdateHandler(featureLoader, events)
|
|
||||||
);
|
|
||||||
router.post(
|
router.post(
|
||||||
'/bulk-update',
|
'/bulk-update',
|
||||||
validatePathParams('projectPath'),
|
validatePathParams('projectPath'),
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||||
import type { Feature, FeatureStatus } from '@automaker/types';
|
import type { Feature, FeatureStatus } from '@automaker/types';
|
||||||
import type { EventEmitter } from '../../../lib/events.js';
|
|
||||||
import { getErrorMessage, logError } from '../common.js';
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
import { createLogger } from '@automaker/utils';
|
import { createLogger } from '@automaker/utils';
|
||||||
|
|
||||||
@@ -14,7 +13,7 @@ const logger = createLogger('features/update');
|
|||||||
// Statuses that should trigger syncing to app_spec.txt
|
// Statuses that should trigger syncing to app_spec.txt
|
||||||
const SYNC_TRIGGER_STATUSES: FeatureStatus[] = ['verified', 'completed'];
|
const SYNC_TRIGGER_STATUSES: FeatureStatus[] = ['verified', 'completed'];
|
||||||
|
|
||||||
export function createUpdateHandler(featureLoader: FeatureLoader, events?: EventEmitter) {
|
export function createUpdateHandler(featureLoader: FeatureLoader) {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
const {
|
const {
|
||||||
@@ -55,18 +54,8 @@ export function createUpdateHandler(featureLoader: FeatureLoader, events?: Event
|
|||||||
preEnhancementDescription
|
preEnhancementDescription
|
||||||
);
|
);
|
||||||
|
|
||||||
// Emit completion event and sync to app_spec.txt when status transitions to verified/completed
|
// Trigger sync to app_spec.txt when status changes to verified or completed
|
||||||
if (newStatus && SYNC_TRIGGER_STATUSES.includes(newStatus) && previousStatus !== newStatus) {
|
if (newStatus && SYNC_TRIGGER_STATUSES.includes(newStatus) && previousStatus !== newStatus) {
|
||||||
events?.emit('feature:completed', {
|
|
||||||
featureId,
|
|
||||||
featureName: updated.title,
|
|
||||||
projectPath,
|
|
||||||
passes: true,
|
|
||||||
message:
|
|
||||||
newStatus === 'verified' ? 'Feature verified manually' : 'Feature completed manually',
|
|
||||||
executionMode: 'manual',
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const synced = await featureLoader.syncFeatureToAppSpec(projectPath, updated);
|
const synced = await featureLoader.syncFeatureToAppSpec(projectPath, updated);
|
||||||
if (synced) {
|
if (synced) {
|
||||||
|
|||||||
@@ -24,9 +24,7 @@ export function createWriteHandler() {
|
|||||||
|
|
||||||
// Ensure parent directory exists (symlink-safe)
|
// Ensure parent directory exists (symlink-safe)
|
||||||
await mkdirSafe(path.dirname(path.resolve(filePath)));
|
await mkdirSafe(path.dirname(path.resolve(filePath)));
|
||||||
// Default content to empty string if undefined/null to prevent writing
|
await secureFs.writeFile(filePath, content, 'utf-8');
|
||||||
// "undefined" as literal text (e.g. when content field is missing from request)
|
|
||||||
await secureFs.writeFile(filePath, content ?? '', 'utf-8');
|
|
||||||
|
|
||||||
res.json({ success: true });
|
res.json({ success: true });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -1,14 +1,38 @@
|
|||||||
/**
|
/**
|
||||||
* Common utilities for GitHub routes
|
* Common utilities for GitHub routes
|
||||||
*
|
|
||||||
* Re-exports shared utilities from lib/exec-utils so route consumers
|
|
||||||
* can continue importing from this module unchanged.
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { exec } from 'child_process';
|
import { exec } from 'child_process';
|
||||||
import { promisify } from 'util';
|
import { promisify } from 'util';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
|
||||||
|
const logger = createLogger('GitHub');
|
||||||
|
|
||||||
export const execAsync = promisify(exec);
|
export const execAsync = promisify(exec);
|
||||||
|
|
||||||
// Re-export shared utilities from the canonical location
|
// Extended PATH to include common tool installation locations
|
||||||
export { extendedPath, execEnv, getErrorMessage, logError } from '../../../lib/exec-utils.js';
|
export const extendedPath = [
|
||||||
|
process.env.PATH,
|
||||||
|
'/opt/homebrew/bin',
|
||||||
|
'/usr/local/bin',
|
||||||
|
'/home/linuxbrew/.linuxbrew/bin',
|
||||||
|
`${process.env.HOME}/.local/bin`,
|
||||||
|
]
|
||||||
|
.filter(Boolean)
|
||||||
|
.join(':');
|
||||||
|
|
||||||
|
export const execEnv = {
|
||||||
|
...process.env,
|
||||||
|
PATH: extendedPath,
|
||||||
|
};
|
||||||
|
|
||||||
|
export function getErrorMessage(error: unknown): string {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
return error.message;
|
||||||
|
}
|
||||||
|
return String(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function logError(error: unknown, context: string): void {
|
||||||
|
logger.error(`${context}:`, error);
|
||||||
|
}
|
||||||
|
|||||||
@@ -5,26 +5,287 @@
|
|||||||
* for a specific pull request, providing file path and line context.
|
* for a specific pull request, providing file path and line context.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { spawn } from 'child_process';
|
||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
import { getErrorMessage, logError } from './common.js';
|
import { execAsync, execEnv, getErrorMessage, logError } from './common.js';
|
||||||
import { checkGitHubRemote } from './check-github-remote.js';
|
import { checkGitHubRemote } from './check-github-remote.js';
|
||||||
import {
|
|
||||||
fetchPRReviewComments,
|
|
||||||
fetchReviewThreadResolvedStatus,
|
|
||||||
type PRReviewComment,
|
|
||||||
type ListPRReviewCommentsResult,
|
|
||||||
} from '../../../services/pr-review-comments.service.js';
|
|
||||||
|
|
||||||
// Re-export types so existing callers continue to work
|
export interface PRReviewComment {
|
||||||
export type { PRReviewComment, ListPRReviewCommentsResult };
|
id: string;
|
||||||
// Re-export service functions so existing callers continue to work
|
author: string;
|
||||||
export { fetchPRReviewComments, fetchReviewThreadResolvedStatus };
|
avatarUrl?: string;
|
||||||
|
body: string;
|
||||||
|
path?: string;
|
||||||
|
line?: number;
|
||||||
|
createdAt: string;
|
||||||
|
updatedAt?: string;
|
||||||
|
isReviewComment: boolean;
|
||||||
|
/** Whether this is an outdated review comment (code has changed since) */
|
||||||
|
isOutdated?: boolean;
|
||||||
|
/** Whether the review thread containing this comment has been resolved */
|
||||||
|
isResolved?: boolean;
|
||||||
|
/** The GraphQL node ID of the review thread (used for resolve/unresolve mutations) */
|
||||||
|
threadId?: string;
|
||||||
|
/** The diff hunk context for the comment */
|
||||||
|
diffHunk?: string;
|
||||||
|
/** The side of the diff (LEFT or RIGHT) */
|
||||||
|
side?: string;
|
||||||
|
/** The commit ID the comment was made on */
|
||||||
|
commitId?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ListPRReviewCommentsResult {
|
||||||
|
success: boolean;
|
||||||
|
comments?: PRReviewComment[];
|
||||||
|
totalCount?: number;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
interface ListPRReviewCommentsRequest {
|
interface ListPRReviewCommentsRequest {
|
||||||
projectPath: string;
|
projectPath: string;
|
||||||
prNumber: number;
|
prNumber: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Timeout for GitHub GraphQL API requests in milliseconds */
|
||||||
|
const GITHUB_API_TIMEOUT_MS = 30000;
|
||||||
|
|
||||||
|
interface GraphQLReviewThreadComment {
|
||||||
|
databaseId: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GraphQLReviewThread {
|
||||||
|
id: string;
|
||||||
|
isResolved: boolean;
|
||||||
|
comments: {
|
||||||
|
nodes: GraphQLReviewThreadComment[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GraphQLResponse {
|
||||||
|
data?: {
|
||||||
|
repository?: {
|
||||||
|
pullRequest?: {
|
||||||
|
reviewThreads?: {
|
||||||
|
nodes: GraphQLReviewThread[];
|
||||||
|
};
|
||||||
|
} | null;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
errors?: Array<{ message: string }>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ReviewThreadInfo {
|
||||||
|
isResolved: boolean;
|
||||||
|
threadId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch review thread resolved status and thread IDs using GitHub GraphQL API.
|
||||||
|
* Returns a map of comment ID (string) -> { isResolved, threadId }.
|
||||||
|
*/
|
||||||
|
async function fetchReviewThreadResolvedStatus(
|
||||||
|
projectPath: string,
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
prNumber: number
|
||||||
|
): Promise<Map<string, ReviewThreadInfo>> {
|
||||||
|
const resolvedMap = new Map<string, ReviewThreadInfo>();
|
||||||
|
|
||||||
|
const query = `
|
||||||
|
query GetPRReviewThreads(
|
||||||
|
$owner: String!
|
||||||
|
$repo: String!
|
||||||
|
$prNumber: Int!
|
||||||
|
) {
|
||||||
|
repository(owner: $owner, name: $repo) {
|
||||||
|
pullRequest(number: $prNumber) {
|
||||||
|
reviewThreads(first: 100) {
|
||||||
|
nodes {
|
||||||
|
id
|
||||||
|
isResolved
|
||||||
|
comments(first: 100) {
|
||||||
|
nodes {
|
||||||
|
databaseId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`;
|
||||||
|
|
||||||
|
const variables = { owner, repo, prNumber };
|
||||||
|
const requestBody = JSON.stringify({ query, variables });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await new Promise<GraphQLResponse>((resolve, reject) => {
|
||||||
|
const gh = spawn('gh', ['api', 'graphql', '--input', '-'], {
|
||||||
|
cwd: projectPath,
|
||||||
|
env: execEnv,
|
||||||
|
});
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
gh.kill();
|
||||||
|
reject(new Error('GitHub GraphQL API request timed out'));
|
||||||
|
}, GITHUB_API_TIMEOUT_MS);
|
||||||
|
|
||||||
|
let stdout = '';
|
||||||
|
let stderr = '';
|
||||||
|
gh.stdout.on('data', (data: Buffer) => (stdout += data.toString()));
|
||||||
|
gh.stderr.on('data', (data: Buffer) => (stderr += data.toString()));
|
||||||
|
|
||||||
|
gh.on('close', (code) => {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
if (code !== 0) {
|
||||||
|
return reject(new Error(`gh process exited with code ${code}: ${stderr}`));
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
resolve(JSON.parse(stdout));
|
||||||
|
} catch (e) {
|
||||||
|
reject(e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
gh.stdin.write(requestBody);
|
||||||
|
gh.stdin.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.errors && response.errors.length > 0) {
|
||||||
|
throw new Error(response.errors[0].message);
|
||||||
|
}
|
||||||
|
|
||||||
|
const threads = response.data?.repository?.pullRequest?.reviewThreads?.nodes ?? [];
|
||||||
|
for (const thread of threads) {
|
||||||
|
const info: ReviewThreadInfo = { isResolved: thread.isResolved, threadId: thread.id };
|
||||||
|
for (const comment of thread.comments.nodes) {
|
||||||
|
resolvedMap.set(String(comment.databaseId), info);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Log but don't fail — resolved status is best-effort
|
||||||
|
logError(error, 'Failed to fetch PR review thread resolved status');
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolvedMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch all comments for a PR (both regular and inline review comments)
|
||||||
|
*/
|
||||||
|
async function fetchPRReviewComments(
|
||||||
|
projectPath: string,
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
prNumber: number
|
||||||
|
): Promise<PRReviewComment[]> {
|
||||||
|
const allComments: PRReviewComment[] = [];
|
||||||
|
|
||||||
|
// Fetch review thread resolved status in parallel with comment fetching
|
||||||
|
const resolvedStatusPromise = fetchReviewThreadResolvedStatus(projectPath, owner, repo, prNumber);
|
||||||
|
|
||||||
|
// 1. Fetch regular PR comments (issue-level comments)
|
||||||
|
try {
|
||||||
|
const { stdout: commentsOutput } = await execAsync(
|
||||||
|
`gh pr view ${prNumber} -R ${owner}/${repo} --json comments`,
|
||||||
|
{
|
||||||
|
cwd: projectPath,
|
||||||
|
env: execEnv,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const commentsData = JSON.parse(commentsOutput);
|
||||||
|
const regularComments = (commentsData.comments || []).map(
|
||||||
|
(c: {
|
||||||
|
id: string;
|
||||||
|
author: { login: string; avatarUrl?: string };
|
||||||
|
body: string;
|
||||||
|
createdAt: string;
|
||||||
|
updatedAt?: string;
|
||||||
|
}) => ({
|
||||||
|
id: String(c.id),
|
||||||
|
author: c.author?.login || 'unknown',
|
||||||
|
avatarUrl: c.author?.avatarUrl,
|
||||||
|
body: c.body,
|
||||||
|
createdAt: c.createdAt,
|
||||||
|
updatedAt: c.updatedAt,
|
||||||
|
isReviewComment: false,
|
||||||
|
isOutdated: false,
|
||||||
|
// Regular PR comments are not part of review threads, so not resolvable
|
||||||
|
isResolved: false,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
allComments.push(...regularComments);
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Failed to fetch regular PR comments');
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Fetch inline review comments (code-level comments with file/line info)
|
||||||
|
try {
|
||||||
|
const reviewsEndpoint = `repos/${owner}/${repo}/pulls/${prNumber}/comments`;
|
||||||
|
const { stdout: reviewsOutput } = await execAsync(`gh api ${reviewsEndpoint} --paginate`, {
|
||||||
|
cwd: projectPath,
|
||||||
|
env: execEnv,
|
||||||
|
});
|
||||||
|
|
||||||
|
const reviewsData = JSON.parse(reviewsOutput);
|
||||||
|
const reviewComments = (Array.isArray(reviewsData) ? reviewsData : []).map(
|
||||||
|
(c: {
|
||||||
|
id: number;
|
||||||
|
user: { login: string; avatar_url?: string };
|
||||||
|
body: string;
|
||||||
|
path: string;
|
||||||
|
line?: number;
|
||||||
|
original_line?: number;
|
||||||
|
created_at: string;
|
||||||
|
updated_at?: string;
|
||||||
|
diff_hunk?: string;
|
||||||
|
side?: string;
|
||||||
|
commit_id?: string;
|
||||||
|
position?: number | null;
|
||||||
|
}) => ({
|
||||||
|
id: String(c.id),
|
||||||
|
author: c.user?.login || 'unknown',
|
||||||
|
avatarUrl: c.user?.avatar_url,
|
||||||
|
body: c.body,
|
||||||
|
path: c.path,
|
||||||
|
line: c.line || c.original_line,
|
||||||
|
createdAt: c.created_at,
|
||||||
|
updatedAt: c.updated_at,
|
||||||
|
isReviewComment: true,
|
||||||
|
// A review comment is "outdated" if position is null (code has changed)
|
||||||
|
isOutdated: c.position === null && !c.line,
|
||||||
|
// isResolved will be filled in below from GraphQL data
|
||||||
|
isResolved: false,
|
||||||
|
diffHunk: c.diff_hunk,
|
||||||
|
side: c.side,
|
||||||
|
commitId: c.commit_id,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
allComments.push(...reviewComments);
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Failed to fetch inline review comments');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for resolved status and apply to inline review comments
|
||||||
|
const resolvedMap = await resolvedStatusPromise;
|
||||||
|
if (resolvedMap.size > 0) {
|
||||||
|
for (const comment of allComments) {
|
||||||
|
if (comment.isReviewComment && resolvedMap.has(comment.id)) {
|
||||||
|
const info = resolvedMap.get(comment.id);
|
||||||
|
comment.isResolved = info?.isResolved ?? false;
|
||||||
|
comment.threadId = info?.threadId;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by createdAt descending (newest first)
|
||||||
|
allComments.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
|
||||||
|
|
||||||
|
return allComments;
|
||||||
|
}
|
||||||
|
|
||||||
export function createListPRReviewCommentsHandler() {
|
export function createListPRReviewCommentsHandler() {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -5,10 +5,10 @@
|
|||||||
* identified by its GraphQL node ID (threadId).
|
* identified by its GraphQL node ID (threadId).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { spawn } from 'child_process';
|
||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
import { getErrorMessage, logError } from './common.js';
|
import { execEnv, getErrorMessage, logError } from './common.js';
|
||||||
import { checkGitHubRemote } from './check-github-remote.js';
|
import { checkGitHubRemote } from './check-github-remote.js';
|
||||||
import { executeReviewThreadMutation } from '../../../services/github-pr-comment.service.js';
|
|
||||||
|
|
||||||
export interface ResolvePRCommentResult {
|
export interface ResolvePRCommentResult {
|
||||||
success: boolean;
|
success: boolean;
|
||||||
@@ -22,6 +22,91 @@ interface ResolvePRCommentRequest {
|
|||||||
resolve: boolean;
|
resolve: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Timeout for GitHub GraphQL API requests in milliseconds */
|
||||||
|
const GITHUB_API_TIMEOUT_MS = 30000;
|
||||||
|
|
||||||
|
interface GraphQLMutationResponse {
|
||||||
|
data?: {
|
||||||
|
resolveReviewThread?: {
|
||||||
|
thread?: { isResolved: boolean; id: string } | null;
|
||||||
|
} | null;
|
||||||
|
unresolveReviewThread?: {
|
||||||
|
thread?: { isResolved: boolean; id: string } | null;
|
||||||
|
} | null;
|
||||||
|
};
|
||||||
|
errors?: Array<{ message: string }>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a GraphQL mutation to resolve or unresolve a review thread.
|
||||||
|
*/
|
||||||
|
async function executeReviewThreadMutation(
|
||||||
|
projectPath: string,
|
||||||
|
threadId: string,
|
||||||
|
resolve: boolean
|
||||||
|
): Promise<{ isResolved: boolean }> {
|
||||||
|
const mutationName = resolve ? 'resolveReviewThread' : 'unresolveReviewThread';
|
||||||
|
|
||||||
|
const mutation = `
|
||||||
|
mutation ${resolve ? 'ResolveThread' : 'UnresolveThread'}($threadId: ID!) {
|
||||||
|
${mutationName}(input: { threadId: $threadId }) {
|
||||||
|
thread {
|
||||||
|
id
|
||||||
|
isResolved
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`;
|
||||||
|
|
||||||
|
const variables = { threadId };
|
||||||
|
const requestBody = JSON.stringify({ query: mutation, variables });
|
||||||
|
|
||||||
|
const response = await new Promise<GraphQLMutationResponse>((res, rej) => {
|
||||||
|
const gh = spawn('gh', ['api', 'graphql', '--input', '-'], {
|
||||||
|
cwd: projectPath,
|
||||||
|
env: execEnv,
|
||||||
|
});
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
gh.kill();
|
||||||
|
rej(new Error('GitHub GraphQL API request timed out'));
|
||||||
|
}, GITHUB_API_TIMEOUT_MS);
|
||||||
|
|
||||||
|
let stdout = '';
|
||||||
|
let stderr = '';
|
||||||
|
gh.stdout.on('data', (data: Buffer) => (stdout += data.toString()));
|
||||||
|
gh.stderr.on('data', (data: Buffer) => (stderr += data.toString()));
|
||||||
|
|
||||||
|
gh.on('close', (code) => {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
if (code !== 0) {
|
||||||
|
return rej(new Error(`gh process exited with code ${code}: ${stderr}`));
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
res(JSON.parse(stdout));
|
||||||
|
} catch (e) {
|
||||||
|
rej(e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
gh.stdin.write(requestBody);
|
||||||
|
gh.stdin.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.errors && response.errors.length > 0) {
|
||||||
|
throw new Error(response.errors[0].message);
|
||||||
|
}
|
||||||
|
|
||||||
|
const threadData = resolve
|
||||||
|
? response.data?.resolveReviewThread?.thread
|
||||||
|
: response.data?.unresolveReviewThread?.thread;
|
||||||
|
|
||||||
|
if (!threadData) {
|
||||||
|
throw new Error('No thread data returned from GitHub API');
|
||||||
|
}
|
||||||
|
|
||||||
|
return { isResolved: threadData.isResolved };
|
||||||
|
}
|
||||||
|
|
||||||
export function createResolvePRCommentHandler() {
|
export function createResolvePRCommentHandler() {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -67,9 +67,6 @@ import { createAbortOperationHandler } from './routes/abort-operation.js';
|
|||||||
import { createContinueOperationHandler } from './routes/continue-operation.js';
|
import { createContinueOperationHandler } from './routes/continue-operation.js';
|
||||||
import { createStageFilesHandler } from './routes/stage-files.js';
|
import { createStageFilesHandler } from './routes/stage-files.js';
|
||||||
import { createCheckChangesHandler } from './routes/check-changes.js';
|
import { createCheckChangesHandler } from './routes/check-changes.js';
|
||||||
import { createSetTrackingHandler } from './routes/set-tracking.js';
|
|
||||||
import { createSyncHandler } from './routes/sync.js';
|
|
||||||
import { createUpdatePRNumberHandler } from './routes/update-pr-number.js';
|
|
||||||
import type { SettingsService } from '../../services/settings-service.js';
|
import type { SettingsService } from '../../services/settings-service.js';
|
||||||
|
|
||||||
export function createWorktreeRoutes(
|
export function createWorktreeRoutes(
|
||||||
@@ -97,12 +94,6 @@ export function createWorktreeRoutes(
|
|||||||
router.post('/delete', validatePathParams('projectPath', 'worktreePath'), createDeleteHandler());
|
router.post('/delete', validatePathParams('projectPath', 'worktreePath'), createDeleteHandler());
|
||||||
router.post('/create-pr', createCreatePRHandler());
|
router.post('/create-pr', createCreatePRHandler());
|
||||||
router.post('/pr-info', createPRInfoHandler());
|
router.post('/pr-info', createPRInfoHandler());
|
||||||
router.post(
|
|
||||||
'/update-pr-number',
|
|
||||||
validatePathParams('worktreePath', 'projectPath?'),
|
|
||||||
requireValidWorktree,
|
|
||||||
createUpdatePRNumberHandler()
|
|
||||||
);
|
|
||||||
router.post(
|
router.post(
|
||||||
'/commit',
|
'/commit',
|
||||||
validatePathParams('worktreePath'),
|
validatePathParams('worktreePath'),
|
||||||
@@ -127,18 +118,6 @@ export function createWorktreeRoutes(
|
|||||||
requireValidWorktree,
|
requireValidWorktree,
|
||||||
createPullHandler()
|
createPullHandler()
|
||||||
);
|
);
|
||||||
router.post(
|
|
||||||
'/sync',
|
|
||||||
validatePathParams('worktreePath'),
|
|
||||||
requireValidWorktree,
|
|
||||||
createSyncHandler()
|
|
||||||
);
|
|
||||||
router.post(
|
|
||||||
'/set-tracking',
|
|
||||||
validatePathParams('worktreePath'),
|
|
||||||
requireValidWorktree,
|
|
||||||
createSetTrackingHandler()
|
|
||||||
);
|
|
||||||
router.post(
|
router.post(
|
||||||
'/checkout-branch',
|
'/checkout-branch',
|
||||||
validatePathParams('worktreePath'),
|
validatePathParams('worktreePath'),
|
||||||
|
|||||||
@@ -4,8 +4,7 @@
|
|||||||
* This endpoint handles worktree creation with proper checks:
|
* This endpoint handles worktree creation with proper checks:
|
||||||
* 1. First checks if git already has a worktree for the branch (anywhere)
|
* 1. First checks if git already has a worktree for the branch (anywhere)
|
||||||
* 2. If found, returns the existing worktree (no error)
|
* 2. If found, returns the existing worktree (no error)
|
||||||
* 3. Syncs the base branch from its remote tracking branch (fast-forward only)
|
* 3. Only creates a new worktree if none exists for the branch
|
||||||
* 4. Only creates a new worktree if none exists for the branch
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
@@ -28,10 +27,6 @@ import { execGitCommand } from '../../../lib/git.js';
|
|||||||
import { trackBranch } from './branch-tracking.js';
|
import { trackBranch } from './branch-tracking.js';
|
||||||
import { createLogger } from '@automaker/utils';
|
import { createLogger } from '@automaker/utils';
|
||||||
import { runInitScript } from '../../../services/init-script-service.js';
|
import { runInitScript } from '../../../services/init-script-service.js';
|
||||||
import {
|
|
||||||
syncBaseBranch,
|
|
||||||
type BaseBranchSyncResult,
|
|
||||||
} from '../../../services/branch-sync-service.js';
|
|
||||||
|
|
||||||
const logger = createLogger('Worktree');
|
const logger = createLogger('Worktree');
|
||||||
|
|
||||||
@@ -198,52 +193,6 @@ export function createCreateHandler(events: EventEmitter, settingsService?: Sett
|
|||||||
logger.warn(`Failed to fetch from remotes: ${getErrorMessage(fetchErr)}`);
|
logger.warn(`Failed to fetch from remotes: ${getErrorMessage(fetchErr)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sync the base branch with its remote tracking branch (fast-forward only).
|
|
||||||
// This ensures the new worktree starts from an up-to-date state rather than
|
|
||||||
// a potentially stale local copy. If the sync fails or the branch has diverged,
|
|
||||||
// we proceed with the local copy and inform the user.
|
|
||||||
const effectiveBase = baseBranch || 'HEAD';
|
|
||||||
let syncResult: BaseBranchSyncResult = { attempted: false, synced: false };
|
|
||||||
|
|
||||||
// Only sync if the base is a real branch (not 'HEAD')
|
|
||||||
// Pass skipFetch=true because we already fetched all remotes above.
|
|
||||||
if (effectiveBase !== 'HEAD') {
|
|
||||||
logger.info(`Syncing base branch '${effectiveBase}' before creating worktree`);
|
|
||||||
syncResult = await syncBaseBranch(projectPath, effectiveBase, true);
|
|
||||||
if (syncResult.attempted) {
|
|
||||||
if (syncResult.synced) {
|
|
||||||
logger.info(`Base branch sync result: ${syncResult.message}`);
|
|
||||||
} else {
|
|
||||||
logger.warn(`Base branch sync result: ${syncResult.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// When using HEAD, try to sync the currently checked-out branch
|
|
||||||
// Pass skipFetch=true because we already fetched all remotes above.
|
|
||||||
try {
|
|
||||||
const currentBranch = await execGitCommand(
|
|
||||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
|
||||||
projectPath
|
|
||||||
);
|
|
||||||
const trimmedBranch = currentBranch.trim();
|
|
||||||
if (trimmedBranch && trimmedBranch !== 'HEAD') {
|
|
||||||
logger.info(
|
|
||||||
`Syncing current branch '${trimmedBranch}' (HEAD) before creating worktree`
|
|
||||||
);
|
|
||||||
syncResult = await syncBaseBranch(projectPath, trimmedBranch, true);
|
|
||||||
if (syncResult.attempted) {
|
|
||||||
if (syncResult.synced) {
|
|
||||||
logger.info(`HEAD branch sync result: ${syncResult.message}`);
|
|
||||||
} else {
|
|
||||||
logger.warn(`HEAD branch sync result: ${syncResult.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// Could not determine HEAD branch — skip sync
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if branch exists (using array arguments to prevent injection)
|
// Check if branch exists (using array arguments to prevent injection)
|
||||||
let branchExists = false;
|
let branchExists = false;
|
||||||
try {
|
try {
|
||||||
@@ -277,19 +226,6 @@ export function createCreateHandler(events: EventEmitter, settingsService?: Sett
|
|||||||
// normalizePath converts to forward slashes for API consistency
|
// normalizePath converts to forward slashes for API consistency
|
||||||
const absoluteWorktreePath = path.resolve(worktreePath);
|
const absoluteWorktreePath = path.resolve(worktreePath);
|
||||||
|
|
||||||
// Get the commit hash the new worktree is based on for logging
|
|
||||||
let baseCommitHash: string | undefined;
|
|
||||||
try {
|
|
||||||
const hash = await execGitCommand(['rev-parse', '--short', 'HEAD'], absoluteWorktreePath);
|
|
||||||
baseCommitHash = hash.trim();
|
|
||||||
} catch {
|
|
||||||
// Non-critical — just for logging
|
|
||||||
}
|
|
||||||
|
|
||||||
if (baseCommitHash) {
|
|
||||||
logger.info(`New worktree for '${branchName}' based on commit ${baseCommitHash}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy configured files into the new worktree before responding
|
// Copy configured files into the new worktree before responding
|
||||||
// This runs synchronously to ensure files are in place before any init script
|
// This runs synchronously to ensure files are in place before any init script
|
||||||
try {
|
try {
|
||||||
@@ -311,17 +247,6 @@ export function createCreateHandler(events: EventEmitter, settingsService?: Sett
|
|||||||
path: normalizePath(absoluteWorktreePath),
|
path: normalizePath(absoluteWorktreePath),
|
||||||
branch: branchName,
|
branch: branchName,
|
||||||
isNew: !branchExists,
|
isNew: !branchExists,
|
||||||
baseCommitHash,
|
|
||||||
...(syncResult.attempted
|
|
||||||
? {
|
|
||||||
syncResult: {
|
|
||||||
synced: syncResult.synced,
|
|
||||||
remote: syncResult.remote,
|
|
||||||
message: syncResult.message,
|
|
||||||
diverged: syncResult.diverged,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
: {}),
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
import { exec } from 'child_process';
|
import { exec } from 'child_process';
|
||||||
import { promisify } from 'util';
|
import { promisify } from 'util';
|
||||||
import fs from 'fs/promises';
|
|
||||||
import { isGitRepo } from '@automaker/git-utils';
|
import { isGitRepo } from '@automaker/git-utils';
|
||||||
import { getErrorMessage, logError, isValidBranchName } from '../common.js';
|
import { getErrorMessage, logError, isValidBranchName } from '../common.js';
|
||||||
import { execGitCommand } from '../../../lib/git.js';
|
import { execGitCommand } from '../../../lib/git.js';
|
||||||
@@ -47,79 +46,20 @@ export function createDeleteHandler() {
|
|||||||
});
|
});
|
||||||
branchName = stdout.trim();
|
branchName = stdout.trim();
|
||||||
} catch {
|
} catch {
|
||||||
// Could not get branch name - worktree directory may already be gone
|
// Could not get branch name
|
||||||
logger.debug('Could not determine branch for worktree, directory may be missing');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove the worktree (using array arguments to prevent injection)
|
// Remove the worktree (using array arguments to prevent injection)
|
||||||
let removeSucceeded = false;
|
|
||||||
try {
|
try {
|
||||||
await execGitCommand(['worktree', 'remove', worktreePath, '--force'], projectPath);
|
await execGitCommand(['worktree', 'remove', worktreePath, '--force'], projectPath);
|
||||||
removeSucceeded = true;
|
|
||||||
} catch (removeError) {
|
|
||||||
// `git worktree remove` can fail if the directory is already missing
|
|
||||||
// or in a bad state. Try pruning stale worktree entries as a fallback.
|
|
||||||
logger.debug('git worktree remove failed, trying prune', {
|
|
||||||
error: getErrorMessage(removeError),
|
|
||||||
});
|
|
||||||
try {
|
|
||||||
await execGitCommand(['worktree', 'prune'], projectPath);
|
|
||||||
|
|
||||||
// Verify the specific worktree is no longer registered after prune.
|
|
||||||
// `git worktree prune` exits 0 even if worktreePath was never registered,
|
|
||||||
// so we must explicitly check the worktree list to avoid false positives.
|
|
||||||
const { stdout: listOut } = await execAsync('git worktree list --porcelain', {
|
|
||||||
cwd: projectPath,
|
|
||||||
});
|
|
||||||
// Parse porcelain output and check for an exact path match.
|
|
||||||
// Using substring .includes() can produce false positives when one
|
|
||||||
// worktree path is a prefix of another (e.g. /foo vs /foobar).
|
|
||||||
const stillRegistered = listOut
|
|
||||||
.split('\n')
|
|
||||||
.filter((line) => line.startsWith('worktree '))
|
|
||||||
.map((line) => line.slice('worktree '.length).trim())
|
|
||||||
.some((registeredPath) => registeredPath === worktreePath);
|
|
||||||
if (stillRegistered) {
|
|
||||||
// Prune didn't clean up our entry - treat as failure
|
|
||||||
throw removeError;
|
|
||||||
}
|
|
||||||
removeSucceeded = true;
|
|
||||||
} catch (pruneError) {
|
|
||||||
// If pruneError is the original removeError re-thrown, propagate it
|
|
||||||
if (pruneError === removeError) {
|
|
||||||
throw removeError;
|
|
||||||
}
|
|
||||||
logger.warn('git worktree prune also failed', {
|
|
||||||
error: getErrorMessage(pruneError),
|
|
||||||
});
|
|
||||||
// If both remove and prune fail, still try to return success
|
|
||||||
// if the worktree directory no longer exists (it may have been
|
|
||||||
// manually deleted already).
|
|
||||||
let dirExists = false;
|
|
||||||
try {
|
|
||||||
await fs.access(worktreePath);
|
|
||||||
dirExists = true;
|
|
||||||
} catch {
|
} catch {
|
||||||
// Directory doesn't exist
|
// Try with prune if remove fails
|
||||||
}
|
await execGitCommand(['worktree', 'prune'], projectPath);
|
||||||
if (dirExists) {
|
|
||||||
// Directory still exists - this is a real failure
|
|
||||||
throw removeError;
|
|
||||||
}
|
|
||||||
// Directory is gone, treat as success
|
|
||||||
removeSucceeded = true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Optionally delete the branch (only if worktree was successfully removed)
|
// Optionally delete the branch
|
||||||
let branchDeleted = false;
|
let branchDeleted = false;
|
||||||
if (
|
if (deleteBranch && branchName && branchName !== 'main' && branchName !== 'master') {
|
||||||
removeSucceeded &&
|
|
||||||
deleteBranch &&
|
|
||||||
branchName &&
|
|
||||||
branchName !== 'main' &&
|
|
||||||
branchName !== 'master'
|
|
||||||
) {
|
|
||||||
// Validate branch name to prevent command injection
|
// Validate branch name to prevent command injection
|
||||||
if (!isValidBranchName(branchName)) {
|
if (!isValidBranchName(branchName)) {
|
||||||
logger.warn(`Invalid branch name detected, skipping deletion: ${branchName}`);
|
logger.warn(`Invalid branch name detected, skipping deletion: ${branchName}`);
|
||||||
|
|||||||
@@ -5,12 +5,12 @@
|
|||||||
* 1. Discard ALL changes (when no files array is provided)
|
* 1. Discard ALL changes (when no files array is provided)
|
||||||
* - Resets staged changes (git reset HEAD)
|
* - Resets staged changes (git reset HEAD)
|
||||||
* - Discards modified tracked files (git checkout .)
|
* - Discards modified tracked files (git checkout .)
|
||||||
* - Removes untracked files and directories (git clean -ffd)
|
* - Removes untracked files and directories (git clean -fd)
|
||||||
*
|
*
|
||||||
* 2. Discard SELECTED files (when files array is provided)
|
* 2. Discard SELECTED files (when files array is provided)
|
||||||
* - Unstages selected staged files (git reset HEAD -- <files>)
|
* - Unstages selected staged files (git reset HEAD -- <files>)
|
||||||
* - Reverts selected tracked file changes (git checkout -- <files>)
|
* - Reverts selected tracked file changes (git checkout -- <files>)
|
||||||
* - Removes selected untracked files (git clean -ffd -- <files>)
|
* - Removes selected untracked files (git clean -fd -- <files>)
|
||||||
*
|
*
|
||||||
* Note: Git repository validation (isGitRepo) is handled by
|
* Note: Git repository validation (isGitRepo) is handled by
|
||||||
* the requireGitRepoOnly middleware in index.ts
|
* the requireGitRepoOnly middleware in index.ts
|
||||||
@@ -52,22 +52,6 @@ function validateFilePath(filePath: string, worktreePath: string): boolean {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse a file path from git status --porcelain output, handling renames.
|
|
||||||
* For renamed files (R status), git reports "old_path -> new_path" and
|
|
||||||
* we need the new path to match what parseGitStatus() returns in git-utils.
|
|
||||||
*/
|
|
||||||
function parseFilePath(rawPath: string, indexStatus: string, workTreeStatus: string): string {
|
|
||||||
const trimmedPath = rawPath.trim();
|
|
||||||
if (indexStatus === 'R' || workTreeStatus === 'R') {
|
|
||||||
const arrowIndex = trimmedPath.indexOf(' -> ');
|
|
||||||
if (arrowIndex !== -1) {
|
|
||||||
return trimmedPath.slice(arrowIndex + 4);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return trimmedPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createDiscardChangesHandler() {
|
export function createDiscardChangesHandler() {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
@@ -107,16 +91,11 @@ export function createDiscardChangesHandler() {
|
|||||||
|
|
||||||
// Parse the status output to categorize files
|
// Parse the status output to categorize files
|
||||||
// Git --porcelain format: XY PATH where X=index status, Y=worktree status
|
// Git --porcelain format: XY PATH where X=index status, Y=worktree status
|
||||||
// For renamed files: XY OLD_PATH -> NEW_PATH
|
// Preserve the exact two-character XY status (no trim) to keep index vs worktree info
|
||||||
const statusLines = status.trim().split('\n').filter(Boolean);
|
const statusLines = status.trim().split('\n').filter(Boolean);
|
||||||
const allFiles = statusLines.map((line) => {
|
const allFiles = statusLines.map((line) => {
|
||||||
const fileStatus = line.substring(0, 2);
|
const fileStatus = line.substring(0, 2);
|
||||||
const rawPath = line.slice(3);
|
const filePath = line.slice(3).trim();
|
||||||
const indexStatus = fileStatus.charAt(0);
|
|
||||||
const workTreeStatus = fileStatus.charAt(1);
|
|
||||||
// Parse path consistently with parseGitStatus() in git-utils,
|
|
||||||
// which extracts the new path for renames
|
|
||||||
const filePath = parseFilePath(rawPath, indexStatus, workTreeStatus);
|
|
||||||
return { status: fileStatus, path: filePath };
|
return { status: fileStatus, path: filePath };
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -143,12 +122,8 @@ export function createDiscardChangesHandler() {
|
|||||||
const untrackedFiles: string[] = []; // Untracked files (?)
|
const untrackedFiles: string[] = []; // Untracked files (?)
|
||||||
const warnings: string[] = [];
|
const warnings: string[] = [];
|
||||||
|
|
||||||
// Track which requested files were matched so we can handle unmatched ones
|
|
||||||
const matchedFiles = new Set<string>();
|
|
||||||
|
|
||||||
for (const file of allFiles) {
|
for (const file of allFiles) {
|
||||||
if (!filesToDiscard.has(file.path)) continue;
|
if (!filesToDiscard.has(file.path)) continue;
|
||||||
matchedFiles.add(file.path);
|
|
||||||
|
|
||||||
// file.status is the raw two-character XY git porcelain status (no trim)
|
// file.status is the raw two-character XY git porcelain status (no trim)
|
||||||
// X = index/staging status, Y = worktree status
|
// X = index/staging status, Y = worktree status
|
||||||
@@ -176,16 +151,6 @@ export function createDiscardChangesHandler() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle files from the UI that didn't match any entry in allFiles.
|
|
||||||
// This can happen due to timing differences between the UI loading diffs
|
|
||||||
// and the discard request, or path format differences.
|
|
||||||
// Attempt to clean unmatched files directly as untracked files.
|
|
||||||
for (const requestedFile of files) {
|
|
||||||
if (!matchedFiles.has(requestedFile)) {
|
|
||||||
untrackedFiles.push(requestedFile);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 1. Unstage selected staged files (using execFile to bypass shell)
|
// 1. Unstage selected staged files (using execFile to bypass shell)
|
||||||
if (stagedFiles.length > 0) {
|
if (stagedFiles.length > 0) {
|
||||||
try {
|
try {
|
||||||
@@ -209,10 +174,9 @@ export function createDiscardChangesHandler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 3. Remove selected untracked files
|
// 3. Remove selected untracked files
|
||||||
// Use -ffd (double force) to also handle nested git repositories
|
|
||||||
if (untrackedFiles.length > 0) {
|
if (untrackedFiles.length > 0) {
|
||||||
try {
|
try {
|
||||||
await execGitCommand(['clean', '-ffd', '--', ...untrackedFiles], worktreePath);
|
await execGitCommand(['clean', '-fd', '--', ...untrackedFiles], worktreePath);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const msg = getErrorMessage(error);
|
const msg = getErrorMessage(error);
|
||||||
logError(error, `Failed to clean untracked files: ${msg}`);
|
logError(error, `Failed to clean untracked files: ${msg}`);
|
||||||
@@ -270,12 +234,11 @@ export function createDiscardChangesHandler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 3. Remove untracked files and directories
|
// 3. Remove untracked files and directories
|
||||||
// Use -ffd (double force) to also handle nested git repositories
|
|
||||||
try {
|
try {
|
||||||
await execGitCommand(['clean', '-ffd', '--'], worktreePath);
|
await execGitCommand(['clean', '-fd'], worktreePath);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const msg = getErrorMessage(error);
|
const msg = getErrorMessage(error);
|
||||||
logError(error, `git clean -ffd failed: ${msg}`);
|
logError(error, `git clean -fd failed: ${msg}`);
|
||||||
warnings.push(`Failed to remove untracked files: ${msg}`);
|
warnings.push(`Failed to remove untracked files: ${msg}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
import { execFile } from 'child_process';
|
import { exec } from 'child_process';
|
||||||
import { promisify } from 'util';
|
import { promisify } from 'util';
|
||||||
import { existsSync } from 'fs';
|
import { existsSync } from 'fs';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
@@ -20,7 +20,7 @@ import { getErrorMessage, logError } from '../common.js';
|
|||||||
import { getPhaseModelWithOverrides } from '../../../lib/settings-helpers.js';
|
import { getPhaseModelWithOverrides } from '../../../lib/settings-helpers.js';
|
||||||
|
|
||||||
const logger = createLogger('GenerateCommitMessage');
|
const logger = createLogger('GenerateCommitMessage');
|
||||||
const execFileAsync = promisify(execFile);
|
const execAsync = promisify(exec);
|
||||||
|
|
||||||
/** Timeout for AI provider calls in milliseconds (30 seconds) */
|
/** Timeout for AI provider calls in milliseconds (30 seconds) */
|
||||||
const AI_TIMEOUT_MS = 30_000;
|
const AI_TIMEOUT_MS = 30_000;
|
||||||
@@ -33,40 +33,21 @@ async function* withTimeout<T>(
|
|||||||
generator: AsyncIterable<T>,
|
generator: AsyncIterable<T>,
|
||||||
timeoutMs: number
|
timeoutMs: number
|
||||||
): AsyncGenerator<T, void, unknown> {
|
): AsyncGenerator<T, void, unknown> {
|
||||||
let timerId: ReturnType<typeof setTimeout> | undefined;
|
|
||||||
|
|
||||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||||
timerId = setTimeout(
|
setTimeout(() => reject(new Error(`AI provider timed out after ${timeoutMs}ms`)), timeoutMs);
|
||||||
() => reject(new Error(`AI provider timed out after ${timeoutMs}ms`)),
|
|
||||||
timeoutMs
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const iterator = generator[Symbol.asyncIterator]();
|
const iterator = generator[Symbol.asyncIterator]();
|
||||||
let done = false;
|
let done = false;
|
||||||
|
|
||||||
try {
|
|
||||||
while (!done) {
|
while (!done) {
|
||||||
const result = await Promise.race([iterator.next(), timeoutPromise]).catch(async (err) => {
|
const result = await Promise.race([iterator.next(), timeoutPromise]);
|
||||||
// Capture the original error, then attempt to close the iterator.
|
|
||||||
// If iterator.return() throws, log it but rethrow the original error
|
|
||||||
// so the timeout error (not the teardown error) is preserved.
|
|
||||||
try {
|
|
||||||
await iterator.return?.();
|
|
||||||
} catch (teardownErr) {
|
|
||||||
logger.warn('Error during iterator cleanup after timeout:', teardownErr);
|
|
||||||
}
|
|
||||||
throw err;
|
|
||||||
});
|
|
||||||
if (result.done) {
|
if (result.done) {
|
||||||
done = true;
|
done = true;
|
||||||
} else {
|
} else {
|
||||||
yield result.value;
|
yield result.value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} finally {
|
|
||||||
clearTimeout(timerId);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -136,14 +117,14 @@ export function createGenerateCommitMessageHandler(
|
|||||||
let diff = '';
|
let diff = '';
|
||||||
try {
|
try {
|
||||||
// First try to get staged changes
|
// First try to get staged changes
|
||||||
const { stdout: stagedDiff } = await execFileAsync('git', ['diff', '--cached'], {
|
const { stdout: stagedDiff } = await execAsync('git diff --cached', {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
maxBuffer: 1024 * 1024 * 5, // 5MB buffer
|
maxBuffer: 1024 * 1024 * 5, // 5MB buffer
|
||||||
});
|
});
|
||||||
|
|
||||||
// If no staged changes, get unstaged changes
|
// If no staged changes, get unstaged changes
|
||||||
if (!stagedDiff.trim()) {
|
if (!stagedDiff.trim()) {
|
||||||
const { stdout: unstagedDiff } = await execFileAsync('git', ['diff'], {
|
const { stdout: unstagedDiff } = await execAsync('git diff', {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
maxBuffer: 1024 * 1024 * 5, // 5MB buffer
|
maxBuffer: 1024 * 1024 * 5, // 5MB buffer
|
||||||
});
|
});
|
||||||
@@ -241,7 +222,7 @@ export function createGenerateCommitMessageHandler(
|
|||||||
|
|
||||||
const message = responseText.trim();
|
const message = responseText.trim();
|
||||||
|
|
||||||
if (!message) {
|
if (!message || message.trim().length === 0) {
|
||||||
logger.warn('Received empty response from model');
|
logger.warn('Received empty response from model');
|
||||||
const response: GenerateCommitMessageErrorResponse = {
|
const response: GenerateCommitMessageErrorResponse = {
|
||||||
success: false,
|
success: false,
|
||||||
|
|||||||
@@ -53,9 +53,7 @@ Rules:
|
|||||||
- Focus on the user-facing impact when possible
|
- Focus on the user-facing impact when possible
|
||||||
- If there are breaking changes, mention them prominently
|
- If there are breaking changes, mention them prominently
|
||||||
- The diff may include both committed changes and uncommitted working directory changes. Treat all changes as part of the PR since uncommitted changes will be committed when the PR is created
|
- The diff may include both committed changes and uncommitted working directory changes. Treat all changes as part of the PR since uncommitted changes will be committed when the PR is created
|
||||||
- Do NOT distinguish between committed and uncommitted changes in the output - describe all changes as a unified set of PR changes
|
- Do NOT distinguish between committed and uncommitted changes in the output - describe all changes as a unified set of PR changes`;
|
||||||
- EXCLUDE any files that are gitignored (e.g., node_modules, dist, build, .env files, lock files, generated files, binary artifacts, coverage reports, cache directories). These should not be mentioned in the description even if they appear in the diff
|
|
||||||
- Focus only on meaningful source code changes that are tracked by git and relevant to reviewers`;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wraps an async generator with a timeout.
|
* Wraps an async generator with a timeout.
|
||||||
@@ -170,125 +168,127 @@ export function createGeneratePRDescriptionHandler(
|
|||||||
// Determine the base branch for comparison
|
// Determine the base branch for comparison
|
||||||
const base = baseBranch || 'main';
|
const base = baseBranch || 'main';
|
||||||
|
|
||||||
// Collect diffs in three layers and combine them:
|
// Get the diff between current branch and base branch (committed changes)
|
||||||
// 1. Committed changes on the branch: `git diff base...HEAD`
|
// Track whether the diff method used only includes committed changes.
|
||||||
// 2. Staged (cached) changes not yet committed: `git diff --cached`
|
// `git diff base...HEAD` and `git diff origin/base...HEAD` only show committed changes,
|
||||||
// 3. Unstaged changes to tracked files: `git diff` (no --cached flag)
|
// while the fallback methods (`git diff HEAD`, `git diff --cached + git diff`) already
|
||||||
//
|
// include uncommitted working directory changes.
|
||||||
// Untracked files are intentionally excluded — they are typically build artifacts,
|
let diff = '';
|
||||||
// planning files, hidden dotfiles, or other files unrelated to the PR.
|
let diffIncludesUncommitted = false;
|
||||||
// `git diff` and `git diff --cached` only show changes to files already tracked by git,
|
|
||||||
// which is exactly the correct scope.
|
|
||||||
//
|
|
||||||
// We combine all three sources and deduplicate by file path so that a file modified
|
|
||||||
// in commits AND with additional uncommitted changes is not double-counted.
|
|
||||||
|
|
||||||
/** Parse a unified diff into per-file hunks keyed by file path */
|
|
||||||
function parseDiffIntoFileHunks(diffText: string): Map<string, string> {
|
|
||||||
const fileHunks = new Map<string, string>();
|
|
||||||
if (!diffText.trim()) return fileHunks;
|
|
||||||
|
|
||||||
// Split on "diff --git" boundaries (keep the delimiter)
|
|
||||||
const sections = diffText.split(/(?=^diff --git )/m);
|
|
||||||
for (const section of sections) {
|
|
||||||
if (!section.trim()) continue;
|
|
||||||
// Use a back-reference pattern so the "b/" side must match the "a/" capture,
|
|
||||||
// correctly handling paths that contain " b/" in their name.
|
|
||||||
// Falls back to a two-capture pattern to handle renames (a/ and b/ differ).
|
|
||||||
const backrefMatch = section.match(/^diff --git a\/(.+) b\/\1$/m);
|
|
||||||
const renameMatch = !backrefMatch ? section.match(/^diff --git a\/(.+) b\/(.+)$/m) : null;
|
|
||||||
const match = backrefMatch || renameMatch;
|
|
||||||
if (match) {
|
|
||||||
// Prefer the backref capture (identical paths); for renames use the destination (match[2])
|
|
||||||
const filePath = backrefMatch ? match[1] : match[2];
|
|
||||||
// Merge hunks if the same file appears in multiple diff sources
|
|
||||||
const existing = fileHunks.get(filePath) ?? '';
|
|
||||||
fileHunks.set(filePath, existing + section);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return fileHunks;
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Step 1: committed changes (branch vs base) ---
|
|
||||||
let committedDiff = '';
|
|
||||||
try {
|
try {
|
||||||
const { stdout } = await execFileAsync('git', ['diff', `${base}...HEAD`], {
|
// First, try to get diff against the base branch
|
||||||
|
const { stdout: branchDiff } = await execFileAsync('git', ['diff', `${base}...HEAD`], {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
maxBuffer: 1024 * 1024 * 5,
|
maxBuffer: 1024 * 1024 * 5, // 5MB buffer
|
||||||
});
|
});
|
||||||
committedDiff = stdout;
|
diff = branchDiff;
|
||||||
|
// git diff base...HEAD only shows committed changes
|
||||||
|
diffIncludesUncommitted = false;
|
||||||
} catch {
|
} catch {
|
||||||
// Base branch may not exist locally; try the remote tracking branch
|
// If branch comparison fails (e.g., base branch doesn't exist locally),
|
||||||
|
// try fetching and comparing against remote base
|
||||||
try {
|
try {
|
||||||
const { stdout } = await execFileAsync('git', ['diff', `origin/${base}...HEAD`], {
|
const { stdout: remoteDiff } = await execFileAsync(
|
||||||
|
'git',
|
||||||
|
['diff', `origin/${base}...HEAD`],
|
||||||
|
{
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
maxBuffer: 1024 * 1024 * 5,
|
maxBuffer: 1024 * 1024 * 5,
|
||||||
});
|
|
||||||
committedDiff = stdout;
|
|
||||||
} catch {
|
|
||||||
// Cannot compare against base — leave committedDiff empty; the uncommitted
|
|
||||||
// changes gathered below will still be included.
|
|
||||||
logger.warn(`Could not get committed diff against ${base} or origin/${base}`);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// --- Step 2: staged changes (tracked files only) ---
|
|
||||||
let stagedDiff = '';
|
|
||||||
try {
|
|
||||||
const { stdout } = await execFileAsync('git', ['diff', '--cached'], {
|
|
||||||
cwd: worktreePath,
|
|
||||||
maxBuffer: 1024 * 1024 * 5,
|
|
||||||
});
|
|
||||||
stagedDiff = stdout;
|
|
||||||
} catch (err) {
|
|
||||||
// Non-fatal — staged diff is a best-effort supplement
|
|
||||||
logger.debug('Failed to get staged diff', err);
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Step 3: unstaged changes (tracked files only) ---
|
|
||||||
let unstagedDiff = '';
|
|
||||||
try {
|
|
||||||
const { stdout } = await execFileAsync('git', ['diff'], {
|
|
||||||
cwd: worktreePath,
|
|
||||||
maxBuffer: 1024 * 1024 * 5,
|
|
||||||
});
|
|
||||||
unstagedDiff = stdout;
|
|
||||||
} catch (err) {
|
|
||||||
// Non-fatal — unstaged diff is a best-effort supplement
|
|
||||||
logger.debug('Failed to get unstaged diff', err);
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Combine and deduplicate ---
|
|
||||||
// Build a map of filePath → diff content by concatenating hunks from all sources
|
|
||||||
// in chronological order (committed → staged → unstaged) so that no changes
|
|
||||||
// are lost when a file appears in multiple diff sources.
|
|
||||||
const combinedFileHunks = new Map<string, string>();
|
|
||||||
|
|
||||||
for (const source of [committedDiff, stagedDiff, unstagedDiff]) {
|
|
||||||
const hunks = parseDiffIntoFileHunks(source);
|
|
||||||
for (const [filePath, hunk] of hunks) {
|
|
||||||
if (combinedFileHunks.has(filePath)) {
|
|
||||||
combinedFileHunks.set(filePath, combinedFileHunks.get(filePath)! + hunk);
|
|
||||||
} else {
|
|
||||||
combinedFileHunks.set(filePath, hunk);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const diff = Array.from(combinedFileHunks.values()).join('');
|
|
||||||
|
|
||||||
// Log what files were included for observability
|
|
||||||
if (combinedFileHunks.size > 0) {
|
|
||||||
logger.info(`PR description scope: ${combinedFileHunks.size} file(s)`);
|
|
||||||
logger.debug(
|
|
||||||
`PR description scope files: ${Array.from(combinedFileHunks.keys()).join(', ')}`
|
|
||||||
);
|
);
|
||||||
|
diff = remoteDiff;
|
||||||
|
// git diff origin/base...HEAD only shows committed changes
|
||||||
|
diffIncludesUncommitted = false;
|
||||||
|
} catch {
|
||||||
|
// Fall back to getting all uncommitted + committed changes
|
||||||
|
try {
|
||||||
|
const { stdout: allDiff } = await execFileAsync('git', ['diff', 'HEAD'], {
|
||||||
|
cwd: worktreePath,
|
||||||
|
maxBuffer: 1024 * 1024 * 5,
|
||||||
|
});
|
||||||
|
diff = allDiff;
|
||||||
|
// git diff HEAD includes uncommitted changes
|
||||||
|
diffIncludesUncommitted = true;
|
||||||
|
} catch {
|
||||||
|
// Last resort: get staged + unstaged changes
|
||||||
|
const { stdout: stagedDiff } = await execFileAsync('git', ['diff', '--cached'], {
|
||||||
|
cwd: worktreePath,
|
||||||
|
maxBuffer: 1024 * 1024 * 5,
|
||||||
|
});
|
||||||
|
const { stdout: unstagedDiff } = await execFileAsync('git', ['diff'], {
|
||||||
|
cwd: worktreePath,
|
||||||
|
maxBuffer: 1024 * 1024 * 5,
|
||||||
|
});
|
||||||
|
diff = stagedDiff + unstagedDiff;
|
||||||
|
// These already include uncommitted changes
|
||||||
|
diffIncludesUncommitted = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Also get the commit log for context — always scoped to the selected base branch
|
// Check for uncommitted changes (staged + unstaged) to include in the description.
|
||||||
// so the log only contains commits that are part of this PR.
|
// When creating a PR, uncommitted changes will be auto-committed, so they should be
|
||||||
// We do NOT fall back to an unscoped `git log` because that would include commits
|
// reflected in the generated description. We only need to fetch uncommitted diffs
|
||||||
// from the base branch itself and produce misleading AI context.
|
// when the primary diff method (base...HEAD) was used, since it only shows committed changes.
|
||||||
|
let hasUncommittedChanges = false;
|
||||||
|
try {
|
||||||
|
const { stdout: statusOutput } = await execFileAsync('git', ['status', '--porcelain'], {
|
||||||
|
cwd: worktreePath,
|
||||||
|
});
|
||||||
|
hasUncommittedChanges = statusOutput.trim().length > 0;
|
||||||
|
|
||||||
|
if (hasUncommittedChanges && !diffIncludesUncommitted) {
|
||||||
|
logger.info('Uncommitted changes detected, including in PR description context');
|
||||||
|
|
||||||
|
let uncommittedDiff = '';
|
||||||
|
|
||||||
|
// Get staged changes
|
||||||
|
try {
|
||||||
|
const { stdout: stagedDiff } = await execFileAsync('git', ['diff', '--cached'], {
|
||||||
|
cwd: worktreePath,
|
||||||
|
maxBuffer: 1024 * 1024 * 5,
|
||||||
|
});
|
||||||
|
if (stagedDiff.trim()) {
|
||||||
|
uncommittedDiff += stagedDiff;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore staged diff errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get unstaged changes (tracked files only)
|
||||||
|
try {
|
||||||
|
const { stdout: unstagedDiff } = await execFileAsync('git', ['diff'], {
|
||||||
|
cwd: worktreePath,
|
||||||
|
maxBuffer: 1024 * 1024 * 5,
|
||||||
|
});
|
||||||
|
if (unstagedDiff.trim()) {
|
||||||
|
uncommittedDiff += unstagedDiff;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore unstaged diff errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get list of untracked files for context
|
||||||
|
const untrackedFiles = statusOutput
|
||||||
|
.split('\n')
|
||||||
|
.filter((line) => line.startsWith('??'))
|
||||||
|
.map((line) => line.substring(3).trim());
|
||||||
|
|
||||||
|
if (untrackedFiles.length > 0) {
|
||||||
|
// Add a summary of untracked (new) files as context
|
||||||
|
uncommittedDiff += `\n# New untracked files:\n${untrackedFiles.map((f) => `# + ${f}`).join('\n')}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append uncommitted changes to the committed diff
|
||||||
|
if (uncommittedDiff.trim()) {
|
||||||
|
diff = diff + uncommittedDiff;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore errors checking for uncommitted changes
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also get the commit log for context
|
||||||
let commitLog = '';
|
let commitLog = '';
|
||||||
try {
|
try {
|
||||||
const { stdout: logOutput } = await execFileAsync(
|
const { stdout: logOutput } = await execFileAsync(
|
||||||
@@ -301,11 +301,11 @@ export function createGeneratePRDescriptionHandler(
|
|||||||
);
|
);
|
||||||
commitLog = logOutput.trim();
|
commitLog = logOutput.trim();
|
||||||
} catch {
|
} catch {
|
||||||
// Base branch not available locally — try the remote tracking branch
|
// If comparing against base fails, fall back to recent commits
|
||||||
try {
|
try {
|
||||||
const { stdout: logOutput } = await execFileAsync(
|
const { stdout: logOutput } = await execFileAsync(
|
||||||
'git',
|
'git',
|
||||||
['log', `origin/${base}..HEAD`, '--oneline', '--no-decorate'],
|
['log', '--oneline', '-10', '--no-decorate'],
|
||||||
{
|
{
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
maxBuffer: 1024 * 1024,
|
maxBuffer: 1024 * 1024,
|
||||||
@@ -313,9 +313,7 @@ export function createGeneratePRDescriptionHandler(
|
|||||||
);
|
);
|
||||||
commitLog = logOutput.trim();
|
commitLog = logOutput.trim();
|
||||||
} catch {
|
} catch {
|
||||||
// Cannot scope commit log to base branch — leave empty rather than
|
// Ignore commit log errors
|
||||||
// including unscoped commits that would pollute the AI context.
|
|
||||||
logger.warn(`Could not get commit log against ${base} or origin/${base}`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -341,6 +339,10 @@ export function createGeneratePRDescriptionHandler(
|
|||||||
userPrompt += `\nCommit History:\n${commitLog}\n`;
|
userPrompt += `\nCommit History:\n${commitLog}\n`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (hasUncommittedChanges) {
|
||||||
|
userPrompt += `\nNote: This branch has uncommitted changes that will be included in the PR.\n`;
|
||||||
|
}
|
||||||
|
|
||||||
if (truncatedDiff) {
|
if (truncatedDiff) {
|
||||||
userPrompt += `\n\`\`\`diff\n${truncatedDiff}\n\`\`\``;
|
userPrompt += `\n\`\`\`diff\n${truncatedDiff}\n\`\`\``;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import type { Request, Response } from 'express';
|
|||||||
import { exec, execFile } from 'child_process';
|
import { exec, execFile } from 'child_process';
|
||||||
import { promisify } from 'util';
|
import { promisify } from 'util';
|
||||||
import { getErrorMessage, logWorktreeError } from '../common.js';
|
import { getErrorMessage, logWorktreeError } from '../common.js';
|
||||||
import { getRemotesWithBranch } from '../../../services/worktree-service.js';
|
|
||||||
|
|
||||||
const execAsync = promisify(exec);
|
const execAsync = promisify(exec);
|
||||||
const execFileAsync = promisify(execFile);
|
const execFileAsync = promisify(execFile);
|
||||||
@@ -132,8 +131,6 @@ export function createListBranchesHandler() {
|
|||||||
let behindCount = 0;
|
let behindCount = 0;
|
||||||
let hasRemoteBranch = false;
|
let hasRemoteBranch = false;
|
||||||
let trackingRemote: string | undefined;
|
let trackingRemote: string | undefined;
|
||||||
// List of remote names that have a branch matching the current branch name
|
|
||||||
let remotesWithBranch: string[] = [];
|
|
||||||
try {
|
try {
|
||||||
// First check if there's a remote tracking branch
|
// First check if there's a remote tracking branch
|
||||||
const { stdout: upstreamOutput } = await execFileAsync(
|
const { stdout: upstreamOutput } = await execFileAsync(
|
||||||
@@ -175,12 +172,6 @@ export function createListBranchesHandler() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check which remotes have a branch matching the current branch name.
|
|
||||||
// This helps the UI distinguish between "branch exists on tracking remote" vs
|
|
||||||
// "branch was pushed to a different remote" (e.g., pushed to 'upstream' but tracking 'origin').
|
|
||||||
// Use for-each-ref to check cached remote refs (already fetched above if includeRemote was true)
|
|
||||||
remotesWithBranch = await getRemotesWithBranch(worktreePath, currentBranch, hasAnyRemotes);
|
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
result: {
|
result: {
|
||||||
@@ -191,7 +182,6 @@ export function createListBranchesHandler() {
|
|||||||
hasRemoteBranch,
|
hasRemoteBranch,
|
||||||
hasAnyRemotes,
|
hasAnyRemotes,
|
||||||
trackingRemote,
|
trackingRemote,
|
||||||
remotesWithBranch,
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -1,24 +1,24 @@
|
|||||||
/**
|
/**
|
||||||
* POST /push endpoint - Push a worktree branch to remote
|
* POST /push endpoint - Push a worktree branch to remote
|
||||||
*
|
*
|
||||||
* Git business logic is delegated to push-service.ts.
|
|
||||||
*
|
|
||||||
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
||||||
* the requireValidWorktree middleware in index.ts
|
* the requireValidWorktree middleware in index.ts
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
|
import { exec } from 'child_process';
|
||||||
|
import { promisify } from 'util';
|
||||||
import { getErrorMessage, logError } from '../common.js';
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
import { performPush } from '../../../services/push-service.js';
|
|
||||||
|
const execAsync = promisify(exec);
|
||||||
|
|
||||||
export function createPushHandler() {
|
export function createPushHandler() {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
const { worktreePath, force, remote, autoResolve } = req.body as {
|
const { worktreePath, force, remote } = req.body as {
|
||||||
worktreePath: string;
|
worktreePath: string;
|
||||||
force?: boolean;
|
force?: boolean;
|
||||||
remote?: string;
|
remote?: string;
|
||||||
autoResolve?: boolean;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!worktreePath) {
|
if (!worktreePath) {
|
||||||
@@ -29,28 +29,34 @@ export function createPushHandler() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await performPush(worktreePath, { remote, force, autoResolve });
|
// Get branch name
|
||||||
|
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||||
if (!result.success) {
|
cwd: worktreePath,
|
||||||
const statusCode = isClientError(result.error ?? '') ? 400 : 500;
|
});
|
||||||
res.status(statusCode).json({
|
const branchName = branchOutput.trim();
|
||||||
success: false,
|
|
||||||
error: result.error,
|
// Use specified remote or default to 'origin'
|
||||||
diverged: result.diverged,
|
const targetRemote = remote || 'origin';
|
||||||
hasConflicts: result.hasConflicts,
|
|
||||||
conflictFiles: result.conflictFiles,
|
// Push the branch
|
||||||
|
const forceFlag = force ? '--force' : '';
|
||||||
|
try {
|
||||||
|
await execAsync(`git push -u ${targetRemote} ${branchName} ${forceFlag}`, {
|
||||||
|
cwd: worktreePath,
|
||||||
|
});
|
||||||
|
} catch {
|
||||||
|
// Try setting upstream
|
||||||
|
await execAsync(`git push --set-upstream ${targetRemote} ${branchName} ${forceFlag}`, {
|
||||||
|
cwd: worktreePath,
|
||||||
});
|
});
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
result: {
|
result: {
|
||||||
branch: result.branch,
|
branch: branchName,
|
||||||
pushed: result.pushed,
|
pushed: true,
|
||||||
diverged: result.diverged,
|
message: `Successfully pushed ${branchName} to ${targetRemote}`,
|
||||||
autoResolved: result.autoResolved,
|
|
||||||
message: result.message,
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -59,15 +65,3 @@ export function createPushHandler() {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Determine whether an error message represents a client error (400)
|
|
||||||
* vs a server error (500).
|
|
||||||
*/
|
|
||||||
function isClientError(errorMessage: string): boolean {
|
|
||||||
return (
|
|
||||||
errorMessage.includes('detached HEAD') ||
|
|
||||||
errorMessage.includes('rejected') ||
|
|
||||||
errorMessage.includes('diverged')
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,76 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /set-tracking endpoint - Set the upstream tracking branch for a worktree
|
|
||||||
*
|
|
||||||
* Sets `git branch --set-upstream-to=<remote>/<branch>` for the current branch.
|
|
||||||
*
|
|
||||||
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
|
||||||
* the requireValidWorktree middleware in index.ts
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { execGitCommand } from '@automaker/git-utils';
|
|
||||||
import { getErrorMessage, logError } from '../common.js';
|
|
||||||
import { getCurrentBranch } from '../../../lib/git.js';
|
|
||||||
|
|
||||||
export function createSetTrackingHandler() {
|
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const { worktreePath, remote, branch } = req.body as {
|
|
||||||
worktreePath: string;
|
|
||||||
remote: string;
|
|
||||||
branch?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!worktreePath) {
|
|
||||||
res.status(400).json({ success: false, error: 'worktreePath required' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!remote) {
|
|
||||||
res.status(400).json({ success: false, error: 'remote required' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get current branch if not provided
|
|
||||||
let targetBranch = branch;
|
|
||||||
if (!targetBranch) {
|
|
||||||
try {
|
|
||||||
targetBranch = await getCurrentBranch(worktreePath);
|
|
||||||
} catch (err) {
|
|
||||||
res.status(400).json({
|
|
||||||
success: false,
|
|
||||||
error: `Failed to get current branch: ${getErrorMessage(err)}`,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (targetBranch === 'HEAD') {
|
|
||||||
res.status(400).json({
|
|
||||||
success: false,
|
|
||||||
error: 'Cannot set tracking in detached HEAD state.',
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set upstream tracking (pass local branch name as final arg to be explicit)
|
|
||||||
await execGitCommand(
|
|
||||||
['branch', '--set-upstream-to', `${remote}/${targetBranch}`, targetBranch],
|
|
||||||
worktreePath
|
|
||||||
);
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
result: {
|
|
||||||
branch: targetBranch,
|
|
||||||
remote,
|
|
||||||
upstream: `${remote}/${targetBranch}`,
|
|
||||||
message: `Set tracking branch to ${remote}/${targetBranch}`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Set tracking branch failed');
|
|
||||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /sync endpoint - Pull then push a worktree branch
|
|
||||||
*
|
|
||||||
* Performs a full sync operation: pull latest from remote, then push
|
|
||||||
* local commits. Handles divergence automatically.
|
|
||||||
*
|
|
||||||
* Git business logic is delegated to sync-service.ts.
|
|
||||||
*
|
|
||||||
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
|
||||||
* the requireValidWorktree middleware in index.ts
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { getErrorMessage, logError } from '../common.js';
|
|
||||||
import { performSync } from '../../../services/sync-service.js';
|
|
||||||
|
|
||||||
export function createSyncHandler() {
|
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const { worktreePath, remote } = req.body as {
|
|
||||||
worktreePath: string;
|
|
||||||
remote?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!worktreePath) {
|
|
||||||
res.status(400).json({
|
|
||||||
success: false,
|
|
||||||
error: 'worktreePath required',
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await performSync(worktreePath, { remote });
|
|
||||||
|
|
||||||
if (!result.success) {
|
|
||||||
const statusCode = result.hasConflicts ? 409 : 500;
|
|
||||||
res.status(statusCode).json({
|
|
||||||
success: false,
|
|
||||||
error: result.error,
|
|
||||||
hasConflicts: result.hasConflicts,
|
|
||||||
conflictFiles: result.conflictFiles,
|
|
||||||
conflictSource: result.conflictSource,
|
|
||||||
pulled: result.pulled,
|
|
||||||
pushed: result.pushed,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
result: {
|
|
||||||
branch: result.branch,
|
|
||||||
pulled: result.pulled,
|
|
||||||
pushed: result.pushed,
|
|
||||||
isFastForward: result.isFastForward,
|
|
||||||
isMerge: result.isMerge,
|
|
||||||
autoResolved: result.autoResolved,
|
|
||||||
message: result.message,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Sync worktree failed');
|
|
||||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,163 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /update-pr-number endpoint - Update the tracked PR number for a worktree
|
|
||||||
*
|
|
||||||
* Allows users to manually change which PR number is tracked for a worktree branch.
|
|
||||||
* Fetches updated PR info from GitHub when available, or updates metadata with the
|
|
||||||
* provided number only if GitHub CLI is unavailable.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { getErrorMessage, logError, execAsync, execEnv, isGhCliAvailable } from '../common.js';
|
|
||||||
import { updateWorktreePRInfo } from '../../../lib/worktree-metadata.js';
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
import { validatePRState } from '@automaker/types';
|
|
||||||
|
|
||||||
const logger = createLogger('UpdatePRNumber');
|
|
||||||
|
|
||||||
export function createUpdatePRNumberHandler() {
|
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const { worktreePath, projectPath, prNumber } = req.body as {
|
|
||||||
worktreePath: string;
|
|
||||||
projectPath?: string;
|
|
||||||
prNumber: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!worktreePath) {
|
|
||||||
res.status(400).json({ success: false, error: 'worktreePath required' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
!prNumber ||
|
|
||||||
typeof prNumber !== 'number' ||
|
|
||||||
prNumber <= 0 ||
|
|
||||||
!Number.isInteger(prNumber)
|
|
||||||
) {
|
|
||||||
res.status(400).json({ success: false, error: 'prNumber must be a positive integer' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const effectiveProjectPath = projectPath || worktreePath;
|
|
||||||
|
|
||||||
// Get current branch name
|
|
||||||
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
|
||||||
cwd: worktreePath,
|
|
||||||
env: execEnv,
|
|
||||||
});
|
|
||||||
const branchName = branchOutput.trim();
|
|
||||||
|
|
||||||
if (!branchName || branchName === 'HEAD') {
|
|
||||||
res.status(400).json({
|
|
||||||
success: false,
|
|
||||||
error: 'Cannot update PR number in detached HEAD state',
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to fetch PR info from GitHub for the given PR number
|
|
||||||
const ghCliAvailable = await isGhCliAvailable();
|
|
||||||
|
|
||||||
if (ghCliAvailable) {
|
|
||||||
try {
|
|
||||||
// Detect repository for gh CLI
|
|
||||||
let repoFlag = '';
|
|
||||||
try {
|
|
||||||
const { stdout: remotes } = await execAsync('git remote -v', {
|
|
||||||
cwd: worktreePath,
|
|
||||||
env: execEnv,
|
|
||||||
});
|
|
||||||
const lines = remotes.split(/\r?\n/);
|
|
||||||
let upstreamRepo: string | null = null;
|
|
||||||
let originOwner: string | null = null;
|
|
||||||
let originRepo: string | null = null;
|
|
||||||
|
|
||||||
for (const line of lines) {
|
|
||||||
const match =
|
|
||||||
line.match(/^(\w+)\s+.*[:/]([^/]+)\/([^/\s]+?)(?:\.git)?\s+\(fetch\)/) ||
|
|
||||||
line.match(/^(\w+)\s+git@[^:]+:([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/) ||
|
|
||||||
line.match(/^(\w+)\s+https?:\/\/[^/]+\/([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/);
|
|
||||||
|
|
||||||
if (match) {
|
|
||||||
const [, remoteName, owner, repo] = match;
|
|
||||||
if (remoteName === 'upstream') {
|
|
||||||
upstreamRepo = `${owner}/${repo}`;
|
|
||||||
} else if (remoteName === 'origin') {
|
|
||||||
originOwner = owner;
|
|
||||||
originRepo = repo;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const targetRepo =
|
|
||||||
upstreamRepo || (originOwner && originRepo ? `${originOwner}/${originRepo}` : null);
|
|
||||||
if (targetRepo) {
|
|
||||||
repoFlag = ` --repo "${targetRepo}"`;
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// Ignore remote parsing errors
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch PR info from GitHub using the PR number
|
|
||||||
const viewCmd = `gh pr view ${prNumber}${repoFlag} --json number,title,url,state,createdAt`;
|
|
||||||
const { stdout: prOutput } = await execAsync(viewCmd, {
|
|
||||||
cwd: worktreePath,
|
|
||||||
env: execEnv,
|
|
||||||
});
|
|
||||||
|
|
||||||
const prData = JSON.parse(prOutput);
|
|
||||||
|
|
||||||
const prInfo = {
|
|
||||||
number: prData.number,
|
|
||||||
url: prData.url,
|
|
||||||
title: prData.title,
|
|
||||||
state: validatePRState(prData.state),
|
|
||||||
createdAt: prData.createdAt || new Date().toISOString(),
|
|
||||||
};
|
|
||||||
|
|
||||||
await updateWorktreePRInfo(effectiveProjectPath, branchName, prInfo);
|
|
||||||
|
|
||||||
logger.info(`Updated PR tracking to #${prNumber} for branch ${branchName}`);
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
result: {
|
|
||||||
branch: branchName,
|
|
||||||
prInfo,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`Failed to fetch PR #${prNumber} from GitHub:`, error);
|
|
||||||
// Fall through to simple update below
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback: update with just the number, preserving existing PR info structure
|
|
||||||
// or creating minimal info if no GitHub data available
|
|
||||||
const prInfo = {
|
|
||||||
number: prNumber,
|
|
||||||
url: `https://github.com/pulls/${prNumber}`,
|
|
||||||
title: `PR #${prNumber}`,
|
|
||||||
state: validatePRState('OPEN'),
|
|
||||||
createdAt: new Date().toISOString(),
|
|
||||||
};
|
|
||||||
|
|
||||||
await updateWorktreePRInfo(effectiveProjectPath, branchName, prInfo);
|
|
||||||
|
|
||||||
logger.info(`Updated PR tracking to #${prNumber} for branch ${branchName} (no GitHub data)`);
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
result: {
|
|
||||||
branch: branchName,
|
|
||||||
prInfo,
|
|
||||||
ghCliUnavailable: !ghCliAvailable,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Update PR number failed');
|
|
||||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -5,7 +5,6 @@
|
|||||||
import type {
|
import type {
|
||||||
PlanningMode,
|
PlanningMode,
|
||||||
ThinkingLevel,
|
ThinkingLevel,
|
||||||
ReasoningEffort,
|
|
||||||
ParsedTask,
|
ParsedTask,
|
||||||
ClaudeCompatibleProvider,
|
ClaudeCompatibleProvider,
|
||||||
Credentials,
|
Credentials,
|
||||||
@@ -25,14 +24,11 @@ export interface AgentExecutionOptions {
|
|||||||
previousContent?: string;
|
previousContent?: string;
|
||||||
systemPrompt?: string;
|
systemPrompt?: string;
|
||||||
autoLoadClaudeMd?: boolean;
|
autoLoadClaudeMd?: boolean;
|
||||||
useClaudeCodeSystemPrompt?: boolean;
|
|
||||||
thinkingLevel?: ThinkingLevel;
|
thinkingLevel?: ThinkingLevel;
|
||||||
reasoningEffort?: ReasoningEffort;
|
|
||||||
branchName?: string | null;
|
branchName?: string | null;
|
||||||
credentials?: Credentials;
|
credentials?: Credentials;
|
||||||
claudeCompatibleProvider?: ClaudeCompatibleProvider;
|
claudeCompatibleProvider?: ClaudeCompatibleProvider;
|
||||||
mcpServers?: Record<string, unknown>;
|
mcpServers?: Record<string, unknown>;
|
||||||
sdkSessionId?: string;
|
|
||||||
sdkOptions?: {
|
sdkOptions?: {
|
||||||
maxTurns?: number;
|
maxTurns?: number;
|
||||||
allowedTools?: string[];
|
allowedTools?: string[];
|
||||||
|
|||||||
@@ -38,8 +38,6 @@ export type {
|
|||||||
|
|
||||||
const logger = createLogger('AgentExecutor');
|
const logger = createLogger('AgentExecutor');
|
||||||
|
|
||||||
const DEFAULT_MAX_TURNS = 10000;
|
|
||||||
|
|
||||||
export class AgentExecutor {
|
export class AgentExecutor {
|
||||||
private static readonly WRITE_DEBOUNCE_MS = 500;
|
private static readonly WRITE_DEBOUNCE_MS = 500;
|
||||||
private static readonly STREAM_HEARTBEAT_MS = 15_000;
|
private static readonly STREAM_HEARTBEAT_MS = 15_000;
|
||||||
@@ -93,7 +91,6 @@ export class AgentExecutor {
|
|||||||
credentials,
|
credentials,
|
||||||
claudeCompatibleProvider,
|
claudeCompatibleProvider,
|
||||||
mcpServers,
|
mcpServers,
|
||||||
sdkSessionId,
|
|
||||||
sdkOptions,
|
sdkOptions,
|
||||||
} = options;
|
} = options;
|
||||||
const { content: promptContent } = await buildPromptWithImages(
|
const { content: promptContent } = await buildPromptWithImages(
|
||||||
@@ -102,22 +99,10 @@ export class AgentExecutor {
|
|||||||
workDir,
|
workDir,
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
const resolvedMaxTurns = sdkOptions?.maxTurns ?? DEFAULT_MAX_TURNS;
|
|
||||||
if (sdkOptions?.maxTurns == null) {
|
|
||||||
logger.info(
|
|
||||||
`[execute] Feature ${featureId}: sdkOptions.maxTurns is not set, defaulting to ${resolvedMaxTurns}. ` +
|
|
||||||
`Model: ${effectiveBareModel}`
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
logger.info(
|
|
||||||
`[execute] Feature ${featureId}: maxTurns=${resolvedMaxTurns}, model=${effectiveBareModel}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const executeOptions: ExecuteOptions = {
|
const executeOptions: ExecuteOptions = {
|
||||||
prompt: promptContent,
|
prompt: promptContent,
|
||||||
model: effectiveBareModel,
|
model: effectiveBareModel,
|
||||||
maxTurns: resolvedMaxTurns,
|
maxTurns: sdkOptions?.maxTurns,
|
||||||
cwd: workDir,
|
cwd: workDir,
|
||||||
allowedTools: sdkOptions?.allowedTools as string[] | undefined,
|
allowedTools: sdkOptions?.allowedTools as string[] | undefined,
|
||||||
abortController,
|
abortController,
|
||||||
@@ -128,10 +113,8 @@ export class AgentExecutor {
|
|||||||
? (mcpServers as Record<string, { command: string }>)
|
? (mcpServers as Record<string, { command: string }>)
|
||||||
: undefined,
|
: undefined,
|
||||||
thinkingLevel: options.thinkingLevel,
|
thinkingLevel: options.thinkingLevel,
|
||||||
reasoningEffort: options.reasoningEffort,
|
|
||||||
credentials,
|
credentials,
|
||||||
claudeCompatibleProvider,
|
claudeCompatibleProvider,
|
||||||
sdkSessionId,
|
|
||||||
};
|
};
|
||||||
const featureDirForOutput = getFeatureDir(projectPath, featureId);
|
const featureDirForOutput = getFeatureDir(projectPath, featureId);
|
||||||
const outputPath = path.join(featureDirForOutput, 'agent-output.md');
|
const outputPath = path.join(featureDirForOutput, 'agent-output.md');
|
||||||
@@ -220,9 +203,6 @@ export class AgentExecutor {
|
|||||||
try {
|
try {
|
||||||
const stream = provider.executeQuery(executeOptions);
|
const stream = provider.executeQuery(executeOptions);
|
||||||
streamLoop: for await (const msg of stream) {
|
streamLoop: for await (const msg of stream) {
|
||||||
if (msg.session_id && msg.session_id !== options.sdkSessionId) {
|
|
||||||
options.sdkSessionId = msg.session_id;
|
|
||||||
}
|
|
||||||
receivedAnyStreamMessage = true;
|
receivedAnyStreamMessage = true;
|
||||||
appendRawEvent(msg);
|
appendRawEvent(msg);
|
||||||
if (abortController.signal.aborted) {
|
if (abortController.signal.aborted) {
|
||||||
@@ -296,40 +276,9 @@ export class AgentExecutor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (msg.type === 'error') {
|
} else if (msg.type === 'error') {
|
||||||
const sanitized = AgentExecutor.sanitizeProviderError(msg.error);
|
throw new Error(AgentExecutor.sanitizeProviderError(msg.error));
|
||||||
logger.error(
|
} else if (msg.type === 'result' && msg.subtype === 'success') scheduleWrite();
|
||||||
`[execute] Feature ${featureId} received error from provider. ` +
|
|
||||||
`raw="${msg.error}", sanitized="${sanitized}", session_id=${msg.session_id ?? 'none'}`
|
|
||||||
);
|
|
||||||
throw new Error(sanitized);
|
|
||||||
} else if (msg.type === 'result') {
|
|
||||||
if (msg.subtype === 'success') {
|
|
||||||
scheduleWrite();
|
|
||||||
} else if (msg.subtype?.startsWith('error')) {
|
|
||||||
// Non-success result subtypes from the SDK (error_max_turns, error_during_execution, etc.)
|
|
||||||
logger.error(
|
|
||||||
`[execute] Feature ${featureId} ended with error subtype: ${msg.subtype}. ` +
|
|
||||||
`session_id=${msg.session_id ?? 'none'}`
|
|
||||||
);
|
|
||||||
throw new Error(`Agent execution ended with: ${msg.subtype}`);
|
|
||||||
} else {
|
|
||||||
logger.warn(
|
|
||||||
`[execute] Feature ${featureId} received unhandled result subtype: ${msg.subtype}`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
clearInterval(streamHeartbeat);
|
|
||||||
if (writeTimeout) clearTimeout(writeTimeout);
|
|
||||||
if (rawWriteTimeout) clearTimeout(rawWriteTimeout);
|
|
||||||
|
|
||||||
const streamElapsedMs = Date.now() - streamStartTime;
|
|
||||||
logger.info(
|
|
||||||
`[execute] Stream ended for feature ${featureId} after ${Math.round(streamElapsedMs / 1000)}s. ` +
|
|
||||||
`aborted=${aborted}, specDetected=${specDetected}, responseLength=${responseText.length}`
|
|
||||||
);
|
|
||||||
|
|
||||||
await writeToFile();
|
await writeToFile();
|
||||||
if (enableRawOutput && rawOutputLines.length > 0) {
|
if (enableRawOutput && rawOutputLines.length > 0) {
|
||||||
try {
|
try {
|
||||||
@@ -339,6 +288,10 @@ export class AgentExecutor {
|
|||||||
/* ignore */
|
/* ignore */
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
|
clearInterval(streamHeartbeat);
|
||||||
|
if (writeTimeout) clearTimeout(writeTimeout);
|
||||||
|
if (rawWriteTimeout) clearTimeout(rawWriteTimeout);
|
||||||
}
|
}
|
||||||
return { responseText, specDetected, tasksCompleted, aborted };
|
return { responseText, specDetected, tasksCompleted, aborted };
|
||||||
}
|
}
|
||||||
@@ -398,22 +351,14 @@ export class AgentExecutor {
|
|||||||
taskPrompts.taskExecution.taskPromptTemplate,
|
taskPrompts.taskExecution.taskPromptTemplate,
|
||||||
userFeedback
|
userFeedback
|
||||||
);
|
);
|
||||||
const taskMaxTurns = sdkOptions?.maxTurns ?? DEFAULT_MAX_TURNS;
|
|
||||||
logger.info(
|
|
||||||
`[executeTasksLoop] Feature ${featureId}, task ${task.id} (${taskIndex + 1}/${tasks.length}): ` +
|
|
||||||
`maxTurns=${taskMaxTurns} (sdkOptions.maxTurns=${sdkOptions?.maxTurns ?? 'undefined'})`
|
|
||||||
);
|
|
||||||
const taskStream = provider.executeQuery(
|
const taskStream = provider.executeQuery(
|
||||||
this.buildExecOpts(options, taskPrompt, taskMaxTurns)
|
this.buildExecOpts(options, taskPrompt, Math.min(sdkOptions?.maxTurns ?? 100, 100))
|
||||||
);
|
);
|
||||||
let taskOutput = '',
|
let taskOutput = '',
|
||||||
taskStartDetected = false,
|
taskStartDetected = false,
|
||||||
taskCompleteDetected = false;
|
taskCompleteDetected = false;
|
||||||
|
|
||||||
for await (const msg of taskStream) {
|
for await (const msg of taskStream) {
|
||||||
if (msg.session_id && msg.session_id !== options.sdkSessionId) {
|
|
||||||
options.sdkSessionId = msg.session_id;
|
|
||||||
}
|
|
||||||
if (msg.type === 'assistant' && msg.message?.content) {
|
if (msg.type === 'assistant' && msg.message?.content) {
|
||||||
for (const b of msg.message.content) {
|
for (const b of msg.message.content) {
|
||||||
if (b.type === 'text') {
|
if (b.type === 'text') {
|
||||||
@@ -467,28 +412,16 @@ export class AgentExecutor {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else if (msg.type === 'error') {
|
} else if (msg.type === 'error') {
|
||||||
const fallback = `Error during task ${task.id}`;
|
// Clean the error: strip ANSI codes and redundant "Error: " prefix
|
||||||
const sanitized = AgentExecutor.sanitizeProviderError(msg.error || fallback);
|
const cleanedError =
|
||||||
logger.error(
|
(msg.error || `Error during task ${task.id}`)
|
||||||
`[executeTasksLoop] Feature ${featureId} task ${task.id} received error from provider. ` +
|
.replace(/\x1b\[[0-9;]*m/g, '')
|
||||||
`raw="${msg.error}", sanitized="${sanitized}", session_id=${msg.session_id ?? 'none'}`
|
.replace(/^Error:\s*/i, '')
|
||||||
);
|
.trim() || `Error during task ${task.id}`;
|
||||||
throw new Error(sanitized);
|
throw new Error(cleanedError);
|
||||||
} else if (msg.type === 'result') {
|
} else if (msg.type === 'result' && msg.subtype === 'success') {
|
||||||
if (msg.subtype === 'success') {
|
|
||||||
taskOutput += msg.result || '';
|
taskOutput += msg.result || '';
|
||||||
responseText += msg.result || '';
|
responseText += msg.result || '';
|
||||||
} else if (msg.subtype?.startsWith('error')) {
|
|
||||||
logger.error(
|
|
||||||
`[executeTasksLoop] Feature ${featureId} task ${task.id} ended with error subtype: ${msg.subtype}. ` +
|
|
||||||
`session_id=${msg.session_id ?? 'none'}`
|
|
||||||
);
|
|
||||||
throw new Error(`Agent execution ended with: ${msg.subtype}`);
|
|
||||||
} else {
|
|
||||||
logger.warn(
|
|
||||||
`[executeTasksLoop] Feature ${featureId} task ${task.id} received unhandled result subtype: ${msg.subtype}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!taskCompleteDetected)
|
if (!taskCompleteDetected)
|
||||||
@@ -638,11 +571,8 @@ export class AgentExecutor {
|
|||||||
});
|
});
|
||||||
let revText = '';
|
let revText = '';
|
||||||
for await (const msg of provider.executeQuery(
|
for await (const msg of provider.executeQuery(
|
||||||
this.buildExecOpts(options, revPrompt, sdkOptions?.maxTurns ?? DEFAULT_MAX_TURNS)
|
this.buildExecOpts(options, revPrompt, sdkOptions?.maxTurns ?? 100)
|
||||||
)) {
|
)) {
|
||||||
if (msg.session_id && msg.session_id !== options.sdkSessionId) {
|
|
||||||
options.sdkSessionId = msg.session_id;
|
|
||||||
}
|
|
||||||
if (msg.type === 'assistant' && msg.message?.content)
|
if (msg.type === 'assistant' && msg.message?.content)
|
||||||
for (const b of msg.message.content)
|
for (const b of msg.message.content)
|
||||||
if (b.type === 'text') {
|
if (b.type === 'text') {
|
||||||
@@ -727,7 +657,7 @@ export class AgentExecutor {
|
|||||||
return { responseText, tasksCompleted };
|
return { responseText, tasksCompleted };
|
||||||
}
|
}
|
||||||
|
|
||||||
private buildExecOpts(o: AgentExecutionOptions, prompt: string, maxTurns: number) {
|
private buildExecOpts(o: AgentExecutionOptions, prompt: string, maxTurns?: number) {
|
||||||
return {
|
return {
|
||||||
prompt,
|
prompt,
|
||||||
model: o.effectiveBareModel,
|
model: o.effectiveBareModel,
|
||||||
@@ -736,14 +666,12 @@ export class AgentExecutor {
|
|||||||
allowedTools: o.sdkOptions?.allowedTools as string[] | undefined,
|
allowedTools: o.sdkOptions?.allowedTools as string[] | undefined,
|
||||||
abortController: o.abortController,
|
abortController: o.abortController,
|
||||||
thinkingLevel: o.thinkingLevel,
|
thinkingLevel: o.thinkingLevel,
|
||||||
reasoningEffort: o.reasoningEffort,
|
|
||||||
mcpServers:
|
mcpServers:
|
||||||
o.mcpServers && Object.keys(o.mcpServers).length > 0
|
o.mcpServers && Object.keys(o.mcpServers).length > 0
|
||||||
? (o.mcpServers as Record<string, { command: string }>)
|
? (o.mcpServers as Record<string, { command: string }>)
|
||||||
: undefined,
|
: undefined,
|
||||||
credentials: o.credentials,
|
credentials: o.credentials,
|
||||||
claudeCompatibleProvider: o.claudeCompatibleProvider,
|
claudeCompatibleProvider: o.claudeCompatibleProvider,
|
||||||
sdkSessionId: o.sdkSessionId,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -761,11 +689,8 @@ export class AgentExecutor {
|
|||||||
.replace(/\{\{approvedPlan\}\}/g, planContent);
|
.replace(/\{\{approvedPlan\}\}/g, planContent);
|
||||||
let responseText = initialResponseText;
|
let responseText = initialResponseText;
|
||||||
for await (const msg of provider.executeQuery(
|
for await (const msg of provider.executeQuery(
|
||||||
this.buildExecOpts(options, contPrompt, options.sdkOptions?.maxTurns ?? DEFAULT_MAX_TURNS)
|
this.buildExecOpts(options, contPrompt, options.sdkOptions?.maxTurns)
|
||||||
)) {
|
)) {
|
||||||
if (msg.session_id && msg.session_id !== options.sdkSessionId) {
|
|
||||||
options.sdkSessionId = msg.session_id;
|
|
||||||
}
|
|
||||||
if (msg.type === 'assistant' && msg.message?.content)
|
if (msg.type === 'assistant' && msg.message?.content)
|
||||||
for (const b of msg.message.content) {
|
for (const b of msg.message.content) {
|
||||||
if (b.type === 'text') {
|
if (b.type === 'text') {
|
||||||
|
|||||||
@@ -21,7 +21,6 @@ import { createChatOptions, validateWorkingDirectory } from '../lib/sdk-options.
|
|||||||
import type { SettingsService } from './settings-service.js';
|
import type { SettingsService } from './settings-service.js';
|
||||||
import {
|
import {
|
||||||
getAutoLoadClaudeMdSetting,
|
getAutoLoadClaudeMdSetting,
|
||||||
getUseClaudeCodeSystemPromptSetting,
|
|
||||||
filterClaudeMdFromContext,
|
filterClaudeMdFromContext,
|
||||||
getMCPServersFromSettings,
|
getMCPServersFromSettings,
|
||||||
getPromptCustomization,
|
getPromptCustomization,
|
||||||
@@ -29,7 +28,6 @@ import {
|
|||||||
getSubagentsConfiguration,
|
getSubagentsConfiguration,
|
||||||
getCustomSubagents,
|
getCustomSubagents,
|
||||||
getProviderByModelId,
|
getProviderByModelId,
|
||||||
getDefaultMaxTurnsSetting,
|
|
||||||
} from '../lib/settings-helpers.js';
|
} from '../lib/settings-helpers.js';
|
||||||
|
|
||||||
interface Message {
|
interface Message {
|
||||||
@@ -330,6 +328,12 @@ export class AgentService {
|
|||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Build conversation history from existing messages BEFORE adding current message
|
||||||
|
const conversationHistory = session.messages.map((msg) => ({
|
||||||
|
role: msg.role,
|
||||||
|
content: msg.content,
|
||||||
|
}));
|
||||||
|
|
||||||
session.messages.push(userMessage);
|
session.messages.push(userMessage);
|
||||||
session.isRunning = true;
|
session.isRunning = true;
|
||||||
session.abortController = new AbortController();
|
session.abortController = new AbortController();
|
||||||
@@ -358,22 +362,6 @@ export class AgentService {
|
|||||||
'[AgentService]'
|
'[AgentService]'
|
||||||
);
|
);
|
||||||
|
|
||||||
// Load useClaudeCodeSystemPrompt setting (project setting takes precedence over global)
|
|
||||||
// Wrap in try/catch so transient settingsService errors don't abort message processing
|
|
||||||
let useClaudeCodeSystemPrompt = true;
|
|
||||||
try {
|
|
||||||
useClaudeCodeSystemPrompt = await getUseClaudeCodeSystemPromptSetting(
|
|
||||||
effectiveWorkDir,
|
|
||||||
this.settingsService,
|
|
||||||
'[AgentService]'
|
|
||||||
);
|
|
||||||
} catch (err) {
|
|
||||||
this.logger.error(
|
|
||||||
'[AgentService] getUseClaudeCodeSystemPromptSetting failed, defaulting to true',
|
|
||||||
err
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load MCP servers from settings (global setting only)
|
// Load MCP servers from settings (global setting only)
|
||||||
const mcpServers = await getMCPServersFromSettings(this.settingsService, '[AgentService]');
|
const mcpServers = await getMCPServersFromSettings(this.settingsService, '[AgentService]');
|
||||||
|
|
||||||
@@ -417,7 +405,6 @@ export class AgentService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let combinedSystemPrompt: string | undefined;
|
|
||||||
// Load project context files (CLAUDE.md, CODE_QUALITY.md, etc.) and memory files
|
// Load project context files (CLAUDE.md, CODE_QUALITY.md, etc.) and memory files
|
||||||
// Use the user's message as task context for smart memory selection
|
// Use the user's message as task context for smart memory selection
|
||||||
const contextResult = await loadContextFiles({
|
const contextResult = await loadContextFiles({
|
||||||
@@ -435,7 +422,7 @@ export class AgentService {
|
|||||||
|
|
||||||
// Build combined system prompt with base prompt and context files
|
// Build combined system prompt with base prompt and context files
|
||||||
const baseSystemPrompt = await this.getSystemPrompt();
|
const baseSystemPrompt = await this.getSystemPrompt();
|
||||||
combinedSystemPrompt = contextFilesPrompt
|
const combinedSystemPrompt = contextFilesPrompt
|
||||||
? `${contextFilesPrompt}\n\n${baseSystemPrompt}`
|
? `${contextFilesPrompt}\n\n${baseSystemPrompt}`
|
||||||
: baseSystemPrompt;
|
: baseSystemPrompt;
|
||||||
|
|
||||||
@@ -450,9 +437,6 @@ export class AgentService {
|
|||||||
const modelForSdk = providerResolvedModel || model;
|
const modelForSdk = providerResolvedModel || model;
|
||||||
const sessionModelForSdk = providerResolvedModel ? undefined : session.model;
|
const sessionModelForSdk = providerResolvedModel ? undefined : session.model;
|
||||||
|
|
||||||
// Read user-configured max turns from settings
|
|
||||||
const userMaxTurns = await getDefaultMaxTurnsSetting(this.settingsService, '[AgentService]');
|
|
||||||
|
|
||||||
const sdkOptions = createChatOptions({
|
const sdkOptions = createChatOptions({
|
||||||
cwd: effectiveWorkDir,
|
cwd: effectiveWorkDir,
|
||||||
model: modelForSdk,
|
model: modelForSdk,
|
||||||
@@ -460,9 +444,7 @@ export class AgentService {
|
|||||||
systemPrompt: combinedSystemPrompt,
|
systemPrompt: combinedSystemPrompt,
|
||||||
abortController: session.abortController!,
|
abortController: session.abortController!,
|
||||||
autoLoadClaudeMd,
|
autoLoadClaudeMd,
|
||||||
useClaudeCodeSystemPrompt,
|
|
||||||
thinkingLevel: effectiveThinkingLevel, // Pass thinking level for Claude models
|
thinkingLevel: effectiveThinkingLevel, // Pass thinking level for Claude models
|
||||||
maxTurns: userMaxTurns, // User-configured max turns from settings
|
|
||||||
mcpServers: Object.keys(mcpServers).length > 0 ? mcpServers : undefined,
|
mcpServers: Object.keys(mcpServers).length > 0 ? mcpServers : undefined,
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -487,19 +469,7 @@ export class AgentService {
|
|||||||
Object.keys(customSubagents).length > 0;
|
Object.keys(customSubagents).length > 0;
|
||||||
|
|
||||||
// Base tools that match the provider's default set
|
// Base tools that match the provider's default set
|
||||||
const baseTools = [
|
const baseTools = ['Read', 'Write', 'Edit', 'Glob', 'Grep', 'Bash', 'WebSearch', 'WebFetch'];
|
||||||
'Read',
|
|
||||||
'Write',
|
|
||||||
'Edit',
|
|
||||||
'MultiEdit',
|
|
||||||
'Glob',
|
|
||||||
'Grep',
|
|
||||||
'LS',
|
|
||||||
'Bash',
|
|
||||||
'WebSearch',
|
|
||||||
'WebFetch',
|
|
||||||
'TodoWrite',
|
|
||||||
];
|
|
||||||
|
|
||||||
if (allowedTools) {
|
if (allowedTools) {
|
||||||
allowedTools = [...allowedTools]; // Create a copy to avoid mutating SDK options
|
allowedTools = [...allowedTools]; // Create a copy to avoid mutating SDK options
|
||||||
@@ -538,14 +508,6 @@ export class AgentService {
|
|||||||
: stripProviderPrefix(effectiveModel);
|
: stripProviderPrefix(effectiveModel);
|
||||||
|
|
||||||
// Build options for provider
|
// Build options for provider
|
||||||
const conversationHistory = session.messages
|
|
||||||
.slice(0, -1)
|
|
||||||
.map((msg) => ({
|
|
||||||
role: msg.role,
|
|
||||||
content: msg.content,
|
|
||||||
}))
|
|
||||||
.filter((msg) => msg.content.trim().length > 0);
|
|
||||||
|
|
||||||
const options: ExecuteOptions = {
|
const options: ExecuteOptions = {
|
||||||
prompt: '', // Will be set below based on images
|
prompt: '', // Will be set below based on images
|
||||||
model: bareModel, // Bare model ID (e.g., "gpt-5.1-codex-max", "composer-1")
|
model: bareModel, // Bare model ID (e.g., "gpt-5.1-codex-max", "composer-1")
|
||||||
@@ -555,8 +517,7 @@ export class AgentService {
|
|||||||
maxTurns: maxTurns,
|
maxTurns: maxTurns,
|
||||||
allowedTools: allowedTools,
|
allowedTools: allowedTools,
|
||||||
abortController: session.abortController!,
|
abortController: session.abortController!,
|
||||||
conversationHistory:
|
conversationHistory: conversationHistory.length > 0 ? conversationHistory : undefined,
|
||||||
conversationHistory && conversationHistory.length > 0 ? conversationHistory : undefined,
|
|
||||||
settingSources: settingSources.length > 0 ? settingSources : undefined,
|
settingSources: settingSources.length > 0 ? settingSources : undefined,
|
||||||
sdkSessionId: session.sdkSessionId, // Pass SDK session ID for resuming
|
sdkSessionId: session.sdkSessionId, // Pass SDK session ID for resuming
|
||||||
mcpServers: Object.keys(mcpServers).length > 0 ? mcpServers : undefined, // Pass MCP servers configuration
|
mcpServers: Object.keys(mcpServers).length > 0 ? mcpServers : undefined, // Pass MCP servers configuration
|
||||||
@@ -584,7 +545,6 @@ export class AgentService {
|
|||||||
let currentAssistantMessage: Message | null = null;
|
let currentAssistantMessage: Message | null = null;
|
||||||
let responseText = '';
|
let responseText = '';
|
||||||
const toolUses: Array<{ name: string; input: unknown }> = [];
|
const toolUses: Array<{ name: string; input: unknown }> = [];
|
||||||
const toolNamesById = new Map<string, string>();
|
|
||||||
|
|
||||||
for await (const msg of stream) {
|
for await (const msg of stream) {
|
||||||
// Capture SDK session ID from any message and persist it.
|
// Capture SDK session ID from any message and persist it.
|
||||||
@@ -629,50 +589,11 @@ export class AgentService {
|
|||||||
input: block.input,
|
input: block.input,
|
||||||
};
|
};
|
||||||
toolUses.push(toolUse);
|
toolUses.push(toolUse);
|
||||||
if (block.tool_use_id) {
|
|
||||||
toolNamesById.set(block.tool_use_id, toolUse.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.emitAgentEvent(sessionId, {
|
this.emitAgentEvent(sessionId, {
|
||||||
type: 'tool_use',
|
type: 'tool_use',
|
||||||
tool: toolUse,
|
tool: toolUse,
|
||||||
});
|
});
|
||||||
} else if (block.type === 'tool_result') {
|
|
||||||
const toolUseId = block.tool_use_id;
|
|
||||||
const toolName = toolUseId ? toolNamesById.get(toolUseId) : undefined;
|
|
||||||
|
|
||||||
// Normalize block.content to a string for the emitted event
|
|
||||||
const rawContent: unknown = block.content;
|
|
||||||
let contentString: string;
|
|
||||||
if (typeof rawContent === 'string') {
|
|
||||||
contentString = rawContent;
|
|
||||||
} else if (Array.isArray(rawContent)) {
|
|
||||||
// Extract text from content blocks (TextBlock, ImageBlock, etc.)
|
|
||||||
contentString = rawContent
|
|
||||||
.map((part: { text?: string; type?: string }) => {
|
|
||||||
if (typeof part === 'string') return part;
|
|
||||||
if (part.text) return part.text;
|
|
||||||
// For non-text blocks (e.g., images), represent as type indicator
|
|
||||||
if (part.type) return `[${part.type}]`;
|
|
||||||
return JSON.stringify(part);
|
|
||||||
})
|
|
||||||
.join('\n');
|
|
||||||
} else if (rawContent !== undefined && rawContent !== null) {
|
|
||||||
contentString = JSON.stringify(rawContent);
|
|
||||||
} else {
|
|
||||||
contentString = '';
|
|
||||||
}
|
|
||||||
|
|
||||||
this.emitAgentEvent(sessionId, {
|
|
||||||
type: 'tool_result',
|
|
||||||
tool: {
|
|
||||||
name: toolName || 'unknown',
|
|
||||||
input: {
|
|
||||||
toolUseId,
|
|
||||||
content: contentString,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,19 +14,14 @@
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { exec } from 'child_process';
|
import { exec } from 'child_process';
|
||||||
import { promisify } from 'util';
|
import { promisify } from 'util';
|
||||||
import type { Feature, PlanningMode, ThinkingLevel, ReasoningEffort } from '@automaker/types';
|
import type { Feature, PlanningMode, ThinkingLevel } from '@automaker/types';
|
||||||
import { DEFAULT_MAX_CONCURRENCY, DEFAULT_MODELS, stripProviderPrefix } from '@automaker/types';
|
import { DEFAULT_MAX_CONCURRENCY, DEFAULT_MODELS, stripProviderPrefix } from '@automaker/types';
|
||||||
import { resolveModelString } from '@automaker/model-resolver';
|
import { resolveModelString } from '@automaker/model-resolver';
|
||||||
import { createLogger, loadContextFiles, classifyError } from '@automaker/utils';
|
import { createLogger, loadContextFiles, classifyError } from '@automaker/utils';
|
||||||
import { getFeatureDir } from '@automaker/platform';
|
import { getFeatureDir } from '@automaker/platform';
|
||||||
import * as secureFs from '../../lib/secure-fs.js';
|
import * as secureFs from '../../lib/secure-fs.js';
|
||||||
import { validateWorkingDirectory, createAutoModeOptions } from '../../lib/sdk-options.js';
|
import { validateWorkingDirectory } from '../../lib/sdk-options.js';
|
||||||
import {
|
import { getPromptCustomization, getProviderByModelId } from '../../lib/settings-helpers.js';
|
||||||
getPromptCustomization,
|
|
||||||
getProviderByModelId,
|
|
||||||
getMCPServersFromSettings,
|
|
||||||
getDefaultMaxTurnsSetting,
|
|
||||||
} from '../../lib/settings-helpers.js';
|
|
||||||
import { execGitCommand } from '@automaker/git-utils';
|
import { execGitCommand } from '@automaker/git-utils';
|
||||||
import { TypedEventBus } from '../typed-event-bus.js';
|
import { TypedEventBus } from '../typed-event-bus.js';
|
||||||
import { ConcurrencyManager } from '../concurrency-manager.js';
|
import { ConcurrencyManager } from '../concurrency-manager.js';
|
||||||
@@ -213,9 +208,7 @@ export class AutoModeServiceFacade {
|
|||||||
previousContent?: string;
|
previousContent?: string;
|
||||||
systemPrompt?: string;
|
systemPrompt?: string;
|
||||||
autoLoadClaudeMd?: boolean;
|
autoLoadClaudeMd?: boolean;
|
||||||
useClaudeCodeSystemPrompt?: boolean;
|
|
||||||
thinkingLevel?: ThinkingLevel;
|
thinkingLevel?: ThinkingLevel;
|
||||||
reasoningEffort?: ReasoningEffort;
|
|
||||||
branchName?: string | null;
|
branchName?: string | null;
|
||||||
[key: string]: unknown;
|
[key: string]: unknown;
|
||||||
}
|
}
|
||||||
@@ -241,47 +234,6 @@ export class AutoModeServiceFacade {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build sdkOptions with proper maxTurns and allowedTools for auto-mode.
|
|
||||||
// Without this, maxTurns would be undefined, causing providers to use their
|
|
||||||
// internal defaults which may be much lower than intended (e.g., Codex CLI's
|
|
||||||
// default turn limit can cause feature runs to stop prematurely).
|
|
||||||
const autoLoadClaudeMd = opts?.autoLoadClaudeMd ?? false;
|
|
||||||
const useClaudeCodeSystemPrompt = opts?.useClaudeCodeSystemPrompt ?? true;
|
|
||||||
let mcpServers: Record<string, unknown> | undefined;
|
|
||||||
try {
|
|
||||||
if (settingsService) {
|
|
||||||
const servers = await getMCPServersFromSettings(settingsService, '[AutoModeFacade]');
|
|
||||||
if (Object.keys(servers).length > 0) {
|
|
||||||
mcpServers = servers;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// MCP servers are optional - continue without them
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read user-configured max turns from settings
|
|
||||||
const userMaxTurns = await getDefaultMaxTurnsSetting(settingsService, '[AutoModeFacade]');
|
|
||||||
|
|
||||||
const sdkOpts = createAutoModeOptions({
|
|
||||||
cwd: workDir,
|
|
||||||
model: resolvedModel,
|
|
||||||
systemPrompt: opts?.systemPrompt,
|
|
||||||
abortController,
|
|
||||||
autoLoadClaudeMd,
|
|
||||||
useClaudeCodeSystemPrompt,
|
|
||||||
thinkingLevel: opts?.thinkingLevel,
|
|
||||||
maxTurns: userMaxTurns,
|
|
||||||
mcpServers: mcpServers as
|
|
||||||
| Record<string, import('@automaker/types').McpServerConfig>
|
|
||||||
| undefined,
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[createRunAgentFn] Feature ${featureId}: model=${resolvedModel}, ` +
|
|
||||||
`maxTurns=${sdkOpts.maxTurns}, allowedTools=${(sdkOpts.allowedTools as string[])?.length ?? 'default'}, ` +
|
|
||||||
`provider=${provider.getName()}`
|
|
||||||
);
|
|
||||||
|
|
||||||
await agentExecutor.execute(
|
await agentExecutor.execute(
|
||||||
{
|
{
|
||||||
workDir,
|
workDir,
|
||||||
@@ -296,23 +248,12 @@ export class AutoModeServiceFacade {
|
|||||||
previousContent: opts?.previousContent as string | undefined,
|
previousContent: opts?.previousContent as string | undefined,
|
||||||
systemPrompt: opts?.systemPrompt as string | undefined,
|
systemPrompt: opts?.systemPrompt as string | undefined,
|
||||||
autoLoadClaudeMd: opts?.autoLoadClaudeMd as boolean | undefined,
|
autoLoadClaudeMd: opts?.autoLoadClaudeMd as boolean | undefined,
|
||||||
useClaudeCodeSystemPrompt,
|
|
||||||
thinkingLevel: opts?.thinkingLevel as ThinkingLevel | undefined,
|
thinkingLevel: opts?.thinkingLevel as ThinkingLevel | undefined,
|
||||||
reasoningEffort: opts?.reasoningEffort as ReasoningEffort | undefined,
|
|
||||||
branchName: opts?.branchName as string | null | undefined,
|
branchName: opts?.branchName as string | null | undefined,
|
||||||
provider,
|
provider,
|
||||||
effectiveBareModel,
|
effectiveBareModel,
|
||||||
credentials,
|
credentials,
|
||||||
claudeCompatibleProvider,
|
claudeCompatibleProvider,
|
||||||
mcpServers,
|
|
||||||
sdkOptions: {
|
|
||||||
maxTurns: sdkOpts.maxTurns,
|
|
||||||
allowedTools: sdkOpts.allowedTools as string[] | undefined,
|
|
||||||
systemPrompt: sdkOpts.systemPrompt,
|
|
||||||
settingSources: sdkOpts.settingSources as
|
|
||||||
| Array<'user' | 'project' | 'local'>
|
|
||||||
| undefined,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
waitForApproval: (fId, projPath) => planApprovalService.waitForApproval(fId, projPath),
|
waitForApproval: (fId, projPath) => planApprovalService.waitForApproval(fId, projPath),
|
||||||
@@ -761,20 +702,16 @@ export class AutoModeServiceFacade {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const runningEntryForVerify = this.concurrencyManager.getRunningFeature(featureId);
|
|
||||||
if (runningEntryForVerify?.isAutoMode) {
|
|
||||||
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
||||||
featureId,
|
featureId,
|
||||||
featureName: feature?.title,
|
featureName: feature?.title,
|
||||||
branchName: feature?.branchName ?? null,
|
branchName: feature?.branchName ?? null,
|
||||||
executionMode: 'auto',
|
|
||||||
passes: allPassed,
|
passes: allPassed,
|
||||||
message: allPassed
|
message: allPassed
|
||||||
? 'All verification checks passed'
|
? 'All verification checks passed'
|
||||||
: `Verification failed: ${results.find((r) => !r.passed)?.check || 'Unknown'}`,
|
: `Verification failed: ${results.find((r) => !r.passed)?.check || 'Unknown'}`,
|
||||||
projectPath: this.projectPath,
|
projectPath: this.projectPath,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
|
|
||||||
return allPassed;
|
return allPassed;
|
||||||
}
|
}
|
||||||
@@ -824,18 +761,14 @@ export class AutoModeServiceFacade {
|
|||||||
await execGitCommand(['commit', '-m', commitMessage], workDir);
|
await execGitCommand(['commit', '-m', commitMessage], workDir);
|
||||||
const hash = await execGitCommand(['rev-parse', 'HEAD'], workDir);
|
const hash = await execGitCommand(['rev-parse', 'HEAD'], workDir);
|
||||||
|
|
||||||
const runningEntryForCommit = this.concurrencyManager.getRunningFeature(featureId);
|
|
||||||
if (runningEntryForCommit?.isAutoMode) {
|
|
||||||
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
||||||
featureId,
|
featureId,
|
||||||
featureName: feature?.title,
|
featureName: feature?.title,
|
||||||
branchName: feature?.branchName ?? null,
|
branchName: feature?.branchName ?? null,
|
||||||
executionMode: 'auto',
|
|
||||||
passes: true,
|
passes: true,
|
||||||
message: `Changes committed: ${hash.trim().substring(0, 8)}`,
|
message: `Changes committed: ${hash.trim().substring(0, 8)}`,
|
||||||
projectPath: this.projectPath,
|
projectPath: this.projectPath,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
|
|
||||||
return hash.trim();
|
return hash.trim();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -918,7 +851,7 @@ export class AutoModeServiceFacade {
|
|||||||
if (feature) {
|
if (feature) {
|
||||||
title = feature.title;
|
title = feature.title;
|
||||||
description = feature.description;
|
description = feature.description;
|
||||||
branchName = feature.branchName ?? undefined;
|
branchName = feature.branchName;
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
// Silently ignore
|
// Silently ignore
|
||||||
@@ -1148,33 +1081,12 @@ export class AutoModeServiceFacade {
|
|||||||
// ===========================================================================
|
// ===========================================================================
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Save execution state for recovery.
|
* Save execution state for recovery
|
||||||
*
|
|
||||||
* Uses the active auto-loop config for each worktree so that the persisted
|
|
||||||
* state reflects the real branch and maxConcurrency values rather than the
|
|
||||||
* hard-coded fallbacks (null / DEFAULT_MAX_CONCURRENCY).
|
|
||||||
*/
|
*/
|
||||||
private async saveExecutionState(): Promise<void> {
|
private async saveExecutionState(): Promise<void> {
|
||||||
const projectWorktrees = this.autoLoopCoordinator
|
|
||||||
.getActiveWorktrees()
|
|
||||||
.filter((w) => w.projectPath === this.projectPath);
|
|
||||||
|
|
||||||
if (projectWorktrees.length === 0) {
|
|
||||||
// No active auto loops — save with defaults as a best-effort fallback.
|
|
||||||
return this.saveExecutionStateForProject(null, DEFAULT_MAX_CONCURRENCY);
|
return this.saveExecutionStateForProject(null, DEFAULT_MAX_CONCURRENCY);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save state for every active worktree using its real config values.
|
|
||||||
for (const { branchName } of projectWorktrees) {
|
|
||||||
const config = this.autoLoopCoordinator.getAutoLoopConfigForProject(
|
|
||||||
this.projectPath,
|
|
||||||
branchName
|
|
||||||
);
|
|
||||||
const maxConcurrency = config?.maxConcurrency ?? DEFAULT_MAX_CONCURRENCY;
|
|
||||||
await this.saveExecutionStateForProject(branchName, maxConcurrency);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Save execution state for a specific worktree
|
* Save execution state for a specific worktree
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -159,7 +159,7 @@ export class GlobalAutoModeService {
|
|||||||
if (feature) {
|
if (feature) {
|
||||||
title = feature.title;
|
title = feature.title;
|
||||||
description = feature.description;
|
description = feature.description;
|
||||||
branchName = feature.branchName ?? undefined;
|
branchName = feature.branchName;
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
// Silently ignore
|
// Silently ignore
|
||||||
|
|||||||
@@ -1,426 +0,0 @@
|
|||||||
/**
|
|
||||||
* branch-sync-service - Sync a local base branch with its remote tracking branch
|
|
||||||
*
|
|
||||||
* Provides logic to detect remote tracking branches, check whether a branch
|
|
||||||
* is checked out in any worktree, and fast-forward a local branch to match
|
|
||||||
* its remote counterpart. Extracted from the worktree create route so
|
|
||||||
* the git logic is decoupled from HTTP request/response handling.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { createLogger, getErrorMessage } from '@automaker/utils';
|
|
||||||
import { execGitCommand } from '../lib/git.js';
|
|
||||||
|
|
||||||
const logger = createLogger('BranchSyncService');
|
|
||||||
|
|
||||||
/** Timeout for git fetch operations (30 seconds) */
|
|
||||||
const FETCH_TIMEOUT_MS = 30_000;
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Types
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Result of attempting to sync a base branch with its remote.
|
|
||||||
*/
|
|
||||||
export interface BaseBranchSyncResult {
|
|
||||||
/** Whether the sync was attempted */
|
|
||||||
attempted: boolean;
|
|
||||||
/** Whether the sync succeeded */
|
|
||||||
synced: boolean;
|
|
||||||
/** Whether the ref was resolved (but not synced, e.g. remote ref, tag, or commit hash) */
|
|
||||||
resolved?: boolean;
|
|
||||||
/** The remote that was synced from (e.g. 'origin') */
|
|
||||||
remote?: string;
|
|
||||||
/** The commit hash the base branch points to after sync */
|
|
||||||
commitHash?: string;
|
|
||||||
/** Human-readable message about the sync result */
|
|
||||||
message?: string;
|
|
||||||
/** Whether the branch had diverged (local commits ahead of remote) */
|
|
||||||
diverged?: boolean;
|
|
||||||
/** Whether the user can proceed with a stale local copy */
|
|
||||||
canProceedWithStale?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Helpers
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Detect the remote tracking branch for a given local branch.
|
|
||||||
*
|
|
||||||
* @param projectPath - Path to the git repository
|
|
||||||
* @param branchName - Local branch name to check (e.g. 'main')
|
|
||||||
* @returns Object with remote name and remote branch, or null if no tracking branch
|
|
||||||
*/
|
|
||||||
export async function getTrackingBranch(
|
|
||||||
projectPath: string,
|
|
||||||
branchName: string
|
|
||||||
): Promise<{ remote: string; remoteBranch: string } | null> {
|
|
||||||
try {
|
|
||||||
// git rev-parse --abbrev-ref <branch>@{upstream} returns e.g. "origin/main"
|
|
||||||
const upstream = await execGitCommand(
|
|
||||||
['rev-parse', '--abbrev-ref', `${branchName}@{upstream}`],
|
|
||||||
projectPath
|
|
||||||
);
|
|
||||||
const trimmed = upstream.trim();
|
|
||||||
if (!trimmed) return null;
|
|
||||||
|
|
||||||
// First, attempt to determine the remote name explicitly via git config
|
|
||||||
// so that remotes whose names contain slashes are handled correctly.
|
|
||||||
let remote: string | null = null;
|
|
||||||
try {
|
|
||||||
const configRemote = await execGitCommand(
|
|
||||||
['config', '--get', `branch.${branchName}.remote`],
|
|
||||||
projectPath
|
|
||||||
);
|
|
||||||
const configRemoteTrimmed = configRemote.trim();
|
|
||||||
if (configRemoteTrimmed) {
|
|
||||||
remote = configRemoteTrimmed;
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// git config lookup failed — will fall back to string splitting below
|
|
||||||
}
|
|
||||||
|
|
||||||
if (remote) {
|
|
||||||
// Strip the known remote prefix (plus the separating '/') to get the remote branch.
|
|
||||||
// The upstream string is expected to be "<remote>/<remoteBranch>".
|
|
||||||
const prefix = `${remote}/`;
|
|
||||||
if (trimmed.startsWith(prefix)) {
|
|
||||||
return {
|
|
||||||
remote,
|
|
||||||
remoteBranch: trimmed.substring(prefix.length),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
// Upstream doesn't start with the expected prefix — fall through to split
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fall back: split on the FIRST slash, which favors the common case of
|
|
||||||
// single-name remotes with slash-containing branch names (e.g.
|
|
||||||
// "origin/feature/foo" → remote="origin", remoteBranch="feature/foo").
|
|
||||||
// Remotes with slashes in their names are uncommon and are already handled
|
|
||||||
// by the git-config lookup above; this fallback only runs when that lookup
|
|
||||||
// fails, so optimizing for single-name remotes is the safer default.
|
|
||||||
const slashIndex = trimmed.indexOf('/');
|
|
||||||
if (slashIndex > 0) {
|
|
||||||
return {
|
|
||||||
remote: trimmed.substring(0, slashIndex),
|
|
||||||
remoteBranch: trimmed.substring(slashIndex + 1),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
} catch {
|
|
||||||
// No upstream tracking branch configured
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check whether a branch is checked out in ANY worktree (main or linked).
|
|
||||||
* Uses `git worktree list --porcelain` to enumerate all worktrees and
|
|
||||||
* checks if any of them has the given branch as their HEAD.
|
|
||||||
*
|
|
||||||
* Returns the absolute path of the worktree where the branch is checked out,
|
|
||||||
* or null if the branch is not checked out anywhere. Callers can use the
|
|
||||||
* returned path to run commands (e.g. `git merge`) inside the correct worktree.
|
|
||||||
*
|
|
||||||
* This prevents using `git update-ref` on a branch that is checked out in
|
|
||||||
* a linked worktree, which would desync that worktree's HEAD.
|
|
||||||
*/
|
|
||||||
export async function isBranchCheckedOut(
|
|
||||||
projectPath: string,
|
|
||||||
branchName: string
|
|
||||||
): Promise<string | null> {
|
|
||||||
try {
|
|
||||||
const stdout = await execGitCommand(['worktree', 'list', '--porcelain'], projectPath);
|
|
||||||
const lines = stdout.split('\n');
|
|
||||||
let currentWorktreePath: string | null = null;
|
|
||||||
let currentBranch: string | null = null;
|
|
||||||
|
|
||||||
for (const line of lines) {
|
|
||||||
if (line.startsWith('worktree ')) {
|
|
||||||
currentWorktreePath = line.slice(9);
|
|
||||||
} else if (line.startsWith('branch ')) {
|
|
||||||
currentBranch = line.slice(7).replace('refs/heads/', '');
|
|
||||||
} else if (line === '') {
|
|
||||||
// End of a worktree entry — check for match, then reset for the next
|
|
||||||
if (currentBranch === branchName && currentWorktreePath) {
|
|
||||||
return currentWorktreePath;
|
|
||||||
}
|
|
||||||
currentWorktreePath = null;
|
|
||||||
currentBranch = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check the last entry (if output doesn't end with a blank line)
|
|
||||||
if (currentBranch === branchName && currentWorktreePath) {
|
|
||||||
return currentWorktreePath;
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Build a BaseBranchSyncResult for cases where we proceed with a stale local copy.
|
|
||||||
* Extracts the repeated pattern of getting the short commit hash with a fallback.
|
|
||||||
*/
|
|
||||||
export async function buildStaleResult(
|
|
||||||
projectPath: string,
|
|
||||||
branchName: string,
|
|
||||||
remote: string | undefined,
|
|
||||||
message: string,
|
|
||||||
extra?: Partial<BaseBranchSyncResult>
|
|
||||||
): Promise<BaseBranchSyncResult> {
|
|
||||||
let commitHash: string | undefined;
|
|
||||||
try {
|
|
||||||
const hash = await execGitCommand(['rev-parse', '--short', branchName], projectPath);
|
|
||||||
commitHash = hash.trim();
|
|
||||||
} catch {
|
|
||||||
/* ignore — commit hash is non-critical */
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
attempted: true,
|
|
||||||
synced: false,
|
|
||||||
remote,
|
|
||||||
commitHash,
|
|
||||||
message,
|
|
||||||
canProceedWithStale: true,
|
|
||||||
...extra,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Main Sync Function
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sync a local base branch with its remote tracking branch using fast-forward only.
|
|
||||||
*
|
|
||||||
* This function:
|
|
||||||
* 1. Detects the remote tracking branch for the given local branch
|
|
||||||
* 2. Fetches latest from that remote (unless skipFetch is true)
|
|
||||||
* 3. Attempts a fast-forward-only update of the local branch
|
|
||||||
* 4. If the branch has diverged, reports the divergence and allows proceeding with stale copy
|
|
||||||
* 5. If no remote tracking branch exists, skips silently
|
|
||||||
*
|
|
||||||
* @param projectPath - Path to the git repository
|
|
||||||
* @param branchName - The local branch name to sync (e.g. 'main')
|
|
||||||
* @param skipFetch - When true, skip the internal git fetch (caller has already fetched)
|
|
||||||
* @returns Sync result with status information
|
|
||||||
*/
|
|
||||||
export async function syncBaseBranch(
|
|
||||||
projectPath: string,
|
|
||||||
branchName: string,
|
|
||||||
skipFetch = false
|
|
||||||
): Promise<BaseBranchSyncResult> {
|
|
||||||
// Check if the branch exists as a local branch (under refs/heads/).
|
|
||||||
// This correctly handles branch names containing slashes (e.g. "feature/abc",
|
|
||||||
// "fix/issue-123") which are valid local branch names, not remote refs.
|
|
||||||
let existsLocally = false;
|
|
||||||
try {
|
|
||||||
await execGitCommand(['rev-parse', '--verify', `refs/heads/${branchName}`], projectPath);
|
|
||||||
existsLocally = true;
|
|
||||||
} catch {
|
|
||||||
existsLocally = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!existsLocally) {
|
|
||||||
// Not a local branch — check if it's a valid ref (remote ref, tag, or commit hash).
|
|
||||||
// No synchronization is performed here; we only resolve the ref to a commit hash.
|
|
||||||
try {
|
|
||||||
const commitHash = await execGitCommand(['rev-parse', '--short', branchName], projectPath);
|
|
||||||
return {
|
|
||||||
attempted: false,
|
|
||||||
synced: false,
|
|
||||||
resolved: true,
|
|
||||||
commitHash: commitHash.trim(),
|
|
||||||
message: `Ref '${branchName}' resolved (not a local branch; no sync performed)`,
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
return {
|
|
||||||
attempted: false,
|
|
||||||
synced: false,
|
|
||||||
message: `Ref '${branchName}' not found`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Detect remote tracking branch
|
|
||||||
const tracking = await getTrackingBranch(projectPath, branchName);
|
|
||||||
if (!tracking) {
|
|
||||||
// No remote tracking branch — skip silently
|
|
||||||
logger.info(`Branch '${branchName}' has no remote tracking branch, skipping sync`);
|
|
||||||
try {
|
|
||||||
const commitHash = await execGitCommand(['rev-parse', '--short', branchName], projectPath);
|
|
||||||
return {
|
|
||||||
attempted: false,
|
|
||||||
synced: false,
|
|
||||||
commitHash: commitHash.trim(),
|
|
||||||
message: `Branch '${branchName}' has no remote tracking branch`,
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
return {
|
|
||||||
attempted: false,
|
|
||||||
synced: false,
|
|
||||||
message: `Branch '${branchName}' has no remote tracking branch`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`Syncing base branch '${branchName}' from ${tracking.remote}/${tracking.remoteBranch}`
|
|
||||||
);
|
|
||||||
|
|
||||||
// Fetch the specific remote unless the caller has already performed a fetch
|
|
||||||
// (e.g. via `git fetch --all`) and passed skipFetch=true to avoid redundant work.
|
|
||||||
if (!skipFetch) {
|
|
||||||
try {
|
|
||||||
const fetchController = new AbortController();
|
|
||||||
const fetchTimer = setTimeout(() => fetchController.abort(), FETCH_TIMEOUT_MS);
|
|
||||||
try {
|
|
||||||
await execGitCommand(
|
|
||||||
['fetch', tracking.remote, tracking.remoteBranch, '--quiet'],
|
|
||||||
projectPath,
|
|
||||||
undefined,
|
|
||||||
fetchController
|
|
||||||
);
|
|
||||||
} finally {
|
|
||||||
clearTimeout(fetchTimer);
|
|
||||||
}
|
|
||||||
} catch (fetchErr) {
|
|
||||||
// Fetch failed — network error, auth error, etc.
|
|
||||||
// Allow proceeding with stale local copy
|
|
||||||
const errMsg = getErrorMessage(fetchErr);
|
|
||||||
logger.warn(`Failed to fetch ${tracking.remote}/${tracking.remoteBranch}: ${errMsg}`);
|
|
||||||
return buildStaleResult(
|
|
||||||
projectPath,
|
|
||||||
branchName,
|
|
||||||
tracking.remote,
|
|
||||||
`Failed to fetch from remote: ${errMsg}. Proceeding with local copy.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
logger.info(`Skipping fetch for '${branchName}' (caller already fetched from remotes)`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the local branch is behind, ahead, or diverged from the remote
|
|
||||||
const remoteRef = `${tracking.remote}/${tracking.remoteBranch}`;
|
|
||||||
try {
|
|
||||||
// Count commits ahead and behind
|
|
||||||
const revListOutput = await execGitCommand(
|
|
||||||
['rev-list', '--left-right', '--count', `${branchName}...${remoteRef}`],
|
|
||||||
projectPath
|
|
||||||
);
|
|
||||||
const parts = revListOutput.trim().split(/\s+/);
|
|
||||||
const ahead = parseInt(parts[0], 10) || 0;
|
|
||||||
const behind = parseInt(parts[1], 10) || 0;
|
|
||||||
|
|
||||||
if (ahead === 0 && behind === 0) {
|
|
||||||
// Already up to date
|
|
||||||
const commitHash = await execGitCommand(['rev-parse', '--short', branchName], projectPath);
|
|
||||||
logger.info(`Branch '${branchName}' is already up to date with ${remoteRef}`);
|
|
||||||
return {
|
|
||||||
attempted: true,
|
|
||||||
synced: true,
|
|
||||||
remote: tracking.remote,
|
|
||||||
commitHash: commitHash.trim(),
|
|
||||||
message: `Branch '${branchName}' is already up to date`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ahead > 0 && behind > 0) {
|
|
||||||
// Branch has diverged — cannot fast-forward
|
|
||||||
logger.warn(
|
|
||||||
`Branch '${branchName}' has diverged from ${remoteRef} (${ahead} ahead, ${behind} behind)`
|
|
||||||
);
|
|
||||||
return buildStaleResult(
|
|
||||||
projectPath,
|
|
||||||
branchName,
|
|
||||||
tracking.remote,
|
|
||||||
`Branch '${branchName}' has diverged from ${remoteRef} (${ahead} commit(s) ahead, ${behind} behind). Using local copy to avoid overwriting local commits.`,
|
|
||||||
{ diverged: true }
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ahead > 0 && behind === 0) {
|
|
||||||
// Local is ahead — nothing to pull, already has everything from remote plus more
|
|
||||||
const commitHash = await execGitCommand(['rev-parse', '--short', branchName], projectPath);
|
|
||||||
logger.info(`Branch '${branchName}' is ${ahead} commit(s) ahead of ${remoteRef}`);
|
|
||||||
return {
|
|
||||||
attempted: true,
|
|
||||||
synced: true,
|
|
||||||
remote: tracking.remote,
|
|
||||||
commitHash: commitHash.trim(),
|
|
||||||
message: `Branch '${branchName}' is ${ahead} commit(s) ahead of remote`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// behind > 0 && ahead === 0 — can fast-forward
|
|
||||||
logger.info(
|
|
||||||
`Branch '${branchName}' is ${behind} commit(s) behind ${remoteRef}, fast-forwarding`
|
|
||||||
);
|
|
||||||
|
|
||||||
// Determine whether the branch is currently checked out (returns the
|
|
||||||
// worktree path where it is checked out, or null if not checked out)
|
|
||||||
const worktreePath = await isBranchCheckedOut(projectPath, branchName);
|
|
||||||
|
|
||||||
if (worktreePath) {
|
|
||||||
// Branch is checked out in a worktree — use git merge --ff-only
|
|
||||||
// Run the merge inside the worktree that has the branch checked out
|
|
||||||
try {
|
|
||||||
await execGitCommand(['merge', '--ff-only', remoteRef], worktreePath);
|
|
||||||
} catch (mergeErr) {
|
|
||||||
const errMsg = getErrorMessage(mergeErr);
|
|
||||||
logger.warn(`Fast-forward merge failed for '${branchName}': ${errMsg}`);
|
|
||||||
return buildStaleResult(
|
|
||||||
projectPath,
|
|
||||||
branchName,
|
|
||||||
tracking.remote,
|
|
||||||
`Fast-forward merge failed: ${errMsg}. Proceeding with local copy.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Branch is NOT checked out — use git update-ref to fast-forward without checkout
|
|
||||||
// This is safe because we already verified the branch is strictly behind (ahead === 0)
|
|
||||||
try {
|
|
||||||
const remoteCommit = await execGitCommand(['rev-parse', remoteRef], projectPath);
|
|
||||||
await execGitCommand(
|
|
||||||
['update-ref', `refs/heads/${branchName}`, remoteCommit.trim()],
|
|
||||||
projectPath
|
|
||||||
);
|
|
||||||
} catch (updateErr) {
|
|
||||||
const errMsg = getErrorMessage(updateErr);
|
|
||||||
logger.warn(`update-ref failed for '${branchName}': ${errMsg}`);
|
|
||||||
return buildStaleResult(
|
|
||||||
projectPath,
|
|
||||||
branchName,
|
|
||||||
tracking.remote,
|
|
||||||
`Failed to fast-forward branch: ${errMsg}. Proceeding with local copy.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Successfully fast-forwarded
|
|
||||||
const commitHash = await execGitCommand(['rev-parse', '--short', branchName], projectPath);
|
|
||||||
logger.info(`Successfully synced '${branchName}' to ${commitHash.trim()} from ${remoteRef}`);
|
|
||||||
return {
|
|
||||||
attempted: true,
|
|
||||||
synced: true,
|
|
||||||
remote: tracking.remote,
|
|
||||||
commitHash: commitHash.trim(),
|
|
||||||
message: `Fast-forwarded '${branchName}' by ${behind} commit(s) from ${remoteRef}`,
|
|
||||||
};
|
|
||||||
} catch (err) {
|
|
||||||
// Unexpected error during rev-list or merge — proceed with stale
|
|
||||||
const errMsg = getErrorMessage(err);
|
|
||||||
logger.warn(`Unexpected error syncing '${branchName}': ${errMsg}`);
|
|
||||||
return buildStaleResult(
|
|
||||||
projectPath,
|
|
||||||
branchName,
|
|
||||||
tracking.remote,
|
|
||||||
`Sync failed: ${errMsg}. Proceeding with local copy.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -19,10 +19,6 @@ const logger = createLogger('DevServerService');
|
|||||||
// Maximum scrollback buffer size (characters) - matches TerminalService pattern
|
// Maximum scrollback buffer size (characters) - matches TerminalService pattern
|
||||||
const MAX_SCROLLBACK_SIZE = 50000; // ~50KB per dev server
|
const MAX_SCROLLBACK_SIZE = 50000; // ~50KB per dev server
|
||||||
|
|
||||||
// Timeout (ms) before falling back to the allocated port if URL detection hasn't succeeded.
|
|
||||||
// This handles cases where the dev server output format is not recognized by any pattern.
|
|
||||||
const URL_DETECTION_TIMEOUT_MS = 30_000;
|
|
||||||
|
|
||||||
// URL patterns for detecting full URLs from dev server output.
|
// URL patterns for detecting full URLs from dev server output.
|
||||||
// Defined once at module level to avoid reallocation on every call to detectUrlFromOutput.
|
// Defined once at module level to avoid reallocation on every call to detectUrlFromOutput.
|
||||||
// Ordered from most specific (framework-specific) to least specific.
|
// Ordered from most specific (framework-specific) to least specific.
|
||||||
@@ -92,8 +88,6 @@ const OUTPUT_BATCH_SIZE = 4096; // Smaller batches for lower latency
|
|||||||
|
|
||||||
export interface DevServerInfo {
|
export interface DevServerInfo {
|
||||||
worktreePath: string;
|
worktreePath: string;
|
||||||
/** The port originally reserved by findAvailablePort() – never mutated after startDevServer sets it */
|
|
||||||
allocatedPort: number;
|
|
||||||
port: number;
|
port: number;
|
||||||
url: string;
|
url: string;
|
||||||
process: ChildProcess | null;
|
process: ChildProcess | null;
|
||||||
@@ -108,8 +102,6 @@ export interface DevServerInfo {
|
|||||||
stopping: boolean;
|
stopping: boolean;
|
||||||
// Flag to indicate if URL has been detected from output
|
// Flag to indicate if URL has been detected from output
|
||||||
urlDetected: boolean;
|
urlDetected: boolean;
|
||||||
// Timer for URL detection timeout fallback
|
|
||||||
urlDetectionTimeout: NodeJS.Timeout | null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Port allocation starts at 3001 to avoid conflicts with common dev ports
|
// Port allocation starts at 3001 to avoid conflicts with common dev ports
|
||||||
@@ -132,32 +124,6 @@ class DevServerService {
|
|||||||
this.emitter = emitter;
|
this.emitter = emitter;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Prune a stale server entry whose process has exited without cleanup.
|
|
||||||
* Clears any pending timers, removes the port from allocatedPorts, deletes
|
|
||||||
* the entry from runningServers, and emits the "dev-server:stopped" event
|
|
||||||
* so all callers consistently notify the frontend when pruning entries.
|
|
||||||
*
|
|
||||||
* @param worktreePath - The key used in runningServers
|
|
||||||
* @param server - The DevServerInfo entry to prune
|
|
||||||
*/
|
|
||||||
private pruneStaleServer(worktreePath: string, server: DevServerInfo): void {
|
|
||||||
if (server.flushTimeout) clearTimeout(server.flushTimeout);
|
|
||||||
if (server.urlDetectionTimeout) clearTimeout(server.urlDetectionTimeout);
|
|
||||||
// Use allocatedPort (immutable) to free the reserved slot; server.port may have
|
|
||||||
// been mutated by detectUrlFromOutput to reflect the actual detected port.
|
|
||||||
this.allocatedPorts.delete(server.allocatedPort);
|
|
||||||
this.runningServers.delete(worktreePath);
|
|
||||||
if (this.emitter) {
|
|
||||||
this.emitter.emit('dev-server:stopped', {
|
|
||||||
worktreePath,
|
|
||||||
port: server.port, // Report the externally-visible (detected) port
|
|
||||||
exitCode: server.process?.exitCode ?? null,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Append data to scrollback buffer with size limit enforcement
|
* Append data to scrollback buffer with size limit enforcement
|
||||||
* Evicts oldest data when buffer exceeds MAX_SCROLLBACK_SIZE
|
* Evicts oldest data when buffer exceeds MAX_SCROLLBACK_SIZE
|
||||||
@@ -287,12 +253,6 @@ class DevServerService {
|
|||||||
server.url = detectedUrl;
|
server.url = detectedUrl;
|
||||||
server.urlDetected = true;
|
server.urlDetected = true;
|
||||||
|
|
||||||
// Clear the URL detection timeout since we found the URL
|
|
||||||
if (server.urlDetectionTimeout) {
|
|
||||||
clearTimeout(server.urlDetectionTimeout);
|
|
||||||
server.urlDetectionTimeout = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the port to match the detected URL's actual port
|
// Update the port to match the detected URL's actual port
|
||||||
const detectedPort = this.extractPortFromUrl(detectedUrl);
|
const detectedPort = this.extractPortFromUrl(detectedUrl);
|
||||||
if (detectedPort && detectedPort !== server.port) {
|
if (detectedPort && detectedPort !== server.port) {
|
||||||
@@ -331,12 +291,6 @@ class DevServerService {
|
|||||||
server.url = detectedUrl;
|
server.url = detectedUrl;
|
||||||
server.urlDetected = true;
|
server.urlDetected = true;
|
||||||
|
|
||||||
// Clear the URL detection timeout since we found the port
|
|
||||||
if (server.urlDetectionTimeout) {
|
|
||||||
clearTimeout(server.urlDetectionTimeout);
|
|
||||||
server.urlDetectionTimeout = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (detectedPort !== server.port) {
|
if (detectedPort !== server.port) {
|
||||||
logger.info(
|
logger.info(
|
||||||
`Port mismatch: allocated ${server.port}, detected ${detectedPort} from ${description}`
|
`Port mismatch: allocated ${server.port}, detected ${detectedPort} from ${description}`
|
||||||
@@ -706,7 +660,6 @@ class DevServerService {
|
|||||||
const hostname = process.env.HOSTNAME || 'localhost';
|
const hostname = process.env.HOSTNAME || 'localhost';
|
||||||
const serverInfo: DevServerInfo = {
|
const serverInfo: DevServerInfo = {
|
||||||
worktreePath,
|
worktreePath,
|
||||||
allocatedPort: port, // Immutable: records which port we reserved; never changed after this point
|
|
||||||
port,
|
port,
|
||||||
url: `http://${hostname}:${port}`, // Initial URL, may be updated by detectUrlFromOutput
|
url: `http://${hostname}:${port}`, // Initial URL, may be updated by detectUrlFromOutput
|
||||||
process: devProcess,
|
process: devProcess,
|
||||||
@@ -716,7 +669,6 @@ class DevServerService {
|
|||||||
flushTimeout: null,
|
flushTimeout: null,
|
||||||
stopping: false,
|
stopping: false,
|
||||||
urlDetected: false, // Will be set to true when actual URL is detected from output
|
urlDetected: false, // Will be set to true when actual URL is detected from output
|
||||||
urlDetectionTimeout: null, // Will be set after server starts successfully
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Capture stdout with buffer management and event emission
|
// Capture stdout with buffer management and event emission
|
||||||
@@ -740,24 +692,18 @@ class DevServerService {
|
|||||||
serverInfo.flushTimeout = null;
|
serverInfo.flushTimeout = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clear URL detection timeout to prevent stale fallback emission
|
|
||||||
if (serverInfo.urlDetectionTimeout) {
|
|
||||||
clearTimeout(serverInfo.urlDetectionTimeout);
|
|
||||||
serverInfo.urlDetectionTimeout = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Emit stopped event (only if not already stopping - prevents duplicate events)
|
// Emit stopped event (only if not already stopping - prevents duplicate events)
|
||||||
if (this.emitter && !serverInfo.stopping) {
|
if (this.emitter && !serverInfo.stopping) {
|
||||||
this.emitter.emit('dev-server:stopped', {
|
this.emitter.emit('dev-server:stopped', {
|
||||||
worktreePath,
|
worktreePath,
|
||||||
port: serverInfo.port, // Use the detected port (may differ from allocated port if detectUrlFromOutput updated it)
|
port,
|
||||||
exitCode,
|
exitCode,
|
||||||
error: errorMessage,
|
error: errorMessage,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
this.allocatedPorts.delete(serverInfo.allocatedPort);
|
this.allocatedPorts.delete(port);
|
||||||
this.runningServers.delete(worktreePath);
|
this.runningServers.delete(worktreePath);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -803,43 +749,6 @@ class DevServerService {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set up URL detection timeout fallback.
|
|
||||||
// If URL detection hasn't succeeded after URL_DETECTION_TIMEOUT_MS, check if
|
|
||||||
// the allocated port is actually in use (server probably started successfully)
|
|
||||||
// and emit a url-detected event with the allocated port as fallback.
|
|
||||||
// Also re-scan the scrollback buffer in case the URL was printed before
|
|
||||||
// our patterns could match (e.g., it was split across multiple data chunks).
|
|
||||||
serverInfo.urlDetectionTimeout = setTimeout(() => {
|
|
||||||
serverInfo.urlDetectionTimeout = null;
|
|
||||||
|
|
||||||
// Only run fallback if server is still running and URL wasn't detected
|
|
||||||
if (serverInfo.stopping || serverInfo.urlDetected || !this.runningServers.has(worktreePath)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Re-scan the entire scrollback buffer for URL patterns
|
|
||||||
// This catches cases where the URL was split across multiple output chunks
|
|
||||||
logger.info(`URL detection timeout for ${worktreePath}, re-scanning scrollback buffer`);
|
|
||||||
this.detectUrlFromOutput(serverInfo, serverInfo.scrollbackBuffer);
|
|
||||||
|
|
||||||
// If still not detected after full rescan, use the allocated port as fallback
|
|
||||||
if (!serverInfo.urlDetected) {
|
|
||||||
logger.info(`URL detection fallback: using allocated port ${port} for ${worktreePath}`);
|
|
||||||
const fallbackUrl = `http://${hostname}:${port}`;
|
|
||||||
serverInfo.url = fallbackUrl;
|
|
||||||
serverInfo.urlDetected = true;
|
|
||||||
|
|
||||||
if (this.emitter) {
|
|
||||||
this.emitter.emit('dev-server:url-detected', {
|
|
||||||
worktreePath,
|
|
||||||
url: fallbackUrl,
|
|
||||||
port,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, URL_DETECTION_TIMEOUT_MS);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
result: {
|
result: {
|
||||||
@@ -885,12 +794,6 @@ class DevServerService {
|
|||||||
server.flushTimeout = null;
|
server.flushTimeout = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clean up URL detection timeout
|
|
||||||
if (server.urlDetectionTimeout) {
|
|
||||||
clearTimeout(server.urlDetectionTimeout);
|
|
||||||
server.urlDetectionTimeout = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clear any pending output buffer
|
// Clear any pending output buffer
|
||||||
server.outputBuffer = '';
|
server.outputBuffer = '';
|
||||||
|
|
||||||
@@ -909,10 +812,8 @@ class DevServerService {
|
|||||||
server.process.kill('SIGTERM');
|
server.process.kill('SIGTERM');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Free the originally-reserved port slot (allocatedPort is immutable and always
|
// Free the port
|
||||||
// matches what was added to allocatedPorts in startDevServer; server.port may
|
this.allocatedPorts.delete(server.port);
|
||||||
// have been updated by detectUrlFromOutput to the actual detected port).
|
|
||||||
this.allocatedPorts.delete(server.allocatedPort);
|
|
||||||
this.runningServers.delete(worktreePath);
|
this.runningServers.delete(worktreePath);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -926,7 +827,6 @@ class DevServerService {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* List all running dev servers
|
* List all running dev servers
|
||||||
* Also verifies that each server's process is still alive, removing stale entries
|
|
||||||
*/
|
*/
|
||||||
listDevServers(): {
|
listDevServers(): {
|
||||||
success: boolean;
|
success: boolean;
|
||||||
@@ -936,37 +836,14 @@ class DevServerService {
|
|||||||
port: number;
|
port: number;
|
||||||
url: string;
|
url: string;
|
||||||
urlDetected: boolean;
|
urlDetected: boolean;
|
||||||
startedAt: string;
|
|
||||||
}>;
|
}>;
|
||||||
};
|
};
|
||||||
} {
|
} {
|
||||||
// Prune any servers whose process has died without us being notified
|
|
||||||
// This handles edge cases where the process exited but the 'exit' event was missed
|
|
||||||
const stalePaths: string[] = [];
|
|
||||||
for (const [worktreePath, server] of this.runningServers) {
|
|
||||||
// Check if exitCode is a number (not null/undefined) - indicates process has exited
|
|
||||||
if (server.process && typeof server.process.exitCode === 'number') {
|
|
||||||
logger.info(
|
|
||||||
`Pruning stale server entry for ${worktreePath} (process exited with code ${server.process.exitCode})`
|
|
||||||
);
|
|
||||||
stalePaths.push(worktreePath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (const stalePath of stalePaths) {
|
|
||||||
const server = this.runningServers.get(stalePath);
|
|
||||||
if (server) {
|
|
||||||
// Delegate to the shared helper so timers, ports, and the stopped event
|
|
||||||
// are all handled consistently with isRunning and getServerInfo.
|
|
||||||
this.pruneStaleServer(stalePath, server);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const servers = Array.from(this.runningServers.values()).map((s) => ({
|
const servers = Array.from(this.runningServers.values()).map((s) => ({
|
||||||
worktreePath: s.worktreePath,
|
worktreePath: s.worktreePath,
|
||||||
port: s.port,
|
port: s.port,
|
||||||
url: s.url,
|
url: s.url,
|
||||||
urlDetected: s.urlDetected,
|
urlDetected: s.urlDetected,
|
||||||
startedAt: s.startedAt.toISOString(),
|
|
||||||
}));
|
}));
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -976,33 +853,17 @@ class DevServerService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if a worktree has a running dev server.
|
* Check if a worktree has a running dev server
|
||||||
* Also prunes stale entries where the process has exited.
|
|
||||||
*/
|
*/
|
||||||
isRunning(worktreePath: string): boolean {
|
isRunning(worktreePath: string): boolean {
|
||||||
const server = this.runningServers.get(worktreePath);
|
return this.runningServers.has(worktreePath);
|
||||||
if (!server) return false;
|
|
||||||
// Prune stale entry if the process has exited
|
|
||||||
if (server.process && typeof server.process.exitCode === 'number') {
|
|
||||||
this.pruneStaleServer(worktreePath, server);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get info for a specific worktree's dev server.
|
* Get info for a specific worktree's dev server
|
||||||
* Also prunes stale entries where the process has exited.
|
|
||||||
*/
|
*/
|
||||||
getServerInfo(worktreePath: string): DevServerInfo | undefined {
|
getServerInfo(worktreePath: string): DevServerInfo | undefined {
|
||||||
const server = this.runningServers.get(worktreePath);
|
return this.runningServers.get(worktreePath);
|
||||||
if (!server) return undefined;
|
|
||||||
// Prune stale entry if the process has exited
|
|
||||||
if (server.process && typeof server.process.exitCode === 'number') {
|
|
||||||
this.pruneStaleServer(worktreePath, server);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return server;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1030,15 +891,6 @@ class DevServerService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prune stale entry if the process has been killed or has exited
|
|
||||||
if (server.process && (server.process.killed || server.process.exitCode != null)) {
|
|
||||||
this.pruneStaleServer(worktreePath, server);
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: `No dev server running for worktree: ${worktreePath}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
result: {
|
result: {
|
||||||
|
|||||||
@@ -60,13 +60,10 @@ interface AutoModeEventPayload {
|
|||||||
featureId?: string;
|
featureId?: string;
|
||||||
featureName?: string;
|
featureName?: string;
|
||||||
passes?: boolean;
|
passes?: boolean;
|
||||||
executionMode?: 'auto' | 'manual';
|
|
||||||
message?: string;
|
message?: string;
|
||||||
error?: string;
|
error?: string;
|
||||||
errorType?: string;
|
errorType?: string;
|
||||||
projectPath?: string;
|
projectPath?: string;
|
||||||
/** Status field present when type === 'feature_status_changed' */
|
|
||||||
status?: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -78,40 +75,6 @@ interface FeatureCreatedPayload {
|
|||||||
projectPath: string;
|
projectPath: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Feature status changed event payload structure
|
|
||||||
*/
|
|
||||||
interface FeatureStatusChangedPayload {
|
|
||||||
featureId: string;
|
|
||||||
projectPath: string;
|
|
||||||
status: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Type guard to safely narrow AutoModeEventPayload to FeatureStatusChangedPayload
|
|
||||||
*/
|
|
||||||
function isFeatureStatusChangedPayload(
|
|
||||||
payload: AutoModeEventPayload
|
|
||||||
): payload is AutoModeEventPayload & FeatureStatusChangedPayload {
|
|
||||||
return (
|
|
||||||
typeof payload.featureId === 'string' &&
|
|
||||||
typeof payload.projectPath === 'string' &&
|
|
||||||
typeof payload.status === 'string'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Feature completed event payload structure
|
|
||||||
*/
|
|
||||||
interface FeatureCompletedPayload {
|
|
||||||
featureId: string;
|
|
||||||
featureName?: string;
|
|
||||||
projectPath: string;
|
|
||||||
passes?: boolean;
|
|
||||||
message?: string;
|
|
||||||
executionMode?: 'auto' | 'manual';
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Event Hook Service
|
* Event Hook Service
|
||||||
*
|
*
|
||||||
@@ -119,30 +82,12 @@ interface FeatureCompletedPayload {
|
|||||||
* Also stores events to history for debugging and replay.
|
* Also stores events to history for debugging and replay.
|
||||||
*/
|
*/
|
||||||
export class EventHookService {
|
export class EventHookService {
|
||||||
/** Feature status that indicates agent work is done and awaiting human review (tests skipped) */
|
|
||||||
private static readonly STATUS_WAITING_APPROVAL = 'waiting_approval';
|
|
||||||
/** Feature status that indicates agent work passed automated verification */
|
|
||||||
private static readonly STATUS_VERIFIED = 'verified';
|
|
||||||
|
|
||||||
private emitter: EventEmitter | null = null;
|
private emitter: EventEmitter | null = null;
|
||||||
private settingsService: SettingsService | null = null;
|
private settingsService: SettingsService | null = null;
|
||||||
private eventHistoryService: EventHistoryService | null = null;
|
private eventHistoryService: EventHistoryService | null = null;
|
||||||
private featureLoader: FeatureLoader | null = null;
|
private featureLoader: FeatureLoader | null = null;
|
||||||
private unsubscribe: (() => void) | null = null;
|
private unsubscribe: (() => void) | null = null;
|
||||||
|
|
||||||
/**
|
|
||||||
* Track feature IDs that have already had hooks fired via auto_mode_feature_complete
|
|
||||||
* to prevent double-firing when feature_status_changed also fires for the same feature.
|
|
||||||
* Entries are automatically cleaned up after 30 seconds.
|
|
||||||
*/
|
|
||||||
private recentlyHandledFeatures = new Set<string>();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Timer IDs for pending cleanup of recentlyHandledFeatures entries,
|
|
||||||
* keyed by featureId. Stored so they can be cancelled in destroy().
|
|
||||||
*/
|
|
||||||
private recentlyHandledTimers = new Map<string, ReturnType<typeof setTimeout>>();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize the service with event emitter, settings service, event history service, and feature loader
|
* Initialize the service with event emitter, settings service, event history service, and feature loader
|
||||||
*/
|
*/
|
||||||
@@ -163,8 +108,6 @@ export class EventHookService {
|
|||||||
this.handleAutoModeEvent(payload as AutoModeEventPayload);
|
this.handleAutoModeEvent(payload as AutoModeEventPayload);
|
||||||
} else if (type === 'feature:created') {
|
} else if (type === 'feature:created') {
|
||||||
this.handleFeatureCreatedEvent(payload as FeatureCreatedPayload);
|
this.handleFeatureCreatedEvent(payload as FeatureCreatedPayload);
|
||||||
} else if (type === 'feature:completed') {
|
|
||||||
this.handleFeatureCompletedEvent(payload as FeatureCompletedPayload);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -179,12 +122,6 @@ export class EventHookService {
|
|||||||
this.unsubscribe();
|
this.unsubscribe();
|
||||||
this.unsubscribe = null;
|
this.unsubscribe = null;
|
||||||
}
|
}
|
||||||
// Cancel all pending cleanup timers to avoid cross-session mutations
|
|
||||||
for (const timerId of this.recentlyHandledTimers.values()) {
|
|
||||||
clearTimeout(timerId);
|
|
||||||
}
|
|
||||||
this.recentlyHandledTimers.clear();
|
|
||||||
this.recentlyHandledFeatures.clear();
|
|
||||||
this.emitter = null;
|
this.emitter = null;
|
||||||
this.settingsService = null;
|
this.settingsService = null;
|
||||||
this.eventHistoryService = null;
|
this.eventHistoryService = null;
|
||||||
@@ -202,31 +139,15 @@ export class EventHookService {
|
|||||||
|
|
||||||
switch (payload.type) {
|
switch (payload.type) {
|
||||||
case 'auto_mode_feature_complete':
|
case 'auto_mode_feature_complete':
|
||||||
// Only map explicit auto-mode completion events.
|
|
||||||
// Manual feature completions are emitted as feature:completed.
|
|
||||||
if (payload.executionMode !== 'auto') return;
|
|
||||||
trigger = payload.passes ? 'feature_success' : 'feature_error';
|
trigger = payload.passes ? 'feature_success' : 'feature_error';
|
||||||
// Track this feature so feature_status_changed doesn't double-fire hooks
|
|
||||||
if (payload.featureId) {
|
|
||||||
this.markFeatureHandled(payload.featureId);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
case 'auto_mode_error':
|
case 'auto_mode_error':
|
||||||
// Feature-level error (has featureId) vs auto-mode level error
|
// Feature-level error (has featureId) vs auto-mode level error
|
||||||
trigger = payload.featureId ? 'feature_error' : 'auto_mode_error';
|
trigger = payload.featureId ? 'feature_error' : 'auto_mode_error';
|
||||||
// Track this feature so feature_status_changed doesn't double-fire hooks
|
|
||||||
if (payload.featureId) {
|
|
||||||
this.markFeatureHandled(payload.featureId);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
case 'auto_mode_idle':
|
case 'auto_mode_idle':
|
||||||
trigger = 'auto_mode_complete';
|
trigger = 'auto_mode_complete';
|
||||||
break;
|
break;
|
||||||
case 'feature_status_changed':
|
|
||||||
if (isFeatureStatusChangedPayload(payload)) {
|
|
||||||
this.handleFeatureStatusChanged(payload);
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
default:
|
default:
|
||||||
// Other event types don't trigger hooks
|
// Other event types don't trigger hooks
|
||||||
return;
|
return;
|
||||||
@@ -249,15 +170,13 @@ export class EventHookService {
|
|||||||
|
|
||||||
// Build context for variable substitution
|
// Build context for variable substitution
|
||||||
// Use loaded featureName (from feature.title) or fall back to payload.featureName
|
// Use loaded featureName (from feature.title) or fall back to payload.featureName
|
||||||
// Only populate error/errorType for error triggers - don't leak success messages into error fields
|
|
||||||
const isErrorTrigger = trigger === 'feature_error' || trigger === 'auto_mode_error';
|
|
||||||
const context: HookContext = {
|
const context: HookContext = {
|
||||||
featureId: payload.featureId,
|
featureId: payload.featureId,
|
||||||
featureName: featureName || payload.featureName,
|
featureName: featureName || payload.featureName,
|
||||||
projectPath: payload.projectPath,
|
projectPath: payload.projectPath,
|
||||||
projectName: payload.projectPath ? this.extractProjectName(payload.projectPath) : undefined,
|
projectName: payload.projectPath ? this.extractProjectName(payload.projectPath) : undefined,
|
||||||
error: isErrorTrigger ? payload.error || payload.message : undefined,
|
error: payload.error || payload.message,
|
||||||
errorType: isErrorTrigger ? payload.errorType : undefined,
|
errorType: payload.errorType,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
eventType: trigger,
|
eventType: trigger,
|
||||||
};
|
};
|
||||||
@@ -266,46 +185,6 @@ export class EventHookService {
|
|||||||
await this.executeHooksForTrigger(trigger, context, { passes: payload.passes });
|
await this.executeHooksForTrigger(trigger, context, { passes: payload.passes });
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle feature:completed events and trigger matching hooks
|
|
||||||
*/
|
|
||||||
private async handleFeatureCompletedEvent(payload: FeatureCompletedPayload): Promise<void> {
|
|
||||||
if (!payload.featureId || !payload.projectPath) return;
|
|
||||||
|
|
||||||
// Mark as handled to prevent duplicate firing if feature_status_changed also fires
|
|
||||||
this.markFeatureHandled(payload.featureId);
|
|
||||||
|
|
||||||
const passes = payload.passes ?? true;
|
|
||||||
const trigger: EventHookTrigger = passes ? 'feature_success' : 'feature_error';
|
|
||||||
|
|
||||||
// Load feature name if we have featureId but no featureName
|
|
||||||
let featureName: string | undefined = undefined;
|
|
||||||
if (payload.projectPath && this.featureLoader) {
|
|
||||||
try {
|
|
||||||
const feature = await this.featureLoader.get(payload.projectPath, payload.featureId);
|
|
||||||
if (feature?.title) {
|
|
||||||
featureName = feature.title;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`Failed to load feature ${payload.featureId} for event hook:`, error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const isErrorTrigger = trigger === 'feature_error';
|
|
||||||
const context: HookContext = {
|
|
||||||
featureId: payload.featureId,
|
|
||||||
featureName: featureName || payload.featureName,
|
|
||||||
projectPath: payload.projectPath,
|
|
||||||
projectName: this.extractProjectName(payload.projectPath),
|
|
||||||
error: isErrorTrigger ? payload.message : undefined,
|
|
||||||
errorType: undefined,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
eventType: trigger,
|
|
||||||
};
|
|
||||||
|
|
||||||
await this.executeHooksForTrigger(trigger, context, { passes });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle feature:created events and trigger matching hooks
|
* Handle feature:created events and trigger matching hooks
|
||||||
*/
|
*/
|
||||||
@@ -322,74 +201,6 @@ export class EventHookService {
|
|||||||
await this.executeHooksForTrigger('feature_created', context);
|
await this.executeHooksForTrigger('feature_created', context);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle feature_status_changed events for non-auto-mode feature completion.
|
|
||||||
*
|
|
||||||
* Auto-mode features already emit auto_mode_feature_complete which triggers hooks.
|
|
||||||
* This handler catches manual (non-auto-mode) feature completions by detecting
|
|
||||||
* status transitions to completion states (verified, waiting_approval).
|
|
||||||
*/
|
|
||||||
private async handleFeatureStatusChanged(payload: FeatureStatusChangedPayload): Promise<void> {
|
|
||||||
// Skip if this feature was already handled via auto_mode_feature_complete
|
|
||||||
if (this.recentlyHandledFeatures.has(payload.featureId)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let trigger: EventHookTrigger | null = null;
|
|
||||||
|
|
||||||
if (
|
|
||||||
payload.status === EventHookService.STATUS_VERIFIED ||
|
|
||||||
payload.status === EventHookService.STATUS_WAITING_APPROVAL
|
|
||||||
) {
|
|
||||||
trigger = 'feature_success';
|
|
||||||
} else {
|
|
||||||
// Only completion statuses trigger hooks from status changes
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load feature name
|
|
||||||
let featureName: string | undefined = undefined;
|
|
||||||
if (this.featureLoader) {
|
|
||||||
try {
|
|
||||||
const feature = await this.featureLoader.get(payload.projectPath, payload.featureId);
|
|
||||||
if (feature?.title) {
|
|
||||||
featureName = feature.title;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`Failed to load feature ${payload.featureId} for status change hook:`, error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const context: HookContext = {
|
|
||||||
featureId: payload.featureId,
|
|
||||||
featureName,
|
|
||||||
projectPath: payload.projectPath,
|
|
||||||
projectName: this.extractProjectName(payload.projectPath),
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
eventType: trigger,
|
|
||||||
};
|
|
||||||
|
|
||||||
await this.executeHooksForTrigger(trigger, context, { passes: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Mark a feature as recently handled to prevent double-firing hooks.
|
|
||||||
* Entries are cleaned up after 30 seconds.
|
|
||||||
*/
|
|
||||||
private markFeatureHandled(featureId: string): void {
|
|
||||||
// Cancel any existing timer for this feature before setting a new one
|
|
||||||
const existing = this.recentlyHandledTimers.get(featureId);
|
|
||||||
if (existing !== undefined) {
|
|
||||||
clearTimeout(existing);
|
|
||||||
}
|
|
||||||
this.recentlyHandledFeatures.add(featureId);
|
|
||||||
const timerId = setTimeout(() => {
|
|
||||||
this.recentlyHandledFeatures.delete(featureId);
|
|
||||||
this.recentlyHandledTimers.delete(featureId);
|
|
||||||
}, 30000);
|
|
||||||
this.recentlyHandledTimers.set(featureId, timerId);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute all enabled hooks matching the given trigger and store event to history
|
* Execute all enabled hooks matching the given trigger and store event to history
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import * as secureFs from '../lib/secure-fs.js';
|
|||||||
import {
|
import {
|
||||||
getPromptCustomization,
|
getPromptCustomization,
|
||||||
getAutoLoadClaudeMdSetting,
|
getAutoLoadClaudeMdSetting,
|
||||||
getUseClaudeCodeSystemPromptSetting,
|
|
||||||
filterClaudeMdFromContext,
|
filterClaudeMdFromContext,
|
||||||
} from '../lib/settings-helpers.js';
|
} from '../lib/settings-helpers.js';
|
||||||
import { validateWorkingDirectory } from '../lib/sdk-options.js';
|
import { validateWorkingDirectory } from '../lib/sdk-options.js';
|
||||||
@@ -60,12 +59,6 @@ import type {
|
|||||||
|
|
||||||
const logger = createLogger('ExecutionService');
|
const logger = createLogger('ExecutionService');
|
||||||
|
|
||||||
/** Marker written by agent-executor for each tool invocation. */
|
|
||||||
const TOOL_USE_MARKER = '🔧 Tool:';
|
|
||||||
|
|
||||||
/** Minimum trimmed output length to consider agent work meaningful. */
|
|
||||||
const MIN_MEANINGFUL_OUTPUT_LENGTH = 200;
|
|
||||||
|
|
||||||
export class ExecutionService {
|
export class ExecutionService {
|
||||||
constructor(
|
constructor(
|
||||||
private eventBus: TypedEventBus,
|
private eventBus: TypedEventBus,
|
||||||
@@ -248,11 +241,6 @@ ${feature.spec}
|
|||||||
this.settingsService,
|
this.settingsService,
|
||||||
'[ExecutionService]'
|
'[ExecutionService]'
|
||||||
);
|
);
|
||||||
const useClaudeCodeSystemPrompt = await getUseClaudeCodeSystemPromptSetting(
|
|
||||||
projectPath,
|
|
||||||
this.settingsService,
|
|
||||||
'[ExecutionService]'
|
|
||||||
);
|
|
||||||
const prompts = await getPromptCustomization(this.settingsService, '[ExecutionService]');
|
const prompts = await getPromptCustomization(this.settingsService, '[ExecutionService]');
|
||||||
let prompt: string;
|
let prompt: string;
|
||||||
const contextResult = await this.loadContextFilesFn({
|
const contextResult = await this.loadContextFilesFn({
|
||||||
@@ -301,9 +289,7 @@ ${feature.spec}
|
|||||||
requirePlanApproval: feature.requirePlanApproval,
|
requirePlanApproval: feature.requirePlanApproval,
|
||||||
systemPrompt: combinedSystemPrompt || undefined,
|
systemPrompt: combinedSystemPrompt || undefined,
|
||||||
autoLoadClaudeMd,
|
autoLoadClaudeMd,
|
||||||
useClaudeCodeSystemPrompt,
|
|
||||||
thinkingLevel: feature.thinkingLevel,
|
thinkingLevel: feature.thinkingLevel,
|
||||||
reasoningEffort: feature.reasoningEffort,
|
|
||||||
branchName: feature.branchName ?? null,
|
branchName: feature.branchName ?? null,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@@ -367,9 +353,7 @@ Please continue from where you left off and complete all remaining tasks. Use th
|
|||||||
requirePlanApproval: false,
|
requirePlanApproval: false,
|
||||||
systemPrompt: combinedSystemPrompt || undefined,
|
systemPrompt: combinedSystemPrompt || undefined,
|
||||||
autoLoadClaudeMd,
|
autoLoadClaudeMd,
|
||||||
useClaudeCodeSystemPrompt,
|
|
||||||
thinkingLevel: feature.thinkingLevel,
|
thinkingLevel: feature.thinkingLevel,
|
||||||
reasoningEffort: feature.reasoningEffort,
|
|
||||||
branchName: feature.branchName ?? null,
|
branchName: feature.branchName ?? null,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@@ -404,7 +388,6 @@ Please continue from where you left off and complete all remaining tasks. Use th
|
|||||||
branchName: feature.branchName ?? null,
|
branchName: feature.branchName ?? null,
|
||||||
abortController,
|
abortController,
|
||||||
autoLoadClaudeMd,
|
autoLoadClaudeMd,
|
||||||
useClaudeCodeSystemPrompt,
|
|
||||||
testAttempts: 0,
|
testAttempts: 0,
|
||||||
maxTestAttempts: 5,
|
maxTestAttempts: 5,
|
||||||
});
|
});
|
||||||
@@ -415,41 +398,7 @@ Please continue from where you left off and complete all remaining tasks. Use th
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read agent output before determining final status.
|
const finalStatus = feature.skipTests ? 'waiting_approval' : 'verified';
|
||||||
// CLI-based providers (Cursor, Codex, etc.) may exit quickly without doing
|
|
||||||
// meaningful work. Check output to avoid prematurely marking as 'verified'.
|
|
||||||
const outputPath = path.join(getFeatureDir(projectPath, featureId), 'agent-output.md');
|
|
||||||
let agentOutput = '';
|
|
||||||
try {
|
|
||||||
agentOutput = (await secureFs.readFile(outputPath, 'utf-8')) as string;
|
|
||||||
} catch {
|
|
||||||
/* */
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine if the agent did meaningful work by checking for tool usage
|
|
||||||
// indicators in the output. The agent executor writes "🔧 Tool:" markers
|
|
||||||
// each time a tool is invoked. No tool usage suggests the CLI exited
|
|
||||||
// without performing implementation work.
|
|
||||||
const hasToolUsage = agentOutput.includes(TOOL_USE_MARKER);
|
|
||||||
const isOutputTooShort = agentOutput.trim().length < MIN_MEANINGFUL_OUTPUT_LENGTH;
|
|
||||||
const agentDidWork = hasToolUsage && !isOutputTooShort;
|
|
||||||
|
|
||||||
let finalStatus: 'verified' | 'waiting_approval';
|
|
||||||
if (feature.skipTests) {
|
|
||||||
finalStatus = 'waiting_approval';
|
|
||||||
} else if (!agentDidWork) {
|
|
||||||
// Agent didn't produce meaningful output (e.g., CLI exited quickly).
|
|
||||||
// Route to waiting_approval so the user can review and re-run.
|
|
||||||
finalStatus = 'waiting_approval';
|
|
||||||
logger.warn(
|
|
||||||
`[executeFeature] Feature ${featureId}: agent produced insufficient output ` +
|
|
||||||
`(${agentOutput.trim().length}/${MIN_MEANINGFUL_OUTPUT_LENGTH} chars, toolUsage=${hasToolUsage}). ` +
|
|
||||||
`Setting status to waiting_approval instead of verified.`
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
finalStatus = 'verified';
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.updateFeatureStatusFn(projectPath, featureId, finalStatus);
|
await this.updateFeatureStatusFn(projectPath, featureId, finalStatus);
|
||||||
this.recordSuccessFn();
|
this.recordSuccessFn();
|
||||||
|
|
||||||
@@ -461,6 +410,13 @@ Please continue from where you left off and complete all remaining tasks. Use th
|
|||||||
const hasIncompleteTasks = totalTasks > 0 && completedTasks < totalTasks;
|
const hasIncompleteTasks = totalTasks > 0 && completedTasks < totalTasks;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const outputPath = path.join(getFeatureDir(projectPath, featureId), 'agent-output.md');
|
||||||
|
let agentOutput = '';
|
||||||
|
try {
|
||||||
|
agentOutput = (await secureFs.readFile(outputPath, 'utf-8')) as string;
|
||||||
|
} catch {
|
||||||
|
/* */
|
||||||
|
}
|
||||||
if (agentOutput) {
|
if (agentOutput) {
|
||||||
const summary = extractSummary(agentOutput);
|
const summary = extractSummary(agentOutput);
|
||||||
if (summary) await this.saveFeatureSummaryFn(projectPath, featureId, summary);
|
if (summary) await this.saveFeatureSummaryFn(projectPath, featureId, summary);
|
||||||
@@ -485,34 +441,28 @@ Please continue from where you left off and complete all remaining tasks. Use th
|
|||||||
if (hasIncompleteTasks)
|
if (hasIncompleteTasks)
|
||||||
completionMessage += ` (${completedTasks}/${totalTasks} tasks completed)`;
|
completionMessage += ` (${completedTasks}/${totalTasks} tasks completed)`;
|
||||||
|
|
||||||
if (isAutoMode) {
|
|
||||||
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
||||||
featureId,
|
featureId,
|
||||||
featureName: feature.title,
|
featureName: feature.title,
|
||||||
branchName: feature.branchName ?? null,
|
branchName: feature.branchName ?? null,
|
||||||
executionMode: 'auto',
|
|
||||||
passes: true,
|
passes: true,
|
||||||
message: completionMessage,
|
message: completionMessage,
|
||||||
projectPath,
|
projectPath,
|
||||||
model: tempRunningFeature.model,
|
model: tempRunningFeature.model,
|
||||||
provider: tempRunningFeature.provider,
|
provider: tempRunningFeature.provider,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorInfo = classifyError(error);
|
const errorInfo = classifyError(error);
|
||||||
if (errorInfo.isAbort) {
|
if (errorInfo.isAbort) {
|
||||||
await this.updateFeatureStatusFn(projectPath, featureId, 'interrupted');
|
await this.updateFeatureStatusFn(projectPath, featureId, 'interrupted');
|
||||||
if (isAutoMode) {
|
|
||||||
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
||||||
featureId,
|
featureId,
|
||||||
featureName: feature?.title,
|
featureName: feature?.title,
|
||||||
branchName: feature?.branchName ?? null,
|
branchName: feature?.branchName ?? null,
|
||||||
executionMode: 'auto',
|
|
||||||
passes: false,
|
passes: false,
|
||||||
message: 'Feature stopped by user',
|
message: 'Feature stopped by user',
|
||||||
projectPath,
|
projectPath,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
logger.error(`Feature ${featureId} failed:`, error);
|
logger.error(`Feature ${featureId} failed:`, error);
|
||||||
await this.updateFeatureStatusFn(projectPath, featureId, 'backlog');
|
await this.updateFeatureStatusFn(projectPath, featureId, 'backlog');
|
||||||
@@ -537,22 +487,6 @@ Please continue from where you left off and complete all remaining tasks. Use th
|
|||||||
async stopFeature(featureId: string): Promise<boolean> {
|
async stopFeature(featureId: string): Promise<boolean> {
|
||||||
const running = this.concurrencyManager.getRunningFeature(featureId);
|
const running = this.concurrencyManager.getRunningFeature(featureId);
|
||||||
if (!running) return false;
|
if (!running) return false;
|
||||||
const { projectPath } = running;
|
|
||||||
|
|
||||||
// Immediately update feature status to 'interrupted' so the UI reflects
|
|
||||||
// the stop right away. CLI-based providers can take seconds to terminate
|
|
||||||
// their subprocess after the abort signal fires, leaving the feature stuck
|
|
||||||
// in 'in_progress' on the Kanban board until the executeFeature catch block
|
|
||||||
// eventually runs. By persisting and emitting the status change here, the
|
|
||||||
// board updates immediately regardless of how long the subprocess takes to stop.
|
|
||||||
try {
|
|
||||||
await this.updateFeatureStatusFn(projectPath, featureId, 'interrupted');
|
|
||||||
} catch (err) {
|
|
||||||
// Non-fatal: the abort still proceeds and executeFeature's catch block
|
|
||||||
// will attempt the same update once the subprocess terminates.
|
|
||||||
logger.warn(`stopFeature: failed to immediately update status for ${featureId}:`, err);
|
|
||||||
}
|
|
||||||
|
|
||||||
running.abortController.abort();
|
running.abortController.abort();
|
||||||
this.releaseRunningFeature(featureId, { force: true });
|
this.releaseRunningFeature(featureId, { force: true });
|
||||||
return true;
|
return true;
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
* allowing the service to delegate to other services without circular dependencies.
|
* allowing the service to delegate to other services without circular dependencies.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type { Feature, PlanningMode, ThinkingLevel, ReasoningEffort } from '@automaker/types';
|
import type { Feature, PlanningMode, ThinkingLevel } from '@automaker/types';
|
||||||
import type { loadContextFiles } from '@automaker/utils';
|
import type { loadContextFiles } from '@automaker/utils';
|
||||||
import type { PipelineContext } from './pipeline-orchestrator.js';
|
import type { PipelineContext } from './pipeline-orchestrator.js';
|
||||||
|
|
||||||
@@ -31,9 +31,7 @@ export type RunAgentFn = (
|
|||||||
previousContent?: string;
|
previousContent?: string;
|
||||||
systemPrompt?: string;
|
systemPrompt?: string;
|
||||||
autoLoadClaudeMd?: boolean;
|
autoLoadClaudeMd?: boolean;
|
||||||
useClaudeCodeSystemPrompt?: boolean;
|
|
||||||
thinkingLevel?: ThinkingLevel;
|
thinkingLevel?: ThinkingLevel;
|
||||||
reasoningEffort?: ReasoningEffort;
|
|
||||||
branchName?: string | null;
|
branchName?: string | null;
|
||||||
}
|
}
|
||||||
) => Promise<void>;
|
) => Promise<void>;
|
||||||
|
|||||||
@@ -1,103 +0,0 @@
|
|||||||
/**
|
|
||||||
* GitHub PR Comment Service
|
|
||||||
*
|
|
||||||
* Domain logic for resolving/unresolving PR review threads via the
|
|
||||||
* GitHub GraphQL API. Extracted from the route handler so the route
|
|
||||||
* only deals with request/response plumbing.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { spawn } from 'child_process';
|
|
||||||
import { execEnv } from '../lib/exec-utils.js';
|
|
||||||
|
|
||||||
/** Timeout for GitHub GraphQL API requests in milliseconds */
|
|
||||||
const GITHUB_API_TIMEOUT_MS = 30000;
|
|
||||||
|
|
||||||
interface GraphQLMutationResponse {
|
|
||||||
data?: {
|
|
||||||
resolveReviewThread?: {
|
|
||||||
thread?: { isResolved: boolean; id: string } | null;
|
|
||||||
} | null;
|
|
||||||
unresolveReviewThread?: {
|
|
||||||
thread?: { isResolved: boolean; id: string } | null;
|
|
||||||
} | null;
|
|
||||||
};
|
|
||||||
errors?: Array<{ message: string }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute a GraphQL mutation to resolve or unresolve a review thread.
|
|
||||||
*/
|
|
||||||
export async function executeReviewThreadMutation(
|
|
||||||
projectPath: string,
|
|
||||||
threadId: string,
|
|
||||||
resolve: boolean
|
|
||||||
): Promise<{ isResolved: boolean }> {
|
|
||||||
const mutationName = resolve ? 'resolveReviewThread' : 'unresolveReviewThread';
|
|
||||||
|
|
||||||
const mutation = `
|
|
||||||
mutation ${resolve ? 'ResolveThread' : 'UnresolveThread'}($threadId: ID!) {
|
|
||||||
${mutationName}(input: { threadId: $threadId }) {
|
|
||||||
thread {
|
|
||||||
id
|
|
||||||
isResolved
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}`;
|
|
||||||
|
|
||||||
const variables = { threadId };
|
|
||||||
const requestBody = JSON.stringify({ query: mutation, variables });
|
|
||||||
|
|
||||||
// Declare timeoutId before registering the error handler to avoid TDZ confusion
|
|
||||||
let timeoutId: NodeJS.Timeout | undefined;
|
|
||||||
|
|
||||||
const response = await new Promise<GraphQLMutationResponse>((res, rej) => {
|
|
||||||
const gh = spawn('gh', ['api', 'graphql', '--input', '-'], {
|
|
||||||
cwd: projectPath,
|
|
||||||
env: execEnv,
|
|
||||||
});
|
|
||||||
|
|
||||||
gh.on('error', (err) => {
|
|
||||||
clearTimeout(timeoutId);
|
|
||||||
rej(err);
|
|
||||||
});
|
|
||||||
|
|
||||||
timeoutId = setTimeout(() => {
|
|
||||||
gh.kill();
|
|
||||||
rej(new Error('GitHub GraphQL API request timed out'));
|
|
||||||
}, GITHUB_API_TIMEOUT_MS);
|
|
||||||
|
|
||||||
let stdout = '';
|
|
||||||
let stderr = '';
|
|
||||||
gh.stdout.on('data', (data: Buffer) => (stdout += data.toString()));
|
|
||||||
gh.stderr.on('data', (data: Buffer) => (stderr += data.toString()));
|
|
||||||
|
|
||||||
gh.on('close', (code) => {
|
|
||||||
clearTimeout(timeoutId);
|
|
||||||
if (code !== 0) {
|
|
||||||
return rej(new Error(`gh process exited with code ${code}: ${stderr}`));
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
res(JSON.parse(stdout));
|
|
||||||
} catch (e) {
|
|
||||||
rej(e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
gh.stdin.write(requestBody);
|
|
||||||
gh.stdin.end();
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.errors && response.errors.length > 0) {
|
|
||||||
throw new Error(response.errors[0].message);
|
|
||||||
}
|
|
||||||
|
|
||||||
const threadData = resolve
|
|
||||||
? response.data?.resolveReviewThread?.thread
|
|
||||||
: response.data?.unresolveReviewThread?.thread;
|
|
||||||
|
|
||||||
if (!threadData) {
|
|
||||||
throw new Error('No thread data returned from GitHub API');
|
|
||||||
}
|
|
||||||
|
|
||||||
return { isResolved: threadData.isResolved };
|
|
||||||
}
|
|
||||||
@@ -16,7 +16,6 @@ import * as secureFs from '../lib/secure-fs.js';
|
|||||||
import {
|
import {
|
||||||
getPromptCustomization,
|
getPromptCustomization,
|
||||||
getAutoLoadClaudeMdSetting,
|
getAutoLoadClaudeMdSetting,
|
||||||
getUseClaudeCodeSystemPromptSetting,
|
|
||||||
filterClaudeMdFromContext,
|
filterClaudeMdFromContext,
|
||||||
} from '../lib/settings-helpers.js';
|
} from '../lib/settings-helpers.js';
|
||||||
import { validateWorkingDirectory } from '../lib/sdk-options.js';
|
import { validateWorkingDirectory } from '../lib/sdk-options.js';
|
||||||
@@ -71,16 +70,8 @@ export class PipelineOrchestrator {
|
|||||||
) {}
|
) {}
|
||||||
|
|
||||||
async executePipeline(ctx: PipelineContext): Promise<void> {
|
async executePipeline(ctx: PipelineContext): Promise<void> {
|
||||||
const {
|
const { projectPath, featureId, feature, steps, workDir, abortController, autoLoadClaudeMd } =
|
||||||
projectPath,
|
ctx;
|
||||||
featureId,
|
|
||||||
feature,
|
|
||||||
steps,
|
|
||||||
workDir,
|
|
||||||
abortController,
|
|
||||||
autoLoadClaudeMd,
|
|
||||||
useClaudeCodeSystemPrompt,
|
|
||||||
} = ctx;
|
|
||||||
const prompts = await getPromptCustomization(this.settingsService, '[AutoMode]');
|
const prompts = await getPromptCustomization(this.settingsService, '[AutoMode]');
|
||||||
const contextResult = await this.loadContextFilesFn({
|
const contextResult = await this.loadContextFilesFn({
|
||||||
projectPath,
|
projectPath,
|
||||||
@@ -130,9 +121,7 @@ export class PipelineOrchestrator {
|
|||||||
previousContent: previousContext,
|
previousContent: previousContext,
|
||||||
systemPrompt: contextFilesPrompt || undefined,
|
systemPrompt: contextFilesPrompt || undefined,
|
||||||
autoLoadClaudeMd,
|
autoLoadClaudeMd,
|
||||||
useClaudeCodeSystemPrompt,
|
|
||||||
thinkingLevel: feature.thinkingLevel,
|
thinkingLevel: feature.thinkingLevel,
|
||||||
reasoningEffort: feature.reasoningEffort,
|
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
try {
|
try {
|
||||||
@@ -237,18 +226,14 @@ export class PipelineOrchestrator {
|
|||||||
logger.warn(`Step ${pipelineInfo.stepId} no longer exists, completing feature`);
|
logger.warn(`Step ${pipelineInfo.stepId} no longer exists, completing feature`);
|
||||||
const finalStatus = feature.skipTests ? 'waiting_approval' : 'verified';
|
const finalStatus = feature.skipTests ? 'waiting_approval' : 'verified';
|
||||||
await this.updateFeatureStatusFn(projectPath, featureId, finalStatus);
|
await this.updateFeatureStatusFn(projectPath, featureId, finalStatus);
|
||||||
const runningEntryForStep = this.concurrencyManager.getRunningFeature(featureId);
|
|
||||||
if (runningEntryForStep?.isAutoMode) {
|
|
||||||
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
||||||
featureId,
|
featureId,
|
||||||
featureName: feature.title,
|
featureName: feature.title,
|
||||||
branchName: feature.branchName ?? null,
|
branchName: feature.branchName ?? null,
|
||||||
executionMode: 'auto',
|
|
||||||
passes: true,
|
passes: true,
|
||||||
message: 'Pipeline step no longer exists',
|
message: 'Pipeline step no longer exists',
|
||||||
projectPath,
|
projectPath,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -287,18 +272,14 @@ export class PipelineOrchestrator {
|
|||||||
);
|
);
|
||||||
if (!pipelineService.isPipelineStatus(nextStatus)) {
|
if (!pipelineService.isPipelineStatus(nextStatus)) {
|
||||||
await this.updateFeatureStatusFn(projectPath, featureId, nextStatus);
|
await this.updateFeatureStatusFn(projectPath, featureId, nextStatus);
|
||||||
const runningEntryForExcluded = this.concurrencyManager.getRunningFeature(featureId);
|
|
||||||
if (runningEntryForExcluded?.isAutoMode) {
|
|
||||||
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
||||||
featureId,
|
featureId,
|
||||||
featureName: feature.title,
|
featureName: feature.title,
|
||||||
branchName: feature.branchName ?? null,
|
branchName: feature.branchName ?? null,
|
||||||
executionMode: 'auto',
|
|
||||||
passes: true,
|
passes: true,
|
||||||
message: 'Pipeline completed (remaining steps excluded)',
|
message: 'Pipeline completed (remaining steps excluded)',
|
||||||
projectPath,
|
projectPath,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const nextStepId = pipelineService.getStepIdFromStatus(nextStatus);
|
const nextStepId = pipelineService.getStepIdFromStatus(nextStatus);
|
||||||
@@ -313,18 +294,14 @@ export class PipelineOrchestrator {
|
|||||||
if (stepsToExecute.length === 0) {
|
if (stepsToExecute.length === 0) {
|
||||||
const finalStatus = feature.skipTests ? 'waiting_approval' : 'verified';
|
const finalStatus = feature.skipTests ? 'waiting_approval' : 'verified';
|
||||||
await this.updateFeatureStatusFn(projectPath, featureId, finalStatus);
|
await this.updateFeatureStatusFn(projectPath, featureId, finalStatus);
|
||||||
const runningEntryForAllExcluded = this.concurrencyManager.getRunningFeature(featureId);
|
|
||||||
if (runningEntryForAllExcluded?.isAutoMode) {
|
|
||||||
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
||||||
featureId,
|
featureId,
|
||||||
featureName: feature.title,
|
featureName: feature.title,
|
||||||
branchName: feature.branchName ?? null,
|
branchName: feature.branchName ?? null,
|
||||||
executionMode: 'auto',
|
|
||||||
passes: true,
|
passes: true,
|
||||||
message: 'Pipeline completed (all steps excluded)',
|
message: 'Pipeline completed (all steps excluded)',
|
||||||
projectPath,
|
projectPath,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -368,11 +345,6 @@ export class PipelineOrchestrator {
|
|||||||
this.settingsService,
|
this.settingsService,
|
||||||
'[AutoMode]'
|
'[AutoMode]'
|
||||||
);
|
);
|
||||||
const useClaudeCodeSystemPrompt = await getUseClaudeCodeSystemPromptSetting(
|
|
||||||
projectPath,
|
|
||||||
this.settingsService,
|
|
||||||
'[AutoMode]'
|
|
||||||
);
|
|
||||||
const context: PipelineContext = {
|
const context: PipelineContext = {
|
||||||
projectPath,
|
projectPath,
|
||||||
featureId,
|
featureId,
|
||||||
@@ -383,7 +355,6 @@ export class PipelineOrchestrator {
|
|||||||
branchName: branchName ?? null,
|
branchName: branchName ?? null,
|
||||||
abortController,
|
abortController,
|
||||||
autoLoadClaudeMd,
|
autoLoadClaudeMd,
|
||||||
useClaudeCodeSystemPrompt,
|
|
||||||
testAttempts: 0,
|
testAttempts: 0,
|
||||||
maxTestAttempts: 5,
|
maxTestAttempts: 5,
|
||||||
};
|
};
|
||||||
@@ -399,31 +370,25 @@ export class PipelineOrchestrator {
|
|||||||
await this.updateFeatureStatusFn(projectPath, featureId, finalStatus);
|
await this.updateFeatureStatusFn(projectPath, featureId, finalStatus);
|
||||||
}
|
}
|
||||||
logger.info(`Pipeline resume completed for feature ${featureId}`);
|
logger.info(`Pipeline resume completed for feature ${featureId}`);
|
||||||
if (runningEntry.isAutoMode) {
|
|
||||||
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
||||||
featureId,
|
featureId,
|
||||||
featureName: feature.title,
|
featureName: feature.title,
|
||||||
branchName: feature.branchName ?? null,
|
branchName: feature.branchName ?? null,
|
||||||
executionMode: 'auto',
|
|
||||||
passes: true,
|
passes: true,
|
||||||
message: 'Pipeline resumed successfully',
|
message: 'Pipeline resumed successfully',
|
||||||
projectPath,
|
projectPath,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorInfo = classifyError(error);
|
const errorInfo = classifyError(error);
|
||||||
if (errorInfo.isAbort) {
|
if (errorInfo.isAbort) {
|
||||||
if (runningEntry.isAutoMode) {
|
|
||||||
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
||||||
featureId,
|
featureId,
|
||||||
featureName: feature.title,
|
featureName: feature.title,
|
||||||
branchName: feature.branchName ?? null,
|
branchName: feature.branchName ?? null,
|
||||||
executionMode: 'auto',
|
|
||||||
passes: false,
|
passes: false,
|
||||||
message: 'Pipeline stopped by user',
|
message: 'Pipeline stopped by user',
|
||||||
projectPath,
|
projectPath,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
logger.error(`Pipeline resume failed for ${featureId}:`, error);
|
logger.error(`Pipeline resume failed for ${featureId}:`, error);
|
||||||
await this.updateFeatureStatusFn(projectPath, featureId, 'backlog');
|
await this.updateFeatureStatusFn(projectPath, featureId, 'backlog');
|
||||||
@@ -484,14 +449,7 @@ export class PipelineOrchestrator {
|
|||||||
projectPath,
|
projectPath,
|
||||||
undefined,
|
undefined,
|
||||||
undefined,
|
undefined,
|
||||||
{
|
{ projectPath, planningMode: 'skip', requirePlanApproval: false }
|
||||||
projectPath,
|
|
||||||
planningMode: 'skip',
|
|
||||||
requirePlanApproval: false,
|
|
||||||
useClaudeCodeSystemPrompt: context.useClaudeCodeSystemPrompt,
|
|
||||||
autoLoadClaudeMd: context.autoLoadClaudeMd,
|
|
||||||
reasoningEffort: context.feature.reasoningEffort,
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -579,18 +537,14 @@ export class PipelineOrchestrator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Auto-merge successful for feature ${featureId}`);
|
logger.info(`Auto-merge successful for feature ${featureId}`);
|
||||||
const runningEntryForMerge = this.concurrencyManager.getRunningFeature(featureId);
|
|
||||||
if (runningEntryForMerge?.isAutoMode) {
|
|
||||||
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
this.eventBus.emitAutoModeEvent('auto_mode_feature_complete', {
|
||||||
featureId,
|
featureId,
|
||||||
featureName: feature.title,
|
featureName: feature.title,
|
||||||
branchName,
|
branchName,
|
||||||
executionMode: 'auto',
|
|
||||||
passes: true,
|
passes: true,
|
||||||
message: 'Pipeline completed and merged',
|
message: 'Pipeline completed and merged',
|
||||||
projectPath,
|
projectPath,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
return { success: true };
|
return { success: true };
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Merge failed for ${featureId}:`, error);
|
logger.error(`Merge failed for ${featureId}:`, error);
|
||||||
@@ -626,7 +580,7 @@ export class PipelineOrchestrator {
|
|||||||
}
|
}
|
||||||
// Only capture assertion details when they appear in failure context
|
// Only capture assertion details when they appear in failure context
|
||||||
// or match explicit assertion error / expect patterns
|
// or match explicit assertion error / expect patterns
|
||||||
if (trimmed.includes('AssertionError')) {
|
if (trimmed.includes('AssertionError') || trimmed.includes('AssertionError')) {
|
||||||
failedTests.push(trimmed);
|
failedTests.push(trimmed);
|
||||||
} else if (
|
} else if (
|
||||||
inFailureContext &&
|
inFailureContext &&
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ export interface PipelineContext {
|
|||||||
branchName: string | null;
|
branchName: string | null;
|
||||||
abortController: AbortController;
|
abortController: AbortController;
|
||||||
autoLoadClaudeMd: boolean;
|
autoLoadClaudeMd: boolean;
|
||||||
useClaudeCodeSystemPrompt?: boolean;
|
|
||||||
testAttempts: number;
|
testAttempts: number;
|
||||||
maxTestAttempts: number;
|
maxTestAttempts: number;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,431 +0,0 @@
|
|||||||
/**
|
|
||||||
* PR Review Comments Service
|
|
||||||
*
|
|
||||||
* Domain logic for fetching PR review comments, enriching them with
|
|
||||||
* resolved-thread status, and sorting. Extracted from the route handler
|
|
||||||
* so the route only deals with request/response plumbing.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { spawn, execFile } from 'child_process';
|
|
||||||
import { promisify } from 'util';
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
import { execEnv, logError } from '../lib/exec-utils.js';
|
|
||||||
|
|
||||||
const execFileAsync = promisify(execFile);
|
|
||||||
|
|
||||||
// ── Public types (re-exported for callers) ──
|
|
||||||
|
|
||||||
export interface PRReviewComment {
|
|
||||||
id: string;
|
|
||||||
author: string;
|
|
||||||
avatarUrl?: string;
|
|
||||||
body: string;
|
|
||||||
path?: string;
|
|
||||||
line?: number;
|
|
||||||
createdAt: string;
|
|
||||||
updatedAt?: string;
|
|
||||||
isReviewComment: boolean;
|
|
||||||
/** Whether this is an outdated review comment (code has changed since) */
|
|
||||||
isOutdated?: boolean;
|
|
||||||
/** Whether the review thread containing this comment has been resolved */
|
|
||||||
isResolved?: boolean;
|
|
||||||
/** The GraphQL node ID of the review thread (used for resolve/unresolve mutations) */
|
|
||||||
threadId?: string;
|
|
||||||
/** The diff hunk context for the comment */
|
|
||||||
diffHunk?: string;
|
|
||||||
/** The side of the diff (LEFT or RIGHT) */
|
|
||||||
side?: string;
|
|
||||||
/** The commit ID the comment was made on */
|
|
||||||
commitId?: string;
|
|
||||||
/** Whether the comment author is a bot/app account */
|
|
||||||
isBot?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ListPRReviewCommentsResult {
|
|
||||||
success: boolean;
|
|
||||||
comments?: PRReviewComment[];
|
|
||||||
totalCount?: number;
|
|
||||||
error?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ── Internal types ──
|
|
||||||
|
|
||||||
/** Timeout for GitHub GraphQL API requests in milliseconds */
|
|
||||||
const GITHUB_API_TIMEOUT_MS = 30000;
|
|
||||||
|
|
||||||
/** Maximum number of pagination pages to prevent infinite loops */
|
|
||||||
const MAX_PAGINATION_PAGES = 20;
|
|
||||||
|
|
||||||
interface GraphQLReviewThreadComment {
|
|
||||||
databaseId: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface GraphQLReviewThread {
|
|
||||||
id: string;
|
|
||||||
isResolved: boolean;
|
|
||||||
comments: {
|
|
||||||
pageInfo?: {
|
|
||||||
hasNextPage: boolean;
|
|
||||||
endCursor?: string | null;
|
|
||||||
};
|
|
||||||
nodes: GraphQLReviewThreadComment[];
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
interface GraphQLResponse {
|
|
||||||
data?: {
|
|
||||||
repository?: {
|
|
||||||
pullRequest?: {
|
|
||||||
reviewThreads?: {
|
|
||||||
nodes: GraphQLReviewThread[];
|
|
||||||
pageInfo?: {
|
|
||||||
hasNextPage: boolean;
|
|
||||||
endCursor?: string | null;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
} | null;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
errors?: Array<{ message: string }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ReviewThreadInfo {
|
|
||||||
isResolved: boolean;
|
|
||||||
threadId: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ── Logger ──
|
|
||||||
|
|
||||||
const logger = createLogger('PRReviewCommentsService');
|
|
||||||
|
|
||||||
// ── Service functions ──
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute a GraphQL query via the `gh` CLI and return the parsed response.
|
|
||||||
*/
|
|
||||||
async function executeGraphQL(projectPath: string, requestBody: string): Promise<GraphQLResponse> {
|
|
||||||
let timeoutId: NodeJS.Timeout | undefined;
|
|
||||||
|
|
||||||
const response = await new Promise<GraphQLResponse>((resolve, reject) => {
|
|
||||||
const gh = spawn('gh', ['api', 'graphql', '--input', '-'], {
|
|
||||||
cwd: projectPath,
|
|
||||||
env: execEnv,
|
|
||||||
});
|
|
||||||
|
|
||||||
gh.on('error', (err) => {
|
|
||||||
clearTimeout(timeoutId);
|
|
||||||
reject(err);
|
|
||||||
});
|
|
||||||
|
|
||||||
timeoutId = setTimeout(() => {
|
|
||||||
gh.kill();
|
|
||||||
reject(new Error('GitHub GraphQL API request timed out'));
|
|
||||||
}, GITHUB_API_TIMEOUT_MS);
|
|
||||||
|
|
||||||
let stdout = '';
|
|
||||||
let stderr = '';
|
|
||||||
gh.stdout.on('data', (data: Buffer) => (stdout += data.toString()));
|
|
||||||
gh.stderr.on('data', (data: Buffer) => (stderr += data.toString()));
|
|
||||||
|
|
||||||
gh.on('close', (code) => {
|
|
||||||
clearTimeout(timeoutId);
|
|
||||||
if (code !== 0) {
|
|
||||||
return reject(new Error(`gh process exited with code ${code}: ${stderr}`));
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
resolve(JSON.parse(stdout));
|
|
||||||
} catch (e) {
|
|
||||||
reject(e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
gh.stdin.on('error', () => {
|
|
||||||
// Ignore stdin errors (e.g. when the child process is killed)
|
|
||||||
});
|
|
||||||
gh.stdin.write(requestBody);
|
|
||||||
gh.stdin.end();
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.errors && response.errors.length > 0) {
|
|
||||||
throw new Error(response.errors[0].message);
|
|
||||||
}
|
|
||||||
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch review thread resolved status and thread IDs using GitHub GraphQL API.
|
|
||||||
* Uses cursor-based pagination to handle PRs with more than 100 review threads.
|
|
||||||
* Returns a map of comment ID (string) -> { isResolved, threadId }.
|
|
||||||
*/
|
|
||||||
export async function fetchReviewThreadResolvedStatus(
|
|
||||||
projectPath: string,
|
|
||||||
owner: string,
|
|
||||||
repo: string,
|
|
||||||
prNumber: number
|
|
||||||
): Promise<Map<string, ReviewThreadInfo>> {
|
|
||||||
const resolvedMap = new Map<string, ReviewThreadInfo>();
|
|
||||||
|
|
||||||
const query = `
|
|
||||||
query GetPRReviewThreads(
|
|
||||||
$owner: String!
|
|
||||||
$repo: String!
|
|
||||||
$prNumber: Int!
|
|
||||||
$cursor: String
|
|
||||||
) {
|
|
||||||
repository(owner: $owner, name: $repo) {
|
|
||||||
pullRequest(number: $prNumber) {
|
|
||||||
reviewThreads(first: 100, after: $cursor) {
|
|
||||||
pageInfo {
|
|
||||||
hasNextPage
|
|
||||||
endCursor
|
|
||||||
}
|
|
||||||
nodes {
|
|
||||||
id
|
|
||||||
isResolved
|
|
||||||
comments(first: 100) {
|
|
||||||
pageInfo {
|
|
||||||
hasNextPage
|
|
||||||
endCursor
|
|
||||||
}
|
|
||||||
nodes {
|
|
||||||
databaseId
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
let cursor: string | null = null;
|
|
||||||
let pageCount = 0;
|
|
||||||
|
|
||||||
do {
|
|
||||||
const variables = { owner, repo, prNumber, cursor };
|
|
||||||
const requestBody = JSON.stringify({ query, variables });
|
|
||||||
const response = await executeGraphQL(projectPath, requestBody);
|
|
||||||
|
|
||||||
const reviewThreads = response.data?.repository?.pullRequest?.reviewThreads;
|
|
||||||
const threads = reviewThreads?.nodes ?? [];
|
|
||||||
|
|
||||||
for (const thread of threads) {
|
|
||||||
if (thread.comments.pageInfo?.hasNextPage) {
|
|
||||||
logger.debug(
|
|
||||||
`Review thread ${thread.id} in PR #${prNumber} has >100 comments — ` +
|
|
||||||
'some comments may be missing resolved status'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const info: ReviewThreadInfo = { isResolved: thread.isResolved, threadId: thread.id };
|
|
||||||
for (const comment of thread.comments.nodes) {
|
|
||||||
resolvedMap.set(String(comment.databaseId), info);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const pageInfo = reviewThreads?.pageInfo;
|
|
||||||
if (pageInfo?.hasNextPage && pageInfo.endCursor) {
|
|
||||||
cursor = pageInfo.endCursor;
|
|
||||||
pageCount++;
|
|
||||||
logger.debug(
|
|
||||||
`Fetching next page of review threads for PR #${prNumber} (page ${pageCount + 1})`
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
cursor = null;
|
|
||||||
}
|
|
||||||
} while (cursor && pageCount < MAX_PAGINATION_PAGES);
|
|
||||||
|
|
||||||
if (pageCount >= MAX_PAGINATION_PAGES) {
|
|
||||||
logger.warn(
|
|
||||||
`PR #${prNumber} in ${owner}/${repo} has more than ${MAX_PAGINATION_PAGES * 100} review threads — ` +
|
|
||||||
'pagination limit reached. Some comments may be missing resolved status.'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// Log but don't fail — resolved status is best-effort
|
|
||||||
logError(error, 'Failed to fetch PR review thread resolved status');
|
|
||||||
}
|
|
||||||
|
|
||||||
return resolvedMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch all comments for a PR (regular, inline review, and review body comments)
|
|
||||||
*/
|
|
||||||
export async function fetchPRReviewComments(
|
|
||||||
projectPath: string,
|
|
||||||
owner: string,
|
|
||||||
repo: string,
|
|
||||||
prNumber: number
|
|
||||||
): Promise<PRReviewComment[]> {
|
|
||||||
const allComments: PRReviewComment[] = [];
|
|
||||||
|
|
||||||
// Fetch review thread resolved status in parallel with comment fetching
|
|
||||||
const resolvedStatusPromise = fetchReviewThreadResolvedStatus(projectPath, owner, repo, prNumber);
|
|
||||||
|
|
||||||
// 1. Fetch regular PR comments (issue-level comments)
|
|
||||||
// Uses the REST API issues endpoint instead of `gh pr view --json comments`
|
|
||||||
// because the latter uses GraphQL internally where bot/app authors can return
|
|
||||||
// null, causing bot comments to be silently dropped or display as "unknown".
|
|
||||||
try {
|
|
||||||
const issueCommentsEndpoint = `repos/${owner}/${repo}/issues/${prNumber}/comments`;
|
|
||||||
const { stdout: commentsOutput } = await execFileAsync(
|
|
||||||
'gh',
|
|
||||||
['api', issueCommentsEndpoint, '--paginate'],
|
|
||||||
{
|
|
||||||
cwd: projectPath,
|
|
||||||
env: execEnv,
|
|
||||||
maxBuffer: 1024 * 1024 * 10, // 10MB buffer for large PRs
|
|
||||||
timeout: GITHUB_API_TIMEOUT_MS,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const commentsData = JSON.parse(commentsOutput);
|
|
||||||
const regularComments = (Array.isArray(commentsData) ? commentsData : []).map(
|
|
||||||
(c: {
|
|
||||||
id: number;
|
|
||||||
user: { login: string; avatar_url?: string; type?: string } | null;
|
|
||||||
body: string;
|
|
||||||
created_at: string;
|
|
||||||
updated_at?: string;
|
|
||||||
performed_via_github_app?: { slug: string } | null;
|
|
||||||
}) => ({
|
|
||||||
id: String(c.id),
|
|
||||||
author: c.user?.login || c.performed_via_github_app?.slug || 'unknown',
|
|
||||||
avatarUrl: c.user?.avatar_url,
|
|
||||||
body: c.body,
|
|
||||||
createdAt: c.created_at,
|
|
||||||
updatedAt: c.updated_at,
|
|
||||||
isReviewComment: false,
|
|
||||||
isOutdated: false,
|
|
||||||
isBot: c.user?.type === 'Bot' || !!c.performed_via_github_app,
|
|
||||||
// Regular PR comments are not part of review threads, so not resolvable
|
|
||||||
isResolved: false,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
allComments.push(...regularComments);
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Failed to fetch regular PR comments');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Fetch inline review comments (code-level comments with file/line info)
|
|
||||||
try {
|
|
||||||
const reviewsEndpoint = `repos/${owner}/${repo}/pulls/${prNumber}/comments`;
|
|
||||||
const { stdout: reviewsOutput } = await execFileAsync(
|
|
||||||
'gh',
|
|
||||||
['api', reviewsEndpoint, '--paginate'],
|
|
||||||
{
|
|
||||||
cwd: projectPath,
|
|
||||||
env: execEnv,
|
|
||||||
maxBuffer: 1024 * 1024 * 10, // 10MB buffer for large PRs
|
|
||||||
timeout: GITHUB_API_TIMEOUT_MS,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const reviewsData = JSON.parse(reviewsOutput);
|
|
||||||
const reviewComments = (Array.isArray(reviewsData) ? reviewsData : []).map(
|
|
||||||
(c: {
|
|
||||||
id: number;
|
|
||||||
user: { login: string; avatar_url?: string; type?: string } | null;
|
|
||||||
body: string;
|
|
||||||
path: string;
|
|
||||||
line?: number;
|
|
||||||
original_line?: number;
|
|
||||||
created_at: string;
|
|
||||||
updated_at?: string;
|
|
||||||
diff_hunk?: string;
|
|
||||||
side?: string;
|
|
||||||
commit_id?: string;
|
|
||||||
position?: number | null;
|
|
||||||
performed_via_github_app?: { slug: string } | null;
|
|
||||||
}) => ({
|
|
||||||
id: String(c.id),
|
|
||||||
author: c.user?.login || c.performed_via_github_app?.slug || 'unknown',
|
|
||||||
avatarUrl: c.user?.avatar_url,
|
|
||||||
body: c.body,
|
|
||||||
path: c.path,
|
|
||||||
line: c.line ?? c.original_line,
|
|
||||||
createdAt: c.created_at,
|
|
||||||
updatedAt: c.updated_at,
|
|
||||||
isReviewComment: true,
|
|
||||||
// A review comment is "outdated" if position is null (code has changed)
|
|
||||||
isOutdated: c.position === null,
|
|
||||||
// isResolved will be filled in below from GraphQL data
|
|
||||||
isResolved: false,
|
|
||||||
isBot: c.user?.type === 'Bot' || !!c.performed_via_github_app,
|
|
||||||
diffHunk: c.diff_hunk,
|
|
||||||
side: c.side,
|
|
||||||
commitId: c.commit_id,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
allComments.push(...reviewComments);
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Failed to fetch inline review comments');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Fetch review body comments (summary text submitted with each review)
|
|
||||||
// These are the top-level comments written when submitting a review
|
|
||||||
// (Approve, Request Changes, Comment). They are separate from inline code comments
|
|
||||||
// and issue-level comments. Only include reviews that have a non-empty body.
|
|
||||||
try {
|
|
||||||
const reviewsEndpoint = `repos/${owner}/${repo}/pulls/${prNumber}/reviews`;
|
|
||||||
const { stdout: reviewBodiesOutput } = await execFileAsync(
|
|
||||||
'gh',
|
|
||||||
['api', reviewsEndpoint, '--paginate'],
|
|
||||||
{
|
|
||||||
cwd: projectPath,
|
|
||||||
env: execEnv,
|
|
||||||
maxBuffer: 1024 * 1024 * 10, // 10MB buffer for large PRs
|
|
||||||
timeout: GITHUB_API_TIMEOUT_MS,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const reviewBodiesData = JSON.parse(reviewBodiesOutput);
|
|
||||||
const reviewBodyComments = (Array.isArray(reviewBodiesData) ? reviewBodiesData : [])
|
|
||||||
.filter(
|
|
||||||
(r: { body?: string; state?: string }) =>
|
|
||||||
r.body && r.body.trim().length > 0 && r.state !== 'PENDING'
|
|
||||||
)
|
|
||||||
.map(
|
|
||||||
(r: {
|
|
||||||
id: number;
|
|
||||||
user: { login: string; avatar_url?: string; type?: string } | null;
|
|
||||||
body: string;
|
|
||||||
state: string;
|
|
||||||
submitted_at: string;
|
|
||||||
performed_via_github_app?: { slug: string } | null;
|
|
||||||
}) => ({
|
|
||||||
id: `review-${r.id}`,
|
|
||||||
author: r.user?.login || r.performed_via_github_app?.slug || 'unknown',
|
|
||||||
avatarUrl: r.user?.avatar_url,
|
|
||||||
body: r.body,
|
|
||||||
createdAt: r.submitted_at,
|
|
||||||
isReviewComment: false,
|
|
||||||
isOutdated: false,
|
|
||||||
isResolved: false,
|
|
||||||
isBot: r.user?.type === 'Bot' || !!r.performed_via_github_app,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
allComments.push(...reviewBodyComments);
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Failed to fetch review body comments');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Wait for resolved status and apply to inline review comments
|
|
||||||
const resolvedMap = await resolvedStatusPromise;
|
|
||||||
for (const comment of allComments) {
|
|
||||||
if (comment.isReviewComment && resolvedMap.has(comment.id)) {
|
|
||||||
const info = resolvedMap.get(comment.id)!;
|
|
||||||
comment.isResolved = info.isResolved;
|
|
||||||
comment.threadId = info.threadId;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort by createdAt descending (newest first)
|
|
||||||
allComments.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
|
|
||||||
|
|
||||||
return allComments;
|
|
||||||
}
|
|
||||||
@@ -1,258 +0,0 @@
|
|||||||
/**
|
|
||||||
* PushService - Push git operations without HTTP
|
|
||||||
*
|
|
||||||
* Encapsulates the full git push workflow including:
|
|
||||||
* - Branch name and detached HEAD detection
|
|
||||||
* - Safe array-based command execution (no shell interpolation)
|
|
||||||
* - Divergent branch detection and auto-resolution via pull-then-retry
|
|
||||||
* - Structured result reporting
|
|
||||||
*
|
|
||||||
* Mirrors the pull-service.ts pattern for consistency.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { createLogger, getErrorMessage } from '@automaker/utils';
|
|
||||||
import { execGitCommand } from '@automaker/git-utils';
|
|
||||||
import { getCurrentBranch } from '../lib/git.js';
|
|
||||||
import { performPull } from './pull-service.js';
|
|
||||||
|
|
||||||
const logger = createLogger('PushService');
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Types
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
export interface PushOptions {
|
|
||||||
/** Remote name to push to (defaults to 'origin') */
|
|
||||||
remote?: string;
|
|
||||||
/** Force push */
|
|
||||||
force?: boolean;
|
|
||||||
/** When true and push is rejected due to divergence, pull then retry push */
|
|
||||||
autoResolve?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface PushResult {
|
|
||||||
success: boolean;
|
|
||||||
error?: string;
|
|
||||||
branch?: string;
|
|
||||||
pushed?: boolean;
|
|
||||||
/** Whether the push was initially rejected because the branches diverged */
|
|
||||||
diverged?: boolean;
|
|
||||||
/** Whether divergence was automatically resolved via pull-then-retry */
|
|
||||||
autoResolved?: boolean;
|
|
||||||
/** Whether the auto-resolve pull resulted in merge conflicts */
|
|
||||||
hasConflicts?: boolean;
|
|
||||||
/** Files with merge conflicts (only when hasConflicts is true) */
|
|
||||||
conflictFiles?: string[];
|
|
||||||
message?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Helper Functions
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Detect whether push error output indicates a diverged/non-fast-forward rejection.
|
|
||||||
*/
|
|
||||||
function isDivergenceError(errorOutput: string): boolean {
|
|
||||||
const lower = errorOutput.toLowerCase();
|
|
||||||
// Require specific divergence indicators rather than just 'rejected' alone,
|
|
||||||
// which could match pre-receive hook rejections or protected branch errors.
|
|
||||||
const hasNonFastForward = lower.includes('non-fast-forward');
|
|
||||||
const hasFetchFirst = lower.includes('fetch first');
|
|
||||||
const hasFailedToPush = lower.includes('failed to push some refs');
|
|
||||||
const hasRejected = lower.includes('rejected');
|
|
||||||
return hasNonFastForward || hasFetchFirst || (hasRejected && hasFailedToPush);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Main Service Function
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Perform a git push on the given worktree.
|
|
||||||
*
|
|
||||||
* The workflow:
|
|
||||||
* 1. Get current branch name (detect detached HEAD)
|
|
||||||
* 2. Attempt `git push <remote> <branch>` with safe array args
|
|
||||||
* 3. If push fails with divergence and autoResolve is true:
|
|
||||||
* a. Pull from the same remote (with stash support)
|
|
||||||
* b. If pull succeeds without conflicts, retry push
|
|
||||||
* 4. If push fails with "no upstream" error, retry with --set-upstream
|
|
||||||
* 5. Return structured result
|
|
||||||
*
|
|
||||||
* @param worktreePath - Path to the git worktree
|
|
||||||
* @param options - Push options (remote, force, autoResolve)
|
|
||||||
* @returns PushResult with detailed status information
|
|
||||||
*/
|
|
||||||
export async function performPush(
|
|
||||||
worktreePath: string,
|
|
||||||
options?: PushOptions
|
|
||||||
): Promise<PushResult> {
|
|
||||||
const targetRemote = options?.remote || 'origin';
|
|
||||||
const force = options?.force ?? false;
|
|
||||||
const autoResolve = options?.autoResolve ?? false;
|
|
||||||
|
|
||||||
// 1. Get current branch name
|
|
||||||
let branchName: string;
|
|
||||||
try {
|
|
||||||
branchName = await getCurrentBranch(worktreePath);
|
|
||||||
} catch (err) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: `Failed to get current branch: ${getErrorMessage(err)}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Check for detached HEAD state
|
|
||||||
if (branchName === 'HEAD') {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: 'Cannot push in detached HEAD state. Please checkout a branch first.',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Build push args (no -u flag; upstream is set in the fallback path only when needed)
|
|
||||||
const pushArgs = ['push', targetRemote, branchName];
|
|
||||||
if (force) {
|
|
||||||
pushArgs.push('--force');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 4. Attempt push
|
|
||||||
try {
|
|
||||||
await execGitCommand(pushArgs, worktreePath);
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
branch: branchName,
|
|
||||||
pushed: true,
|
|
||||||
message: `Successfully pushed ${branchName} to ${targetRemote}`,
|
|
||||||
};
|
|
||||||
} catch (pushError: unknown) {
|
|
||||||
const err = pushError as { stderr?: string; stdout?: string; message?: string };
|
|
||||||
const errorOutput = `${err.stderr || ''} ${err.stdout || ''} ${err.message || ''}`;
|
|
||||||
|
|
||||||
// 5. Check if the error is a divergence rejection
|
|
||||||
if (isDivergenceError(errorOutput)) {
|
|
||||||
if (!autoResolve) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: branchName,
|
|
||||||
pushed: false,
|
|
||||||
diverged: true,
|
|
||||||
error: `Push rejected: remote has changes not present locally. Use sync or pull first, or enable auto-resolve.`,
|
|
||||||
message: `Push to ${targetRemote} was rejected because the remote branch has diverged.`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// 6. Auto-resolve: pull then retry push
|
|
||||||
logger.info('Push rejected due to divergence, attempting auto-resolve via pull', {
|
|
||||||
worktreePath,
|
|
||||||
remote: targetRemote,
|
|
||||||
branch: branchName,
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
const pullResult = await performPull(worktreePath, {
|
|
||||||
remote: targetRemote,
|
|
||||||
stashIfNeeded: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!pullResult.success) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: branchName,
|
|
||||||
pushed: false,
|
|
||||||
diverged: true,
|
|
||||||
autoResolved: false,
|
|
||||||
error: `Auto-resolve failed during pull: ${pullResult.error}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pullResult.hasConflicts) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: branchName,
|
|
||||||
pushed: false,
|
|
||||||
diverged: true,
|
|
||||||
autoResolved: false,
|
|
||||||
hasConflicts: true,
|
|
||||||
conflictFiles: pullResult.conflictFiles,
|
|
||||||
error:
|
|
||||||
'Auto-resolve pull resulted in merge conflicts. Resolve conflicts and push again.',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// 7. Retry push after successful pull
|
|
||||||
try {
|
|
||||||
await execGitCommand(pushArgs, worktreePath);
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
branch: branchName,
|
|
||||||
pushed: true,
|
|
||||||
diverged: true,
|
|
||||||
autoResolved: true,
|
|
||||||
message: `Push succeeded after auto-resolving divergence (pulled from ${targetRemote} first).`,
|
|
||||||
};
|
|
||||||
} catch (retryError: unknown) {
|
|
||||||
const retryErr = retryError as { stderr?: string; message?: string };
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: branchName,
|
|
||||||
pushed: false,
|
|
||||||
diverged: true,
|
|
||||||
autoResolved: false,
|
|
||||||
error: `Push failed after auto-resolve pull: ${retryErr.stderr || retryErr.message || 'Unknown error'}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} catch (pullError) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: branchName,
|
|
||||||
pushed: false,
|
|
||||||
diverged: true,
|
|
||||||
autoResolved: false,
|
|
||||||
error: `Auto-resolve pull failed: ${getErrorMessage(pullError)}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 6b. Non-divergence error (e.g. no upstream configured) - retry with --set-upstream
|
|
||||||
const isNoUpstreamError =
|
|
||||||
errorOutput.toLowerCase().includes('no upstream') ||
|
|
||||||
errorOutput.toLowerCase().includes('has no upstream branch') ||
|
|
||||||
errorOutput.toLowerCase().includes('set-upstream');
|
|
||||||
if (isNoUpstreamError) {
|
|
||||||
try {
|
|
||||||
const setUpstreamArgs = ['push', '--set-upstream', targetRemote, branchName];
|
|
||||||
if (force) {
|
|
||||||
setUpstreamArgs.push('--force');
|
|
||||||
}
|
|
||||||
await execGitCommand(setUpstreamArgs, worktreePath);
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
branch: branchName,
|
|
||||||
pushed: true,
|
|
||||||
message: `Successfully pushed ${branchName} to ${targetRemote} (set upstream)`,
|
|
||||||
};
|
|
||||||
} catch (upstreamError: unknown) {
|
|
||||||
const upstreamErr = upstreamError as { stderr?: string; message?: string };
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: branchName,
|
|
||||||
pushed: false,
|
|
||||||
error: upstreamErr.stderr || upstreamErr.message || getErrorMessage(pushError),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 6c. Other push error - return as-is
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: branchName,
|
|
||||||
pushed: false,
|
|
||||||
error: err.stderr || err.message || getErrorMessage(pushError),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -31,7 +31,6 @@ import type {
|
|||||||
WorktreeInfo,
|
WorktreeInfo,
|
||||||
PhaseModelConfig,
|
PhaseModelConfig,
|
||||||
PhaseModelEntry,
|
PhaseModelEntry,
|
||||||
FeatureTemplate,
|
|
||||||
ClaudeApiProfile,
|
ClaudeApiProfile,
|
||||||
ClaudeCompatibleProvider,
|
ClaudeCompatibleProvider,
|
||||||
ProviderModel,
|
ProviderModel,
|
||||||
@@ -41,7 +40,6 @@ import {
|
|||||||
DEFAULT_CREDENTIALS,
|
DEFAULT_CREDENTIALS,
|
||||||
DEFAULT_PROJECT_SETTINGS,
|
DEFAULT_PROJECT_SETTINGS,
|
||||||
DEFAULT_PHASE_MODELS,
|
DEFAULT_PHASE_MODELS,
|
||||||
DEFAULT_FEATURE_TEMPLATES,
|
|
||||||
SETTINGS_VERSION,
|
SETTINGS_VERSION,
|
||||||
CREDENTIALS_VERSION,
|
CREDENTIALS_VERSION,
|
||||||
PROJECT_SETTINGS_VERSION,
|
PROJECT_SETTINGS_VERSION,
|
||||||
@@ -141,11 +139,6 @@ export class SettingsService {
|
|||||||
// Migrate model IDs to canonical format
|
// Migrate model IDs to canonical format
|
||||||
const migratedModelSettings = this.migrateModelSettings(settings);
|
const migratedModelSettings = this.migrateModelSettings(settings);
|
||||||
|
|
||||||
// Merge built-in feature templates: ensure all built-in templates exist in user settings.
|
|
||||||
// User customizations (enabled/disabled state, order overrides) are preserved.
|
|
||||||
// New built-in templates added in code updates are injected for existing users.
|
|
||||||
const mergedFeatureTemplates = this.mergeBuiltInTemplates(settings.featureTemplates);
|
|
||||||
|
|
||||||
// Apply any missing defaults (for backwards compatibility)
|
// Apply any missing defaults (for backwards compatibility)
|
||||||
let result: GlobalSettings = {
|
let result: GlobalSettings = {
|
||||||
...DEFAULT_GLOBAL_SETTINGS,
|
...DEFAULT_GLOBAL_SETTINGS,
|
||||||
@@ -156,7 +149,6 @@ export class SettingsService {
|
|||||||
...settings.keyboardShortcuts,
|
...settings.keyboardShortcuts,
|
||||||
},
|
},
|
||||||
phaseModels: migratedPhaseModels,
|
phaseModels: migratedPhaseModels,
|
||||||
featureTemplates: mergedFeatureTemplates,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Version-based migrations
|
// Version-based migrations
|
||||||
@@ -258,32 +250,6 @@ export class SettingsService {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Merge built-in feature templates with user's stored templates.
|
|
||||||
*
|
|
||||||
* Ensures new built-in templates added in code updates are available to existing users
|
|
||||||
* without overwriting their customizations (e.g., enabled/disabled state, custom order).
|
|
||||||
* Built-in templates missing from stored settings are appended with their defaults.
|
|
||||||
*
|
|
||||||
* @param storedTemplates - Templates from user's settings file (may be undefined for new installs)
|
|
||||||
* @returns Merged template list with all built-in templates present
|
|
||||||
*/
|
|
||||||
private mergeBuiltInTemplates(storedTemplates: FeatureTemplate[] | undefined): FeatureTemplate[] {
|
|
||||||
if (!storedTemplates) {
|
|
||||||
return DEFAULT_FEATURE_TEMPLATES;
|
|
||||||
}
|
|
||||||
|
|
||||||
const storedIds = new Set(storedTemplates.map((t) => t.id));
|
|
||||||
const missingBuiltIns = DEFAULT_FEATURE_TEMPLATES.filter((t) => !storedIds.has(t.id));
|
|
||||||
|
|
||||||
if (missingBuiltIns.length === 0) {
|
|
||||||
return storedTemplates;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Append missing built-in templates after existing ones
|
|
||||||
return [...storedTemplates, ...missingBuiltIns];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Migrate legacy enhancementModel/validationModel fields to phaseModels structure
|
* Migrate legacy enhancementModel/validationModel fields to phaseModels structure
|
||||||
*
|
*
|
||||||
@@ -607,17 +573,6 @@ export class SettingsService {
|
|||||||
ignoreEmptyArrayOverwrite('claudeApiProfiles');
|
ignoreEmptyArrayOverwrite('claudeApiProfiles');
|
||||||
// Note: claudeCompatibleProviders intentionally NOT guarded - users should be able to delete all providers
|
// Note: claudeCompatibleProviders intentionally NOT guarded - users should be able to delete all providers
|
||||||
|
|
||||||
// Check for explicit permission to clear eventHooks (escape hatch for intentional clearing)
|
|
||||||
const allowEmptyEventHooks =
|
|
||||||
(sanitizedUpdates as Record<string, unknown>).__allowEmptyEventHooks === true;
|
|
||||||
// Remove the flag so it doesn't get persisted
|
|
||||||
delete (sanitizedUpdates as Record<string, unknown>).__allowEmptyEventHooks;
|
|
||||||
|
|
||||||
// Only guard eventHooks if explicit permission wasn't granted
|
|
||||||
if (!allowEmptyEventHooks) {
|
|
||||||
ignoreEmptyArrayOverwrite('eventHooks');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Empty object overwrite guard
|
// Empty object overwrite guard
|
||||||
const ignoreEmptyObjectOverwrite = <K extends keyof GlobalSettings>(key: K): void => {
|
const ignoreEmptyObjectOverwrite = <K extends keyof GlobalSettings>(key: K): void => {
|
||||||
const nextVal = sanitizedUpdates[key] as unknown;
|
const nextVal = sanitizedUpdates[key] as unknown;
|
||||||
|
|||||||
@@ -1,209 +0,0 @@
|
|||||||
/**
|
|
||||||
* SyncService - Pull then push in a single operation
|
|
||||||
*
|
|
||||||
* Composes performPull() and performPush() to synchronize a branch
|
|
||||||
* with its remote. Always uses stashIfNeeded for the pull step.
|
|
||||||
* If push fails with divergence after pull, retries once.
|
|
||||||
*
|
|
||||||
* Follows the same pattern as pull-service.ts and push-service.ts.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { createLogger, getErrorMessage } from '@automaker/utils';
|
|
||||||
import { performPull } from './pull-service.js';
|
|
||||||
import { performPush } from './push-service.js';
|
|
||||||
import type { PullResult } from './pull-service.js';
|
|
||||||
import type { PushResult } from './push-service.js';
|
|
||||||
|
|
||||||
const logger = createLogger('SyncService');
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Types
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
export interface SyncOptions {
|
|
||||||
/** Remote name (defaults to 'origin') */
|
|
||||||
remote?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SyncResult {
|
|
||||||
success: boolean;
|
|
||||||
error?: string;
|
|
||||||
branch?: string;
|
|
||||||
/** Whether the pull step was performed */
|
|
||||||
pulled?: boolean;
|
|
||||||
/** Whether the push step was performed */
|
|
||||||
pushed?: boolean;
|
|
||||||
/** Pull resulted in conflicts */
|
|
||||||
hasConflicts?: boolean;
|
|
||||||
/** Files with merge conflicts */
|
|
||||||
conflictFiles?: string[];
|
|
||||||
/** Source of conflicts ('pull' | 'stash') */
|
|
||||||
conflictSource?: 'pull' | 'stash';
|
|
||||||
/** Whether the pull was a fast-forward */
|
|
||||||
isFastForward?: boolean;
|
|
||||||
/** Whether the pull resulted in a merge commit */
|
|
||||||
isMerge?: boolean;
|
|
||||||
/** Whether push divergence was auto-resolved */
|
|
||||||
autoResolved?: boolean;
|
|
||||||
message?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Main Service Function
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Perform a sync operation (pull then push) on the given worktree.
|
|
||||||
*
|
|
||||||
* The workflow:
|
|
||||||
* 1. Pull from remote with stashIfNeeded: true
|
|
||||||
* 2. If pull has conflicts, stop and return conflict info
|
|
||||||
* 3. Push to remote
|
|
||||||
* 4. If push fails with divergence after pull, retry once
|
|
||||||
*
|
|
||||||
* @param worktreePath - Path to the git worktree
|
|
||||||
* @param options - Sync options (remote)
|
|
||||||
* @returns SyncResult with detailed status information
|
|
||||||
*/
|
|
||||||
export async function performSync(
|
|
||||||
worktreePath: string,
|
|
||||||
options?: SyncOptions
|
|
||||||
): Promise<SyncResult> {
|
|
||||||
const targetRemote = options?.remote || 'origin';
|
|
||||||
|
|
||||||
// 1. Pull from remote
|
|
||||||
logger.info('Sync: starting pull', { worktreePath, remote: targetRemote });
|
|
||||||
|
|
||||||
let pullResult: PullResult;
|
|
||||||
try {
|
|
||||||
pullResult = await performPull(worktreePath, {
|
|
||||||
remote: targetRemote,
|
|
||||||
stashIfNeeded: true,
|
|
||||||
});
|
|
||||||
} catch (pullError) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: `Sync pull failed: ${getErrorMessage(pullError)}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!pullResult.success) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: pullResult.branch,
|
|
||||||
pulled: false,
|
|
||||||
pushed: false,
|
|
||||||
error: `Sync pull failed: ${pullResult.error}`,
|
|
||||||
hasConflicts: pullResult.hasConflicts,
|
|
||||||
conflictFiles: pullResult.conflictFiles,
|
|
||||||
conflictSource: pullResult.conflictSource,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. If pull had conflicts, stop and return conflict info
|
|
||||||
if (pullResult.hasConflicts) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: pullResult.branch,
|
|
||||||
pulled: true,
|
|
||||||
pushed: false,
|
|
||||||
hasConflicts: true,
|
|
||||||
conflictFiles: pullResult.conflictFiles,
|
|
||||||
conflictSource: pullResult.conflictSource,
|
|
||||||
isFastForward: pullResult.isFastForward,
|
|
||||||
isMerge: pullResult.isMerge,
|
|
||||||
error: 'Sync stopped: pull resulted in merge conflicts. Resolve conflicts and try again.',
|
|
||||||
message: pullResult.message,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Push to remote
|
|
||||||
logger.info('Sync: pull succeeded, starting push', { worktreePath, remote: targetRemote });
|
|
||||||
|
|
||||||
let pushResult: PushResult;
|
|
||||||
try {
|
|
||||||
pushResult = await performPush(worktreePath, {
|
|
||||||
remote: targetRemote,
|
|
||||||
});
|
|
||||||
} catch (pushError) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: pullResult.branch,
|
|
||||||
pulled: true,
|
|
||||||
pushed: false,
|
|
||||||
isFastForward: pullResult.isFastForward,
|
|
||||||
isMerge: pullResult.isMerge,
|
|
||||||
error: `Sync push failed: ${getErrorMessage(pushError)}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!pushResult.success) {
|
|
||||||
// 4. If push diverged after pull, retry once with autoResolve
|
|
||||||
if (pushResult.diverged) {
|
|
||||||
logger.info('Sync: push diverged after pull, retrying with autoResolve', {
|
|
||||||
worktreePath,
|
|
||||||
remote: targetRemote,
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
const retryResult = await performPush(worktreePath, {
|
|
||||||
remote: targetRemote,
|
|
||||||
autoResolve: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (retryResult.success) {
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
branch: retryResult.branch,
|
|
||||||
pulled: true,
|
|
||||||
pushed: true,
|
|
||||||
autoResolved: true,
|
|
||||||
isFastForward: pullResult.isFastForward,
|
|
||||||
isMerge: pullResult.isMerge,
|
|
||||||
message: 'Sync completed (push required auto-resolve).',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: retryResult.branch,
|
|
||||||
pulled: true,
|
|
||||||
pushed: false,
|
|
||||||
hasConflicts: retryResult.hasConflicts,
|
|
||||||
conflictFiles: retryResult.conflictFiles,
|
|
||||||
error: retryResult.error,
|
|
||||||
};
|
|
||||||
} catch (retryError) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: pullResult.branch,
|
|
||||||
pulled: true,
|
|
||||||
pushed: false,
|
|
||||||
error: `Sync push retry failed: ${getErrorMessage(retryError)}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
branch: pushResult.branch,
|
|
||||||
pulled: true,
|
|
||||||
pushed: false,
|
|
||||||
isFastForward: pullResult.isFastForward,
|
|
||||||
isMerge: pullResult.isMerge,
|
|
||||||
error: `Sync push failed: ${pushResult.error}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
branch: pushResult.branch,
|
|
||||||
pulled: pullResult.pulled ?? true,
|
|
||||||
pushed: true,
|
|
||||||
isFastForward: pullResult.isFastForward,
|
|
||||||
isMerge: pullResult.isMerge,
|
|
||||||
message: pullResult.pulled
|
|
||||||
? 'Sync completed: pulled latest changes and pushed.'
|
|
||||||
: 'Sync completed: already up to date, pushed local commits.',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -8,60 +8,9 @@
|
|||||||
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
import { execGitCommand } from '@automaker/git-utils';
|
|
||||||
import type { EventEmitter } from '../lib/events.js';
|
import type { EventEmitter } from '../lib/events.js';
|
||||||
import type { SettingsService } from './settings-service.js';
|
import type { SettingsService } from './settings-service.js';
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the list of remote names that have a branch matching the given branch name.
|
|
||||||
*
|
|
||||||
* Uses `git for-each-ref` to check cached remote refs, returning the names of
|
|
||||||
* any remotes that already have a branch with the same name as `currentBranch`.
|
|
||||||
* Returns an empty array when `hasAnyRemotes` is false or when no matching
|
|
||||||
* remote refs are found.
|
|
||||||
*
|
|
||||||
* This helps the UI distinguish between "branch exists on the tracking remote"
|
|
||||||
* vs "branch was pushed to a different remote".
|
|
||||||
*
|
|
||||||
* @param worktreePath - Path to the git worktree
|
|
||||||
* @param currentBranch - Branch name to search for on remotes
|
|
||||||
* @param hasAnyRemotes - Whether the repository has any remotes configured
|
|
||||||
* @returns Array of remote names (e.g. ["origin", "upstream"]) that contain the branch
|
|
||||||
*/
|
|
||||||
export async function getRemotesWithBranch(
|
|
||||||
worktreePath: string,
|
|
||||||
currentBranch: string,
|
|
||||||
hasAnyRemotes: boolean
|
|
||||||
): Promise<string[]> {
|
|
||||||
if (!hasAnyRemotes) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const remoteRefsOutput = await execGitCommand(
|
|
||||||
['for-each-ref', '--format=%(refname:short)', `refs/remotes/*/${currentBranch}`],
|
|
||||||
worktreePath
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!remoteRefsOutput.trim()) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
return remoteRefsOutput
|
|
||||||
.trim()
|
|
||||||
.split('\n')
|
|
||||||
.map((ref) => {
|
|
||||||
// Extract remote name from "remote/branch" format
|
|
||||||
const slashIdx = ref.indexOf('/');
|
|
||||||
return slashIdx !== -1 ? ref.slice(0, slashIdx) : ref;
|
|
||||||
})
|
|
||||||
.filter((name) => name.length > 0);
|
|
||||||
} catch {
|
|
||||||
// Ignore errors - return empty array
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Error thrown when one or more file copy operations fail during
|
* Error thrown when one or more file copy operations fail during
|
||||||
* `copyConfiguredFiles`. The caller can inspect `failures` for details.
|
* `copyConfiguredFiles`. The caller can inspect `failures` for details.
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ export type {
|
|||||||
PhaseModelConfig,
|
PhaseModelConfig,
|
||||||
PhaseModelKey,
|
PhaseModelKey,
|
||||||
PhaseModelEntry,
|
PhaseModelEntry,
|
||||||
FeatureTemplate,
|
|
||||||
// Claude-compatible provider types
|
// Claude-compatible provider types
|
||||||
ApiKeySource,
|
ApiKeySource,
|
||||||
ClaudeCompatibleProviderType,
|
ClaudeCompatibleProviderType,
|
||||||
@@ -42,7 +41,6 @@ export {
|
|||||||
DEFAULT_CREDENTIALS,
|
DEFAULT_CREDENTIALS,
|
||||||
DEFAULT_PROJECT_SETTINGS,
|
DEFAULT_PROJECT_SETTINGS,
|
||||||
DEFAULT_PHASE_MODELS,
|
DEFAULT_PHASE_MODELS,
|
||||||
DEFAULT_FEATURE_TEMPLATES,
|
|
||||||
SETTINGS_VERSION,
|
SETTINGS_VERSION,
|
||||||
CREDENTIALS_VERSION,
|
CREDENTIALS_VERSION,
|
||||||
PROJECT_SETTINGS_VERSION,
|
PROJECT_SETTINGS_VERSION,
|
||||||
|
|||||||
@@ -168,7 +168,7 @@ describe('enhancement-prompts.ts', () => {
|
|||||||
const prompt = buildUserPrompt('improve', testText);
|
const prompt = buildUserPrompt('improve', testText);
|
||||||
expect(prompt).toContain('Example 1:');
|
expect(prompt).toContain('Example 1:');
|
||||||
expect(prompt).toContain(testText);
|
expect(prompt).toContain(testText);
|
||||||
expect(prompt).toContain('Please enhance the following task description:');
|
expect(prompt).toContain('Now, please enhance the following task description:');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should build prompt without examples when includeExamples is false', () => {
|
it('should build prompt without examples when includeExamples is false', () => {
|
||||||
|
|||||||
@@ -1,20 +0,0 @@
|
|||||||
import { describe, it, expect } from 'vitest';
|
|
||||||
import { normalizeThinkingLevelForModel } from '@automaker/types';
|
|
||||||
|
|
||||||
describe('normalizeThinkingLevelForModel', () => {
|
|
||||||
it('preserves explicitly selected none for Opus models', () => {
|
|
||||||
expect(normalizeThinkingLevelForModel('claude-opus', 'none')).toBe('none');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('falls back to none when Opus receives an unsupported manual thinking level', () => {
|
|
||||||
expect(normalizeThinkingLevelForModel('claude-opus', 'medium')).toBe('none');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('keeps adaptive for Opus when adaptive is selected', () => {
|
|
||||||
expect(normalizeThinkingLevelForModel('claude-opus', 'adaptive')).toBe('adaptive');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('preserves supported manual levels for non-Opus models', () => {
|
|
||||||
expect(normalizeThinkingLevelForModel('claude-sonnet', 'high')).toBe('high');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -198,7 +198,7 @@ describe('claude-provider.ts', () => {
|
|||||||
expect(typeof callArgs.prompt).not.toBe('string');
|
expect(typeof callArgs.prompt).not.toBe('string');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use maxTurns default of 1000', async () => {
|
it('should use maxTurns default of 100', async () => {
|
||||||
vi.mocked(sdk.query).mockReturnValue(
|
vi.mocked(sdk.query).mockReturnValue(
|
||||||
(async function* () {
|
(async function* () {
|
||||||
yield { type: 'text', text: 'test' };
|
yield { type: 'text', text: 'test' };
|
||||||
@@ -216,7 +216,7 @@ describe('claude-provider.ts', () => {
|
|||||||
expect(sdk.query).toHaveBeenCalledWith({
|
expect(sdk.query).toHaveBeenCalledWith({
|
||||||
prompt: 'Test',
|
prompt: 'Test',
|
||||||
options: expect.objectContaining({
|
options: expect.objectContaining({
|
||||||
maxTurns: 1000,
|
maxTurns: 100,
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -170,30 +170,6 @@ describe('codex-provider.ts', () => {
|
|||||||
expect(call.args).toContain('--json');
|
expect(call.args).toContain('--json');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('uses exec resume when sdkSessionId is provided', async () => {
|
|
||||||
vi.mocked(spawnJSONLProcess).mockReturnValue((async function* () {})());
|
|
||||||
|
|
||||||
await collectAsyncGenerator(
|
|
||||||
provider.executeQuery({
|
|
||||||
prompt: 'Continue',
|
|
||||||
model: 'gpt-5.2',
|
|
||||||
cwd: '/tmp',
|
|
||||||
sdkSessionId: 'codex-session-123',
|
|
||||||
outputFormat: { type: 'json_schema', schema: { type: 'object', properties: {} } },
|
|
||||||
codexSettings: { additionalDirs: ['/extra/dir'] },
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
const call = vi.mocked(spawnJSONLProcess).mock.calls[0][0];
|
|
||||||
expect(call.args[0]).toBe('exec');
|
|
||||||
expect(call.args[1]).toBe('resume');
|
|
||||||
expect(call.args).toContain('codex-session-123');
|
|
||||||
expect(call.args).toContain('--json');
|
|
||||||
// Resume queries must not include --output-schema or --add-dir
|
|
||||||
expect(call.args).not.toContain('--output-schema');
|
|
||||||
expect(call.args).not.toContain('--add-dir');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('overrides approval policy when MCP auto-approval is enabled', async () => {
|
it('overrides approval policy when MCP auto-approval is enabled', async () => {
|
||||||
// Note: With full-permissions always on (--dangerously-bypass-approvals-and-sandbox),
|
// Note: With full-permissions always on (--dangerously-bypass-approvals-and-sandbox),
|
||||||
// approval policy is bypassed, not configured via --config
|
// approval policy is bypassed, not configured via --config
|
||||||
@@ -344,10 +320,8 @@ describe('codex-provider.ts', () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
const call = vi.mocked(spawnJSONLProcess).mock.calls[0][0];
|
const call = vi.mocked(spawnJSONLProcess).mock.calls[0][0];
|
||||||
// High reasoning effort should have 3x the CLI base timeout (120000ms)
|
// High reasoning effort should have 3x the default timeout (90000ms)
|
||||||
// CODEX_CLI_TIMEOUT_MS = 120000, multiplier for 'high' = 3.0 → 360000ms
|
expect(call.timeout).toBe(DEFAULT_TIMEOUT_MS * REASONING_TIMEOUT_MULTIPLIERS.high);
|
||||||
const CODEX_CLI_TIMEOUT_MS = 120000;
|
|
||||||
expect(call.timeout).toBe(CODEX_CLI_TIMEOUT_MS * REASONING_TIMEOUT_MULTIPLIERS.high);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('passes extended timeout for xhigh reasoning effort', async () => {
|
it('passes extended timeout for xhigh reasoning effort', async () => {
|
||||||
@@ -383,10 +357,8 @@ describe('codex-provider.ts', () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
const call = vi.mocked(spawnJSONLProcess).mock.calls[0][0];
|
const call = vi.mocked(spawnJSONLProcess).mock.calls[0][0];
|
||||||
// No reasoning effort should use the CLI base timeout (2 minutes)
|
// No reasoning effort should use the default timeout
|
||||||
// CODEX_CLI_TIMEOUT_MS = 120000ms, no multiplier applied
|
expect(call.timeout).toBe(DEFAULT_TIMEOUT_MS);
|
||||||
const CODEX_CLI_TIMEOUT_MS = 120000;
|
|
||||||
expect(call.timeout).toBe(CODEX_CLI_TIMEOUT_MS);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,35 +1,17 @@
|
|||||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
import { CopilotProvider, CopilotErrorCode } from '@/providers/copilot-provider.js';
|
import { CopilotProvider, CopilotErrorCode } from '@/providers/copilot-provider.js';
|
||||||
import { collectAsyncGenerator } from '../../utils/helpers.js';
|
|
||||||
import { CopilotClient } from '@github/copilot-sdk';
|
|
||||||
|
|
||||||
const createSessionMock = vi.fn();
|
|
||||||
const resumeSessionMock = vi.fn();
|
|
||||||
|
|
||||||
function createMockSession(sessionId = 'test-session') {
|
|
||||||
let eventHandler: ((event: any) => void) | null = null;
|
|
||||||
return {
|
|
||||||
sessionId,
|
|
||||||
send: vi.fn().mockImplementation(async () => {
|
|
||||||
if (eventHandler) {
|
|
||||||
eventHandler({ type: 'assistant.message', data: { content: 'hello' } });
|
|
||||||
eventHandler({ type: 'session.idle' });
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
destroy: vi.fn().mockResolvedValue(undefined),
|
|
||||||
on: vi.fn().mockImplementation((handler: (event: any) => void) => {
|
|
||||||
eventHandler = handler;
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mock the Copilot SDK
|
// Mock the Copilot SDK
|
||||||
vi.mock('@github/copilot-sdk', () => ({
|
vi.mock('@github/copilot-sdk', () => ({
|
||||||
CopilotClient: vi.fn().mockImplementation(() => ({
|
CopilotClient: vi.fn().mockImplementation(() => ({
|
||||||
start: vi.fn().mockResolvedValue(undefined),
|
start: vi.fn().mockResolvedValue(undefined),
|
||||||
stop: vi.fn().mockResolvedValue(undefined),
|
stop: vi.fn().mockResolvedValue(undefined),
|
||||||
createSession: createSessionMock,
|
createSession: vi.fn().mockResolvedValue({
|
||||||
resumeSession: resumeSessionMock,
|
sessionId: 'test-session',
|
||||||
|
send: vi.fn().mockResolvedValue(undefined),
|
||||||
|
destroy: vi.fn().mockResolvedValue(undefined),
|
||||||
|
on: vi.fn(),
|
||||||
|
}),
|
||||||
})),
|
})),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@@ -67,16 +49,6 @@ describe('copilot-provider.ts', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
vi.mocked(CopilotClient).mockImplementation(function () {
|
|
||||||
return {
|
|
||||||
start: vi.fn().mockResolvedValue(undefined),
|
|
||||||
stop: vi.fn().mockResolvedValue(undefined),
|
|
||||||
createSession: createSessionMock,
|
|
||||||
resumeSession: resumeSessionMock,
|
|
||||||
} as any;
|
|
||||||
});
|
|
||||||
createSessionMock.mockResolvedValue(createMockSession());
|
|
||||||
resumeSessionMock.mockResolvedValue(createMockSession('resumed-session'));
|
|
||||||
|
|
||||||
// Mock fs.existsSync for CLI path validation
|
// Mock fs.existsSync for CLI path validation
|
||||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||||
@@ -397,45 +369,6 @@ describe('copilot-provider.ts', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use error code in fallback when session.error message is empty', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'session.error',
|
|
||||||
data: { message: '', code: 'RATE_LIMIT_EXCEEDED' },
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = provider.normalizeEvent(event);
|
|
||||||
expect(result).not.toBeNull();
|
|
||||||
expect(result!.type).toBe('error');
|
|
||||||
expect(result!.error).toContain('RATE_LIMIT_EXCEEDED');
|
|
||||||
expect(result!.error).not.toBe('Unknown error');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return generic "Copilot agent error" fallback when both message and code are empty', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'session.error',
|
|
||||||
data: { message: '', code: '' },
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = provider.normalizeEvent(event);
|
|
||||||
expect(result).not.toBeNull();
|
|
||||||
expect(result!.type).toBe('error');
|
|
||||||
expect(result!.error).toBe('Copilot agent error');
|
|
||||||
// Must NOT be the old opaque 'Unknown error'
|
|
||||||
expect(result!.error).not.toBe('Unknown error');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return generic "Copilot agent error" fallback when data has no code field', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'session.error',
|
|
||||||
data: { message: '' },
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = provider.normalizeEvent(event);
|
|
||||||
expect(result).not.toBeNull();
|
|
||||||
expect(result!.type).toBe('error');
|
|
||||||
expect(result!.error).toBe('Copilot agent error');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return null for unknown event types', () => {
|
it('should return null for unknown event types', () => {
|
||||||
const event = { type: 'unknown.event' };
|
const event = { type: 'unknown.event' };
|
||||||
|
|
||||||
@@ -581,45 +514,4 @@ describe('copilot-provider.ts', () => {
|
|||||||
expect(todoInput.todos[0].status).toBe('completed');
|
expect(todoInput.todos[0].status).toBe('completed');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('executeQuery resume behavior', () => {
|
|
||||||
it('uses resumeSession when sdkSessionId is provided', async () => {
|
|
||||||
const results = await collectAsyncGenerator(
|
|
||||||
provider.executeQuery({
|
|
||||||
prompt: 'Hello',
|
|
||||||
model: 'claude-sonnet-4.6',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
sdkSessionId: 'session-123',
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(resumeSessionMock).toHaveBeenCalledWith(
|
|
||||||
'session-123',
|
|
||||||
expect.objectContaining({ model: 'claude-sonnet-4.6', streaming: true })
|
|
||||||
);
|
|
||||||
expect(createSessionMock).not.toHaveBeenCalled();
|
|
||||||
expect(results.some((msg) => msg.session_id === 'resumed-session')).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('falls back to createSession when resumeSession fails', async () => {
|
|
||||||
resumeSessionMock.mockRejectedValueOnce(new Error('session not found'));
|
|
||||||
createSessionMock.mockResolvedValueOnce(createMockSession('fresh-session'));
|
|
||||||
|
|
||||||
const results = await collectAsyncGenerator(
|
|
||||||
provider.executeQuery({
|
|
||||||
prompt: 'Hello',
|
|
||||||
model: 'claude-sonnet-4.6',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
sdkSessionId: 'stale-session',
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(resumeSessionMock).toHaveBeenCalledWith(
|
|
||||||
'stale-session',
|
|
||||||
expect.objectContaining({ model: 'claude-sonnet-4.6', streaming: true })
|
|
||||||
);
|
|
||||||
expect(createSessionMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(results.some((msg) => msg.session_id === 'fresh-session')).toBe(true);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,157 +0,0 @@
|
|||||||
import { describe, it, expect, beforeEach } from 'vitest';
|
|
||||||
import { CursorProvider } from '@/providers/cursor-provider.js';
|
|
||||||
|
|
||||||
describe('cursor-provider.ts', () => {
|
|
||||||
describe('buildCliArgs', () => {
|
|
||||||
it('adds --resume when sdkSessionId is provided', () => {
|
|
||||||
const provider = Object.create(CursorProvider.prototype) as CursorProvider & {
|
|
||||||
cliPath?: string;
|
|
||||||
};
|
|
||||||
provider.cliPath = '/usr/local/bin/cursor-agent';
|
|
||||||
|
|
||||||
const args = provider.buildCliArgs({
|
|
||||||
prompt: 'Continue the task',
|
|
||||||
model: 'gpt-5',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
sdkSessionId: 'cursor-session-123',
|
|
||||||
});
|
|
||||||
|
|
||||||
const resumeIndex = args.indexOf('--resume');
|
|
||||||
expect(resumeIndex).toBeGreaterThan(-1);
|
|
||||||
expect(args[resumeIndex + 1]).toBe('cursor-session-123');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not add --resume when sdkSessionId is omitted', () => {
|
|
||||||
const provider = Object.create(CursorProvider.prototype) as CursorProvider & {
|
|
||||||
cliPath?: string;
|
|
||||||
};
|
|
||||||
provider.cliPath = '/usr/local/bin/cursor-agent';
|
|
||||||
|
|
||||||
const args = provider.buildCliArgs({
|
|
||||||
prompt: 'Start a new task',
|
|
||||||
model: 'gpt-5',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(args).not.toContain('--resume');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('normalizeEvent - result error handling', () => {
|
|
||||||
let provider: CursorProvider;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
provider = Object.create(CursorProvider.prototype) as CursorProvider;
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns error message from resultEvent.error when is_error=true', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'result',
|
|
||||||
is_error: true,
|
|
||||||
error: 'Rate limit exceeded',
|
|
||||||
result: '',
|
|
||||||
subtype: 'error',
|
|
||||||
duration_ms: 3000,
|
|
||||||
session_id: 'sess-123',
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event);
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg!.type).toBe('error');
|
|
||||||
expect(msg!.error).toBe('Rate limit exceeded');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('falls back to resultEvent.result when error field is empty and is_error=true', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'result',
|
|
||||||
is_error: true,
|
|
||||||
error: '',
|
|
||||||
result: 'Process terminated unexpectedly',
|
|
||||||
subtype: 'error',
|
|
||||||
duration_ms: 5000,
|
|
||||||
session_id: 'sess-456',
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event);
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg!.type).toBe('error');
|
|
||||||
expect(msg!.error).toBe('Process terminated unexpectedly');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('builds diagnostic fallback when both error and result are empty and is_error=true', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'result',
|
|
||||||
is_error: true,
|
|
||||||
error: '',
|
|
||||||
result: '',
|
|
||||||
subtype: 'error',
|
|
||||||
duration_ms: 5000,
|
|
||||||
session_id: 'sess-789',
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event);
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg!.type).toBe('error');
|
|
||||||
// Should contain diagnostic info rather than 'Unknown error'
|
|
||||||
expect(msg!.error).toContain('5000ms');
|
|
||||||
expect(msg!.error).toContain('sess-789');
|
|
||||||
expect(msg!.error).not.toBe('Unknown error');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('preserves session_id in error message', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'result',
|
|
||||||
is_error: true,
|
|
||||||
error: 'Timeout occurred',
|
|
||||||
result: '',
|
|
||||||
subtype: 'error',
|
|
||||||
duration_ms: 30000,
|
|
||||||
session_id: 'my-session-id',
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event);
|
|
||||||
|
|
||||||
expect(msg!.session_id).toBe('my-session-id');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('uses "none" when session_id is missing from diagnostic fallback', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'result',
|
|
||||||
is_error: true,
|
|
||||||
error: '',
|
|
||||||
result: '',
|
|
||||||
subtype: 'error',
|
|
||||||
duration_ms: 5000,
|
|
||||||
// session_id intentionally omitted
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event);
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg!.type).toBe('error');
|
|
||||||
expect(msg!.error).toContain('none');
|
|
||||||
expect(msg!.error).not.toContain('undefined');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns success result when is_error=false', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'result',
|
|
||||||
is_error: false,
|
|
||||||
error: '',
|
|
||||||
result: 'Completed successfully',
|
|
||||||
subtype: 'success',
|
|
||||||
duration_ms: 2000,
|
|
||||||
session_id: 'sess-ok',
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event);
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg!.type).toBe('result');
|
|
||||||
expect(msg!.subtype).toBe('success');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,256 +0,0 @@
|
|||||||
import { describe, it, expect, beforeEach } from 'vitest';
|
|
||||||
import { GeminiProvider } from '@/providers/gemini-provider.js';
|
|
||||||
import type { ProviderMessage } from '@automaker/types';
|
|
||||||
|
|
||||||
describe('gemini-provider.ts', () => {
|
|
||||||
let provider: GeminiProvider;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
provider = new GeminiProvider();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('buildCliArgs', () => {
|
|
||||||
it('should include --prompt with empty string to force headless mode', () => {
|
|
||||||
const args = provider.buildCliArgs({
|
|
||||||
prompt: 'Hello from Gemini',
|
|
||||||
model: '2.5-flash',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
const promptIndex = args.indexOf('--prompt');
|
|
||||||
expect(promptIndex).toBeGreaterThan(-1);
|
|
||||||
expect(args[promptIndex + 1]).toBe('');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should include --resume when sdkSessionId is provided', () => {
|
|
||||||
const args = provider.buildCliArgs({
|
|
||||||
prompt: 'Hello',
|
|
||||||
model: '2.5-flash',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
sdkSessionId: 'gemini-session-123',
|
|
||||||
});
|
|
||||||
|
|
||||||
const resumeIndex = args.indexOf('--resume');
|
|
||||||
expect(resumeIndex).toBeGreaterThan(-1);
|
|
||||||
expect(args[resumeIndex + 1]).toBe('gemini-session-123');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should not include --resume when sdkSessionId is missing', () => {
|
|
||||||
const args = provider.buildCliArgs({
|
|
||||||
prompt: 'Hello',
|
|
||||||
model: '2.5-flash',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(args).not.toContain('--resume');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should include --sandbox false for faster execution', () => {
|
|
||||||
const args = provider.buildCliArgs({
|
|
||||||
prompt: 'Hello',
|
|
||||||
model: '2.5-flash',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
const sandboxIndex = args.indexOf('--sandbox');
|
|
||||||
expect(sandboxIndex).toBeGreaterThan(-1);
|
|
||||||
expect(args[sandboxIndex + 1]).toBe('false');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should include --approval-mode yolo for non-interactive use', () => {
|
|
||||||
const args = provider.buildCliArgs({
|
|
||||||
prompt: 'Hello',
|
|
||||||
model: '2.5-flash',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
const approvalIndex = args.indexOf('--approval-mode');
|
|
||||||
expect(approvalIndex).toBeGreaterThan(-1);
|
|
||||||
expect(args[approvalIndex + 1]).toBe('yolo');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should include --output-format stream-json', () => {
|
|
||||||
const args = provider.buildCliArgs({
|
|
||||||
prompt: 'Hello',
|
|
||||||
model: '2.5-flash',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
const formatIndex = args.indexOf('--output-format');
|
|
||||||
expect(formatIndex).toBeGreaterThan(-1);
|
|
||||||
expect(args[formatIndex + 1]).toBe('stream-json');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should include --include-directories with cwd', () => {
|
|
||||||
const args = provider.buildCliArgs({
|
|
||||||
prompt: 'Hello',
|
|
||||||
model: '2.5-flash',
|
|
||||||
cwd: '/tmp/my-project',
|
|
||||||
});
|
|
||||||
|
|
||||||
const dirIndex = args.indexOf('--include-directories');
|
|
||||||
expect(dirIndex).toBeGreaterThan(-1);
|
|
||||||
expect(args[dirIndex + 1]).toBe('/tmp/my-project');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should add gemini- prefix to bare model names', () => {
|
|
||||||
const args = provider.buildCliArgs({
|
|
||||||
prompt: 'Hello',
|
|
||||||
model: '2.5-flash',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
const modelIndex = args.indexOf('--model');
|
|
||||||
expect(modelIndex).toBeGreaterThan(-1);
|
|
||||||
expect(args[modelIndex + 1]).toBe('gemini-2.5-flash');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should not double-prefix model names that already have gemini-', () => {
|
|
||||||
const args = provider.buildCliArgs({
|
|
||||||
prompt: 'Hello',
|
|
||||||
model: 'gemini-2.5-pro',
|
|
||||||
cwd: '/tmp/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
const modelIndex = args.indexOf('--model');
|
|
||||||
expect(modelIndex).toBeGreaterThan(-1);
|
|
||||||
expect(args[modelIndex + 1]).toBe('gemini-2.5-pro');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('normalizeEvent - error handling', () => {
|
|
||||||
it('returns error from result event when status=error and error field is set', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'result',
|
|
||||||
status: 'error',
|
|
||||||
error: 'Model overloaded',
|
|
||||||
session_id: 'sess-gemini-1',
|
|
||||||
stats: { duration_ms: 4000, total_tokens: 0 },
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event) as ProviderMessage;
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg.type).toBe('error');
|
|
||||||
expect(msg.error).toBe('Model overloaded');
|
|
||||||
expect(msg.session_id).toBe('sess-gemini-1');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('builds diagnostic fallback when result event has status=error but empty error field', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'result',
|
|
||||||
status: 'error',
|
|
||||||
error: '',
|
|
||||||
session_id: 'sess-gemini-2',
|
|
||||||
stats: { duration_ms: 7500, total_tokens: 0 },
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event) as ProviderMessage;
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg.type).toBe('error');
|
|
||||||
// Diagnostic info should be present instead of 'Unknown error'
|
|
||||||
expect(msg.error).toContain('7500ms');
|
|
||||||
expect(msg.error).toContain('sess-gemini-2');
|
|
||||||
expect(msg.error).not.toBe('Unknown error');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('builds fallback with "unknown" duration when stats are missing', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'result',
|
|
||||||
status: 'error',
|
|
||||||
error: '',
|
|
||||||
session_id: 'sess-gemini-nostats',
|
|
||||||
// no stats field
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event) as ProviderMessage;
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg.type).toBe('error');
|
|
||||||
expect(msg.error).toContain('unknown');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns error from standalone error event with error field set', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'error',
|
|
||||||
error: 'API key invalid',
|
|
||||||
session_id: 'sess-gemini-3',
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event) as ProviderMessage;
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg.type).toBe('error');
|
|
||||||
expect(msg.error).toBe('API key invalid');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('builds diagnostic fallback when standalone error event has empty error field', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'error',
|
|
||||||
error: '',
|
|
||||||
session_id: 'sess-gemini-empty',
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event) as ProviderMessage;
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg.type).toBe('error');
|
|
||||||
// Should include session_id, not just 'Unknown error'
|
|
||||||
expect(msg.error).toContain('sess-gemini-empty');
|
|
||||||
expect(msg.error).not.toBe('Unknown error');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('builds fallback mentioning "none" when session_id is missing from error event', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'error',
|
|
||||||
error: '',
|
|
||||||
// no session_id
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event) as ProviderMessage;
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg.type).toBe('error');
|
|
||||||
expect(msg.error).toContain('none');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('uses consistent "Gemini agent failed" label for both result and error event fallbacks', () => {
|
|
||||||
const resultEvent = {
|
|
||||||
type: 'result',
|
|
||||||
status: 'error',
|
|
||||||
error: '',
|
|
||||||
session_id: 'sess-r',
|
|
||||||
stats: { duration_ms: 1000 },
|
|
||||||
};
|
|
||||||
const errorEvent = {
|
|
||||||
type: 'error',
|
|
||||||
error: '',
|
|
||||||
session_id: 'sess-e',
|
|
||||||
};
|
|
||||||
|
|
||||||
const resultMsg = provider.normalizeEvent(resultEvent) as ProviderMessage;
|
|
||||||
const errorMsg = provider.normalizeEvent(errorEvent) as ProviderMessage;
|
|
||||||
|
|
||||||
// Both fallback messages should use the same "Gemini agent failed" prefix
|
|
||||||
expect(resultMsg.error).toContain('Gemini agent failed');
|
|
||||||
expect(errorMsg.error).toContain('Gemini agent failed');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns success result when result event has status=success', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'result',
|
|
||||||
status: 'success',
|
|
||||||
error: '',
|
|
||||||
session_id: 'sess-gemini-ok',
|
|
||||||
stats: { duration_ms: 1200, total_tokens: 500 },
|
|
||||||
};
|
|
||||||
|
|
||||||
const msg = provider.normalizeEvent(event) as ProviderMessage;
|
|
||||||
|
|
||||||
expect(msg).not.toBeNull();
|
|
||||||
expect(msg.type).toBe('result');
|
|
||||||
expect(msg.subtype).toBe('success');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,218 +0,0 @@
|
|||||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
|
||||||
import type { BacklogPlanResult, ProviderMessage } from '@automaker/types';
|
|
||||||
|
|
||||||
const {
|
|
||||||
mockGetAll,
|
|
||||||
mockExecuteQuery,
|
|
||||||
mockSaveBacklogPlan,
|
|
||||||
mockSetRunningState,
|
|
||||||
mockSetRunningDetails,
|
|
||||||
mockGetPromptCustomization,
|
|
||||||
mockGetAutoLoadClaudeMdSetting,
|
|
||||||
mockGetUseClaudeCodeSystemPromptSetting,
|
|
||||||
} = vi.hoisted(() => ({
|
|
||||||
mockGetAll: vi.fn(),
|
|
||||||
mockExecuteQuery: vi.fn(),
|
|
||||||
mockSaveBacklogPlan: vi.fn(),
|
|
||||||
mockSetRunningState: vi.fn(),
|
|
||||||
mockSetRunningDetails: vi.fn(),
|
|
||||||
mockGetPromptCustomization: vi.fn(),
|
|
||||||
mockGetAutoLoadClaudeMdSetting: vi.fn(),
|
|
||||||
mockGetUseClaudeCodeSystemPromptSetting: vi.fn(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
vi.mock('@/services/feature-loader.js', () => ({
|
|
||||||
FeatureLoader: class {
|
|
||||||
getAll = mockGetAll;
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
vi.mock('@/providers/provider-factory.js', () => ({
|
|
||||||
ProviderFactory: {
|
|
||||||
getProviderForModel: vi.fn(() => ({
|
|
||||||
executeQuery: mockExecuteQuery,
|
|
||||||
})),
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
vi.mock('@/routes/backlog-plan/common.js', () => ({
|
|
||||||
logger: {
|
|
||||||
debug: vi.fn(),
|
|
||||||
info: vi.fn(),
|
|
||||||
warn: vi.fn(),
|
|
||||||
error: vi.fn(),
|
|
||||||
},
|
|
||||||
setRunningState: mockSetRunningState,
|
|
||||||
setRunningDetails: mockSetRunningDetails,
|
|
||||||
getErrorMessage: (error: unknown) => (error instanceof Error ? error.message : String(error)),
|
|
||||||
saveBacklogPlan: mockSaveBacklogPlan,
|
|
||||||
}));
|
|
||||||
|
|
||||||
vi.mock('@/lib/settings-helpers.js', () => ({
|
|
||||||
getPromptCustomization: mockGetPromptCustomization,
|
|
||||||
getAutoLoadClaudeMdSetting: mockGetAutoLoadClaudeMdSetting,
|
|
||||||
getUseClaudeCodeSystemPromptSetting: mockGetUseClaudeCodeSystemPromptSetting,
|
|
||||||
getPhaseModelWithOverrides: vi.fn(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
import { generateBacklogPlan } from '@/routes/backlog-plan/generate-plan.js';
|
|
||||||
|
|
||||||
function createMockEvents() {
|
|
||||||
return {
|
|
||||||
emit: vi.fn(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('generateBacklogPlan', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
vi.clearAllMocks();
|
|
||||||
|
|
||||||
mockGetAll.mockResolvedValue([]);
|
|
||||||
mockGetPromptCustomization.mockResolvedValue({
|
|
||||||
backlogPlan: {
|
|
||||||
systemPrompt: 'System instructions',
|
|
||||||
userPromptTemplate:
|
|
||||||
'Current features:\n{{currentFeatures}}\n\nUser request:\n{{userRequest}}',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
mockGetAutoLoadClaudeMdSetting.mockResolvedValue(false);
|
|
||||||
mockGetUseClaudeCodeSystemPromptSetting.mockResolvedValue(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('salvages valid streamed JSON when Claude process exits with code 1', async () => {
|
|
||||||
const partialResult: BacklogPlanResult = {
|
|
||||||
changes: [
|
|
||||||
{
|
|
||||||
type: 'add',
|
|
||||||
feature: {
|
|
||||||
title: 'Add signup form',
|
|
||||||
description: 'Create signup UI and validation',
|
|
||||||
category: 'frontend',
|
|
||||||
},
|
|
||||||
reason: 'Required for user onboarding',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
summary: 'Adds signup feature to the backlog',
|
|
||||||
dependencyUpdates: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
const responseJson = JSON.stringify(partialResult);
|
|
||||||
|
|
||||||
async function* streamWithExitError(): AsyncGenerator<ProviderMessage> {
|
|
||||||
yield {
|
|
||||||
type: 'assistant',
|
|
||||||
message: {
|
|
||||||
role: 'assistant',
|
|
||||||
content: [{ type: 'text', text: responseJson }],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
throw new Error('Claude Code process exited with code 1');
|
|
||||||
}
|
|
||||||
|
|
||||||
mockExecuteQuery.mockReturnValueOnce(streamWithExitError());
|
|
||||||
|
|
||||||
const events = createMockEvents();
|
|
||||||
const abortController = new AbortController();
|
|
||||||
|
|
||||||
const result = await generateBacklogPlan(
|
|
||||||
'/tmp/project',
|
|
||||||
'Please add a signup feature',
|
|
||||||
events as any,
|
|
||||||
abortController,
|
|
||||||
undefined,
|
|
||||||
'claude-opus'
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(mockExecuteQuery).toHaveBeenCalledTimes(1);
|
|
||||||
expect(result).toEqual(partialResult);
|
|
||||||
expect(mockSaveBacklogPlan).toHaveBeenCalledWith(
|
|
||||||
'/tmp/project',
|
|
||||||
expect.objectContaining({
|
|
||||||
prompt: 'Please add a signup feature',
|
|
||||||
model: 'claude-opus-4-6',
|
|
||||||
result: partialResult,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
expect(events.emit).toHaveBeenCalledWith('backlog-plan:event', {
|
|
||||||
type: 'backlog_plan_complete',
|
|
||||||
result: partialResult,
|
|
||||||
});
|
|
||||||
expect(mockSetRunningState).toHaveBeenCalledWith(false, null);
|
|
||||||
expect(mockSetRunningDetails).toHaveBeenCalledWith(null);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('prefers parseable provider result over longer non-JSON accumulated text on exit', async () => {
|
|
||||||
const recoveredResult: BacklogPlanResult = {
|
|
||||||
changes: [
|
|
||||||
{
|
|
||||||
type: 'add',
|
|
||||||
feature: {
|
|
||||||
title: 'Add reset password flow',
|
|
||||||
description: 'Implement reset password request and token validation UI',
|
|
||||||
category: 'frontend',
|
|
||||||
},
|
|
||||||
reason: 'Supports account recovery',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
summary: 'Adds password reset capability',
|
|
||||||
dependencyUpdates: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
const validProviderResult = JSON.stringify(recoveredResult);
|
|
||||||
const invalidAccumulatedText = `${validProviderResult}\n\nAdditional commentary that breaks raw JSON parsing.`;
|
|
||||||
|
|
||||||
async function* streamWithResultThenExit(): AsyncGenerator<ProviderMessage> {
|
|
||||||
yield {
|
|
||||||
type: 'assistant',
|
|
||||||
message: {
|
|
||||||
role: 'assistant',
|
|
||||||
content: [{ type: 'text', text: invalidAccumulatedText }],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
yield {
|
|
||||||
type: 'result',
|
|
||||||
subtype: 'success',
|
|
||||||
duration_ms: 10,
|
|
||||||
duration_api_ms: 10,
|
|
||||||
is_error: false,
|
|
||||||
num_turns: 1,
|
|
||||||
result: validProviderResult,
|
|
||||||
session_id: 'session-1',
|
|
||||||
total_cost_usd: 0,
|
|
||||||
usage: {
|
|
||||||
input_tokens: 10,
|
|
||||||
cache_creation_input_tokens: 0,
|
|
||||||
cache_read_input_tokens: 0,
|
|
||||||
output_tokens: 10,
|
|
||||||
server_tool_use: {
|
|
||||||
web_search_requests: 0,
|
|
||||||
},
|
|
||||||
service_tier: 'standard',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
throw new Error('Claude Code process exited with code 1');
|
|
||||||
}
|
|
||||||
|
|
||||||
mockExecuteQuery.mockReturnValueOnce(streamWithResultThenExit());
|
|
||||||
|
|
||||||
const events = createMockEvents();
|
|
||||||
const abortController = new AbortController();
|
|
||||||
|
|
||||||
const result = await generateBacklogPlan(
|
|
||||||
'/tmp/project',
|
|
||||||
'Add password reset support',
|
|
||||||
events as any,
|
|
||||||
abortController,
|
|
||||||
undefined,
|
|
||||||
'claude-opus'
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(result).toEqual(recoveredResult);
|
|
||||||
expect(mockSaveBacklogPlan).toHaveBeenCalledWith(
|
|
||||||
'/tmp/project',
|
|
||||||
expect.objectContaining({
|
|
||||||
result: recoveredResult,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -685,309 +685,6 @@ describe('AgentExecutor', () => {
|
|||||||
await expect(executor.execute(options, callbacks)).rejects.toThrow('API rate limit exceeded');
|
await expect(executor.execute(options, callbacks)).rejects.toThrow('API rate limit exceeded');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw "Unknown error" when provider stream yields error with empty message', async () => {
|
|
||||||
const executor = new AgentExecutor(
|
|
||||||
mockEventBus,
|
|
||||||
mockFeatureStateManager,
|
|
||||||
mockPlanApprovalService,
|
|
||||||
mockSettingsService
|
|
||||||
);
|
|
||||||
|
|
||||||
const mockProvider = {
|
|
||||||
getName: () => 'mock',
|
|
||||||
executeQuery: vi.fn().mockImplementation(function* () {
|
|
||||||
yield {
|
|
||||||
type: 'error',
|
|
||||||
error: '',
|
|
||||||
session_id: 'sess-123',
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
} as unknown as BaseProvider;
|
|
||||||
|
|
||||||
const options: AgentExecutionOptions = {
|
|
||||||
workDir: '/test',
|
|
||||||
featureId: 'test-feature',
|
|
||||||
prompt: 'Test prompt',
|
|
||||||
projectPath: '/project',
|
|
||||||
abortController: new AbortController(),
|
|
||||||
provider: mockProvider,
|
|
||||||
effectiveBareModel: 'claude-sonnet-4-6',
|
|
||||||
planningMode: 'skip',
|
|
||||||
};
|
|
||||||
|
|
||||||
const callbacks = {
|
|
||||||
waitForApproval: vi.fn().mockResolvedValue({ approved: true }),
|
|
||||||
saveFeatureSummary: vi.fn(),
|
|
||||||
updateFeatureSummary: vi.fn(),
|
|
||||||
buildTaskPrompt: vi.fn().mockReturnValue('task prompt'),
|
|
||||||
};
|
|
||||||
|
|
||||||
await expect(executor.execute(options, callbacks)).rejects.toThrow('Unknown error');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should throw with sanitized error when provider yields ANSI-decorated error', async () => {
|
|
||||||
const executor = new AgentExecutor(
|
|
||||||
mockEventBus,
|
|
||||||
mockFeatureStateManager,
|
|
||||||
mockPlanApprovalService,
|
|
||||||
mockSettingsService
|
|
||||||
);
|
|
||||||
|
|
||||||
const mockProvider = {
|
|
||||||
getName: () => 'mock',
|
|
||||||
executeQuery: vi.fn().mockImplementation(function* () {
|
|
||||||
yield {
|
|
||||||
type: 'error',
|
|
||||||
// ANSI color codes + "Error: " prefix that should be stripped
|
|
||||||
error: '\x1b[31mError: Connection refused\x1b[0m',
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
} as unknown as BaseProvider;
|
|
||||||
|
|
||||||
const options: AgentExecutionOptions = {
|
|
||||||
workDir: '/test',
|
|
||||||
featureId: 'test-feature',
|
|
||||||
prompt: 'Test prompt',
|
|
||||||
projectPath: '/project',
|
|
||||||
abortController: new AbortController(),
|
|
||||||
provider: mockProvider,
|
|
||||||
effectiveBareModel: 'claude-sonnet-4-6',
|
|
||||||
planningMode: 'skip',
|
|
||||||
};
|
|
||||||
|
|
||||||
const callbacks = {
|
|
||||||
waitForApproval: vi.fn().mockResolvedValue({ approved: true }),
|
|
||||||
saveFeatureSummary: vi.fn(),
|
|
||||||
updateFeatureSummary: vi.fn(),
|
|
||||||
buildTaskPrompt: vi.fn().mockReturnValue('task prompt'),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Should strip ANSI codes and "Error: " prefix
|
|
||||||
await expect(executor.execute(options, callbacks)).rejects.toThrow('Connection refused');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should throw when result subtype is error_max_turns', async () => {
|
|
||||||
const executor = new AgentExecutor(
|
|
||||||
mockEventBus,
|
|
||||||
mockFeatureStateManager,
|
|
||||||
mockPlanApprovalService,
|
|
||||||
mockSettingsService
|
|
||||||
);
|
|
||||||
|
|
||||||
const mockProvider = {
|
|
||||||
getName: () => 'mock',
|
|
||||||
executeQuery: vi.fn().mockImplementation(function* () {
|
|
||||||
yield {
|
|
||||||
type: 'assistant',
|
|
||||||
message: {
|
|
||||||
content: [{ type: 'text', text: 'Working on it...' }],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
yield {
|
|
||||||
type: 'result',
|
|
||||||
subtype: 'error_max_turns',
|
|
||||||
session_id: 'sess-456',
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
} as unknown as BaseProvider;
|
|
||||||
|
|
||||||
const options: AgentExecutionOptions = {
|
|
||||||
workDir: '/test',
|
|
||||||
featureId: 'test-feature',
|
|
||||||
prompt: 'Test prompt',
|
|
||||||
projectPath: '/project',
|
|
||||||
abortController: new AbortController(),
|
|
||||||
provider: mockProvider,
|
|
||||||
effectiveBareModel: 'claude-sonnet-4-6',
|
|
||||||
planningMode: 'skip',
|
|
||||||
};
|
|
||||||
|
|
||||||
const callbacks = {
|
|
||||||
waitForApproval: vi.fn().mockResolvedValue({ approved: true }),
|
|
||||||
saveFeatureSummary: vi.fn(),
|
|
||||||
updateFeatureSummary: vi.fn(),
|
|
||||||
buildTaskPrompt: vi.fn().mockReturnValue('task prompt'),
|
|
||||||
};
|
|
||||||
|
|
||||||
await expect(executor.execute(options, callbacks)).rejects.toThrow(
|
|
||||||
'Agent execution ended with: error_max_turns'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should throw when result subtype is error_during_execution', async () => {
|
|
||||||
const executor = new AgentExecutor(
|
|
||||||
mockEventBus,
|
|
||||||
mockFeatureStateManager,
|
|
||||||
mockPlanApprovalService,
|
|
||||||
mockSettingsService
|
|
||||||
);
|
|
||||||
|
|
||||||
const mockProvider = {
|
|
||||||
getName: () => 'mock',
|
|
||||||
executeQuery: vi.fn().mockImplementation(function* () {
|
|
||||||
yield {
|
|
||||||
type: 'result',
|
|
||||||
subtype: 'error_during_execution',
|
|
||||||
session_id: 'sess-789',
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
} as unknown as BaseProvider;
|
|
||||||
|
|
||||||
const options: AgentExecutionOptions = {
|
|
||||||
workDir: '/test',
|
|
||||||
featureId: 'test-feature',
|
|
||||||
prompt: 'Test prompt',
|
|
||||||
projectPath: '/project',
|
|
||||||
abortController: new AbortController(),
|
|
||||||
provider: mockProvider,
|
|
||||||
effectiveBareModel: 'claude-sonnet-4-6',
|
|
||||||
planningMode: 'skip',
|
|
||||||
};
|
|
||||||
|
|
||||||
const callbacks = {
|
|
||||||
waitForApproval: vi.fn().mockResolvedValue({ approved: true }),
|
|
||||||
saveFeatureSummary: vi.fn(),
|
|
||||||
updateFeatureSummary: vi.fn(),
|
|
||||||
buildTaskPrompt: vi.fn().mockReturnValue('task prompt'),
|
|
||||||
};
|
|
||||||
|
|
||||||
await expect(executor.execute(options, callbacks)).rejects.toThrow(
|
|
||||||
'Agent execution ended with: error_during_execution'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should throw when result subtype is error_max_structured_output_retries', async () => {
|
|
||||||
const executor = new AgentExecutor(
|
|
||||||
mockEventBus,
|
|
||||||
mockFeatureStateManager,
|
|
||||||
mockPlanApprovalService,
|
|
||||||
mockSettingsService
|
|
||||||
);
|
|
||||||
|
|
||||||
const mockProvider = {
|
|
||||||
getName: () => 'mock',
|
|
||||||
executeQuery: vi.fn().mockImplementation(function* () {
|
|
||||||
yield {
|
|
||||||
type: 'result',
|
|
||||||
subtype: 'error_max_structured_output_retries',
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
} as unknown as BaseProvider;
|
|
||||||
|
|
||||||
const options: AgentExecutionOptions = {
|
|
||||||
workDir: '/test',
|
|
||||||
featureId: 'test-feature',
|
|
||||||
prompt: 'Test prompt',
|
|
||||||
projectPath: '/project',
|
|
||||||
abortController: new AbortController(),
|
|
||||||
provider: mockProvider,
|
|
||||||
effectiveBareModel: 'claude-sonnet-4-6',
|
|
||||||
planningMode: 'skip',
|
|
||||||
};
|
|
||||||
|
|
||||||
const callbacks = {
|
|
||||||
waitForApproval: vi.fn().mockResolvedValue({ approved: true }),
|
|
||||||
saveFeatureSummary: vi.fn(),
|
|
||||||
updateFeatureSummary: vi.fn(),
|
|
||||||
buildTaskPrompt: vi.fn().mockReturnValue('task prompt'),
|
|
||||||
};
|
|
||||||
|
|
||||||
await expect(executor.execute(options, callbacks)).rejects.toThrow(
|
|
||||||
'Agent execution ended with: error_max_structured_output_retries'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should throw when result subtype is error_max_budget_usd', async () => {
|
|
||||||
const executor = new AgentExecutor(
|
|
||||||
mockEventBus,
|
|
||||||
mockFeatureStateManager,
|
|
||||||
mockPlanApprovalService,
|
|
||||||
mockSettingsService
|
|
||||||
);
|
|
||||||
|
|
||||||
const mockProvider = {
|
|
||||||
getName: () => 'mock',
|
|
||||||
executeQuery: vi.fn().mockImplementation(function* () {
|
|
||||||
yield {
|
|
||||||
type: 'result',
|
|
||||||
subtype: 'error_max_budget_usd',
|
|
||||||
session_id: 'sess-budget',
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
} as unknown as BaseProvider;
|
|
||||||
|
|
||||||
const options: AgentExecutionOptions = {
|
|
||||||
workDir: '/test',
|
|
||||||
featureId: 'test-feature',
|
|
||||||
prompt: 'Test prompt',
|
|
||||||
projectPath: '/project',
|
|
||||||
abortController: new AbortController(),
|
|
||||||
provider: mockProvider,
|
|
||||||
effectiveBareModel: 'claude-sonnet-4-6',
|
|
||||||
planningMode: 'skip',
|
|
||||||
};
|
|
||||||
|
|
||||||
const callbacks = {
|
|
||||||
waitForApproval: vi.fn().mockResolvedValue({ approved: true }),
|
|
||||||
saveFeatureSummary: vi.fn(),
|
|
||||||
updateFeatureSummary: vi.fn(),
|
|
||||||
buildTaskPrompt: vi.fn().mockReturnValue('task prompt'),
|
|
||||||
};
|
|
||||||
|
|
||||||
await expect(executor.execute(options, callbacks)).rejects.toThrow(
|
|
||||||
'Agent execution ended with: error_max_budget_usd'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should NOT throw when result subtype is success', async () => {
|
|
||||||
const executor = new AgentExecutor(
|
|
||||||
mockEventBus,
|
|
||||||
mockFeatureStateManager,
|
|
||||||
mockPlanApprovalService,
|
|
||||||
mockSettingsService
|
|
||||||
);
|
|
||||||
|
|
||||||
const mockProvider = {
|
|
||||||
getName: () => 'mock',
|
|
||||||
executeQuery: vi.fn().mockImplementation(function* () {
|
|
||||||
yield {
|
|
||||||
type: 'assistant',
|
|
||||||
message: {
|
|
||||||
content: [{ type: 'text', text: 'Done!' }],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
yield {
|
|
||||||
type: 'result',
|
|
||||||
subtype: 'success',
|
|
||||||
session_id: 'sess-ok',
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
} as unknown as BaseProvider;
|
|
||||||
|
|
||||||
const options: AgentExecutionOptions = {
|
|
||||||
workDir: '/test',
|
|
||||||
featureId: 'test-feature',
|
|
||||||
prompt: 'Test prompt',
|
|
||||||
projectPath: '/project',
|
|
||||||
abortController: new AbortController(),
|
|
||||||
provider: mockProvider,
|
|
||||||
effectiveBareModel: 'claude-sonnet-4-6',
|
|
||||||
planningMode: 'skip',
|
|
||||||
};
|
|
||||||
|
|
||||||
const callbacks = {
|
|
||||||
waitForApproval: vi.fn().mockResolvedValue({ approved: true }),
|
|
||||||
saveFeatureSummary: vi.fn(),
|
|
||||||
updateFeatureSummary: vi.fn(),
|
|
||||||
buildTaskPrompt: vi.fn().mockReturnValue('task prompt'),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Should resolve without throwing
|
|
||||||
const result = await executor.execute(options, callbacks);
|
|
||||||
expect(result.aborted).toBe(false);
|
|
||||||
expect(result.responseText).toContain('Done!');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should throw error when authentication fails in response', async () => {
|
it('should throw error when authentication fails in response', async () => {
|
||||||
const executor = new AgentExecutor(
|
const executor = new AgentExecutor(
|
||||||
mockEventBus,
|
mockEventBus,
|
||||||
|
|||||||
@@ -1,192 +0,0 @@
|
|||||||
import { describe, it, expect } from 'vitest';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Contract tests verifying the tool marker format used by agent-executor
|
|
||||||
* (which writes agent output) and execution-service (which reads it to
|
|
||||||
* determine if the agent did meaningful work).
|
|
||||||
*
|
|
||||||
* The agent-executor writes: `\n🔧 Tool: ${block.name}\n`
|
|
||||||
* The execution-service checks: `agentOutput.includes('🔧 Tool:')`
|
|
||||||
*
|
|
||||||
* These tests ensure the marker format contract stays consistent and
|
|
||||||
* document the exact detection logic used for status determination.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// The exact marker prefix that execution-service searches for
|
|
||||||
const TOOL_MARKER = '🔧 Tool:';
|
|
||||||
|
|
||||||
// Minimum output length threshold for "meaningful work"
|
|
||||||
const MIN_OUTPUT_LENGTH = 200;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Simulates the agent-executor's tool_use output format.
|
|
||||||
* See: agent-executor.ts line ~293
|
|
||||||
*/
|
|
||||||
function formatToolUseBlock(toolName: string, input?: Record<string, unknown>): string {
|
|
||||||
let output = `\n${TOOL_MARKER} ${toolName}\n`;
|
|
||||||
if (input) output += `Input: ${JSON.stringify(input, null, 2)}\n`;
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Simulates the execution-service's output validation logic.
|
|
||||||
* See: execution-service.ts lines ~427-429
|
|
||||||
*/
|
|
||||||
function validateAgentOutput(
|
|
||||||
agentOutput: string,
|
|
||||||
skipTests: boolean
|
|
||||||
): 'verified' | 'waiting_approval' {
|
|
||||||
const hasToolUsage = agentOutput.includes(TOOL_MARKER);
|
|
||||||
const hasMinimalOutput = agentOutput.trim().length < MIN_OUTPUT_LENGTH;
|
|
||||||
const agentDidWork = hasToolUsage && !hasMinimalOutput;
|
|
||||||
|
|
||||||
if (skipTests) return 'waiting_approval';
|
|
||||||
if (!agentDidWork) return 'waiting_approval';
|
|
||||||
return 'verified';
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('Agent Output Validation - Contract Tests', () => {
|
|
||||||
describe('tool marker format contract', () => {
|
|
||||||
it('agent-executor tool format contains the expected marker', () => {
|
|
||||||
const toolOutput = formatToolUseBlock('Read', { file_path: '/src/index.ts' });
|
|
||||||
expect(toolOutput).toContain(TOOL_MARKER);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('agent-executor tool format includes tool name after marker', () => {
|
|
||||||
const toolOutput = formatToolUseBlock('Edit', {
|
|
||||||
file_path: '/src/app.ts',
|
|
||||||
old_string: 'foo',
|
|
||||||
new_string: 'bar',
|
|
||||||
});
|
|
||||||
expect(toolOutput).toContain('🔧 Tool: Edit');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('agent-executor tool format includes JSON input', () => {
|
|
||||||
const input = { file_path: '/src/index.ts' };
|
|
||||||
const toolOutput = formatToolUseBlock('Read', input);
|
|
||||||
expect(toolOutput).toContain('Input: ');
|
|
||||||
expect(toolOutput).toContain('"file_path": "/src/index.ts"');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('agent-executor tool format works without input', () => {
|
|
||||||
const toolOutput = formatToolUseBlock('Bash');
|
|
||||||
expect(toolOutput).toContain('🔧 Tool: Bash');
|
|
||||||
expect(toolOutput).not.toContain('Input:');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('marker includes colon and space to avoid false positives', () => {
|
|
||||||
// Ensure the marker is specific enough to avoid matching other emoji patterns
|
|
||||||
expect(TOOL_MARKER).toBe('🔧 Tool:');
|
|
||||||
expect(TOOL_MARKER).toContain(':');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('output validation logic', () => {
|
|
||||||
it('verified: tool usage + sufficient output', () => {
|
|
||||||
const output =
|
|
||||||
'Starting implementation of the new feature...\n' +
|
|
||||||
formatToolUseBlock('Read', { file_path: '/src/index.ts' }) +
|
|
||||||
'I can see the existing code. Let me make the needed changes.\n' +
|
|
||||||
formatToolUseBlock('Edit', { file_path: '/src/index.ts' }) +
|
|
||||||
'Changes complete. The implementation adds new validation logic and tests.';
|
|
||||||
expect(output.trim().length).toBeGreaterThanOrEqual(MIN_OUTPUT_LENGTH);
|
|
||||||
|
|
||||||
expect(validateAgentOutput(output, false)).toBe('verified');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('waiting_approval: no tool markers regardless of length', () => {
|
|
||||||
const longOutput = 'I analyzed the codebase. '.repeat(50);
|
|
||||||
expect(longOutput.trim().length).toBeGreaterThan(MIN_OUTPUT_LENGTH);
|
|
||||||
|
|
||||||
expect(validateAgentOutput(longOutput, false)).toBe('waiting_approval');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('waiting_approval: tool markers but insufficient length', () => {
|
|
||||||
const shortOutput = formatToolUseBlock('Read', { file_path: '/src/a.ts' });
|
|
||||||
expect(shortOutput.trim().length).toBeLessThan(MIN_OUTPUT_LENGTH);
|
|
||||||
|
|
||||||
expect(validateAgentOutput(shortOutput, false)).toBe('waiting_approval');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('waiting_approval: empty output', () => {
|
|
||||||
expect(validateAgentOutput('', false)).toBe('waiting_approval');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('waiting_approval: skipTests always overrides', () => {
|
|
||||||
const goodOutput =
|
|
||||||
'Starting...\n' +
|
|
||||||
formatToolUseBlock('Read', { file_path: '/src/index.ts' }) +
|
|
||||||
formatToolUseBlock('Edit', { file_path: '/src/index.ts' }) +
|
|
||||||
'Done implementing. '.repeat(15);
|
|
||||||
expect(goodOutput.trim().length).toBeGreaterThanOrEqual(MIN_OUTPUT_LENGTH);
|
|
||||||
|
|
||||||
expect(validateAgentOutput(goodOutput, true)).toBe('waiting_approval');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('boundary: exactly MIN_OUTPUT_LENGTH chars with tool is verified', () => {
|
|
||||||
const tool = formatToolUseBlock('Read');
|
|
||||||
const padding = 'x'.repeat(MIN_OUTPUT_LENGTH - tool.trim().length);
|
|
||||||
const output = tool + padding;
|
|
||||||
expect(output.trim().length).toBeGreaterThanOrEqual(MIN_OUTPUT_LENGTH);
|
|
||||||
|
|
||||||
expect(validateAgentOutput(output, false)).toBe('verified');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('boundary: MIN_OUTPUT_LENGTH - 1 chars with tool is waiting_approval', () => {
|
|
||||||
const marker = `${TOOL_MARKER} Read\n`;
|
|
||||||
const padding = 'x'.repeat(MIN_OUTPUT_LENGTH - 1 - marker.length);
|
|
||||||
const output = marker + padding;
|
|
||||||
expect(output.trim().length).toBe(MIN_OUTPUT_LENGTH - 1);
|
|
||||||
|
|
||||||
expect(validateAgentOutput(output, false)).toBe('waiting_approval');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('realistic provider scenarios', () => {
|
|
||||||
it('Claude SDK agent with multiple tools → verified', () => {
|
|
||||||
let output = "I'll implement the feature.\n\n";
|
|
||||||
output += formatToolUseBlock('Read', { file_path: '/src/components/App.tsx' });
|
|
||||||
output += 'I see the component. Let me update it.\n\n';
|
|
||||||
output += formatToolUseBlock('Edit', {
|
|
||||||
file_path: '/src/components/App.tsx',
|
|
||||||
old_string: 'const App = () => {',
|
|
||||||
new_string: 'const App: React.FC = () => {',
|
|
||||||
});
|
|
||||||
output += 'Done. The component is now typed correctly.\n';
|
|
||||||
|
|
||||||
expect(validateAgentOutput(output, false)).toBe('verified');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('Cursor CLI quick exit (no tools) → waiting_approval', () => {
|
|
||||||
const output = 'Task received. Processing...\nResult: completed successfully.';
|
|
||||||
expect(validateAgentOutput(output, false)).toBe('waiting_approval');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('Codex CLI with brief acknowledgment → waiting_approval', () => {
|
|
||||||
const output = 'Understood the task. Starting implementation.\nDone.';
|
|
||||||
expect(validateAgentOutput(output, false)).toBe('waiting_approval');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('Agent that only reads but makes no edits (single Read tool, short output) → waiting_approval', () => {
|
|
||||||
const output = formatToolUseBlock('Read', { file_path: '/src/index.ts' }) + 'File read.';
|
|
||||||
expect(output.trim().length).toBeLessThan(MIN_OUTPUT_LENGTH);
|
|
||||||
expect(validateAgentOutput(output, false)).toBe('waiting_approval');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('Agent with extensive tool usage and explanation → verified', () => {
|
|
||||||
let output = 'Analyzing the codebase for the authentication feature.\n\n';
|
|
||||||
for (let i = 0; i < 5; i++) {
|
|
||||||
output += formatToolUseBlock('Read', { file_path: `/src/auth/handler${i}.ts` });
|
|
||||||
output += `Found handler ${i}. `;
|
|
||||||
}
|
|
||||||
output += formatToolUseBlock('Edit', {
|
|
||||||
file_path: '/src/auth/handler0.ts',
|
|
||||||
old_string: 'function login() {}',
|
|
||||||
new_string: 'async function login(creds: Credentials) { ... }',
|
|
||||||
});
|
|
||||||
output += 'Implementation complete with all authentication changes applied.\n';
|
|
||||||
|
|
||||||
expect(validateAgentOutput(output, false)).toBe('verified');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -188,125 +188,6 @@ describe('agent-service.ts', () => {
|
|||||||
expect(mockEvents.emit).toHaveBeenCalled();
|
expect(mockEvents.emit).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should emit tool_result events from provider stream', async () => {
|
|
||||||
const mockProvider = {
|
|
||||||
getName: () => 'gemini',
|
|
||||||
executeQuery: async function* () {
|
|
||||||
yield {
|
|
||||||
type: 'assistant',
|
|
||||||
message: {
|
|
||||||
role: 'assistant',
|
|
||||||
content: [
|
|
||||||
{
|
|
||||||
type: 'tool_use',
|
|
||||||
name: 'Read',
|
|
||||||
tool_use_id: 'tool-1',
|
|
||||||
input: { file_path: 'README.md' },
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
yield {
|
|
||||||
type: 'assistant',
|
|
||||||
message: {
|
|
||||||
role: 'assistant',
|
|
||||||
content: [
|
|
||||||
{
|
|
||||||
type: 'tool_result',
|
|
||||||
tool_use_id: 'tool-1',
|
|
||||||
content: 'File contents here',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
yield {
|
|
||||||
type: 'result',
|
|
||||||
subtype: 'success',
|
|
||||||
};
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
|
||||||
|
|
||||||
vi.mocked(promptBuilder.buildPromptWithImages).mockResolvedValue({
|
|
||||||
content: 'Hello',
|
|
||||||
hasImages: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
await service.sendMessage({
|
|
||||||
sessionId: 'session-1',
|
|
||||||
message: 'Hello',
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(mockEvents.emit).toHaveBeenCalledWith(
|
|
||||||
'agent:stream',
|
|
||||||
expect.objectContaining({
|
|
||||||
sessionId: 'session-1',
|
|
||||||
type: 'tool_result',
|
|
||||||
tool: {
|
|
||||||
name: 'Read',
|
|
||||||
input: {
|
|
||||||
toolUseId: 'tool-1',
|
|
||||||
content: 'File contents here',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should emit tool_result with unknown tool name for unregistered tool_use_id', async () => {
|
|
||||||
const mockProvider = {
|
|
||||||
getName: () => 'gemini',
|
|
||||||
executeQuery: async function* () {
|
|
||||||
// Yield tool_result WITHOUT a preceding tool_use (unregistered tool_use_id)
|
|
||||||
yield {
|
|
||||||
type: 'assistant',
|
|
||||||
message: {
|
|
||||||
role: 'assistant',
|
|
||||||
content: [
|
|
||||||
{
|
|
||||||
type: 'tool_result',
|
|
||||||
tool_use_id: 'unregistered-id',
|
|
||||||
content: 'Some result content',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
yield {
|
|
||||||
type: 'result',
|
|
||||||
subtype: 'success',
|
|
||||||
};
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
|
||||||
|
|
||||||
vi.mocked(promptBuilder.buildPromptWithImages).mockResolvedValue({
|
|
||||||
content: 'Hello',
|
|
||||||
hasImages: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
await service.sendMessage({
|
|
||||||
sessionId: 'session-1',
|
|
||||||
message: 'Hello',
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(mockEvents.emit).toHaveBeenCalledWith(
|
|
||||||
'agent:stream',
|
|
||||||
expect.objectContaining({
|
|
||||||
sessionId: 'session-1',
|
|
||||||
type: 'tool_result',
|
|
||||||
tool: {
|
|
||||||
name: 'unknown',
|
|
||||||
input: {
|
|
||||||
toolUseId: 'unregistered-id',
|
|
||||||
content: 'Some result content',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle images in message', async () => {
|
it('should handle images in message', async () => {
|
||||||
const mockProvider = {
|
const mockProvider = {
|
||||||
getName: () => 'claude',
|
getName: () => 'claude',
|
||||||
@@ -422,36 +303,6 @@ describe('agent-service.ts', () => {
|
|||||||
|
|
||||||
expect(fs.writeFile).toHaveBeenCalled();
|
expect(fs.writeFile).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should include context/history preparation for Gemini requests', async () => {
|
|
||||||
let capturedOptions: any;
|
|
||||||
const mockProvider = {
|
|
||||||
getName: () => 'gemini',
|
|
||||||
executeQuery: async function* (options: any) {
|
|
||||||
capturedOptions = options;
|
|
||||||
yield {
|
|
||||||
type: 'result',
|
|
||||||
subtype: 'success',
|
|
||||||
};
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
vi.mocked(ProviderFactory.getProviderForModelName).mockReturnValue('gemini');
|
|
||||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
|
||||||
vi.mocked(promptBuilder.buildPromptWithImages).mockResolvedValue({
|
|
||||||
content: 'Hello',
|
|
||||||
hasImages: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
await service.sendMessage({
|
|
||||||
sessionId: 'session-1',
|
|
||||||
message: 'Hello',
|
|
||||||
model: 'gemini-2.5-flash',
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(contextLoader.loadContextFiles).toHaveBeenCalled();
|
|
||||||
expect(capturedOptions).toBeDefined();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('stopExecution', () => {
|
describe('stopExecution', () => {
|
||||||
|
|||||||
@@ -1,835 +0,0 @@
|
|||||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
||||||
import { EventHookService } from '../../../src/services/event-hook-service.js';
|
|
||||||
import type { EventEmitter, EventCallback, EventType } from '../../../src/lib/events.js';
|
|
||||||
import type { SettingsService } from '../../../src/services/settings-service.js';
|
|
||||||
import type { EventHistoryService } from '../../../src/services/event-history-service.js';
|
|
||||||
import type { FeatureLoader } from '../../../src/services/feature-loader.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a mock EventEmitter for testing
|
|
||||||
*/
|
|
||||||
function createMockEventEmitter(): EventEmitter & {
|
|
||||||
subscribers: Set<EventCallback>;
|
|
||||||
simulateEvent: (type: EventType, payload: unknown) => void;
|
|
||||||
} {
|
|
||||||
const subscribers = new Set<EventCallback>();
|
|
||||||
|
|
||||||
return {
|
|
||||||
subscribers,
|
|
||||||
emit(type: EventType, payload: unknown) {
|
|
||||||
for (const callback of subscribers) {
|
|
||||||
callback(type, payload);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
subscribe(callback: EventCallback) {
|
|
||||||
subscribers.add(callback);
|
|
||||||
return () => {
|
|
||||||
subscribers.delete(callback);
|
|
||||||
};
|
|
||||||
},
|
|
||||||
simulateEvent(type: EventType, payload: unknown) {
|
|
||||||
for (const callback of subscribers) {
|
|
||||||
callback(type, payload);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a mock SettingsService
|
|
||||||
*/
|
|
||||||
function createMockSettingsService(hooks: unknown[] = []): SettingsService {
|
|
||||||
return {
|
|
||||||
getGlobalSettings: vi.fn().mockResolvedValue({ eventHooks: hooks }),
|
|
||||||
} as unknown as SettingsService;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a mock EventHistoryService
|
|
||||||
*/
|
|
||||||
function createMockEventHistoryService() {
|
|
||||||
return {
|
|
||||||
storeEvent: vi.fn().mockResolvedValue({ id: 'test-event-id' }),
|
|
||||||
} as unknown as EventHistoryService;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a mock FeatureLoader
|
|
||||||
*/
|
|
||||||
function createMockFeatureLoader(features: Record<string, { title: string }> = {}) {
|
|
||||||
return {
|
|
||||||
get: vi.fn().mockImplementation((_projectPath: string, featureId: string) => {
|
|
||||||
return Promise.resolve(features[featureId] || null);
|
|
||||||
}),
|
|
||||||
} as unknown as FeatureLoader;
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('EventHookService', () => {
|
|
||||||
let service: EventHookService;
|
|
||||||
let mockEmitter: ReturnType<typeof createMockEventEmitter>;
|
|
||||||
let mockSettingsService: ReturnType<typeof createMockSettingsService>;
|
|
||||||
let mockEventHistoryService: ReturnType<typeof createMockEventHistoryService>;
|
|
||||||
let mockFeatureLoader: ReturnType<typeof createMockFeatureLoader>;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
service = new EventHookService();
|
|
||||||
mockEmitter = createMockEventEmitter();
|
|
||||||
mockSettingsService = createMockSettingsService();
|
|
||||||
mockEventHistoryService = createMockEventHistoryService();
|
|
||||||
mockFeatureLoader = createMockFeatureLoader();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
service.destroy();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('initialize', () => {
|
|
||||||
it('should subscribe to the event emitter', () => {
|
|
||||||
service.initialize(mockEmitter, mockSettingsService, mockEventHistoryService);
|
|
||||||
expect(mockEmitter.subscribers.size).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should log initialization', () => {
|
|
||||||
service.initialize(mockEmitter, mockSettingsService);
|
|
||||||
expect(mockEmitter.subscribers.size).toBe(1);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('destroy', () => {
|
|
||||||
it('should unsubscribe from the event emitter', () => {
|
|
||||||
service.initialize(mockEmitter, mockSettingsService);
|
|
||||||
expect(mockEmitter.subscribers.size).toBe(1);
|
|
||||||
|
|
||||||
service.destroy();
|
|
||||||
expect(mockEmitter.subscribers.size).toBe(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('event mapping - auto_mode_feature_complete', () => {
|
|
||||||
it('should map to feature_success when passes is true', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
executionMode: 'auto',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
featureName: 'Test Feature',
|
|
||||||
passes: true,
|
|
||||||
message: 'Feature completed in 30s',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
// Allow async processing
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_success');
|
|
||||||
expect(storeCall.passes).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should map to feature_error when passes is false', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
executionMode: 'auto',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
featureName: 'Test Feature',
|
|
||||||
passes: false,
|
|
||||||
message: 'Feature stopped by user',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_error');
|
|
||||||
expect(storeCall.passes).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should NOT populate error field for successful feature completion', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
executionMode: 'auto',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
featureName: 'Test Feature',
|
|
||||||
passes: true,
|
|
||||||
message: 'Feature completed in 30s - auto-verified',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_success');
|
|
||||||
// Critical: error should NOT contain the success message
|
|
||||||
expect(storeCall.error).toBeUndefined();
|
|
||||||
expect(storeCall.errorType).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should populate error field for failed feature completion', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
executionMode: 'auto',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
featureName: 'Test Feature',
|
|
||||||
passes: false,
|
|
||||||
message: 'Feature stopped by user',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_error');
|
|
||||||
// Error field should be populated for error triggers
|
|
||||||
expect(storeCall.error).toBe('Feature stopped by user');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should ignore feature complete events without explicit auto execution mode', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
featureName: 'Manual Feature',
|
|
||||||
passes: true,
|
|
||||||
message: 'Manually verified',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
||||||
expect(mockEventHistoryService.storeEvent).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('event mapping - feature:completed', () => {
|
|
||||||
it('should map manual completion to feature_success', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('feature:completed', {
|
|
||||||
featureId: 'feat-1',
|
|
||||||
featureName: 'Manual Feature',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
passes: true,
|
|
||||||
executionMode: 'manual',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_success');
|
|
||||||
expect(storeCall.passes).toBe(true);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('event mapping - auto_mode_error', () => {
|
|
||||||
it('should map to feature_error when featureId is present', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_error',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
error: 'Network timeout',
|
|
||||||
errorType: 'network',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_error');
|
|
||||||
expect(storeCall.error).toBe('Network timeout');
|
|
||||||
expect(storeCall.errorType).toBe('network');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should map to auto_mode_error when featureId is not present', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_error',
|
|
||||||
error: 'System error',
|
|
||||||
errorType: 'system',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('auto_mode_error');
|
|
||||||
expect(storeCall.error).toBe('System error');
|
|
||||||
expect(storeCall.errorType).toBe('system');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('event mapping - auto_mode_idle', () => {
|
|
||||||
it('should map to auto_mode_complete', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_idle',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('auto_mode_complete');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('event mapping - feature:created', () => {
|
|
||||||
it('should trigger feature_created hook', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('feature:created', {
|
|
||||||
featureId: 'feat-1',
|
|
||||||
featureName: 'New Feature',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_created');
|
|
||||||
expect(storeCall.featureId).toBe('feat-1');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('event mapping - unhandled events', () => {
|
|
||||||
it('should ignore auto-mode events with unrecognized types', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_progress',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
content: 'Working...',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
// Give it time to process
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
||||||
|
|
||||||
expect(mockEventHistoryService.storeEvent).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should ignore events without a type', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
featureId: 'feat-1',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
||||||
|
|
||||||
expect(mockEventHistoryService.storeEvent).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('hook execution', () => {
|
|
||||||
it('should execute matching enabled hooks for feature_success', async () => {
|
|
||||||
const hooks = [
|
|
||||||
{
|
|
||||||
id: 'hook-1',
|
|
||||||
enabled: true,
|
|
||||||
trigger: 'feature_success',
|
|
||||||
name: 'Success Hook',
|
|
||||||
action: {
|
|
||||||
type: 'shell',
|
|
||||||
command: 'echo "success"',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'hook-2',
|
|
||||||
enabled: true,
|
|
||||||
trigger: 'feature_error',
|
|
||||||
name: 'Error Hook',
|
|
||||||
action: {
|
|
||||||
type: 'shell',
|
|
||||||
command: 'echo "error"',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
mockSettingsService = createMockSettingsService(hooks);
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
executionMode: 'auto',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
featureName: 'Test Feature',
|
|
||||||
passes: true,
|
|
||||||
message: 'Feature completed in 30s',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockSettingsService.getGlobalSettings).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
// The error hook should NOT have been triggered for a success event
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_success');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should NOT execute error hooks when feature completes successfully', async () => {
|
|
||||||
// This is the key regression test for the bug:
|
|
||||||
// "Error event hook fired when a feature completes successfully"
|
|
||||||
const hooks = [
|
|
||||||
{
|
|
||||||
id: 'hook-error',
|
|
||||||
enabled: true,
|
|
||||||
trigger: 'feature_error',
|
|
||||||
name: 'Error Notification',
|
|
||||||
action: {
|
|
||||||
type: 'shell',
|
|
||||||
command: 'echo "ERROR FIRED"',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
mockSettingsService = createMockSettingsService(hooks);
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
executionMode: 'auto',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
featureName: 'Test Feature',
|
|
||||||
passes: true,
|
|
||||||
message: 'Feature completed in 30s - auto-verified',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Verify the trigger was feature_success, not feature_error
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_success');
|
|
||||||
// And no error information should be present
|
|
||||||
expect(storeCall.error).toBeUndefined();
|
|
||||||
expect(storeCall.errorType).toBeUndefined();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('feature name loading', () => {
|
|
||||||
it('should load feature name from feature loader when not in payload', async () => {
|
|
||||||
mockFeatureLoader = createMockFeatureLoader({
|
|
||||||
'feat-1': { title: 'Loaded Feature Title' },
|
|
||||||
});
|
|
||||||
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
executionMode: 'auto',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
passes: true,
|
|
||||||
message: 'Done',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.featureName).toBe('Loaded Feature Title');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should fall back to payload featureName when loader fails', async () => {
|
|
||||||
mockFeatureLoader = createMockFeatureLoader({}); // Empty - no features found
|
|
||||||
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
executionMode: 'auto',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
featureName: 'Fallback Name',
|
|
||||||
passes: true,
|
|
||||||
message: 'Done',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.featureName).toBe('Fallback Name');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('event mapping - feature_status_changed (non-auto-mode completion)', () => {
|
|
||||||
it('should trigger feature_success when status changes to verified', async () => {
|
|
||||||
mockFeatureLoader = createMockFeatureLoader({
|
|
||||||
'feat-1': { title: 'Manual Feature' },
|
|
||||||
});
|
|
||||||
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'feature_status_changed',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
status: 'verified',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_success');
|
|
||||||
expect(storeCall.featureName).toBe('Manual Feature');
|
|
||||||
expect(storeCall.passes).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should trigger feature_success when status changes to waiting_approval', async () => {
|
|
||||||
mockFeatureLoader = createMockFeatureLoader({
|
|
||||||
'feat-1': { title: 'Manual Feature' },
|
|
||||||
});
|
|
||||||
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'feature_status_changed',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
status: 'waiting_approval',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_success');
|
|
||||||
expect(storeCall.passes).toBe(true);
|
|
||||||
expect(storeCall.featureName).toBe('Manual Feature');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should NOT trigger hooks for non-completion status changes', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'feature_status_changed',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
status: 'in_progress',
|
|
||||||
});
|
|
||||||
|
|
||||||
// Give it time to process
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
||||||
|
|
||||||
expect(mockEventHistoryService.storeEvent).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should NOT double-fire hooks when auto_mode_feature_complete already fired', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
// First: auto_mode_feature_complete fires (auto-mode path)
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
executionMode: 'auto',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
featureName: 'Auto Feature',
|
|
||||||
passes: true,
|
|
||||||
message: 'Feature completed',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Then: feature_status_changed fires for the same feature
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'feature_status_changed',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
status: 'verified',
|
|
||||||
});
|
|
||||||
|
|
||||||
// Give it time to process
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
||||||
|
|
||||||
// Should still only have been called once (from auto_mode_feature_complete)
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should NOT double-fire hooks when auto_mode_error already fired for feature', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
// First: auto_mode_error fires for a feature
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_error',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
error: 'Something failed',
|
|
||||||
errorType: 'execution',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Then: feature_status_changed fires for the same feature (e.g., reset to backlog)
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'feature_status_changed',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
status: 'verified', // unlikely after error, but tests the dedup
|
|
||||||
});
|
|
||||||
|
|
||||||
// Give it time to process
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
||||||
|
|
||||||
// Should still only have been called once
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should fire hooks for different features independently', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
// Auto-mode completion for feat-1
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
executionMode: 'auto',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
passes: true,
|
|
||||||
message: 'Done',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Manual completion for feat-2 (different feature)
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'feature_status_changed',
|
|
||||||
featureId: 'feat-2',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
status: 'verified',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalledTimes(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
// feat-2 should have triggered feature_success
|
|
||||||
const secondCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[1][0];
|
|
||||||
expect(secondCall.trigger).toBe('feature_success');
|
|
||||||
expect(secondCall.featureId).toBe('feat-2');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('error context for error events', () => {
|
|
||||||
it('should use payload.error when available for error triggers', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_error',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
error: 'Authentication failed',
|
|
||||||
errorType: 'auth',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.error).toBe('Authentication failed');
|
|
||||||
expect(storeCall.errorType).toBe('auth');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should fall back to payload.message for error field in error triggers', async () => {
|
|
||||||
service.initialize(
|
|
||||||
mockEmitter,
|
|
||||||
mockSettingsService,
|
|
||||||
mockEventHistoryService,
|
|
||||||
mockFeatureLoader
|
|
||||||
);
|
|
||||||
|
|
||||||
mockEmitter.simulateEvent('auto-mode:event', {
|
|
||||||
type: 'auto_mode_feature_complete',
|
|
||||||
executionMode: 'auto',
|
|
||||||
featureId: 'feat-1',
|
|
||||||
passes: false,
|
|
||||||
message: 'Feature stopped by user',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
});
|
|
||||||
|
|
||||||
await vi.waitFor(() => {
|
|
||||||
expect(mockEventHistoryService.storeEvent).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const storeCall = (mockEventHistoryService.storeEvent as ReturnType<typeof vi.fn>).mock
|
|
||||||
.calls[0][0];
|
|
||||||
expect(storeCall.trigger).toBe('feature_error');
|
|
||||||
expect(storeCall.error).toBe('Feature stopped by user');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -34,7 +34,6 @@ import { getFeatureDir } from '@automaker/platform';
|
|||||||
import {
|
import {
|
||||||
getPromptCustomization,
|
getPromptCustomization,
|
||||||
getAutoLoadClaudeMdSetting,
|
getAutoLoadClaudeMdSetting,
|
||||||
getUseClaudeCodeSystemPromptSetting,
|
|
||||||
filterClaudeMdFromContext,
|
filterClaudeMdFromContext,
|
||||||
} from '../../../src/lib/settings-helpers.js';
|
} from '../../../src/lib/settings-helpers.js';
|
||||||
import { extractSummary } from '../../../src/services/spec-parser.js';
|
import { extractSummary } from '../../../src/services/spec-parser.js';
|
||||||
@@ -68,7 +67,6 @@ vi.mock('../../../src/lib/settings-helpers.js', () => ({
|
|||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
getAutoLoadClaudeMdSetting: vi.fn().mockResolvedValue(true),
|
getAutoLoadClaudeMdSetting: vi.fn().mockResolvedValue(true),
|
||||||
getUseClaudeCodeSystemPromptSetting: vi.fn().mockResolvedValue(true),
|
|
||||||
filterClaudeMdFromContext: vi.fn().mockReturnValue('context prompt'),
|
filterClaudeMdFromContext: vi.fn().mockReturnValue('context prompt'),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@@ -177,10 +175,7 @@ describe('execution-service.ts', () => {
|
|||||||
} as unknown as TypedEventBus;
|
} as unknown as TypedEventBus;
|
||||||
|
|
||||||
mockConcurrencyManager = {
|
mockConcurrencyManager = {
|
||||||
acquire: vi.fn().mockImplementation(({ featureId, isAutoMode }) => ({
|
acquire: vi.fn().mockImplementation(({ featureId }) => createRunningFeature(featureId)),
|
||||||
...createRunningFeature(featureId),
|
|
||||||
isAutoMode: isAutoMode ?? false,
|
|
||||||
})),
|
|
||||||
release: vi.fn(),
|
release: vi.fn(),
|
||||||
getRunningFeature: vi.fn(),
|
getRunningFeature: vi.fn(),
|
||||||
isRunning: vi.fn(),
|
isRunning: vi.fn(),
|
||||||
@@ -211,14 +206,7 @@ describe('execution-service.ts', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Default mocks for secureFs
|
// Default mocks for secureFs
|
||||||
// Include tool usage markers to simulate meaningful agent output.
|
vi.mocked(secureFs.readFile).mockResolvedValue('Agent output content');
|
||||||
// The execution service checks for '🔧 Tool:' markers and minimum
|
|
||||||
// output length to determine if the agent did real work.
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(
|
|
||||||
'Starting implementation...\n\n🔧 Tool: Read\nInput: {"file_path": "/src/index.ts"}\n\n' +
|
|
||||||
'🔧 Tool: Edit\nInput: {"file_path": "/src/index.ts", "old_string": "foo", "new_string": "bar"}\n\n' +
|
|
||||||
'Implementation complete. Updated the code as requested.'
|
|
||||||
);
|
|
||||||
vi.mocked(secureFs.access).mockResolvedValue(undefined);
|
vi.mocked(secureFs.access).mockResolvedValue(undefined);
|
||||||
|
|
||||||
// Re-setup platform mocks
|
// Re-setup platform mocks
|
||||||
@@ -239,7 +227,6 @@ describe('execution-service.ts', () => {
|
|||||||
},
|
},
|
||||||
} as Awaited<ReturnType<typeof getPromptCustomization>>);
|
} as Awaited<ReturnType<typeof getPromptCustomization>>);
|
||||||
vi.mocked(getAutoLoadClaudeMdSetting).mockResolvedValue(true);
|
vi.mocked(getAutoLoadClaudeMdSetting).mockResolvedValue(true);
|
||||||
vi.mocked(getUseClaudeCodeSystemPromptSetting).mockResolvedValue(true);
|
|
||||||
vi.mocked(filterClaudeMdFromContext).mockReturnValue('context prompt');
|
vi.mocked(filterClaudeMdFromContext).mockReturnValue('context prompt');
|
||||||
|
|
||||||
// Re-setup spec-parser mock
|
// Re-setup spec-parser mock
|
||||||
@@ -563,8 +550,8 @@ describe('execution-service.ts', () => {
|
|||||||
expect(mockRunAgentFn).not.toHaveBeenCalled();
|
expect(mockRunAgentFn).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('emits feature_complete event on success when isAutoMode is true', async () => {
|
it('emits feature_complete event on success', async () => {
|
||||||
await service.executeFeature('/test/project', 'feature-1', false, true);
|
await service.executeFeature('/test/project', 'feature-1');
|
||||||
|
|
||||||
expect(mockEventBus.emitAutoModeEvent).toHaveBeenCalledWith(
|
expect(mockEventBus.emitAutoModeEvent).toHaveBeenCalledWith(
|
||||||
'auto_mode_feature_complete',
|
'auto_mode_feature_complete',
|
||||||
@@ -574,15 +561,6 @@ describe('execution-service.ts', () => {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('does not emit feature_complete event on success when isAutoMode is false', async () => {
|
|
||||||
await service.executeFeature('/test/project', 'feature-1', false, false);
|
|
||||||
|
|
||||||
const completeCalls = vi
|
|
||||||
.mocked(mockEventBus.emitAutoModeEvent)
|
|
||||||
.mock.calls.filter((call) => call[0] === 'auto_mode_feature_complete');
|
|
||||||
expect(completeCalls.length).toBe(0);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('executeFeature - approved plan handling', () => {
|
describe('executeFeature - approved plan handling', () => {
|
||||||
@@ -1132,7 +1110,7 @@ describe('execution-service.ts', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('handles abort signal without error event (emits feature_complete when isAutoMode=true)', async () => {
|
it('handles abort signal without error event', async () => {
|
||||||
const abortError = new Error('abort');
|
const abortError = new Error('abort');
|
||||||
abortError.name = 'AbortError';
|
abortError.name = 'AbortError';
|
||||||
mockRunAgentFn = vi.fn().mockRejectedValue(abortError);
|
mockRunAgentFn = vi.fn().mockRejectedValue(abortError);
|
||||||
@@ -1158,7 +1136,7 @@ describe('execution-service.ts', () => {
|
|||||||
mockLoadContextFilesFn
|
mockLoadContextFilesFn
|
||||||
);
|
);
|
||||||
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1', false, true);
|
await svc.executeFeature('/test/project', 'feature-1');
|
||||||
|
|
||||||
// Should emit feature_complete with stopped by user
|
// Should emit feature_complete with stopped by user
|
||||||
expect(mockEventBus.emitAutoModeEvent).toHaveBeenCalledWith(
|
expect(mockEventBus.emitAutoModeEvent).toHaveBeenCalledWith(
|
||||||
@@ -1177,47 +1155,6 @@ describe('execution-service.ts', () => {
|
|||||||
expect(errorCalls.length).toBe(0);
|
expect(errorCalls.length).toBe(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('handles abort signal without emitting feature_complete when isAutoMode=false', async () => {
|
|
||||||
const abortError = new Error('abort');
|
|
||||||
abortError.name = 'AbortError';
|
|
||||||
mockRunAgentFn = vi.fn().mockRejectedValue(abortError);
|
|
||||||
|
|
||||||
const svc = new ExecutionService(
|
|
||||||
mockEventBus,
|
|
||||||
mockConcurrencyManager,
|
|
||||||
mockWorktreeResolver,
|
|
||||||
mockSettingsService,
|
|
||||||
mockRunAgentFn,
|
|
||||||
mockExecutePipelineFn,
|
|
||||||
mockUpdateFeatureStatusFn,
|
|
||||||
mockLoadFeatureFn,
|
|
||||||
mockGetPlanningPromptPrefixFn,
|
|
||||||
mockSaveFeatureSummaryFn,
|
|
||||||
mockRecordLearningsFn,
|
|
||||||
mockContextExistsFn,
|
|
||||||
mockResumeFeatureFn,
|
|
||||||
mockTrackFailureFn,
|
|
||||||
mockSignalPauseFn,
|
|
||||||
mockRecordSuccessFn,
|
|
||||||
mockSaveExecutionStateFn,
|
|
||||||
mockLoadContextFilesFn
|
|
||||||
);
|
|
||||||
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1', false, false);
|
|
||||||
|
|
||||||
// Should NOT emit feature_complete when isAutoMode is false
|
|
||||||
const completeCalls = vi
|
|
||||||
.mocked(mockEventBus.emitAutoModeEvent)
|
|
||||||
.mock.calls.filter((call) => call[0] === 'auto_mode_feature_complete');
|
|
||||||
expect(completeCalls.length).toBe(0);
|
|
||||||
|
|
||||||
// Should NOT emit error event (abort is not an error)
|
|
||||||
const errorCalls = vi
|
|
||||||
.mocked(mockEventBus.emitAutoModeEvent)
|
|
||||||
.mock.calls.filter((call) => call[0] === 'auto_mode_error');
|
|
||||||
expect(errorCalls.length).toBe(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('releases running feature even on error', async () => {
|
it('releases running feature even on error', async () => {
|
||||||
const testError = new Error('Test error');
|
const testError = new Error('Test error');
|
||||||
mockRunAgentFn = vi.fn().mockRejectedValue(testError);
|
mockRunAgentFn = vi.fn().mockRejectedValue(testError);
|
||||||
@@ -1276,34 +1213,6 @@ describe('execution-service.ts', () => {
|
|||||||
|
|
||||||
expect(mockConcurrencyManager.release).toHaveBeenCalledWith('feature-1', { force: true });
|
expect(mockConcurrencyManager.release).toHaveBeenCalledWith('feature-1', { force: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
it('immediately updates feature status to interrupted before subprocess terminates', async () => {
|
|
||||||
const runningFeature = createRunningFeature('feature-1');
|
|
||||||
vi.mocked(mockConcurrencyManager.getRunningFeature).mockReturnValue(runningFeature);
|
|
||||||
|
|
||||||
await service.stopFeature('feature-1');
|
|
||||||
|
|
||||||
// Should update to 'interrupted' immediately so the UI reflects the stop
|
|
||||||
// without waiting for the CLI subprocess to fully terminate
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'interrupted'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('still aborts and releases even if status update fails', async () => {
|
|
||||||
const runningFeature = createRunningFeature('feature-1');
|
|
||||||
const abortSpy = vi.spyOn(runningFeature.abortController, 'abort');
|
|
||||||
vi.mocked(mockConcurrencyManager.getRunningFeature).mockReturnValue(runningFeature);
|
|
||||||
vi.mocked(mockUpdateFeatureStatusFn).mockRejectedValueOnce(new Error('disk error'));
|
|
||||||
|
|
||||||
const result = await service.stopFeature('feature-1');
|
|
||||||
|
|
||||||
expect(result).toBe(true);
|
|
||||||
expect(abortSpy).toHaveBeenCalled();
|
|
||||||
expect(mockConcurrencyManager.release).toHaveBeenCalledWith('feature-1', { force: true });
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('worktree resolution', () => {
|
describe('worktree resolution', () => {
|
||||||
@@ -1430,8 +1339,8 @@ describe('execution-service.ts', () => {
|
|||||||
it('handles missing agent output gracefully', async () => {
|
it('handles missing agent output gracefully', async () => {
|
||||||
vi.mocked(secureFs.readFile).mockRejectedValue(new Error('ENOENT'));
|
vi.mocked(secureFs.readFile).mockRejectedValue(new Error('ENOENT'));
|
||||||
|
|
||||||
// Should not throw (isAutoMode=true so event is emitted)
|
// Should not throw
|
||||||
await service.executeFeature('/test/project', 'feature-1', false, true);
|
await service.executeFeature('/test/project', 'feature-1');
|
||||||
|
|
||||||
// Feature should still complete successfully
|
// Feature should still complete successfully
|
||||||
expect(mockEventBus.emitAutoModeEvent).toHaveBeenCalledWith(
|
expect(mockEventBus.emitAutoModeEvent).toHaveBeenCalledWith(
|
||||||
@@ -1440,439 +1349,4 @@ describe('execution-service.ts', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('executeFeature - agent output validation', () => {
|
|
||||||
// Helper to generate realistic agent output with tool markers
|
|
||||||
const makeAgentOutput = (toolCount: number, extraText = ''): string => {
|
|
||||||
let output = 'Starting implementation...\n\n';
|
|
||||||
for (let i = 0; i < toolCount; i++) {
|
|
||||||
output += `🔧 Tool: Edit\nInput: {"file_path": "/src/file${i}.ts", "old_string": "old${i}", "new_string": "new${i}"}\n\n`;
|
|
||||||
}
|
|
||||||
output += `Implementation complete. ${extraText}`;
|
|
||||||
return output;
|
|
||||||
};
|
|
||||||
|
|
||||||
const createServiceWithMocks = () => {
|
|
||||||
return new ExecutionService(
|
|
||||||
mockEventBus,
|
|
||||||
mockConcurrencyManager,
|
|
||||||
mockWorktreeResolver,
|
|
||||||
mockSettingsService,
|
|
||||||
mockRunAgentFn,
|
|
||||||
mockExecutePipelineFn,
|
|
||||||
mockUpdateFeatureStatusFn,
|
|
||||||
mockLoadFeatureFn,
|
|
||||||
mockGetPlanningPromptPrefixFn,
|
|
||||||
mockSaveFeatureSummaryFn,
|
|
||||||
mockRecordLearningsFn,
|
|
||||||
mockContextExistsFn,
|
|
||||||
mockResumeFeatureFn,
|
|
||||||
mockTrackFailureFn,
|
|
||||||
mockSignalPauseFn,
|
|
||||||
mockRecordSuccessFn,
|
|
||||||
mockSaveExecutionStateFn,
|
|
||||||
mockLoadContextFilesFn
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
it('sets verified when agent output has tool usage and sufficient length', async () => {
|
|
||||||
const output = makeAgentOutput(3, 'Updated authentication module with new login flow.');
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(output);
|
|
||||||
|
|
||||||
await service.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'verified'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sets waiting_approval when agent output is empty', async () => {
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue('');
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'waiting_approval'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sets waiting_approval when agent output has no tool usage markers', async () => {
|
|
||||||
// Long output but no tool markers - agent printed text but didn't use tools
|
|
||||||
const longOutputNoTools = 'I analyzed the codebase and found several issues. '.repeat(20);
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(longOutputNoTools);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'waiting_approval'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sets waiting_approval when agent output has tool markers but is too short', async () => {
|
|
||||||
// Has a tool marker but total output is under 200 chars
|
|
||||||
const shortWithTool = '🔧 Tool: Read\nInput: {"file_path": "/src/index.ts"}\nDone.';
|
|
||||||
expect(shortWithTool.trim().length).toBeLessThan(200);
|
|
||||||
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(shortWithTool);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'waiting_approval'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sets waiting_approval when agent output file is missing (ENOENT)', async () => {
|
|
||||||
vi.mocked(secureFs.readFile).mockRejectedValue(new Error('ENOENT'));
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'waiting_approval'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sets waiting_approval when agent output is only whitespace', async () => {
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(' \n\n\t \n ');
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'waiting_approval'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sets verified when output is exactly at the 200 char threshold with tool usage', async () => {
|
|
||||||
// Create output that's exactly 200 chars trimmed with tool markers
|
|
||||||
const toolMarker = '🔧 Tool: Edit\nInput: {"file_path": "/src/index.ts"}\n';
|
|
||||||
const padding = 'x'.repeat(200 - toolMarker.length);
|
|
||||||
const output = toolMarker + padding;
|
|
||||||
expect(output.trim().length).toBeGreaterThanOrEqual(200);
|
|
||||||
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(output);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'verified'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sets waiting_approval when output is 199 chars with tool usage (below threshold)', async () => {
|
|
||||||
const toolMarker = '🔧 Tool: Read\n';
|
|
||||||
const padding = 'x'.repeat(199 - toolMarker.length);
|
|
||||||
const output = toolMarker + padding;
|
|
||||||
expect(output.trim().length).toBe(199);
|
|
||||||
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(output);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'waiting_approval'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('skipTests always takes priority over output validation', async () => {
|
|
||||||
// Meaningful output with tool usage - would normally be 'verified'
|
|
||||||
const output = makeAgentOutput(5, 'All changes applied successfully.');
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(output);
|
|
||||||
|
|
||||||
mockLoadFeatureFn = vi.fn().mockResolvedValue({ ...testFeature, skipTests: true });
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
// skipTests=true always means waiting_approval regardless of output quality
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'waiting_approval'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('skipTests with empty output still results in waiting_approval', async () => {
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue('');
|
|
||||||
|
|
||||||
mockLoadFeatureFn = vi.fn().mockResolvedValue({ ...testFeature, skipTests: true });
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'waiting_approval'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('still records success even when output validation fails', async () => {
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue('');
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
// recordSuccess should still be called - the agent ran without errors
|
|
||||||
expect(mockRecordSuccessFn).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('still extracts summary when output has content but no tool markers', async () => {
|
|
||||||
const outputNoTools = 'A '.repeat(150); // > 200 chars but no tool markers
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(outputNoTools);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
// Summary extraction still runs even though status is waiting_approval
|
|
||||||
expect(extractSummary).toHaveBeenCalledWith(outputNoTools);
|
|
||||||
expect(mockSaveFeatureSummaryFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'Test summary'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('emits feature_complete with passes=true even when output validation routes to waiting_approval', async () => {
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue('');
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1', false, true);
|
|
||||||
|
|
||||||
// The agent ran without error - it's still a "pass" from the execution perspective
|
|
||||||
expect(mockEventBus.emitAutoModeEvent).toHaveBeenCalledWith(
|
|
||||||
'auto_mode_feature_complete',
|
|
||||||
expect.objectContaining({ passes: true })
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles realistic Cursor CLI output that exits quickly', async () => {
|
|
||||||
// Simulates a Cursor CLI that prints a brief message and exits
|
|
||||||
const cursorQuickExit = 'Task received. Processing...\nResult: completed successfully.';
|
|
||||||
expect(cursorQuickExit.includes('🔧 Tool:')).toBe(false);
|
|
||||||
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(cursorQuickExit);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
// No tool usage = waiting_approval
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'waiting_approval'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles realistic Claude SDK output with multiple tool uses', async () => {
|
|
||||||
// Simulates a Claude SDK agent that does real work
|
|
||||||
const claudeOutput =
|
|
||||||
"I'll implement the requested feature.\n\n" +
|
|
||||||
'🔧 Tool: Read\nInput: {"file_path": "/src/components/App.tsx"}\n\n' +
|
|
||||||
'I can see the existing component structure. Let me modify it.\n\n' +
|
|
||||||
'🔧 Tool: Edit\nInput: {"file_path": "/src/components/App.tsx", "old_string": "const App = () => {", "new_string": "const App: React.FC = () => {"}\n\n' +
|
|
||||||
'🔧 Tool: Write\nInput: {"file_path": "/src/components/NewFeature.tsx"}\n\n' +
|
|
||||||
"I've created the new component and updated the existing one. The feature is now implemented with proper TypeScript types.";
|
|
||||||
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(claudeOutput);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
// Real work = verified
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'verified'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('reads agent output from the correct path with utf-8 encoding', async () => {
|
|
||||||
const output = makeAgentOutput(2, 'Done with changes.');
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(output);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
// Verify readFile was called with the correct path derived from getFeatureDir
|
|
||||||
expect(secureFs.readFile).toHaveBeenCalledWith(
|
|
||||||
'/test/project/.automaker/features/feature-1/agent-output.md',
|
|
||||||
'utf-8'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('completion message includes auto-verified when status is verified', async () => {
|
|
||||||
const output = makeAgentOutput(3, 'All changes applied.');
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(output);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1', false, true);
|
|
||||||
|
|
||||||
expect(mockEventBus.emitAutoModeEvent).toHaveBeenCalledWith(
|
|
||||||
'auto_mode_feature_complete',
|
|
||||||
expect.objectContaining({
|
|
||||||
message: expect.stringContaining('auto-verified'),
|
|
||||||
})
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('completion message does NOT include auto-verified when status is waiting_approval', async () => {
|
|
||||||
// Empty output → waiting_approval
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue('');
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1', false, true);
|
|
||||||
|
|
||||||
const completeCall = vi
|
|
||||||
.mocked(mockEventBus.emitAutoModeEvent)
|
|
||||||
.mock.calls.find((call) => call[0] === 'auto_mode_feature_complete');
|
|
||||||
expect(completeCall).toBeDefined();
|
|
||||||
expect((completeCall![1] as { message: string }).message).not.toContain('auto-verified');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('uses same agentOutput for both status determination and summary extraction', async () => {
|
|
||||||
// Specific output that is long enough with tool markers (verified path)
|
|
||||||
// AND has content for summary extraction
|
|
||||||
const specificOutput =
|
|
||||||
'🔧 Tool: Read\nReading file...\n🔧 Tool: Edit\nEditing file...\n' +
|
|
||||||
'The implementation is complete. Here is a detailed description of what was done. '.repeat(
|
|
||||||
3
|
|
||||||
);
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(specificOutput);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
// Status should be verified (has tools + long enough)
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'verified'
|
|
||||||
);
|
|
||||||
// extractSummary should receive the exact same output
|
|
||||||
expect(extractSummary).toHaveBeenCalledWith(specificOutput);
|
|
||||||
// recordLearnings should also receive the same output
|
|
||||||
expect(mockRecordLearningsFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
testFeature,
|
|
||||||
specificOutput
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not call recordMemoryUsage when output is empty and memoryFiles is empty', async () => {
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue('');
|
|
||||||
const { recordMemoryUsage } = await import('@automaker/utils');
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
// With empty output and empty memoryFiles, recordMemoryUsage should not be called
|
|
||||||
expect(recordMemoryUsage).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles output with special unicode characters correctly', async () => {
|
|
||||||
// Output with various unicode but includes tool markers
|
|
||||||
const unicodeOutput =
|
|
||||||
'🔧 Tool: Read\n' +
|
|
||||||
'🔧 Tool: Edit\n' +
|
|
||||||
'Añadiendo función de búsqueda con caracteres especiales: ñ, ü, ö, é, 日本語テスト. ' +
|
|
||||||
'Die Änderungen wurden erfolgreich implementiert. '.repeat(3);
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(unicodeOutput);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
// Should still detect tool markers and sufficient length
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'verified'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('treats output with only newlines and spaces around tool marker as insufficient', async () => {
|
|
||||||
// Has tool marker but surrounded by whitespace, total trimmed < 200
|
|
||||||
const sparseOutput = '\n\n 🔧 Tool: Read \n\n';
|
|
||||||
expect(sparseOutput.trim().length).toBeLessThan(200);
|
|
||||||
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(sparseOutput);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'waiting_approval'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('detects tool marker substring correctly (partial match like "🔧 Tools:" does not count)', async () => {
|
|
||||||
// Output with a similar but not exact marker - "🔧 Tools:" instead of "🔧 Tool:"
|
|
||||||
const wrongMarker = '🔧 Tools: Read\n🔧 Tools: Edit\n' + 'Implementation done. '.repeat(20);
|
|
||||||
expect(wrongMarker.includes('🔧 Tool:')).toBe(false);
|
|
||||||
|
|
||||||
vi.mocked(secureFs.readFile).mockResolvedValue(wrongMarker);
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
// "🔧 Tools:" is not the same as "🔧 Tool:" - should be waiting_approval
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'waiting_approval'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('pipeline merge_conflict status short-circuits before output validation', async () => {
|
|
||||||
// Set up pipeline that results in merge_conflict
|
|
||||||
vi.mocked(pipelineService.getPipelineConfig).mockResolvedValue({
|
|
||||||
version: 1,
|
|
||||||
steps: [{ id: 'step-1', name: 'Step 1', order: 1, instructions: 'Do step 1' }] as any,
|
|
||||||
});
|
|
||||||
|
|
||||||
// After pipeline, loadFeature returns merge_conflict status
|
|
||||||
let loadCallCount = 0;
|
|
||||||
mockLoadFeatureFn = vi.fn().mockImplementation(() => {
|
|
||||||
loadCallCount++;
|
|
||||||
if (loadCallCount === 1) return testFeature; // initial load
|
|
||||||
// All subsequent loads (task check + pipeline refresh) return merge_conflict
|
|
||||||
return { ...testFeature, status: 'merge_conflict' };
|
|
||||||
});
|
|
||||||
|
|
||||||
const svc = createServiceWithMocks();
|
|
||||||
await svc.executeFeature('/test/project', 'feature-1');
|
|
||||||
|
|
||||||
// Should NOT have called updateFeatureStatusFn with 'verified' or 'waiting_approval'
|
|
||||||
// because pipeline merge_conflict short-circuits the method
|
|
||||||
const statusCalls = vi
|
|
||||||
.mocked(mockUpdateFeatureStatusFn)
|
|
||||||
.mock.calls.filter((call) => call[2] === 'verified' || call[2] === 'waiting_approval');
|
|
||||||
// The only non-in_progress status call should be absent since merge_conflict returns early
|
|
||||||
expect(statusCalls.length).toBe(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -57,7 +57,6 @@ vi.mock('../../../src/lib/settings-helpers.js', () => ({
|
|||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
getAutoLoadClaudeMdSetting: vi.fn().mockResolvedValue(true),
|
getAutoLoadClaudeMdSetting: vi.fn().mockResolvedValue(true),
|
||||||
getUseClaudeCodeSystemPromptSetting: vi.fn().mockResolvedValue(true),
|
|
||||||
filterClaudeMdFromContext: vi.fn().mockReturnValue('context prompt'),
|
filterClaudeMdFromContext: vi.fn().mockReturnValue('context prompt'),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@@ -171,16 +170,14 @@ describe('PipelineOrchestrator', () => {
|
|||||||
} as unknown as WorktreeResolver;
|
} as unknown as WorktreeResolver;
|
||||||
|
|
||||||
mockConcurrencyManager = {
|
mockConcurrencyManager = {
|
||||||
acquire: vi.fn().mockImplementation(({ featureId, isAutoMode }) => ({
|
acquire: vi.fn().mockReturnValue({
|
||||||
featureId,
|
featureId: 'feature-1',
|
||||||
projectPath: '/test/project',
|
projectPath: '/test/project',
|
||||||
abortController: new AbortController(),
|
abortController: new AbortController(),
|
||||||
branchName: null,
|
branchName: null,
|
||||||
worktreePath: null,
|
worktreePath: null,
|
||||||
isAutoMode: isAutoMode ?? false,
|
}),
|
||||||
})),
|
|
||||||
release: vi.fn(),
|
release: vi.fn(),
|
||||||
getRunningFeature: vi.fn().mockReturnValue(undefined),
|
|
||||||
} as unknown as ConcurrencyManager;
|
} as unknown as ConcurrencyManager;
|
||||||
|
|
||||||
mockSettingsService = null;
|
mockSettingsService = null;
|
||||||
@@ -544,18 +541,8 @@ describe('PipelineOrchestrator', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should emit auto_mode_feature_complete on success when isAutoMode is true', async () => {
|
it('should emit auto_mode_feature_complete on success', async () => {
|
||||||
vi.mocked(performMerge).mockResolvedValue({ success: true });
|
vi.mocked(performMerge).mockResolvedValue({ success: true });
|
||||||
vi.mocked(mockConcurrencyManager.getRunningFeature).mockReturnValue({
|
|
||||||
featureId: 'feature-1',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
abortController: new AbortController(),
|
|
||||||
branchName: null,
|
|
||||||
worktreePath: null,
|
|
||||||
isAutoMode: true,
|
|
||||||
startTime: Date.now(),
|
|
||||||
leaseCount: 1,
|
|
||||||
});
|
|
||||||
|
|
||||||
const context = createMergeContext();
|
const context = createMergeContext();
|
||||||
await orchestrator.attemptMerge(context);
|
await orchestrator.attemptMerge(context);
|
||||||
@@ -566,19 +553,6 @@ describe('PipelineOrchestrator', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not emit auto_mode_feature_complete on success when isAutoMode is false', async () => {
|
|
||||||
vi.mocked(performMerge).mockResolvedValue({ success: true });
|
|
||||||
vi.mocked(mockConcurrencyManager.getRunningFeature).mockReturnValue(undefined);
|
|
||||||
|
|
||||||
const context = createMergeContext();
|
|
||||||
await orchestrator.attemptMerge(context);
|
|
||||||
|
|
||||||
const completeCalls = vi
|
|
||||||
.mocked(mockEventBus.emitAutoModeEvent)
|
|
||||||
.mock.calls.filter((call) => call[0] === 'auto_mode_feature_complete');
|
|
||||||
expect(completeCalls.length).toBe(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return needsAgentResolution true on conflict', async () => {
|
it('should return needsAgentResolution true on conflict', async () => {
|
||||||
vi.mocked(performMerge).mockResolvedValue({
|
vi.mocked(performMerge).mockResolvedValue({
|
||||||
success: false,
|
success: false,
|
||||||
@@ -649,24 +623,13 @@ describe('PipelineOrchestrator', () => {
|
|||||||
expect(mockExecuteFeatureFn).toHaveBeenCalled();
|
expect(mockExecuteFeatureFn).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should complete feature when step no longer exists and emit event when isAutoMode=true', async () => {
|
it('should complete feature when step no longer exists', async () => {
|
||||||
const invalidPipelineInfo: PipelineStatusInfo = {
|
const invalidPipelineInfo: PipelineStatusInfo = {
|
||||||
...validPipelineInfo,
|
...validPipelineInfo,
|
||||||
stepIndex: -1,
|
stepIndex: -1,
|
||||||
step: null,
|
step: null,
|
||||||
};
|
};
|
||||||
|
|
||||||
vi.mocked(mockConcurrencyManager.getRunningFeature).mockReturnValue({
|
|
||||||
featureId: 'feature-1',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
abortController: new AbortController(),
|
|
||||||
branchName: null,
|
|
||||||
worktreePath: null,
|
|
||||||
isAutoMode: true,
|
|
||||||
startTime: Date.now(),
|
|
||||||
leaseCount: 1,
|
|
||||||
});
|
|
||||||
|
|
||||||
await orchestrator.resumePipeline('/test/project', testFeature, true, invalidPipelineInfo);
|
await orchestrator.resumePipeline('/test/project', testFeature, true, invalidPipelineInfo);
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
||||||
@@ -679,28 +642,6 @@ describe('PipelineOrchestrator', () => {
|
|||||||
expect.objectContaining({ message: expect.stringContaining('no longer exists') })
|
expect.objectContaining({ message: expect.stringContaining('no longer exists') })
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not emit feature_complete when step no longer exists and isAutoMode=false', async () => {
|
|
||||||
const invalidPipelineInfo: PipelineStatusInfo = {
|
|
||||||
...validPipelineInfo,
|
|
||||||
stepIndex: -1,
|
|
||||||
step: null,
|
|
||||||
};
|
|
||||||
|
|
||||||
vi.mocked(mockConcurrencyManager.getRunningFeature).mockReturnValue(undefined);
|
|
||||||
|
|
||||||
await orchestrator.resumePipeline('/test/project', testFeature, true, invalidPipelineInfo);
|
|
||||||
|
|
||||||
expect(mockUpdateFeatureStatusFn).toHaveBeenCalledWith(
|
|
||||||
'/test/project',
|
|
||||||
'feature-1',
|
|
||||||
'verified'
|
|
||||||
);
|
|
||||||
const completeCalls = vi
|
|
||||||
.mocked(mockEventBus.emitAutoModeEvent)
|
|
||||||
.mock.calls.filter((call) => call[0] === 'auto_mode_feature_complete');
|
|
||||||
expect(completeCalls.length).toBe(0);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('resumeFromStep', () => {
|
describe('resumeFromStep', () => {
|
||||||
@@ -725,7 +666,7 @@ describe('PipelineOrchestrator', () => {
|
|||||||
expect(mockRunAgentFn).toHaveBeenCalled();
|
expect(mockRunAgentFn).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should complete feature when all remaining steps excluded and emit event when isAutoMode=true', async () => {
|
it('should complete feature when all remaining steps excluded', async () => {
|
||||||
const featureWithAllExcluded: Feature = {
|
const featureWithAllExcluded: Feature = {
|
||||||
...testFeature,
|
...testFeature,
|
||||||
excludedPipelineSteps: ['step-1', 'step-2'],
|
excludedPipelineSteps: ['step-1', 'step-2'],
|
||||||
@@ -733,16 +674,6 @@ describe('PipelineOrchestrator', () => {
|
|||||||
|
|
||||||
vi.mocked(pipelineService.getNextStatus).mockReturnValue('verified');
|
vi.mocked(pipelineService.getNextStatus).mockReturnValue('verified');
|
||||||
vi.mocked(pipelineService.isPipelineStatus).mockReturnValue(false);
|
vi.mocked(pipelineService.isPipelineStatus).mockReturnValue(false);
|
||||||
vi.mocked(mockConcurrencyManager.getRunningFeature).mockReturnValue({
|
|
||||||
featureId: 'feature-1',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
abortController: new AbortController(),
|
|
||||||
branchName: null,
|
|
||||||
worktreePath: null,
|
|
||||||
isAutoMode: true,
|
|
||||||
startTime: Date.now(),
|
|
||||||
leaseCount: 1,
|
|
||||||
});
|
|
||||||
|
|
||||||
await orchestrator.resumeFromStep(
|
await orchestrator.resumeFromStep(
|
||||||
'/test/project',
|
'/test/project',
|
||||||
@@ -1102,7 +1033,7 @@ describe('PipelineOrchestrator', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('handles all steps excluded during resume and emits event when isAutoMode=true', async () => {
|
it('handles all steps excluded during resume', async () => {
|
||||||
const featureWithAllExcluded: Feature = {
|
const featureWithAllExcluded: Feature = {
|
||||||
...testFeature,
|
...testFeature,
|
||||||
excludedPipelineSteps: ['step-1', 'step-2'],
|
excludedPipelineSteps: ['step-1', 'step-2'],
|
||||||
@@ -1110,16 +1041,6 @@ describe('PipelineOrchestrator', () => {
|
|||||||
|
|
||||||
vi.mocked(pipelineService.getNextStatus).mockReturnValue('verified');
|
vi.mocked(pipelineService.getNextStatus).mockReturnValue('verified');
|
||||||
vi.mocked(pipelineService.isPipelineStatus).mockReturnValue(false);
|
vi.mocked(pipelineService.isPipelineStatus).mockReturnValue(false);
|
||||||
vi.mocked(mockConcurrencyManager.getRunningFeature).mockReturnValue({
|
|
||||||
featureId: 'feature-1',
|
|
||||||
projectPath: '/test/project',
|
|
||||||
abortController: new AbortController(),
|
|
||||||
branchName: null,
|
|
||||||
worktreePath: null,
|
|
||||||
isAutoMode: true,
|
|
||||||
startTime: Date.now(),
|
|
||||||
leaseCount: 1,
|
|
||||||
});
|
|
||||||
|
|
||||||
await orchestrator.resumeFromStep(
|
await orchestrator.resumeFromStep(
|
||||||
'/test/project',
|
'/test/project',
|
||||||
|
|||||||
@@ -740,11 +740,8 @@ describe('settings-service.ts', () => {
|
|||||||
// Legacy fields should be migrated to phaseModels with canonical IDs
|
// Legacy fields should be migrated to phaseModels with canonical IDs
|
||||||
expect(settings.phaseModels.enhancementModel).toEqual({ model: 'claude-haiku' });
|
expect(settings.phaseModels.enhancementModel).toEqual({ model: 'claude-haiku' });
|
||||||
expect(settings.phaseModels.validationModel).toEqual({ model: 'claude-opus' });
|
expect(settings.phaseModels.validationModel).toEqual({ model: 'claude-opus' });
|
||||||
// Other fields should use defaults (canonical IDs) - specGenerationModel includes thinkingLevel from DEFAULT_PHASE_MODELS
|
// Other fields should use defaults (canonical IDs)
|
||||||
expect(settings.phaseModels.specGenerationModel).toEqual({
|
expect(settings.phaseModels.specGenerationModel).toEqual({ model: 'claude-opus' });
|
||||||
model: 'claude-opus',
|
|
||||||
thinkingLevel: 'adaptive',
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use default phase models when none are configured', async () => {
|
it('should use default phase models when none are configured', async () => {
|
||||||
@@ -758,13 +755,10 @@ describe('settings-service.ts', () => {
|
|||||||
|
|
||||||
const settings = await settingsService.getGlobalSettings();
|
const settings = await settingsService.getGlobalSettings();
|
||||||
|
|
||||||
// Should use DEFAULT_PHASE_MODELS (with canonical IDs) - specGenerationModel includes thinkingLevel from DEFAULT_PHASE_MODELS
|
// Should use DEFAULT_PHASE_MODELS (with canonical IDs)
|
||||||
expect(settings.phaseModels.enhancementModel).toEqual({ model: 'claude-sonnet' });
|
expect(settings.phaseModels.enhancementModel).toEqual({ model: 'claude-sonnet' });
|
||||||
expect(settings.phaseModels.fileDescriptionModel).toEqual({ model: 'claude-haiku' });
|
expect(settings.phaseModels.fileDescriptionModel).toEqual({ model: 'claude-haiku' });
|
||||||
expect(settings.phaseModels.specGenerationModel).toEqual({
|
expect(settings.phaseModels.specGenerationModel).toEqual({ model: 'claude-opus' });
|
||||||
model: 'claude-opus',
|
|
||||||
thinkingLevel: 'adaptive',
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should deep merge phaseModels on update', async () => {
|
it('should deep merge phaseModels on update', async () => {
|
||||||
|
|||||||
@@ -117,8 +117,6 @@ const eslintConfig = defineConfig([
|
|||||||
Electron: 'readonly',
|
Electron: 'readonly',
|
||||||
// Console
|
// Console
|
||||||
console: 'readonly',
|
console: 'readonly',
|
||||||
// Structured clone (modern browser/Node API)
|
|
||||||
structuredClone: 'readonly',
|
|
||||||
// Vite defines
|
// Vite defines
|
||||||
__APP_VERSION__: 'readonly',
|
__APP_VERSION__: 'readonly',
|
||||||
__APP_BUILD_HASH__: 'readonly',
|
__APP_BUILD_HASH__: 'readonly',
|
||||||
|
|||||||
@@ -1,30 +1,9 @@
|
|||||||
# Map for conditional WebSocket upgrade header
|
|
||||||
map $http_upgrade $connection_upgrade {
|
|
||||||
default upgrade;
|
|
||||||
'' close;
|
|
||||||
}
|
|
||||||
|
|
||||||
server {
|
server {
|
||||||
listen 80;
|
listen 80;
|
||||||
server_name localhost;
|
server_name localhost;
|
||||||
root /usr/share/nginx/html;
|
root /usr/share/nginx/html;
|
||||||
index index.html;
|
index index.html;
|
||||||
|
|
||||||
# Proxy API and WebSocket requests to the backend server container
|
|
||||||
# Handles both HTTP API calls and WebSocket upgrades (/api/events, /api/terminal/ws)
|
|
||||||
location /api/ {
|
|
||||||
proxy_pass http://server:3008;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection $connection_upgrade;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
proxy_buffering off;
|
|
||||||
proxy_read_timeout 3600s;
|
|
||||||
}
|
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
try_files $uri $uri/ /index.html;
|
try_files $uri $uri/ /index.html;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@automaker/ui",
|
"name": "@automaker/ui",
|
||||||
"version": "0.15.0",
|
"version": "0.13.0",
|
||||||
"description": "An autonomous AI development studio that helps you build software faster using AI-powered agents",
|
"description": "An autonomous AI development studio that helps you build software faster using AI-powered agents",
|
||||||
"homepage": "https://github.com/AutoMaker-Org/automaker",
|
"homepage": "https://github.com/AutoMaker-Org/automaker",
|
||||||
"repository": {
|
"repository": {
|
||||||
@@ -56,7 +56,6 @@
|
|||||||
"@codemirror/lang-xml": "6.1.0",
|
"@codemirror/lang-xml": "6.1.0",
|
||||||
"@codemirror/language": "^6.12.1",
|
"@codemirror/language": "^6.12.1",
|
||||||
"@codemirror/legacy-modes": "^6.5.2",
|
"@codemirror/legacy-modes": "^6.5.2",
|
||||||
"@codemirror/merge": "^6.12.0",
|
|
||||||
"@codemirror/search": "^6.6.0",
|
"@codemirror/search": "^6.6.0",
|
||||||
"@codemirror/state": "^6.5.4",
|
"@codemirror/state": "^6.5.4",
|
||||||
"@codemirror/theme-one-dark": "6.1.3",
|
"@codemirror/theme-one-dark": "6.1.3",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { defineConfig, devices } from '@playwright/test';
|
import { defineConfig, devices } from '@playwright/test';
|
||||||
|
|
||||||
const port = process.env.TEST_PORT || 3107;
|
const port = process.env.TEST_PORT || 3007;
|
||||||
const serverPort = process.env.TEST_SERVER_PORT || 3108;
|
const serverPort = process.env.TEST_SERVER_PORT || 3008;
|
||||||
const reuseServer = process.env.TEST_REUSE_SERVER === 'true';
|
const reuseServer = process.env.TEST_REUSE_SERVER === 'true';
|
||||||
const useExternalBackend = !!process.env.VITE_SERVER_URL;
|
const useExternalBackend = !!process.env.VITE_SERVER_URL;
|
||||||
// Always use mock agent for tests (disables rate limiting, uses mock Claude responses)
|
// Always use mock agent for tests (disables rate limiting, uses mock Claude responses)
|
||||||
@@ -19,7 +19,6 @@ export default defineConfig({
|
|||||||
baseURL: `http://localhost:${port}`,
|
baseURL: `http://localhost:${port}`,
|
||||||
trace: 'on-failure',
|
trace: 'on-failure',
|
||||||
screenshot: 'only-on-failure',
|
screenshot: 'only-on-failure',
|
||||||
serviceWorkers: 'block',
|
|
||||||
},
|
},
|
||||||
// Global setup - authenticate before each test
|
// Global setup - authenticate before each test
|
||||||
globalSetup: require.resolve('./tests/global-setup.ts'),
|
globalSetup: require.resolve('./tests/global-setup.ts'),
|
||||||
@@ -70,10 +69,6 @@ export default defineConfig({
|
|||||||
timeout: 120000,
|
timeout: 120000,
|
||||||
env: {
|
env: {
|
||||||
...process.env,
|
...process.env,
|
||||||
// Must set AUTOMAKER_WEB_PORT to match the port Playwright waits for
|
|
||||||
AUTOMAKER_WEB_PORT: String(port),
|
|
||||||
// Must set AUTOMAKER_SERVER_PORT so Vite proxy forwards to the correct backend port
|
|
||||||
AUTOMAKER_SERVER_PORT: String(serverPort),
|
|
||||||
VITE_SKIP_SETUP: 'true',
|
VITE_SKIP_SETUP: 'true',
|
||||||
// Always skip electron plugin during tests - prevents duplicate server spawning
|
// Always skip electron plugin during tests - prevents duplicate server spawning
|
||||||
VITE_SKIP_ELECTRON: 'true',
|
VITE_SKIP_ELECTRON: 'true',
|
||||||
|
|||||||
@@ -8,11 +8,10 @@ import { promisify } from 'util';
|
|||||||
|
|
||||||
const execAsync = promisify(exec);
|
const execAsync = promisify(exec);
|
||||||
|
|
||||||
const SERVER_PORT = process.env.TEST_SERVER_PORT || 3108;
|
const SERVER_PORT = process.env.TEST_SERVER_PORT || 3008;
|
||||||
const UI_PORT = process.env.TEST_PORT || 3107;
|
const UI_PORT = process.env.TEST_PORT || 3007;
|
||||||
const USE_EXTERNAL_SERVER = !!process.env.VITE_SERVER_URL;
|
const USE_EXTERNAL_SERVER = !!process.env.VITE_SERVER_URL;
|
||||||
console.log(`[KillTestServers] SERVER_PORT ${SERVER_PORT}`);
|
|
||||||
console.log(`[KillTestServers] UI_PORT ${UI_PORT}`);
|
|
||||||
async function killProcessOnPort(port) {
|
async function killProcessOnPort(port) {
|
||||||
try {
|
try {
|
||||||
const hasLsof = await execAsync('command -v lsof').then(
|
const hasLsof = await execAsync('command -v lsof').then(
|
||||||
|
|||||||
@@ -21,7 +21,6 @@ import {
|
|||||||
Maximize2,
|
Maximize2,
|
||||||
Check,
|
Check,
|
||||||
Undo2,
|
Undo2,
|
||||||
RefreshCw,
|
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import {
|
import {
|
||||||
Dialog,
|
Dialog,
|
||||||
@@ -37,7 +36,8 @@ import { Switch } from '@/components/ui/switch';
|
|||||||
import { Label } from '@/components/ui/label';
|
import { Label } from '@/components/ui/label';
|
||||||
import { Spinner } from '@/components/ui/spinner';
|
import { Spinner } from '@/components/ui/spinner';
|
||||||
import { Markdown } from '@/components/ui/markdown';
|
import { Markdown } from '@/components/ui/markdown';
|
||||||
import { cn, modelSupportsThinking, generateUUID } from '@/lib/utils';
|
import { ScrollArea } from '@/components/ui/scroll-area';
|
||||||
|
import { cn, modelSupportsThinking } from '@/lib/utils';
|
||||||
import { useAppStore } from '@/store/app-store';
|
import { useAppStore } from '@/store/app-store';
|
||||||
import { useGitHubPRReviewComments } from '@/hooks/queries';
|
import { useGitHubPRReviewComments } from '@/hooks/queries';
|
||||||
import { useCreateFeature, useResolveReviewThread } from '@/hooks/mutations';
|
import { useCreateFeature, useResolveReviewThread } from '@/hooks/mutations';
|
||||||
@@ -45,9 +45,8 @@ import { toast } from 'sonner';
|
|||||||
import type { PRReviewComment } from '@/lib/electron';
|
import type { PRReviewComment } from '@/lib/electron';
|
||||||
import type { Feature } from '@/store/app-store';
|
import type { Feature } from '@/store/app-store';
|
||||||
import type { PhaseModelEntry } from '@automaker/types';
|
import type { PhaseModelEntry } from '@automaker/types';
|
||||||
import { supportsReasoningEffort, normalizeThinkingLevelForModel } from '@automaker/types';
|
import { supportsReasoningEffort, isAdaptiveThinkingModel } from '@automaker/types';
|
||||||
import { resolveModelString } from '@automaker/model-resolver';
|
import { PhaseModelSelector } from '@/components/views/settings-view/model-defaults/phase-model-selector';
|
||||||
import { PhaseModelSelector } from '@/components/views/settings-view/model-defaults';
|
|
||||||
|
|
||||||
// ============================================
|
// ============================================
|
||||||
// Types
|
// Types
|
||||||
@@ -76,7 +75,7 @@ interface PRCommentResolutionDialogProps {
|
|||||||
|
|
||||||
/** Generate a feature ID */
|
/** Generate a feature ID */
|
||||||
function generateFeatureId(): string {
|
function generateFeatureId(): string {
|
||||||
return generateUUID();
|
return `feature-${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Format a date string for display */
|
/** Format a date string for display */
|
||||||
@@ -248,22 +247,39 @@ function CommentRow({
|
|||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
'flex items-start gap-3 p-3 rounded-lg border border-border transition-colors',
|
'flex items-start gap-3 p-3 rounded-lg border border-border transition-colors cursor-pointer',
|
||||||
needsExpansion ? 'cursor-pointer' : 'cursor-default',
|
|
||||||
isSelected ? 'bg-accent/50 border-primary/30' : 'hover:bg-accent/30'
|
isSelected ? 'bg-accent/50 border-primary/30' : 'hover:bg-accent/30'
|
||||||
)}
|
)}
|
||||||
onClick={needsExpansion ? () => setIsExpanded((prev) => !prev) : undefined}
|
onClick={onToggle}
|
||||||
>
|
>
|
||||||
<Checkbox
|
<Checkbox
|
||||||
checked={isSelected}
|
checked={isSelected}
|
||||||
onCheckedChange={() => onToggle()}
|
onCheckedChange={() => onToggle()}
|
||||||
className="mt-0.5 shrink-0"
|
className="mt-0.5"
|
||||||
onClick={(e) => e.stopPropagation()}
|
onClick={(e) => e.stopPropagation()}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<div className="flex-1 min-w-0">
|
<div className="flex-1 min-w-0">
|
||||||
{/* Header: disclosure triangle + author + file location + tags */}
|
{/* Header: disclosure triangle + author + file location + tags */}
|
||||||
<div className="flex items-start gap-1.5 flex-wrap mb-1">
|
<div className="flex items-start gap-1.5 flex-wrap mb-1">
|
||||||
|
{/* Disclosure triangle - always shown, toggles expand/collapse */}
|
||||||
|
{needsExpansion ? (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={handleExpandToggle}
|
||||||
|
className="mt-0.5 shrink-0 text-muted-foreground hover:text-foreground transition-colors"
|
||||||
|
title={isExpanded ? 'Collapse comment' : 'Expand comment'}
|
||||||
|
>
|
||||||
|
{isExpanded ? (
|
||||||
|
<ChevronDown className="h-3.5 w-3.5" />
|
||||||
|
) : (
|
||||||
|
<ChevronDown className="h-3.5 w-3.5 -rotate-90" />
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
) : (
|
||||||
|
<span className="mt-0.5 shrink-0 w-3.5 h-3.5" />
|
||||||
|
)}
|
||||||
|
|
||||||
<div className="flex items-center gap-2 flex-wrap flex-1 min-w-0">
|
<div className="flex items-center gap-2 flex-wrap flex-1 min-w-0">
|
||||||
<div className="flex items-center gap-1.5">
|
<div className="flex items-center gap-1.5">
|
||||||
{comment.avatarUrl ? (
|
{comment.avatarUrl ? (
|
||||||
@@ -287,12 +303,6 @@ function CommentRow({
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{comment.isBot && (
|
|
||||||
<span className="px-1.5 py-0.5 text-[10px] font-medium rounded bg-purple-500/10 text-purple-500">
|
|
||||||
Bot
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{comment.isOutdated && (
|
{comment.isOutdated && (
|
||||||
<span className="px-1.5 py-0.5 text-[10px] font-medium rounded bg-yellow-500/10 text-yellow-500">
|
<span className="px-1.5 py-0.5 text-[10px] font-medium rounded bg-yellow-500/10 text-yellow-500">
|
||||||
Outdated
|
Outdated
|
||||||
@@ -336,47 +346,27 @@ function CommentRow({
|
|||||||
</button>
|
</button>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<div className="ml-auto shrink-0 flex items-center gap-1">
|
|
||||||
{/* Disclosure triangle - toggles expand/collapse */}
|
|
||||||
{needsExpansion ? (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={handleExpandToggle}
|
|
||||||
className="text-muted-foreground hover:text-foreground transition-colors p-0.5 rounded hover:bg-muted"
|
|
||||||
title={isExpanded ? 'Collapse comment' : 'Expand comment'}
|
|
||||||
>
|
|
||||||
{isExpanded ? (
|
|
||||||
<ChevronDown className="h-3.5 w-3.5" />
|
|
||||||
) : (
|
|
||||||
<ChevronDown className="h-3.5 w-3.5 -rotate-90" />
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
) : (
|
|
||||||
<span className="w-4 h-4" />
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Expand detail button */}
|
{/* Expand detail button */}
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={handleExpandDetail}
|
onClick={handleExpandDetail}
|
||||||
className="text-muted-foreground hover:text-foreground transition-colors p-0.5 rounded hover:bg-muted"
|
className="ml-auto shrink-0 text-muted-foreground hover:text-foreground transition-colors p-0.5 rounded hover:bg-muted"
|
||||||
title="View full comment details"
|
title="View full comment details"
|
||||||
>
|
>
|
||||||
<Maximize2 className="h-3.5 w-3.5" />
|
<Maximize2 className="h-3.5 w-3.5" />
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Comment body - collapsible, rendered as markdown */}
|
{/* Comment body - collapsible, rendered as markdown */}
|
||||||
{isExpanded ? (
|
{isExpanded ? (
|
||||||
<div onClick={(e) => e.stopPropagation()}>
|
<div className="pl-5" onClick={(e) => e.stopPropagation()}>
|
||||||
<Markdown className="text-sm [&_p]:text-muted-foreground [&_li]:text-muted-foreground">
|
<Markdown className="text-sm [&_p]:text-muted-foreground [&_li]:text-muted-foreground">
|
||||||
{comment.body}
|
{comment.body}
|
||||||
</Markdown>
|
</Markdown>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<div className="line-clamp-2">
|
<div className="pl-5 line-clamp-2">
|
||||||
<Markdown className="text-sm [&_p]:text-muted-foreground [&_li]:text-muted-foreground [&_p]:my-0 [&_ul]:my-0 [&_ol]:my-0 [&_h1]:text-sm [&_h2]:text-sm [&_h3]:text-sm [&_h4]:text-sm [&_h1]:my-0 [&_h2]:my-0 [&_h3]:my-0 [&_h4]:my-0 [&_pre]:my-0 [&_blockquote]:my-0">
|
<Markdown className="text-sm [&_p]:text-muted-foreground [&_li]:text-muted-foreground [&_p]:my-0 [&_ul]:my-0 [&_ol]:my-0 [&_h1]:text-sm [&_h2]:text-sm [&_h3]:text-sm [&_h4]:text-sm [&_h1]:my-0 [&_h2]:my-0 [&_h3]:my-0 [&_h4]:my-0 [&_pre]:my-0 [&_blockquote]:my-0">
|
||||||
{comment.body}
|
{comment.body}
|
||||||
</Markdown>
|
</Markdown>
|
||||||
@@ -384,7 +374,7 @@ function CommentRow({
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Date row */}
|
{/* Date row */}
|
||||||
<div className="flex items-center mt-1">
|
<div className="flex items-center mt-1 pl-5">
|
||||||
<div className="flex flex-col">
|
<div className="flex flex-col">
|
||||||
<div className="text-xs text-muted-foreground">{formatDate(comment.createdAt)}</div>
|
<div className="text-xs text-muted-foreground">{formatDate(comment.createdAt)}</div>
|
||||||
<div className="text-xs text-muted-foreground/70">{formatTime(comment.createdAt)}</div>
|
<div className="text-xs text-muted-foreground/70">{formatTime(comment.createdAt)}</div>
|
||||||
@@ -423,7 +413,7 @@ function CommentDetailDialog({ comment, open, onOpenChange }: CommentDetailDialo
|
|||||||
</DialogDescription>
|
</DialogDescription>
|
||||||
</DialogHeader>
|
</DialogHeader>
|
||||||
|
|
||||||
<div className="flex-1 min-h-0 overflow-y-auto -mx-6 px-6">
|
<ScrollArea className="flex-1 min-h-0 h-full -mx-6 px-6">
|
||||||
<div className="space-y-4 pb-2">
|
<div className="space-y-4 pb-2">
|
||||||
{/* Author & metadata section */}
|
{/* Author & metadata section */}
|
||||||
<div className="flex items-center gap-3 flex-wrap">
|
<div className="flex items-center gap-3 flex-wrap">
|
||||||
@@ -449,11 +439,6 @@ function CommentDetailDialog({ comment, open, onOpenChange }: CommentDetailDialo
|
|||||||
|
|
||||||
{/* Badges */}
|
{/* Badges */}
|
||||||
<div className="flex items-center gap-1.5 ml-auto">
|
<div className="flex items-center gap-1.5 ml-auto">
|
||||||
{comment.isBot && (
|
|
||||||
<span className="px-2 py-0.5 text-xs font-medium rounded bg-purple-500/10 text-purple-500">
|
|
||||||
Bot
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
{comment.isOutdated && (
|
{comment.isOutdated && (
|
||||||
<span className="px-2 py-0.5 text-xs font-medium rounded bg-yellow-500/10 text-yellow-500">
|
<span className="px-2 py-0.5 text-xs font-medium rounded bg-yellow-500/10 text-yellow-500">
|
||||||
Outdated
|
Outdated
|
||||||
@@ -510,7 +495,7 @@ function CommentDetailDialog({ comment, open, onOpenChange }: CommentDetailDialo
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</ScrollArea>
|
||||||
|
|
||||||
<DialogFooter className="mt-4">
|
<DialogFooter className="mt-4">
|
||||||
<Button variant="outline" onClick={() => onOpenChange(false)}>
|
<Button variant="outline" onClick={() => onOpenChange(false)}>
|
||||||
@@ -580,42 +565,46 @@ export function PRCommentResolutionDialog({
|
|||||||
>([]);
|
>([]);
|
||||||
const [detailComment, setDetailComment] = useState<PRReviewComment | null>(null);
|
const [detailComment, setDetailComment] = useState<PRReviewComment | null>(null);
|
||||||
|
|
||||||
// Per-thread resolving state - tracks which threads are currently being resolved/unresolved
|
|
||||||
const [resolvingThreads, setResolvingThreads] = useState<Set<string>>(new Set());
|
|
||||||
|
|
||||||
// Model selection state
|
// Model selection state
|
||||||
const [modelEntry, setModelEntry] = useState<PhaseModelEntry>({ model: 'claude-sonnet' });
|
const [modelEntry, setModelEntry] = useState<PhaseModelEntry>({ model: 'claude-sonnet' });
|
||||||
|
|
||||||
// Track previous open state to detect when dialog opens
|
// Track previous open state to detect when dialog opens
|
||||||
const wasOpenRef = useRef(false);
|
const wasOpenRef = useRef(false);
|
||||||
|
|
||||||
const handleModelChange = useCallback((entry: PhaseModelEntry) => {
|
// Sync model defaults only when dialog opens (transitions from closed to open)
|
||||||
const modelId = typeof entry.model === 'string' ? entry.model : '';
|
|
||||||
const normalizedThinkingLevel = normalizeThinkingLevelForModel(modelId, entry.thinkingLevel);
|
|
||||||
|
|
||||||
setModelEntry({ ...entry, thinkingLevel: normalizedThinkingLevel });
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
// Fetch PR review comments
|
|
||||||
const {
|
|
||||||
data,
|
|
||||||
isLoading: loading,
|
|
||||||
isFetching: refreshing,
|
|
||||||
error,
|
|
||||||
refetch,
|
|
||||||
} = useGitHubPRReviewComments(currentProject?.path, open ? pr.number : undefined);
|
|
||||||
|
|
||||||
// Sync model defaults and refresh comments when dialog opens (transitions from closed to open)
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const justOpened = open && !wasOpenRef.current;
|
const justOpened = open && !wasOpenRef.current;
|
||||||
wasOpenRef.current = open;
|
wasOpenRef.current = open;
|
||||||
|
|
||||||
if (justOpened) {
|
if (justOpened) {
|
||||||
setModelEntry(effectiveDefaultFeatureModel);
|
setModelEntry(effectiveDefaultFeatureModel);
|
||||||
// Force refresh PR comments from GitHub when dialog opens
|
|
||||||
refetch();
|
|
||||||
}
|
}
|
||||||
}, [open, effectiveDefaultFeatureModel, refetch]);
|
}, [open, effectiveDefaultFeatureModel]);
|
||||||
|
|
||||||
|
const handleModelChange = useCallback((entry: PhaseModelEntry) => {
|
||||||
|
// Normalize thinking level when switching between adaptive and non-adaptive models
|
||||||
|
const isNewModelAdaptive =
|
||||||
|
typeof entry.model === 'string' && isAdaptiveThinkingModel(entry.model);
|
||||||
|
const currentLevel = entry.thinkingLevel || 'none';
|
||||||
|
|
||||||
|
if (isNewModelAdaptive && currentLevel !== 'none' && currentLevel !== 'adaptive') {
|
||||||
|
// Switching TO an adaptive model with a manual level -> auto-switch to 'adaptive'
|
||||||
|
setModelEntry({ ...entry, thinkingLevel: 'adaptive' });
|
||||||
|
} else if (!isNewModelAdaptive && currentLevel === 'adaptive') {
|
||||||
|
// Switching FROM an adaptive model with adaptive -> auto-switch to 'high'
|
||||||
|
setModelEntry({ ...entry, thinkingLevel: 'high' });
|
||||||
|
} else {
|
||||||
|
setModelEntry(entry);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Fetch PR review comments
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isLoading: loading,
|
||||||
|
error,
|
||||||
|
refetch,
|
||||||
|
} = useGitHubPRReviewComments(currentProject?.path, open ? pr.number : undefined);
|
||||||
|
|
||||||
const allComments = useMemo(() => {
|
const allComments = useMemo(() => {
|
||||||
const raw = data?.comments ?? [];
|
const raw = data?.comments ?? [];
|
||||||
@@ -646,8 +635,8 @@ export function PRCommentResolutionDialog({
|
|||||||
const resolveThread = useResolveReviewThread(currentProject?.path ?? '', pr.number);
|
const resolveThread = useResolveReviewThread(currentProject?.path ?? '', pr.number);
|
||||||
|
|
||||||
// Derived state
|
// Derived state
|
||||||
const allSelected = comments.length > 0 && comments.every((c) => selectedIds.has(c.id));
|
const allSelected = comments.length > 0 && selectedIds.size === comments.length;
|
||||||
const someSelected = selectedIds.size > 0 && !allSelected;
|
const someSelected = selectedIds.size > 0 && selectedIds.size < comments.length;
|
||||||
const noneSelected = selectedIds.size === 0;
|
const noneSelected = selectedIds.size === 0;
|
||||||
|
|
||||||
// ============================================
|
// ============================================
|
||||||
@@ -669,24 +658,7 @@ export function PRCommentResolutionDialog({
|
|||||||
const handleResolveComment = useCallback(
|
const handleResolveComment = useCallback(
|
||||||
(comment: PRReviewComment, resolve: boolean) => {
|
(comment: PRReviewComment, resolve: boolean) => {
|
||||||
if (!comment.threadId) return;
|
if (!comment.threadId) return;
|
||||||
const threadId = comment.threadId;
|
resolveThread.mutate({ threadId: comment.threadId, resolve });
|
||||||
setResolvingThreads((prev) => {
|
|
||||||
const next = new Set(prev);
|
|
||||||
next.add(threadId);
|
|
||||||
return next;
|
|
||||||
});
|
|
||||||
resolveThread.mutate(
|
|
||||||
{ threadId, resolve },
|
|
||||||
{
|
|
||||||
onSettled: () => {
|
|
||||||
setResolvingThreads((prev) => {
|
|
||||||
const next = new Set(prev);
|
|
||||||
next.delete(threadId);
|
|
||||||
return next;
|
|
||||||
});
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
},
|
},
|
||||||
[resolveThread]
|
[resolveThread]
|
||||||
);
|
);
|
||||||
@@ -731,7 +703,7 @@ export function PRCommentResolutionDialog({
|
|||||||
const selectedComments = comments.filter((c) => selectedIds.has(c.id));
|
const selectedComments = comments.filter((c) => selectedIds.has(c.id));
|
||||||
|
|
||||||
// Resolve model settings from the current model entry
|
// Resolve model settings from the current model entry
|
||||||
const selectedModel = resolveModelString(modelEntry.model);
|
const selectedModel = modelEntry.model;
|
||||||
const normalizedThinking = modelSupportsThinking(selectedModel)
|
const normalizedThinking = modelSupportsThinking(selectedModel)
|
||||||
? modelEntry.thinkingLevel || 'none'
|
? modelEntry.thinkingLevel || 'none'
|
||||||
: 'none';
|
: 'none';
|
||||||
@@ -838,7 +810,6 @@ export function PRCommentResolutionDialog({
|
|||||||
setShowResolved(false);
|
setShowResolved(false);
|
||||||
setCreationErrors([]);
|
setCreationErrors([]);
|
||||||
setDetailComment(null);
|
setDetailComment(null);
|
||||||
setResolvingThreads(new Set());
|
|
||||||
setModelEntry(effectiveDefaultFeatureModel);
|
setModelEntry(effectiveDefaultFeatureModel);
|
||||||
}
|
}
|
||||||
onOpenChange(newOpen);
|
onOpenChange(newOpen);
|
||||||
@@ -854,22 +825,10 @@ export function PRCommentResolutionDialog({
|
|||||||
<Dialog open={open} onOpenChange={handleOpenChange}>
|
<Dialog open={open} onOpenChange={handleOpenChange}>
|
||||||
<DialogContent className="max-w-3xl max-h-[80vh] flex flex-col">
|
<DialogContent className="max-w-3xl max-h-[80vh] flex flex-col">
|
||||||
<DialogHeader>
|
<DialogHeader>
|
||||||
<div className="flex items-center justify-between pr-10">
|
|
||||||
<DialogTitle className="flex items-center gap-2">
|
<DialogTitle className="flex items-center gap-2">
|
||||||
<MessageSquare className="h-5 w-5 text-blue-500" />
|
<MessageSquare className="h-5 w-5 text-blue-500" />
|
||||||
Manage PR Review Comments
|
Manage PR Review Comments
|
||||||
</DialogTitle>
|
</DialogTitle>
|
||||||
<Button
|
|
||||||
variant="ghost"
|
|
||||||
size="sm"
|
|
||||||
className="h-7 w-7 p-0 shrink-0"
|
|
||||||
onClick={() => refetch()}
|
|
||||||
disabled={refreshing}
|
|
||||||
title="Refresh comments"
|
|
||||||
>
|
|
||||||
<RefreshCw className={cn('h-4 w-4', refreshing && 'animate-spin')} />
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
<DialogDescription>
|
<DialogDescription>
|
||||||
Select comments from PR #{pr.number} to create feature tasks that address them.
|
Select comments from PR #{pr.number} to create feature tasks that address them.
|
||||||
</DialogDescription>
|
</DialogDescription>
|
||||||
@@ -904,7 +863,7 @@ export function PRCommentResolutionDialog({
|
|||||||
{!loading && !error && allComments.length > 0 && (
|
{!loading && !error && allComments.length > 0 && (
|
||||||
<>
|
<>
|
||||||
{/* Controls Bar */}
|
{/* Controls Bar */}
|
||||||
<div className="flex flex-wrap items-center justify-between gap-2 px-1">
|
<div className="flex items-center justify-between gap-4 px-1">
|
||||||
{/* Select All - only interactive when there are visible comments */}
|
{/* Select All - only interactive when there are visible comments */}
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<Checkbox
|
<Checkbox
|
||||||
@@ -925,7 +884,7 @@ export function PRCommentResolutionDialog({
|
|||||||
</Label>
|
</Label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex flex-wrap items-center gap-2">
|
<div className="flex items-center gap-3">
|
||||||
{/* Show/Hide Resolved Filter Toggle - always visible */}
|
{/* Show/Hide Resolved Filter Toggle - always visible */}
|
||||||
<Button
|
<Button
|
||||||
variant="ghost"
|
variant="ghost"
|
||||||
@@ -980,7 +939,7 @@ export function PRCommentResolutionDialog({
|
|||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
{/* Mode Toggle */}
|
{/* Mode Toggle */}
|
||||||
<div className="flex items-center gap-2 shrink-0">
|
<div className="flex items-center gap-2">
|
||||||
<Label
|
<Label
|
||||||
className={cn(
|
className={cn(
|
||||||
'text-xs cursor-pointer',
|
'text-xs cursor-pointer',
|
||||||
@@ -1049,9 +1008,7 @@ export function PRCommentResolutionDialog({
|
|||||||
onToggle={() => handleToggleComment(comment.id)}
|
onToggle={() => handleToggleComment(comment.id)}
|
||||||
onExpandDetail={() => setDetailComment(comment)}
|
onExpandDetail={() => setDetailComment(comment)}
|
||||||
onResolve={handleResolveComment}
|
onResolve={handleResolveComment}
|
||||||
isResolvingThread={
|
isResolvingThread={resolveThread.isPending}
|
||||||
!!comment.threadId && resolvingThreads.has(comment.threadId)
|
|
||||||
}
|
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useState, useCallback, useEffect, startTransition } from 'react';
|
import { useState, useCallback, useEffect } from 'react';
|
||||||
import { Plus, Bug, FolderOpen, BookOpen } from 'lucide-react';
|
import { Plus, Bug, FolderOpen, BookOpen } from 'lucide-react';
|
||||||
import { useNavigate, useLocation } from '@tanstack/react-router';
|
import { useNavigate, useLocation } from '@tanstack/react-router';
|
||||||
import { cn, isMac } from '@/lib/utils';
|
import { cn, isMac } from '@/lib/utils';
|
||||||
@@ -40,12 +40,14 @@ export function ProjectSwitcher() {
|
|||||||
const location = useLocation();
|
const location = useLocation();
|
||||||
const { hideWiki } = SIDEBAR_FEATURE_FLAGS;
|
const { hideWiki } = SIDEBAR_FEATURE_FLAGS;
|
||||||
const isWikiActive = location.pathname === '/wiki';
|
const isWikiActive = location.pathname === '/wiki';
|
||||||
const projects = useAppStore((s) => s.projects);
|
const {
|
||||||
const currentProject = useAppStore((s) => s.currentProject);
|
projects,
|
||||||
const setCurrentProject = useAppStore((s) => s.setCurrentProject);
|
currentProject,
|
||||||
const upsertAndSetCurrentProject = useAppStore((s) => s.upsertAndSetCurrentProject);
|
setCurrentProject,
|
||||||
const specCreatingForProject = useAppStore((s) => s.specCreatingForProject);
|
upsertAndSetCurrentProject,
|
||||||
const setSpecCreatingForProject = useAppStore((s) => s.setSpecCreatingForProject);
|
specCreatingForProject,
|
||||||
|
setSpecCreatingForProject,
|
||||||
|
} = useAppStore();
|
||||||
const [contextMenuProject, setContextMenuProject] = useState<Project | null>(null);
|
const [contextMenuProject, setContextMenuProject] = useState<Project | null>(null);
|
||||||
const [contextMenuPosition, setContextMenuPosition] = useState<{ x: number; y: number } | null>(
|
const [contextMenuPosition, setContextMenuPosition] = useState<{ x: number; y: number } | null>(
|
||||||
null
|
null
|
||||||
@@ -102,10 +104,6 @@ export function ProjectSwitcher() {
|
|||||||
|
|
||||||
const handleProjectClick = useCallback(
|
const handleProjectClick = useCallback(
|
||||||
async (project: Project) => {
|
async (project: Project) => {
|
||||||
if (project.id === currentProject?.id) {
|
|
||||||
navigate({ to: '/board' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
try {
|
||||||
// Ensure .automaker directory structure exists before switching
|
// Ensure .automaker directory structure exists before switching
|
||||||
await initializeProject(project.path);
|
await initializeProject(project.path);
|
||||||
@@ -114,19 +112,11 @@ export function ProjectSwitcher() {
|
|||||||
// Continue with switch even if initialization fails -
|
// Continue with switch even if initialization fails -
|
||||||
// the project may already be initialized
|
// the project may already be initialized
|
||||||
}
|
}
|
||||||
// Wrap in startTransition to let React batch the project switch and
|
|
||||||
// navigation into a single low-priority update. Without this, the two
|
|
||||||
// synchronous calls fire separate renders where currentProject points
|
|
||||||
// to the new project but per-project state (worktrees, features) is
|
|
||||||
// still stale, causing a cascade of effects and store mutations that
|
|
||||||
// can trigger React error #185 (maximum update depth exceeded).
|
|
||||||
startTransition(() => {
|
|
||||||
setCurrentProject(project);
|
setCurrentProject(project);
|
||||||
// Navigate to board view when switching projects
|
// Navigate to board view when switching projects
|
||||||
navigate({ to: '/board' });
|
navigate({ to: '/board' });
|
||||||
});
|
|
||||||
},
|
},
|
||||||
[currentProject?.id, setCurrentProject, navigate]
|
[setCurrentProject, navigate]
|
||||||
);
|
);
|
||||||
|
|
||||||
const handleNewProject = () => {
|
const handleNewProject = () => {
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useCallback, startTransition } from 'react';
|
import { useCallback } from 'react';
|
||||||
import {
|
import {
|
||||||
Folder,
|
Folder,
|
||||||
ChevronDown,
|
ChevronDown,
|
||||||
@@ -78,22 +78,21 @@ export function ProjectSelectorWithOptions({
|
|||||||
setShowDeleteProjectDialog,
|
setShowDeleteProjectDialog,
|
||||||
setShowRemoveFromAutomakerDialog,
|
setShowRemoveFromAutomakerDialog,
|
||||||
}: ProjectSelectorWithOptionsProps) {
|
}: ProjectSelectorWithOptionsProps) {
|
||||||
const projects = useAppStore((s) => s.projects);
|
const {
|
||||||
const currentProject = useAppStore((s) => s.currentProject);
|
projects,
|
||||||
const projectHistory = useAppStore((s) => s.projectHistory);
|
currentProject,
|
||||||
const setCurrentProject = useAppStore((s) => s.setCurrentProject);
|
projectHistory,
|
||||||
const reorderProjects = useAppStore((s) => s.reorderProjects);
|
setCurrentProject,
|
||||||
const cyclePrevProject = useAppStore((s) => s.cyclePrevProject);
|
reorderProjects,
|
||||||
const cycleNextProject = useAppStore((s) => s.cycleNextProject);
|
cyclePrevProject,
|
||||||
const clearProjectHistory = useAppStore((s) => s.clearProjectHistory);
|
cycleNextProject,
|
||||||
|
clearProjectHistory,
|
||||||
|
} = useAppStore();
|
||||||
|
|
||||||
const shortcuts = useKeyboardShortcutsConfig();
|
const shortcuts = useKeyboardShortcutsConfig();
|
||||||
// Wrap setCurrentProject to ensure .automaker is initialized before switching
|
// Wrap setCurrentProject to ensure .automaker is initialized before switching
|
||||||
const setCurrentProjectWithInit = useCallback(
|
const setCurrentProjectWithInit = useCallback(
|
||||||
async (p: Project) => {
|
async (p: Project) => {
|
||||||
if (p.id === currentProject?.id) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
try {
|
||||||
// Ensure .automaker directory structure exists before switching
|
// Ensure .automaker directory structure exists before switching
|
||||||
await initializeProject(p.path);
|
await initializeProject(p.path);
|
||||||
@@ -102,12 +101,9 @@ export function ProjectSelectorWithOptions({
|
|||||||
// Continue with switch even if initialization fails -
|
// Continue with switch even if initialization fails -
|
||||||
// the project may already be initialized
|
// the project may already be initialized
|
||||||
}
|
}
|
||||||
// Defer project switch update to avoid synchronous render cascades.
|
|
||||||
startTransition(() => {
|
|
||||||
setCurrentProject(p);
|
setCurrentProject(p);
|
||||||
});
|
|
||||||
},
|
},
|
||||||
[currentProject?.id, setCurrentProject]
|
[setCurrentProject]
|
||||||
);
|
);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useState, useCallback, startTransition } from 'react';
|
import { useState, useCallback } from 'react';
|
||||||
import { useNavigate } from '@tanstack/react-router';
|
import { useNavigate } from '@tanstack/react-router';
|
||||||
import { ChevronsUpDown, Folder, Plus, FolderOpen, LogOut } from 'lucide-react';
|
import { ChevronsUpDown, Folder, Plus, FolderOpen, LogOut } from 'lucide-react';
|
||||||
import * as LucideIcons from 'lucide-react';
|
import * as LucideIcons from 'lucide-react';
|
||||||
@@ -6,7 +6,6 @@ import type { LucideIcon } from 'lucide-react';
|
|||||||
import { cn, isMac } from '@/lib/utils';
|
import { cn, isMac } from '@/lib/utils';
|
||||||
import { formatShortcut } from '@/store/app-store';
|
import { formatShortcut } from '@/store/app-store';
|
||||||
import { isElectron, type Project } from '@/lib/electron';
|
import { isElectron, type Project } from '@/lib/electron';
|
||||||
import { initializeProject } from '@/lib/project-init';
|
|
||||||
import { MACOS_ELECTRON_TOP_PADDING_CLASS } from '../constants';
|
import { MACOS_ELECTRON_TOP_PADDING_CLASS } from '../constants';
|
||||||
import { getAuthenticatedImageUrl } from '@/lib/api-fetch';
|
import { getAuthenticatedImageUrl } from '@/lib/api-fetch';
|
||||||
import { useAppStore } from '@/store/app-store';
|
import { useAppStore } from '@/store/app-store';
|
||||||
@@ -37,8 +36,7 @@ export function SidebarHeader({
|
|||||||
setShowRemoveFromAutomakerDialog,
|
setShowRemoveFromAutomakerDialog,
|
||||||
}: SidebarHeaderProps) {
|
}: SidebarHeaderProps) {
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
const projects = useAppStore((s) => s.projects);
|
const { projects, setCurrentProject } = useAppStore();
|
||||||
const setCurrentProject = useAppStore((s) => s.setCurrentProject);
|
|
||||||
const [dropdownOpen, setDropdownOpen] = useState(false);
|
const [dropdownOpen, setDropdownOpen] = useState(false);
|
||||||
|
|
||||||
const handleLogoClick = useCallback(() => {
|
const handleLogoClick = useCallback(() => {
|
||||||
@@ -46,29 +44,12 @@ export function SidebarHeader({
|
|||||||
}, [navigate]);
|
}, [navigate]);
|
||||||
|
|
||||||
const handleProjectSelect = useCallback(
|
const handleProjectSelect = useCallback(
|
||||||
async (project: Project) => {
|
(project: Project) => {
|
||||||
if (project.id === currentProject?.id) {
|
|
||||||
setDropdownOpen(false);
|
|
||||||
navigate({ to: '/board' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
// Ensure .automaker directory structure exists before switching
|
|
||||||
await initializeProject(project.path);
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to initialize project during switch:', error);
|
|
||||||
// Continue with switch even if initialization fails -
|
|
||||||
// the project may already be initialized
|
|
||||||
}
|
|
||||||
|
|
||||||
// Batch project switch + navigation to prevent multi-render cascades.
|
|
||||||
startTransition(() => {
|
|
||||||
setCurrentProject(project);
|
setCurrentProject(project);
|
||||||
setDropdownOpen(false);
|
setDropdownOpen(false);
|
||||||
navigate({ to: '/board' });
|
navigate({ to: '/board' });
|
||||||
});
|
|
||||||
},
|
},
|
||||||
[currentProject?.id, setCurrentProject, navigate]
|
[setCurrentProject, navigate]
|
||||||
);
|
);
|
||||||
|
|
||||||
const getIconComponent = (project: Project): LucideIcon => {
|
const getIconComponent = (project: Project): LucideIcon => {
|
||||||
|
|||||||
@@ -108,9 +108,7 @@ export function useProjectPicker({
|
|||||||
setIsProjectPickerOpen(false);
|
setIsProjectPickerOpen(false);
|
||||||
} else if (event.key === 'Enter') {
|
} else if (event.key === 'Enter') {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
selectHighlightedProject().catch(() => {
|
selectHighlightedProject();
|
||||||
/* Error already logged upstream */
|
|
||||||
});
|
|
||||||
} else if (event.key === 'ArrowDown') {
|
} else if (event.key === 'ArrowDown') {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
setSelectedProjectIndex((prev) => (prev < filteredProjects.length - 1 ? prev + 1 : prev));
|
setSelectedProjectIndex((prev) => (prev < filteredProjects.length - 1 ? prev + 1 : prev));
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useState, useCallback, useEffect, startTransition } from 'react';
|
import { useState, useCallback, useEffect } from 'react';
|
||||||
import { createLogger } from '@automaker/utils/logger';
|
import { createLogger } from '@automaker/utils/logger';
|
||||||
import { useNavigate, useLocation } from '@tanstack/react-router';
|
import { useNavigate, useLocation } from '@tanstack/react-router';
|
||||||
import { PanelLeftClose, ChevronDown } from 'lucide-react';
|
import { PanelLeftClose, ChevronDown } from 'lucide-react';
|
||||||
@@ -281,27 +281,6 @@ export function Sidebar() {
|
|||||||
// Register keyboard shortcuts
|
// Register keyboard shortcuts
|
||||||
useKeyboardShortcuts(navigationShortcuts);
|
useKeyboardShortcuts(navigationShortcuts);
|
||||||
|
|
||||||
const switchProjectSafely = useCallback(
|
|
||||||
async (targetProject: Project) => {
|
|
||||||
// Ensure .automaker directory structure exists before switching
|
|
||||||
const initResult = await initializeProject(targetProject.path);
|
|
||||||
if (!initResult.success) {
|
|
||||||
logger.error('Failed to initialize project during switch:', initResult.error);
|
|
||||||
toast.warning(
|
|
||||||
`Could not fully initialize project: ${initResult.error ?? 'Unknown error'}. Some features may not work correctly.`
|
|
||||||
);
|
|
||||||
// Continue with switch despite init failure — project may already be partially initialized
|
|
||||||
}
|
|
||||||
|
|
||||||
// Batch project switch + navigation to prevent multi-render cascades.
|
|
||||||
startTransition(() => {
|
|
||||||
setCurrentProject(targetProject);
|
|
||||||
navigate({ to: '/board' });
|
|
||||||
});
|
|
||||||
},
|
|
||||||
[setCurrentProject, navigate]
|
|
||||||
);
|
|
||||||
|
|
||||||
// Keyboard shortcuts for project switching (1-9, 0)
|
// Keyboard shortcuts for project switching (1-9, 0)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const handleKeyDown = (event: KeyboardEvent) => {
|
const handleKeyDown = (event: KeyboardEvent) => {
|
||||||
@@ -326,14 +305,15 @@ export function Sidebar() {
|
|||||||
if (projectIndex !== null && projectIndex < projects.length) {
|
if (projectIndex !== null && projectIndex < projects.length) {
|
||||||
const targetProject = projects[projectIndex];
|
const targetProject = projects[projectIndex];
|
||||||
if (targetProject && targetProject.id !== currentProject?.id) {
|
if (targetProject && targetProject.id !== currentProject?.id) {
|
||||||
void switchProjectSafely(targetProject);
|
setCurrentProject(targetProject);
|
||||||
|
navigate({ to: '/board' });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
window.addEventListener('keydown', handleKeyDown);
|
window.addEventListener('keydown', handleKeyDown);
|
||||||
return () => window.removeEventListener('keydown', handleKeyDown);
|
return () => window.removeEventListener('keydown', handleKeyDown);
|
||||||
}, [projects, currentProject, switchProjectSafely]);
|
}, [projects, currentProject, setCurrentProject, navigate]);
|
||||||
|
|
||||||
const isActiveRoute = (id: string) => {
|
const isActiveRoute = (id: string) => {
|
||||||
const routePath = id === 'welcome' ? '/' : `/${id}`;
|
const routePath = id === 'welcome' ? '/' : `/${id}`;
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ import {
|
|||||||
ArchiveRestore,
|
ArchiveRestore,
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import { Spinner } from '@/components/ui/spinner';
|
import { Spinner } from '@/components/ui/spinner';
|
||||||
import { cn, pathsEqual } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import type { SessionListItem } from '@/types/electron';
|
import type { SessionListItem } from '@/types/electron';
|
||||||
import { useKeyboardShortcutsConfig } from '@/hooks/use-keyboard-shortcuts';
|
import { useKeyboardShortcutsConfig } from '@/hooks/use-keyboard-shortcuts';
|
||||||
import { getElectronAPI } from '@/lib/electron';
|
import { getElectronAPI } from '@/lib/electron';
|
||||||
@@ -93,7 +93,6 @@ interface SessionManagerProps {
|
|||||||
currentSessionId: string | null;
|
currentSessionId: string | null;
|
||||||
onSelectSession: (sessionId: string | null) => void;
|
onSelectSession: (sessionId: string | null) => void;
|
||||||
projectPath: string;
|
projectPath: string;
|
||||||
workingDirectory?: string; // Current worktree path for scoping sessions
|
|
||||||
isCurrentSessionThinking?: boolean;
|
isCurrentSessionThinking?: boolean;
|
||||||
onQuickCreateRef?: React.MutableRefObject<(() => Promise<void>) | null>;
|
onQuickCreateRef?: React.MutableRefObject<(() => Promise<void>) | null>;
|
||||||
}
|
}
|
||||||
@@ -102,7 +101,6 @@ export function SessionManager({
|
|||||||
currentSessionId,
|
currentSessionId,
|
||||||
onSelectSession,
|
onSelectSession,
|
||||||
projectPath,
|
projectPath,
|
||||||
workingDirectory,
|
|
||||||
isCurrentSessionThinking = false,
|
isCurrentSessionThinking = false,
|
||||||
onQuickCreateRef,
|
onQuickCreateRef,
|
||||||
}: SessionManagerProps) {
|
}: SessionManagerProps) {
|
||||||
@@ -155,7 +153,6 @@ export function SessionManager({
|
|||||||
if (result.data) {
|
if (result.data) {
|
||||||
await checkRunningSessions(result.data);
|
await checkRunningSessions(result.data);
|
||||||
}
|
}
|
||||||
return result;
|
|
||||||
}, [queryClient, refetchSessions, checkRunningSessions]);
|
}, [queryClient, refetchSessions, checkRunningSessions]);
|
||||||
|
|
||||||
// Check running state on initial load (runs only once when sessions first load)
|
// Check running state on initial load (runs only once when sessions first load)
|
||||||
@@ -180,9 +177,6 @@ export function SessionManager({
|
|||||||
return () => clearInterval(interval);
|
return () => clearInterval(interval);
|
||||||
}, [sessions, runningSessions.size, isCurrentSessionThinking, checkRunningSessions]);
|
}, [sessions, runningSessions.size, isCurrentSessionThinking, checkRunningSessions]);
|
||||||
|
|
||||||
// Effective working directory for session creation (worktree path or project path)
|
|
||||||
const effectiveWorkingDirectory = workingDirectory || projectPath;
|
|
||||||
|
|
||||||
// Create new session with random name
|
// Create new session with random name
|
||||||
const handleCreateSession = async () => {
|
const handleCreateSession = async () => {
|
||||||
const api = getElectronAPI();
|
const api = getElectronAPI();
|
||||||
@@ -190,7 +184,7 @@ export function SessionManager({
|
|||||||
|
|
||||||
const sessionName = newSessionName.trim() || generateRandomSessionName();
|
const sessionName = newSessionName.trim() || generateRandomSessionName();
|
||||||
|
|
||||||
const result = await api.sessions.create(sessionName, projectPath, effectiveWorkingDirectory);
|
const result = await api.sessions.create(sessionName, projectPath, projectPath);
|
||||||
|
|
||||||
if (result.success && result.session?.id) {
|
if (result.success && result.session?.id) {
|
||||||
setNewSessionName('');
|
setNewSessionName('');
|
||||||
@@ -201,19 +195,19 @@ export function SessionManager({
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Create new session directly with a random name (one-click)
|
// Create new session directly with a random name (one-click)
|
||||||
const handleQuickCreateSession = useCallback(async () => {
|
const handleQuickCreateSession = async () => {
|
||||||
const api = getElectronAPI();
|
const api = getElectronAPI();
|
||||||
if (!api?.sessions) return;
|
if (!api?.sessions) return;
|
||||||
|
|
||||||
const sessionName = generateRandomSessionName();
|
const sessionName = generateRandomSessionName();
|
||||||
|
|
||||||
const result = await api.sessions.create(sessionName, projectPath, effectiveWorkingDirectory);
|
const result = await api.sessions.create(sessionName, projectPath, projectPath);
|
||||||
|
|
||||||
if (result.success && result.session?.id) {
|
if (result.success && result.session?.id) {
|
||||||
await invalidateSessions();
|
await invalidateSessions();
|
||||||
onSelectSession(result.session.id);
|
onSelectSession(result.session.id);
|
||||||
}
|
}
|
||||||
}, [effectiveWorkingDirectory, projectPath, invalidateSessions, onSelectSession]);
|
};
|
||||||
|
|
||||||
// Expose the quick create function via ref for keyboard shortcuts
|
// Expose the quick create function via ref for keyboard shortcuts
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -225,7 +219,7 @@ export function SessionManager({
|
|||||||
onQuickCreateRef.current = null;
|
onQuickCreateRef.current = null;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}, [onQuickCreateRef, handleQuickCreateSession]);
|
}, [onQuickCreateRef, projectPath]);
|
||||||
|
|
||||||
// Rename session
|
// Rename session
|
||||||
const handleRenameSession = async (sessionId: string) => {
|
const handleRenameSession = async (sessionId: string) => {
|
||||||
@@ -298,20 +292,12 @@ export function SessionManager({
|
|||||||
|
|
||||||
const result = await api.sessions.delete(sessionId);
|
const result = await api.sessions.delete(sessionId);
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
const refetchResult = await invalidateSessions();
|
await invalidateSessions();
|
||||||
if (currentSessionId === sessionId) {
|
if (currentSessionId === sessionId) {
|
||||||
// Switch to another session using fresh data, excluding the deleted session
|
// Switch to another session or create a new one
|
||||||
// Filter to sessions within the same worktree to avoid jumping to a different worktree
|
const activeSessionsList = sessions.filter((s) => !s.isArchived);
|
||||||
const freshSessions = refetchResult?.data ?? [];
|
|
||||||
const activeSessionsList = freshSessions.filter((s) => {
|
|
||||||
if (s.isArchived || s.id === sessionId) return false;
|
|
||||||
const sessionDir = s.workingDirectory || s.projectPath;
|
|
||||||
return pathsEqual(sessionDir, effectiveWorkingDirectory);
|
|
||||||
});
|
|
||||||
if (activeSessionsList.length > 0) {
|
if (activeSessionsList.length > 0) {
|
||||||
onSelectSession(activeSessionsList[0].id);
|
onSelectSession(activeSessionsList[0].id);
|
||||||
} else {
|
|
||||||
onSelectSession(null);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -332,16 +318,8 @@ export function SessionManager({
|
|||||||
setIsDeleteAllArchivedDialogOpen(false);
|
setIsDeleteAllArchivedDialogOpen(false);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Filter sessions by current working directory (worktree scoping)
|
const activeSessions = sessions.filter((s) => !s.isArchived);
|
||||||
const scopedSessions = sessions.filter((s) => {
|
const archivedSessions = sessions.filter((s) => s.isArchived);
|
||||||
const sessionDir = s.workingDirectory || s.projectPath;
|
|
||||||
// Match sessions whose workingDirectory matches the current effective directory
|
|
||||||
// Use pathsEqual for cross-platform path normalization (trailing slashes, separators)
|
|
||||||
return pathsEqual(sessionDir, effectiveWorkingDirectory);
|
|
||||||
});
|
|
||||||
|
|
||||||
const activeSessions = scopedSessions.filter((s) => !s.isArchived);
|
|
||||||
const archivedSessions = scopedSessions.filter((s) => s.isArchived);
|
|
||||||
const displayedSessions = activeTab === 'active' ? activeSessions : archivedSessions;
|
const displayedSessions = activeTab === 'active' ? activeSessions : archivedSessions;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -29,13 +29,8 @@ export interface UseModelOverrideResult {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Normalize PhaseModelEntry or string to PhaseModelEntry
|
* Normalize PhaseModelEntry or string to PhaseModelEntry
|
||||||
* Handles undefined/null gracefully (e.g., when phaseModels from server settings
|
|
||||||
* is missing a recently-added phase key)
|
|
||||||
*/
|
*/
|
||||||
function normalizeEntry(entry: PhaseModelEntry | string | undefined | null): PhaseModelEntry {
|
function normalizeEntry(entry: PhaseModelEntry | string): PhaseModelEntry {
|
||||||
if (!entry) {
|
|
||||||
return { model: 'claude-sonnet' as ModelId };
|
|
||||||
}
|
|
||||||
if (typeof entry === 'string') {
|
if (typeof entry === 'string') {
|
||||||
return { model: entry as ModelId };
|
return { model: entry as ModelId };
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,205 +0,0 @@
|
|||||||
import { Component, type ReactNode, type ErrorInfo } from 'react';
|
|
||||||
import { createLogger } from '@automaker/utils/logger';
|
|
||||||
|
|
||||||
const logger = createLogger('AppErrorBoundary');
|
|
||||||
|
|
||||||
interface Props {
|
|
||||||
children: ReactNode;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface State {
|
|
||||||
hasError: boolean;
|
|
||||||
error: Error | null;
|
|
||||||
isCrashLoop: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Key used to track recent crash timestamps for crash loop detection */
|
|
||||||
const CRASH_TIMESTAMPS_KEY = 'automaker-crash-timestamps';
|
|
||||||
/** Number of crashes within the time window that constitutes a crash loop */
|
|
||||||
const CRASH_LOOP_THRESHOLD = 3;
|
|
||||||
/** Time window in ms for crash loop detection (30 seconds) */
|
|
||||||
const CRASH_LOOP_WINDOW_MS = 30_000;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Root-level error boundary for the entire application.
|
|
||||||
*
|
|
||||||
* Catches uncaught React errors that would otherwise show TanStack Router's
|
|
||||||
* default "Something went wrong!" screen with a raw error message.
|
|
||||||
*
|
|
||||||
* Provides a user-friendly error screen with a reload button to recover.
|
|
||||||
* This is especially important for transient errors during initial app load
|
|
||||||
* (e.g., race conditions during auth/hydration on fresh browser sessions).
|
|
||||||
*
|
|
||||||
* Includes crash loop detection: if the app crashes 3+ times within 30 seconds,
|
|
||||||
* the UI cache is automatically cleared to break loops caused by stale cached
|
|
||||||
* worktree paths or other corrupt persisted state.
|
|
||||||
*/
|
|
||||||
export class AppErrorBoundary extends Component<Props, State> {
|
|
||||||
constructor(props: Props) {
|
|
||||||
super(props);
|
|
||||||
this.state = { hasError: false, error: null, isCrashLoop: false };
|
|
||||||
}
|
|
||||||
|
|
||||||
static getDerivedStateFromError(error: Error): Partial<State> {
|
|
||||||
return { hasError: true, error };
|
|
||||||
}
|
|
||||||
|
|
||||||
componentDidCatch(error: Error, errorInfo: ErrorInfo) {
|
|
||||||
logger.error('Uncaught application error:', {
|
|
||||||
error: error.message,
|
|
||||||
stack: error.stack,
|
|
||||||
componentStack: errorInfo.componentStack,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Track crash timestamps to detect crash loops.
|
|
||||||
// If the app crashes multiple times in quick succession, it's likely due to
|
|
||||||
// stale cached data (e.g., worktree paths that no longer exist on disk).
|
|
||||||
try {
|
|
||||||
const now = Date.now();
|
|
||||||
const raw = sessionStorage.getItem(CRASH_TIMESTAMPS_KEY);
|
|
||||||
const timestamps: number[] = raw ? JSON.parse(raw) : [];
|
|
||||||
timestamps.push(now);
|
|
||||||
// Keep only timestamps within the detection window
|
|
||||||
const recent = timestamps.filter((t) => now - t < CRASH_LOOP_WINDOW_MS);
|
|
||||||
sessionStorage.setItem(CRASH_TIMESTAMPS_KEY, JSON.stringify(recent));
|
|
||||||
|
|
||||||
if (recent.length >= CRASH_LOOP_THRESHOLD) {
|
|
||||||
logger.error(
|
|
||||||
`Crash loop detected (${recent.length} crashes in ${CRASH_LOOP_WINDOW_MS}ms) — clearing UI cache`
|
|
||||||
);
|
|
||||||
// Auto-clear the UI cache to break the loop
|
|
||||||
localStorage.removeItem('automaker-ui-cache');
|
|
||||||
sessionStorage.removeItem(CRASH_TIMESTAMPS_KEY);
|
|
||||||
this.setState({ isCrashLoop: true });
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// Storage may be unavailable — ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
handleReload = () => {
|
|
||||||
window.location.reload();
|
|
||||||
};
|
|
||||||
|
|
||||||
handleClearCacheAndReload = () => {
|
|
||||||
// Clear the UI cache store that persists worktree selections and other UI state.
|
|
||||||
// This breaks crash loops caused by stale worktree paths that no longer exist on disk.
|
|
||||||
try {
|
|
||||||
localStorage.removeItem('automaker-ui-cache');
|
|
||||||
} catch {
|
|
||||||
// localStorage may be unavailable in some contexts
|
|
||||||
}
|
|
||||||
window.location.reload();
|
|
||||||
};
|
|
||||||
|
|
||||||
render() {
|
|
||||||
if (this.state.hasError) {
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
className="flex h-screen w-full flex-col items-center justify-center gap-6 bg-background p-6 text-foreground"
|
|
||||||
data-testid="app-error-boundary"
|
|
||||||
>
|
|
||||||
{/* Logo matching the app shell in index.html */}
|
|
||||||
<svg
|
|
||||||
className="h-14 w-14 opacity-90"
|
|
||||||
viewBox="0 0 256 256"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
aria-hidden="true"
|
|
||||||
>
|
|
||||||
<rect
|
|
||||||
className="fill-foreground/[0.08]"
|
|
||||||
x="16"
|
|
||||||
y="16"
|
|
||||||
width="224"
|
|
||||||
height="224"
|
|
||||||
rx="56"
|
|
||||||
/>
|
|
||||||
<g
|
|
||||||
className="stroke-foreground/70"
|
|
||||||
fill="none"
|
|
||||||
strokeWidth="20"
|
|
||||||
strokeLinecap="round"
|
|
||||||
strokeLinejoin="round"
|
|
||||||
>
|
|
||||||
<path d="M92 92 L52 128 L92 164" />
|
|
||||||
<path d="M144 72 L116 184" />
|
|
||||||
<path d="M164 92 L204 128 L164 164" />
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
||||||
|
|
||||||
<div className="text-center space-y-2">
|
|
||||||
<h1 className="text-xl font-semibold">Something went wrong</h1>
|
|
||||||
<p className="text-sm text-muted-foreground max-w-md">
|
|
||||||
{this.state.isCrashLoop
|
|
||||||
? 'The application crashed repeatedly, likely due to stale cached data. The cache has been cleared automatically. Reload to continue.'
|
|
||||||
: 'The application encountered an unexpected error. This is usually temporary and can be resolved by reloading the page.'}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex items-center gap-3">
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={this.handleReload}
|
|
||||||
className="inline-flex items-center gap-2 rounded-md border border-border bg-background px-4 py-2 text-sm font-medium text-foreground shadow-sm transition-colors hover:bg-muted focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring"
|
|
||||||
>
|
|
||||||
<svg
|
|
||||||
className="h-4 w-4"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
strokeWidth="2"
|
|
||||||
strokeLinecap="round"
|
|
||||||
strokeLinejoin="round"
|
|
||||||
aria-hidden="true"
|
|
||||||
>
|
|
||||||
<path d="M21 12a9 9 0 0 0-9-9 9.75 9.75 0 0 0-6.74 2.74L3 8" />
|
|
||||||
<path d="M3 3v5h5" />
|
|
||||||
<path d="M3 12a9 9 0 0 0 9 9 9.75 9.75 0 0 0 6.74-2.74L21 16" />
|
|
||||||
<path d="M16 21h5v-5" />
|
|
||||||
</svg>
|
|
||||||
Reload Page
|
|
||||||
</button>
|
|
||||||
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={this.handleClearCacheAndReload}
|
|
||||||
className="inline-flex items-center gap-2 rounded-md border border-border bg-background px-4 py-2 text-sm font-medium text-muted-foreground shadow-sm transition-colors hover:bg-muted hover:text-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring"
|
|
||||||
>
|
|
||||||
<svg
|
|
||||||
className="h-4 w-4"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
strokeWidth="2"
|
|
||||||
strokeLinecap="round"
|
|
||||||
strokeLinejoin="round"
|
|
||||||
aria-hidden="true"
|
|
||||||
>
|
|
||||||
<path d="M3 6h18" />
|
|
||||||
<path d="M19 6v14c0 1-1 2-2 2H7c-1 0-2-1-2-2V6" />
|
|
||||||
<path d="M8 6V4c0-1 1-2 2-2h4c1 0 2 1 2 2v2" />
|
|
||||||
</svg>
|
|
||||||
Clear Cache & Reload
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Collapsible technical details for debugging */}
|
|
||||||
{this.state.error && (
|
|
||||||
<details className="text-xs text-muted-foreground max-w-lg w-full">
|
|
||||||
<summary className="cursor-pointer hover:text-foreground text-center">
|
|
||||||
Technical details
|
|
||||||
</summary>
|
|
||||||
<pre className="mt-2 p-3 bg-muted/50 rounded-md text-left overflow-auto max-h-32 border border-border">
|
|
||||||
{this.state.error.stack || this.state.error.message}
|
|
||||||
</pre>
|
|
||||||
</details>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.props.children;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,220 +0,0 @@
|
|||||||
/**
|
|
||||||
* CodeMirror-based unified diff viewer.
|
|
||||||
*
|
|
||||||
* Uses @codemirror/merge's `unifiedMergeView` extension to display a
|
|
||||||
* syntax-highlighted inline diff between the original and modified file content.
|
|
||||||
* The viewer is read-only and collapses unchanged regions.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { useMemo, useRef, useEffect } from 'react';
|
|
||||||
import { EditorView } from '@codemirror/view';
|
|
||||||
import { EditorState, type Extension } from '@codemirror/state';
|
|
||||||
import { HighlightStyle, syntaxHighlighting } from '@codemirror/language';
|
|
||||||
import { tags as t } from '@lezer/highlight';
|
|
||||||
import { unifiedMergeView } from '@codemirror/merge';
|
|
||||||
import { getLanguageExtension } from '@/lib/codemirror-languages';
|
|
||||||
import { reconstructFilesFromDiff } from '@/lib/diff-utils';
|
|
||||||
import { cn } from '@/lib/utils';
|
|
||||||
|
|
||||||
// Reuse the same syntax highlighting from the code editor
|
|
||||||
const syntaxColors = HighlightStyle.define([
|
|
||||||
{ tag: t.keyword, color: 'var(--chart-4, oklch(0.7 0.15 280))' },
|
|
||||||
{ tag: t.string, color: 'var(--chart-1, oklch(0.646 0.222 41.116))' },
|
|
||||||
{ tag: t.number, color: 'var(--chart-3, oklch(0.7 0.15 150))' },
|
|
||||||
{ tag: t.bool, color: 'var(--chart-4, oklch(0.7 0.15 280))' },
|
|
||||||
{ tag: t.null, color: 'var(--chart-4, oklch(0.7 0.15 280))' },
|
|
||||||
{ tag: t.comment, color: 'var(--muted-foreground)', fontStyle: 'italic' },
|
|
||||||
{ tag: t.propertyName, color: 'var(--chart-2, oklch(0.6 0.118 184.704))' },
|
|
||||||
{ tag: t.variableName, color: 'var(--chart-2, oklch(0.6 0.118 184.704))' },
|
|
||||||
{ tag: t.function(t.variableName), color: 'var(--primary)' },
|
|
||||||
{ tag: t.typeName, color: 'var(--chart-5, oklch(0.65 0.2 30))' },
|
|
||||||
{ tag: t.className, color: 'var(--chart-5, oklch(0.65 0.2 30))' },
|
|
||||||
{ tag: t.definition(t.variableName), color: 'var(--chart-2, oklch(0.6 0.118 184.704))' },
|
|
||||||
{ tag: t.operator, color: 'var(--muted-foreground)' },
|
|
||||||
{ tag: t.bracket, color: 'var(--muted-foreground)' },
|
|
||||||
{ tag: t.punctuation, color: 'var(--muted-foreground)' },
|
|
||||||
{ tag: t.attributeName, color: 'var(--chart-5, oklch(0.65 0.2 30))' },
|
|
||||||
{ tag: t.attributeValue, color: 'var(--chart-1, oklch(0.646 0.222 41.116))' },
|
|
||||||
{ tag: t.tagName, color: 'var(--chart-4, oklch(0.7 0.15 280))' },
|
|
||||||
{ tag: t.heading, color: 'var(--foreground)', fontWeight: 'bold' },
|
|
||||||
{ tag: t.emphasis, fontStyle: 'italic' },
|
|
||||||
{ tag: t.strong, fontWeight: 'bold' },
|
|
||||||
{ tag: t.link, color: 'var(--primary)', textDecoration: 'underline' },
|
|
||||||
{ tag: t.content, color: 'var(--foreground)' },
|
|
||||||
{ tag: t.regexp, color: 'var(--chart-1, oklch(0.646 0.222 41.116))' },
|
|
||||||
{ tag: t.meta, color: 'var(--muted-foreground)' },
|
|
||||||
]);
|
|
||||||
|
|
||||||
const diffViewTheme = EditorView.theme(
|
|
||||||
{
|
|
||||||
'&': {
|
|
||||||
fontSize: '12px',
|
|
||||||
fontFamily:
|
|
||||||
'var(--font-mono, ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, monospace)',
|
|
||||||
backgroundColor: 'var(--background)',
|
|
||||||
color: 'var(--foreground)',
|
|
||||||
},
|
|
||||||
'.cm-scroller': {
|
|
||||||
overflow: 'auto',
|
|
||||||
fontFamily:
|
|
||||||
'var(--font-mono, ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, monospace)',
|
|
||||||
},
|
|
||||||
'.cm-content': {
|
|
||||||
padding: '0',
|
|
||||||
minHeight: 'auto',
|
|
||||||
},
|
|
||||||
'.cm-line': {
|
|
||||||
padding: '0 0.5rem',
|
|
||||||
},
|
|
||||||
'&.cm-focused': {
|
|
||||||
outline: 'none',
|
|
||||||
},
|
|
||||||
'.cm-gutters': {
|
|
||||||
backgroundColor: 'transparent',
|
|
||||||
color: 'var(--muted-foreground)',
|
|
||||||
border: 'none',
|
|
||||||
borderRight: '1px solid var(--border)',
|
|
||||||
paddingRight: '0.25rem',
|
|
||||||
},
|
|
||||||
'.cm-lineNumbers .cm-gutterElement': {
|
|
||||||
minWidth: '3rem',
|
|
||||||
textAlign: 'right',
|
|
||||||
paddingRight: '0.5rem',
|
|
||||||
fontSize: '11px',
|
|
||||||
},
|
|
||||||
|
|
||||||
// --- GitHub-style diff colors (dark mode) ---
|
|
||||||
|
|
||||||
// Added/changed lines: green background
|
|
||||||
'&.cm-merge-b .cm-changedLine': {
|
|
||||||
backgroundColor: 'rgba(46, 160, 67, 0.15)',
|
|
||||||
},
|
|
||||||
// Highlighted text within added/changed lines: stronger green
|
|
||||||
'&.cm-merge-b .cm-changedText': {
|
|
||||||
background: 'rgba(46, 160, 67, 0.4)',
|
|
||||||
},
|
|
||||||
|
|
||||||
// Deleted chunk container: red background
|
|
||||||
'.cm-deletedChunk': {
|
|
||||||
backgroundColor: 'rgba(248, 81, 73, 0.1)',
|
|
||||||
paddingLeft: '6px',
|
|
||||||
},
|
|
||||||
// Individual deleted lines within the chunk
|
|
||||||
'.cm-deletedChunk .cm-deletedLine': {
|
|
||||||
backgroundColor: 'rgba(248, 81, 73, 0.15)',
|
|
||||||
},
|
|
||||||
// Highlighted text within deleted lines: stronger red
|
|
||||||
'.cm-deletedChunk .cm-deletedText': {
|
|
||||||
background: 'rgba(248, 81, 73, 0.4)',
|
|
||||||
},
|
|
||||||
// Remove strikethrough from deleted text (GitHub doesn't use it)
|
|
||||||
'.cm-insertedLine, .cm-deletedLine, .cm-deletedLine del': {
|
|
||||||
textDecoration: 'none',
|
|
||||||
},
|
|
||||||
|
|
||||||
// Gutter markers for changed lines (green bar)
|
|
||||||
'&.cm-merge-b .cm-changedLineGutter': {
|
|
||||||
background: '#3fb950',
|
|
||||||
},
|
|
||||||
// Gutter markers for deleted lines (red bar)
|
|
||||||
'.cm-deletedLineGutter': {
|
|
||||||
background: '#f85149',
|
|
||||||
},
|
|
||||||
|
|
||||||
// Collapse button styling
|
|
||||||
'.cm-collapsedLines': {
|
|
||||||
color: 'var(--muted-foreground)',
|
|
||||||
backgroundColor: 'var(--muted)',
|
|
||||||
borderTop: '1px solid var(--border)',
|
|
||||||
borderBottom: '1px solid var(--border)',
|
|
||||||
cursor: 'pointer',
|
|
||||||
padding: '2px 8px',
|
|
||||||
fontSize: '11px',
|
|
||||||
},
|
|
||||||
|
|
||||||
// Selection styling
|
|
||||||
'&.cm-focused .cm-selectionBackground, .cm-selectionBackground, .cm-content ::selection': {
|
|
||||||
backgroundColor: 'oklch(0.55 0.25 265 / 0.3)',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{ dark: true }
|
|
||||||
);
|
|
||||||
|
|
||||||
interface CodeMirrorDiffViewProps {
|
|
||||||
/** The unified diff text for a single file */
|
|
||||||
fileDiff: string;
|
|
||||||
/** File path for language detection */
|
|
||||||
filePath: string;
|
|
||||||
/** Max height of the diff view (CSS value) */
|
|
||||||
maxHeight?: string;
|
|
||||||
className?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function CodeMirrorDiffView({
|
|
||||||
fileDiff,
|
|
||||||
filePath,
|
|
||||||
maxHeight = '400px',
|
|
||||||
className,
|
|
||||||
}: CodeMirrorDiffViewProps) {
|
|
||||||
const containerRef = useRef<HTMLDivElement>(null);
|
|
||||||
const viewRef = useRef<EditorView | null>(null);
|
|
||||||
|
|
||||||
const { oldContent, newContent } = useMemo(() => reconstructFilesFromDiff(fileDiff), [fileDiff]);
|
|
||||||
|
|
||||||
const extensions = useMemo(() => {
|
|
||||||
const exts: Extension[] = [
|
|
||||||
EditorView.darkTheme.of(true),
|
|
||||||
diffViewTheme,
|
|
||||||
syntaxHighlighting(syntaxColors),
|
|
||||||
EditorView.editable.of(false),
|
|
||||||
EditorState.readOnly.of(true),
|
|
||||||
EditorView.lineWrapping,
|
|
||||||
unifiedMergeView({
|
|
||||||
original: oldContent,
|
|
||||||
highlightChanges: true,
|
|
||||||
gutter: true,
|
|
||||||
syntaxHighlightDeletions: true,
|
|
||||||
mergeControls: false,
|
|
||||||
collapseUnchanged: { margin: 3, minSize: 4 },
|
|
||||||
}),
|
|
||||||
];
|
|
||||||
|
|
||||||
const langExt = getLanguageExtension(filePath);
|
|
||||||
if (langExt) {
|
|
||||||
exts.push(langExt);
|
|
||||||
}
|
|
||||||
|
|
||||||
return exts;
|
|
||||||
}, [oldContent, filePath]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (!containerRef.current) return;
|
|
||||||
|
|
||||||
// Clean up previous view
|
|
||||||
if (viewRef.current) {
|
|
||||||
viewRef.current.destroy();
|
|
||||||
viewRef.current = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const state = EditorState.create({
|
|
||||||
doc: newContent,
|
|
||||||
extensions,
|
|
||||||
});
|
|
||||||
|
|
||||||
const view = new EditorView({
|
|
||||||
state,
|
|
||||||
parent: containerRef.current,
|
|
||||||
});
|
|
||||||
|
|
||||||
viewRef.current = view;
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
view.destroy();
|
|
||||||
viewRef.current = null;
|
|
||||||
};
|
|
||||||
}, [newContent, extensions]);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div ref={containerRef} className={cn('overflow-auto', className)} style={{ maxHeight }} />
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -17,13 +17,10 @@ import {
|
|||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import { Spinner } from '@/components/ui/spinner';
|
import { Spinner } from '@/components/ui/spinner';
|
||||||
import { TruncatedFilePath } from '@/components/ui/truncated-file-path';
|
import { TruncatedFilePath } from '@/components/ui/truncated-file-path';
|
||||||
import { CodeMirrorDiffView } from '@/components/ui/codemirror-diff-view';
|
|
||||||
import { Button } from './button';
|
import { Button } from './button';
|
||||||
import { useWorktreeDiffs, useGitDiffs } from '@/hooks/queries';
|
import { useWorktreeDiffs, useGitDiffs } from '@/hooks/queries';
|
||||||
import { getElectronAPI } from '@/lib/electron';
|
import { getElectronAPI } from '@/lib/electron';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import { parseDiff, splitDiffByFile } from '@/lib/diff-utils';
|
|
||||||
import type { ParsedFileDiff } from '@/lib/diff-utils';
|
|
||||||
import type { FileStatus, MergeStateInfo } from '@/types/electron';
|
import type { FileStatus, MergeStateInfo } from '@/types/electron';
|
||||||
|
|
||||||
interface GitDiffPanelProps {
|
interface GitDiffPanelProps {
|
||||||
@@ -40,6 +37,23 @@ interface GitDiffPanelProps {
|
|||||||
worktreePath?: string;
|
worktreePath?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface ParsedDiffHunk {
|
||||||
|
header: string;
|
||||||
|
lines: {
|
||||||
|
type: 'context' | 'addition' | 'deletion' | 'header';
|
||||||
|
content: string;
|
||||||
|
lineNumber?: { old?: number; new?: number };
|
||||||
|
}[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ParsedFileDiff {
|
||||||
|
filePath: string;
|
||||||
|
hunks: ParsedDiffHunk[];
|
||||||
|
isNew?: boolean;
|
||||||
|
isDeleted?: boolean;
|
||||||
|
isRenamed?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
const getFileIcon = (status: string) => {
|
const getFileIcon = (status: string) => {
|
||||||
switch (status) {
|
switch (status) {
|
||||||
case 'A':
|
case 'A':
|
||||||
@@ -115,6 +129,174 @@ function getStagingState(file: FileStatus): 'staged' | 'unstaged' | 'partial' {
|
|||||||
return 'unstaged';
|
return 'unstaged';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse unified diff format into structured data
|
||||||
|
*/
|
||||||
|
function parseDiff(diffText: string): ParsedFileDiff[] {
|
||||||
|
if (!diffText) return [];
|
||||||
|
|
||||||
|
const files: ParsedFileDiff[] = [];
|
||||||
|
const lines = diffText.split('\n');
|
||||||
|
let currentFile: ParsedFileDiff | null = null;
|
||||||
|
let currentHunk: ParsedDiffHunk | null = null;
|
||||||
|
let oldLineNum = 0;
|
||||||
|
let newLineNum = 0;
|
||||||
|
|
||||||
|
for (let i = 0; i < lines.length; i++) {
|
||||||
|
const line = lines[i];
|
||||||
|
|
||||||
|
// New file diff
|
||||||
|
if (line.startsWith('diff --git')) {
|
||||||
|
if (currentFile) {
|
||||||
|
if (currentHunk) {
|
||||||
|
currentFile.hunks.push(currentHunk);
|
||||||
|
}
|
||||||
|
files.push(currentFile);
|
||||||
|
}
|
||||||
|
// Extract file path from diff header
|
||||||
|
const match = line.match(/diff --git a\/(.*?) b\/(.*)/);
|
||||||
|
currentFile = {
|
||||||
|
filePath: match ? match[2] : 'unknown',
|
||||||
|
hunks: [],
|
||||||
|
};
|
||||||
|
currentHunk = null;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// New file indicator
|
||||||
|
if (line.startsWith('new file mode')) {
|
||||||
|
if (currentFile) currentFile.isNew = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deleted file indicator
|
||||||
|
if (line.startsWith('deleted file mode')) {
|
||||||
|
if (currentFile) currentFile.isDeleted = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Renamed file indicator
|
||||||
|
if (line.startsWith('rename from') || line.startsWith('rename to')) {
|
||||||
|
if (currentFile) currentFile.isRenamed = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip index, ---/+++ lines
|
||||||
|
if (line.startsWith('index ') || line.startsWith('--- ') || line.startsWith('+++ ')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hunk header
|
||||||
|
if (line.startsWith('@@')) {
|
||||||
|
if (currentHunk && currentFile) {
|
||||||
|
currentFile.hunks.push(currentHunk);
|
||||||
|
}
|
||||||
|
// Parse line numbers from @@ -old,count +new,count @@
|
||||||
|
const hunkMatch = line.match(/@@ -(\d+)(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
||||||
|
oldLineNum = hunkMatch ? parseInt(hunkMatch[1], 10) : 1;
|
||||||
|
newLineNum = hunkMatch ? parseInt(hunkMatch[2], 10) : 1;
|
||||||
|
currentHunk = {
|
||||||
|
header: line,
|
||||||
|
lines: [{ type: 'header', content: line }],
|
||||||
|
};
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Diff content lines
|
||||||
|
if (currentHunk) {
|
||||||
|
if (line.startsWith('+')) {
|
||||||
|
currentHunk.lines.push({
|
||||||
|
type: 'addition',
|
||||||
|
content: line.substring(1),
|
||||||
|
lineNumber: { new: newLineNum },
|
||||||
|
});
|
||||||
|
newLineNum++;
|
||||||
|
} else if (line.startsWith('-')) {
|
||||||
|
currentHunk.lines.push({
|
||||||
|
type: 'deletion',
|
||||||
|
content: line.substring(1),
|
||||||
|
lineNumber: { old: oldLineNum },
|
||||||
|
});
|
||||||
|
oldLineNum++;
|
||||||
|
} else if (line.startsWith(' ') || line === '') {
|
||||||
|
currentHunk.lines.push({
|
||||||
|
type: 'context',
|
||||||
|
content: line.substring(1) || '',
|
||||||
|
lineNumber: { old: oldLineNum, new: newLineNum },
|
||||||
|
});
|
||||||
|
oldLineNum++;
|
||||||
|
newLineNum++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't forget the last file and hunk
|
||||||
|
if (currentFile) {
|
||||||
|
if (currentHunk) {
|
||||||
|
currentFile.hunks.push(currentHunk);
|
||||||
|
}
|
||||||
|
files.push(currentFile);
|
||||||
|
}
|
||||||
|
|
||||||
|
return files;
|
||||||
|
}
|
||||||
|
|
||||||
|
function DiffLine({
|
||||||
|
type,
|
||||||
|
content,
|
||||||
|
lineNumber,
|
||||||
|
}: {
|
||||||
|
type: 'context' | 'addition' | 'deletion' | 'header';
|
||||||
|
content: string;
|
||||||
|
lineNumber?: { old?: number; new?: number };
|
||||||
|
}) {
|
||||||
|
const bgClass = {
|
||||||
|
context: 'bg-transparent',
|
||||||
|
addition: 'bg-green-500/10',
|
||||||
|
deletion: 'bg-red-500/10',
|
||||||
|
header: 'bg-blue-500/10',
|
||||||
|
};
|
||||||
|
|
||||||
|
const textClass = {
|
||||||
|
context: 'text-foreground-secondary',
|
||||||
|
addition: 'text-green-400',
|
||||||
|
deletion: 'text-red-400',
|
||||||
|
header: 'text-blue-400',
|
||||||
|
};
|
||||||
|
|
||||||
|
const prefix = {
|
||||||
|
context: ' ',
|
||||||
|
addition: '+',
|
||||||
|
deletion: '-',
|
||||||
|
header: '',
|
||||||
|
};
|
||||||
|
|
||||||
|
if (type === 'header') {
|
||||||
|
return (
|
||||||
|
<div className={cn('px-2 py-1 font-mono text-xs', bgClass[type], textClass[type])}>
|
||||||
|
{content}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={cn('flex font-mono text-xs', bgClass[type])}>
|
||||||
|
<span className="w-12 flex-shrink-0 text-right pr-2 text-muted-foreground select-none border-r border-border-glass">
|
||||||
|
{lineNumber?.old ?? ''}
|
||||||
|
</span>
|
||||||
|
<span className="w-12 flex-shrink-0 text-right pr-2 text-muted-foreground select-none border-r border-border-glass">
|
||||||
|
{lineNumber?.new ?? ''}
|
||||||
|
</span>
|
||||||
|
<span className={cn('w-4 flex-shrink-0 text-center select-none', textClass[type])}>
|
||||||
|
{prefix[type]}
|
||||||
|
</span>
|
||||||
|
<span className={cn('flex-1 px-2 whitespace-pre-wrap break-all', textClass[type])}>
|
||||||
|
{content || '\u00A0'}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
function StagingBadge({ state }: { state: 'staged' | 'unstaged' | 'partial' }) {
|
function StagingBadge({ state }: { state: 'staged' | 'unstaged' | 'partial' }) {
|
||||||
if (state === 'staged') {
|
if (state === 'staged') {
|
||||||
return (
|
return (
|
||||||
@@ -219,7 +401,6 @@ function MergeStateBanner({ mergeState }: { mergeState: MergeStateInfo }) {
|
|||||||
|
|
||||||
function FileDiffSection({
|
function FileDiffSection({
|
||||||
fileDiff,
|
fileDiff,
|
||||||
rawDiff,
|
|
||||||
isExpanded,
|
isExpanded,
|
||||||
onToggle,
|
onToggle,
|
||||||
fileStatus,
|
fileStatus,
|
||||||
@@ -229,8 +410,6 @@ function FileDiffSection({
|
|||||||
isStagingFile,
|
isStagingFile,
|
||||||
}: {
|
}: {
|
||||||
fileDiff: ParsedFileDiff;
|
fileDiff: ParsedFileDiff;
|
||||||
/** Raw unified diff string for this file, used by CodeMirror merge view */
|
|
||||||
rawDiff?: string;
|
|
||||||
isExpanded: boolean;
|
isExpanded: boolean;
|
||||||
onToggle: () => void;
|
onToggle: () => void;
|
||||||
fileStatus?: FileStatus;
|
fileStatus?: FileStatus;
|
||||||
@@ -239,8 +418,14 @@ function FileDiffSection({
|
|||||||
onUnstage?: (filePath: string) => void;
|
onUnstage?: (filePath: string) => void;
|
||||||
isStagingFile?: boolean;
|
isStagingFile?: boolean;
|
||||||
}) {
|
}) {
|
||||||
const additions = fileDiff.additions;
|
const additions = fileDiff.hunks.reduce(
|
||||||
const deletions = fileDiff.deletions;
|
(acc, hunk) => acc + hunk.lines.filter((l) => l.type === 'addition').length,
|
||||||
|
0
|
||||||
|
);
|
||||||
|
const deletions = fileDiff.hunks.reduce(
|
||||||
|
(acc, hunk) => acc + hunk.lines.filter((l) => l.type === 'deletion').length,
|
||||||
|
0
|
||||||
|
);
|
||||||
|
|
||||||
const stagingState = fileStatus ? getStagingState(fileStatus) : undefined;
|
const stagingState = fileStatus ? getStagingState(fileStatus) : undefined;
|
||||||
|
|
||||||
@@ -336,9 +521,20 @@ function FileDiffSection({
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{isExpanded && rawDiff && (
|
{isExpanded && (
|
||||||
<div className="bg-background border-t border-border">
|
<div className="bg-background border-t border-border max-h-[400px] overflow-y-auto scrollbar-visible">
|
||||||
<CodeMirrorDiffView fileDiff={rawDiff} filePath={fileDiff.filePath} maxHeight="400px" />
|
{fileDiff.hunks.map((hunk, hunkIndex) => (
|
||||||
|
<div key={hunkIndex} className="border-b border-border-glass last:border-b-0">
|
||||||
|
{hunk.lines.map((line, lineIndex) => (
|
||||||
|
<DiffLine
|
||||||
|
key={lineIndex}
|
||||||
|
type={line.type}
|
||||||
|
content={line.content}
|
||||||
|
lineNumber={line.lineNumber}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
@@ -423,16 +619,6 @@ export function GitDiffPanel({
|
|||||||
return diffs;
|
return diffs;
|
||||||
}, [diffContent, mergeState, fileStatusMap]);
|
}, [diffContent, mergeState, fileStatusMap]);
|
||||||
|
|
||||||
// Build a map from file path to raw diff string for CodeMirror merge view
|
|
||||||
const fileDiffMap = useMemo(() => {
|
|
||||||
const map = new Map<string, string>();
|
|
||||||
const perFileDiffs = splitDiffByFile(diffContent);
|
|
||||||
for (const entry of perFileDiffs) {
|
|
||||||
map.set(entry.filePath, entry.diff);
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}, [diffContent]);
|
|
||||||
|
|
||||||
const toggleFile = (filePath: string) => {
|
const toggleFile = (filePath: string) => {
|
||||||
setExpandedFiles((prev) => {
|
setExpandedFiles((prev) => {
|
||||||
const next = new Set(prev);
|
const next = new Set(prev);
|
||||||
@@ -636,9 +822,25 @@ export function GitDiffPanel({
|
|||||||
return { staged, partial, unstaged, total: files.length };
|
return { staged, partial, unstaged, total: files.length };
|
||||||
}, [enableStaging, files]);
|
}, [enableStaging, files]);
|
||||||
|
|
||||||
// Total stats (pre-computed by shared parseDiff)
|
// Total stats
|
||||||
const totalAdditions = parsedDiffs.reduce((acc, file) => acc + file.additions, 0);
|
const totalAdditions = parsedDiffs.reduce(
|
||||||
const totalDeletions = parsedDiffs.reduce((acc, file) => acc + file.deletions, 0);
|
(acc, file) =>
|
||||||
|
acc +
|
||||||
|
file.hunks.reduce(
|
||||||
|
(hAcc, hunk) => hAcc + hunk.lines.filter((l) => l.type === 'addition').length,
|
||||||
|
0
|
||||||
|
),
|
||||||
|
0
|
||||||
|
);
|
||||||
|
const totalDeletions = parsedDiffs.reduce(
|
||||||
|
(acc, file) =>
|
||||||
|
acc +
|
||||||
|
file.hunks.reduce(
|
||||||
|
(hAcc, hunk) => hAcc + hunk.lines.filter((l) => l.type === 'deletion').length,
|
||||||
|
0
|
||||||
|
),
|
||||||
|
0
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
@@ -851,7 +1053,6 @@ export function GitDiffPanel({
|
|||||||
<FileDiffSection
|
<FileDiffSection
|
||||||
key={fileDiff.filePath}
|
key={fileDiff.filePath}
|
||||||
fileDiff={fileDiff}
|
fileDiff={fileDiff}
|
||||||
rawDiff={fileDiffMap.get(fileDiff.filePath)}
|
|
||||||
isExpanded={expandedFiles.has(fileDiff.filePath)}
|
isExpanded={expandedFiles.has(fileDiff.filePath)}
|
||||||
onToggle={() => toggleFile(fileDiff.filePath)}
|
onToggle={() => toggleFile(fileDiff.filePath)}
|
||||||
fileStatus={fileStatusMap.get(fileDiff.filePath)}
|
fileStatus={fileStatusMap.get(fileDiff.filePath)}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { useState, useCallback, useRef, useEffect } from 'react';
|
import { useState, useCallback, useRef, useEffect } from 'react';
|
||||||
import { useAppStore } from '@/store/app-store';
|
import { useAppStore } from '@/store/app-store';
|
||||||
|
import type { PhaseModelEntry } from '@automaker/types';
|
||||||
import { useElectronAgent } from '@/hooks/use-electron-agent';
|
import { useElectronAgent } from '@/hooks/use-electron-agent';
|
||||||
import { SessionManager } from '@/components/session-manager';
|
import { SessionManager } from '@/components/session-manager';
|
||||||
|
|
||||||
@@ -19,13 +20,9 @@ import { AgentInputArea } from './agent-view/input-area';
|
|||||||
const LG_BREAKPOINT = 1024;
|
const LG_BREAKPOINT = 1024;
|
||||||
|
|
||||||
export function AgentView() {
|
export function AgentView() {
|
||||||
const { currentProject, getCurrentWorktree } = useAppStore();
|
const { currentProject } = useAppStore();
|
||||||
const [input, setInput] = useState('');
|
const [input, setInput] = useState('');
|
||||||
const [currentTool, setCurrentTool] = useState<string | null>(null);
|
const [currentTool, setCurrentTool] = useState<string | null>(null);
|
||||||
|
|
||||||
// Get the current worktree to scope sessions and agent working directory
|
|
||||||
const currentWorktree = currentProject ? getCurrentWorktree(currentProject.path) : null;
|
|
||||||
const effectiveWorkingDirectory = currentWorktree?.path || currentProject?.path;
|
|
||||||
// Initialize session manager state - starts as true to match SSR
|
// Initialize session manager state - starts as true to match SSR
|
||||||
// Then updates on mount based on actual screen size to prevent hydration mismatch
|
// Then updates on mount based on actual screen size to prevent hydration mismatch
|
||||||
const [showSessionManager, setShowSessionManager] = useState(true);
|
const [showSessionManager, setShowSessionManager] = useState(true);
|
||||||
@@ -45,6 +42,8 @@ export function AgentView() {
|
|||||||
return () => window.removeEventListener('resize', updateVisibility);
|
return () => window.removeEventListener('resize', updateVisibility);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
const [modelSelection, setModelSelection] = useState<PhaseModelEntry>({ model: 'claude-sonnet' });
|
||||||
|
|
||||||
// Input ref for auto-focus
|
// Input ref for auto-focus
|
||||||
const inputRef = useRef<HTMLTextAreaElement>(null);
|
const inputRef = useRef<HTMLTextAreaElement>(null);
|
||||||
|
|
||||||
@@ -53,12 +52,9 @@ export function AgentView() {
|
|||||||
// Guard to prevent concurrent invocations of handleCreateSessionFromEmptyState
|
// Guard to prevent concurrent invocations of handleCreateSessionFromEmptyState
|
||||||
const createSessionInFlightRef = useRef(false);
|
const createSessionInFlightRef = useRef(false);
|
||||||
|
|
||||||
// Session management hook - scoped to current worktree
|
// Session management hook
|
||||||
// Also handles model selection persistence per session
|
const { currentSessionId, handleSelectSession } = useAgentSession({
|
||||||
const { currentSessionId, handleSelectSession, modelSelection, setModelSelection } =
|
|
||||||
useAgentSession({
|
|
||||||
projectPath: currentProject?.path,
|
projectPath: currentProject?.path,
|
||||||
workingDirectory: effectiveWorkingDirectory,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Use the Electron agent hook (only if we have a session)
|
// Use the Electron agent hook (only if we have a session)
|
||||||
@@ -75,7 +71,7 @@ export function AgentView() {
|
|||||||
clearServerQueue,
|
clearServerQueue,
|
||||||
} = useElectronAgent({
|
} = useElectronAgent({
|
||||||
sessionId: currentSessionId || '',
|
sessionId: currentSessionId || '',
|
||||||
workingDirectory: effectiveWorkingDirectory,
|
workingDirectory: currentProject?.path,
|
||||||
model: modelSelection.model,
|
model: modelSelection.model,
|
||||||
thinkingLevel: modelSelection.thinkingLevel,
|
thinkingLevel: modelSelection.thinkingLevel,
|
||||||
onToolUse: (toolName) => {
|
onToolUse: (toolName) => {
|
||||||
@@ -233,7 +229,6 @@ export function AgentView() {
|
|||||||
currentSessionId={currentSessionId}
|
currentSessionId={currentSessionId}
|
||||||
onSelectSession={handleSelectSession}
|
onSelectSession={handleSelectSession}
|
||||||
projectPath={currentProject.path}
|
projectPath={currentProject.path}
|
||||||
workingDirectory={effectiveWorkingDirectory}
|
|
||||||
isCurrentSessionThinking={isProcessing}
|
isCurrentSessionThinking={isProcessing}
|
||||||
onQuickCreateRef={quickCreateSessionRef}
|
onQuickCreateRef={quickCreateSessionRef}
|
||||||
/>
|
/>
|
||||||
@@ -253,7 +248,6 @@ export function AgentView() {
|
|||||||
showSessionManager={showSessionManager}
|
showSessionManager={showSessionManager}
|
||||||
onToggleSessionManager={() => setShowSessionManager(!showSessionManager)}
|
onToggleSessionManager={() => setShowSessionManager(!showSessionManager)}
|
||||||
onClearChat={handleClearChat}
|
onClearChat={handleClearChat}
|
||||||
worktreeBranch={currentWorktree?.branch}
|
|
||||||
/>
|
/>
|
||||||
|
|
||||||
{/* Messages */}
|
{/* Messages */}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { Bot, PanelLeftClose, PanelLeft, Wrench, Trash2, GitBranch } from 'lucide-react';
|
import { Bot, PanelLeftClose, PanelLeft, Wrench, Trash2 } from 'lucide-react';
|
||||||
import { Button } from '@/components/ui/button';
|
import { Button } from '@/components/ui/button';
|
||||||
|
|
||||||
interface AgentHeaderProps {
|
interface AgentHeaderProps {
|
||||||
@@ -11,7 +11,6 @@ interface AgentHeaderProps {
|
|||||||
showSessionManager: boolean;
|
showSessionManager: boolean;
|
||||||
onToggleSessionManager: () => void;
|
onToggleSessionManager: () => void;
|
||||||
onClearChat: () => void;
|
onClearChat: () => void;
|
||||||
worktreeBranch?: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function AgentHeader({
|
export function AgentHeader({
|
||||||
@@ -24,7 +23,6 @@ export function AgentHeader({
|
|||||||
showSessionManager,
|
showSessionManager,
|
||||||
onToggleSessionManager,
|
onToggleSessionManager,
|
||||||
onClearChat,
|
onClearChat,
|
||||||
worktreeBranch,
|
|
||||||
}: AgentHeaderProps) {
|
}: AgentHeaderProps) {
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center justify-between px-6 py-4 border-b border-border bg-card/50 backdrop-blur-sm">
|
<div className="flex items-center justify-between px-6 py-4 border-b border-border bg-card/50 backdrop-blur-sm">
|
||||||
@@ -34,18 +32,10 @@ export function AgentHeader({
|
|||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<h1 className="text-lg font-semibold text-foreground">AI Agent</h1>
|
<h1 className="text-lg font-semibold text-foreground">AI Agent</h1>
|
||||||
<div className="flex items-center gap-2 text-sm text-muted-foreground">
|
<p className="text-sm text-muted-foreground">
|
||||||
<span>
|
|
||||||
{projectName}
|
{projectName}
|
||||||
{currentSessionId && !isConnected && ' - Connecting...'}
|
{currentSessionId && !isConnected && ' - Connecting...'}
|
||||||
</span>
|
</p>
|
||||||
{worktreeBranch && (
|
|
||||||
<span className="inline-flex items-center gap-1 text-xs bg-muted/50 px-2 py-0.5 rounded-full border border-border">
|
|
||||||
<GitBranch className="w-3 h-3 shrink-0" />
|
|
||||||
<span className="max-w-[180px] truncate">{worktreeBranch}</span>
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -1,144 +1,64 @@
|
|||||||
import { useState, useCallback, useEffect, useRef } from 'react';
|
import { useState, useCallback, useEffect, useRef } from 'react';
|
||||||
import { createLogger } from '@automaker/utils/logger';
|
import { createLogger } from '@automaker/utils/logger';
|
||||||
import type { PhaseModelEntry } from '@automaker/types';
|
|
||||||
import { useAppStore } from '@/store/app-store';
|
import { useAppStore } from '@/store/app-store';
|
||||||
import { useShallow } from 'zustand/react/shallow';
|
|
||||||
|
|
||||||
const logger = createLogger('AgentSession');
|
const logger = createLogger('AgentSession');
|
||||||
|
|
||||||
// Default model selection when none is persisted
|
|
||||||
const DEFAULT_MODEL_SELECTION: PhaseModelEntry = { model: 'claude-sonnet' };
|
|
||||||
|
|
||||||
interface UseAgentSessionOptions {
|
interface UseAgentSessionOptions {
|
||||||
projectPath: string | undefined;
|
projectPath: string | undefined;
|
||||||
workingDirectory?: string; // Current worktree path for per-worktree session persistence
|
|
||||||
}
|
}
|
||||||
|
|
||||||
interface UseAgentSessionResult {
|
interface UseAgentSessionResult {
|
||||||
currentSessionId: string | null;
|
currentSessionId: string | null;
|
||||||
handleSelectSession: (sessionId: string | null) => void;
|
handleSelectSession: (sessionId: string | null) => void;
|
||||||
// Model selection persistence
|
|
||||||
modelSelection: PhaseModelEntry;
|
|
||||||
setModelSelection: (model: PhaseModelEntry) => void;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function useAgentSession({
|
export function useAgentSession({ projectPath }: UseAgentSessionOptions): UseAgentSessionResult {
|
||||||
projectPath,
|
const { setLastSelectedSession, getLastSelectedSession } = useAppStore();
|
||||||
workingDirectory,
|
|
||||||
}: UseAgentSessionOptions): UseAgentSessionResult {
|
|
||||||
const {
|
|
||||||
setLastSelectedSession,
|
|
||||||
getLastSelectedSession,
|
|
||||||
setAgentModelForSession,
|
|
||||||
getAgentModelForSession,
|
|
||||||
} = useAppStore(
|
|
||||||
useShallow((state) => ({
|
|
||||||
setLastSelectedSession: state.setLastSelectedSession,
|
|
||||||
getLastSelectedSession: state.getLastSelectedSession,
|
|
||||||
setAgentModelForSession: state.setAgentModelForSession,
|
|
||||||
getAgentModelForSession: state.getAgentModelForSession,
|
|
||||||
}))
|
|
||||||
);
|
|
||||||
const [currentSessionId, setCurrentSessionId] = useState<string | null>(null);
|
const [currentSessionId, setCurrentSessionId] = useState<string | null>(null);
|
||||||
const [modelSelection, setModelSelectionState] =
|
|
||||||
useState<PhaseModelEntry>(DEFAULT_MODEL_SELECTION);
|
|
||||||
|
|
||||||
// Track if initial session has been loaded
|
// Track if initial session has been loaded
|
||||||
const initialSessionLoadedRef = useRef(false);
|
const initialSessionLoadedRef = useRef(false);
|
||||||
|
|
||||||
// Use workingDirectory as the persistence key so sessions are scoped per worktree
|
|
||||||
const persistenceKey = workingDirectory || projectPath;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch persisted model for sessionId and update local state, or fall back to default.
|
|
||||||
*/
|
|
||||||
const restoreModelForSession = useCallback(
|
|
||||||
(sessionId: string) => {
|
|
||||||
const persistedModel = getAgentModelForSession(sessionId);
|
|
||||||
if (persistedModel) {
|
|
||||||
logger.debug('Restoring model selection for session:', sessionId, persistedModel);
|
|
||||||
setModelSelectionState(persistedModel);
|
|
||||||
} else {
|
|
||||||
setModelSelectionState(DEFAULT_MODEL_SELECTION);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[getAgentModelForSession]
|
|
||||||
);
|
|
||||||
|
|
||||||
// Handle session selection with persistence
|
// Handle session selection with persistence
|
||||||
const handleSelectSession = useCallback(
|
const handleSelectSession = useCallback(
|
||||||
(sessionId: string | null) => {
|
(sessionId: string | null) => {
|
||||||
setCurrentSessionId(sessionId);
|
setCurrentSessionId(sessionId);
|
||||||
// Persist the selection for this worktree/project
|
// Persist the selection for this project
|
||||||
if (persistenceKey) {
|
if (projectPath) {
|
||||||
setLastSelectedSession(persistenceKey, sessionId);
|
setLastSelectedSession(projectPath, sessionId);
|
||||||
}
|
|
||||||
// Restore model selection for this session if available
|
|
||||||
if (sessionId) {
|
|
||||||
restoreModelForSession(sessionId);
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[persistenceKey, setLastSelectedSession, restoreModelForSession]
|
[projectPath, setLastSelectedSession]
|
||||||
);
|
);
|
||||||
|
|
||||||
// Wrapper for setModelSelection that also persists
|
// Restore last selected session when switching to Agent view or when project changes
|
||||||
const setModelSelection = useCallback(
|
|
||||||
(model: PhaseModelEntry) => {
|
|
||||||
setModelSelectionState(model);
|
|
||||||
// Persist model selection for current session.
|
|
||||||
// If currentSessionId is null (no active session), we only update local state
|
|
||||||
// and skip persistence — this is intentional because the model picker should be
|
|
||||||
// disabled (or hidden) in the UI whenever there is no active session, so this
|
|
||||||
// path is only reached if the UI allows selection before a session is established.
|
|
||||||
if (currentSessionId) {
|
|
||||||
setAgentModelForSession(currentSessionId, model);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[currentSessionId, setAgentModelForSession]
|
|
||||||
);
|
|
||||||
|
|
||||||
// Track the previous persistence key to detect actual changes
|
|
||||||
const prevPersistenceKeyRef = useRef(persistenceKey);
|
|
||||||
|
|
||||||
// Restore last selected session when switching to Agent view or when worktree changes
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Detect if persistenceKey actually changed (worktree/project switch)
|
if (!projectPath) {
|
||||||
const persistenceKeyChanged = prevPersistenceKeyRef.current !== persistenceKey;
|
// No project, reset
|
||||||
|
|
||||||
if (persistenceKeyChanged) {
|
|
||||||
// Reset state when switching worktree/project
|
|
||||||
prevPersistenceKeyRef.current = persistenceKey;
|
|
||||||
initialSessionLoadedRef.current = false;
|
|
||||||
setCurrentSessionId(null);
|
setCurrentSessionId(null);
|
||||||
setModelSelectionState(DEFAULT_MODEL_SELECTION);
|
initialSessionLoadedRef.current = false;
|
||||||
|
|
||||||
if (!persistenceKey) {
|
|
||||||
// No project, nothing to restore
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!persistenceKey) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only restore once per persistence key
|
// Only restore once per project
|
||||||
if (initialSessionLoadedRef.current) return;
|
if (initialSessionLoadedRef.current) return;
|
||||||
initialSessionLoadedRef.current = true;
|
initialSessionLoadedRef.current = true;
|
||||||
|
|
||||||
const lastSessionId = getLastSelectedSession(persistenceKey);
|
const lastSessionId = getLastSelectedSession(projectPath);
|
||||||
if (lastSessionId) {
|
if (lastSessionId) {
|
||||||
logger.debug('Restoring last selected session:', lastSessionId);
|
logger.info('Restoring last selected session:', lastSessionId);
|
||||||
setCurrentSessionId(lastSessionId);
|
setCurrentSessionId(lastSessionId);
|
||||||
// Also restore model selection for this session
|
|
||||||
restoreModelForSession(lastSessionId);
|
|
||||||
}
|
}
|
||||||
}, [persistenceKey, getLastSelectedSession, restoreModelForSession]);
|
}, [projectPath, getLastSelectedSession]);
|
||||||
|
|
||||||
|
// Reset initialSessionLoadedRef when project changes
|
||||||
|
useEffect(() => {
|
||||||
|
initialSessionLoadedRef.current = false;
|
||||||
|
}, [projectPath]);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
currentSessionId,
|
currentSessionId,
|
||||||
handleSelectSession,
|
handleSelectSession,
|
||||||
modelSelection,
|
|
||||||
setModelSelection,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -27,21 +27,17 @@ class DialogAwarePointerSensor extends PointerSensor {
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
import { useAppStore, Feature, type ModelAlias, type ThinkingLevel } from '@/store/app-store';
|
import { useAppStore, Feature } from '@/store/app-store';
|
||||||
import { getElectronAPI } from '@/lib/electron';
|
import { getElectronAPI } from '@/lib/electron';
|
||||||
import { getHttpApiClient } from '@/lib/http-api-client';
|
import { getHttpApiClient } from '@/lib/http-api-client';
|
||||||
import type {
|
import type { BacklogPlanResult, FeatureStatusWithPipeline } from '@automaker/types';
|
||||||
BacklogPlanResult,
|
|
||||||
FeatureStatusWithPipeline,
|
|
||||||
FeatureTemplate,
|
|
||||||
} from '@automaker/types';
|
|
||||||
import { pathsEqual } from '@/lib/utils';
|
import { pathsEqual } from '@/lib/utils';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
|
import { BoardBackgroundModal } from '@/components/dialogs/board-background-modal';
|
||||||
import {
|
import {
|
||||||
BoardBackgroundModal,
|
|
||||||
PRCommentResolutionDialog,
|
PRCommentResolutionDialog,
|
||||||
type PRCommentResolutionPRInfo,
|
type PRCommentResolutionPRInfo,
|
||||||
} from '@/components/dialogs';
|
} from '@/components/dialogs/pr-comment-resolution-dialog';
|
||||||
import { useShallow } from 'zustand/react/shallow';
|
import { useShallow } from 'zustand/react/shallow';
|
||||||
import { useAutoMode } from '@/hooks/use-auto-mode';
|
import { useAutoMode } from '@/hooks/use-auto-mode';
|
||||||
import { resolveModelString } from '@automaker/model-resolver';
|
import { resolveModelString } from '@automaker/model-resolver';
|
||||||
@@ -62,8 +58,6 @@ import {
|
|||||||
FollowUpDialog,
|
FollowUpDialog,
|
||||||
PlanApprovalDialog,
|
PlanApprovalDialog,
|
||||||
MergeRebaseDialog,
|
MergeRebaseDialog,
|
||||||
QuickAddDialog,
|
|
||||||
ChangePRNumberDialog,
|
|
||||||
} from './board-view/dialogs';
|
} from './board-view/dialogs';
|
||||||
import type { DependencyLinkType } from './board-view/dialogs';
|
import type { DependencyLinkType } from './board-view/dialogs';
|
||||||
import { PipelineSettingsDialog } from './board-view/dialogs/pipeline-settings-dialog';
|
import { PipelineSettingsDialog } from './board-view/dialogs/pipeline-settings-dialog';
|
||||||
@@ -81,7 +75,6 @@ import type {
|
|||||||
StashPopConflictInfo,
|
StashPopConflictInfo,
|
||||||
StashApplyConflictInfo,
|
StashApplyConflictInfo,
|
||||||
} from './board-view/worktree-panel/types';
|
} from './board-view/worktree-panel/types';
|
||||||
import { BoardErrorBoundary } from './board-view/board-error-boundary';
|
|
||||||
import { COLUMNS, getColumnsWithPipeline } from './board-view/constants';
|
import { COLUMNS, getColumnsWithPipeline } from './board-view/constants';
|
||||||
import {
|
import {
|
||||||
useBoardFeatures,
|
useBoardFeatures,
|
||||||
@@ -106,7 +99,6 @@ import { useQueryClient } from '@tanstack/react-query';
|
|||||||
import { queryKeys } from '@/lib/query-keys';
|
import { queryKeys } from '@/lib/query-keys';
|
||||||
import { useAutoModeQueryInvalidation } from '@/hooks/use-query-invalidation';
|
import { useAutoModeQueryInvalidation } from '@/hooks/use-query-invalidation';
|
||||||
import { useUpdateGlobalSettings } from '@/hooks/mutations/use-settings-mutations';
|
import { useUpdateGlobalSettings } from '@/hooks/mutations/use-settings-mutations';
|
||||||
import { forceSyncSettingsToServer } from '@/hooks/use-settings-sync';
|
|
||||||
|
|
||||||
// Stable empty array to avoid infinite loop in selector
|
// Stable empty array to avoid infinite loop in selector
|
||||||
const EMPTY_WORKTREES: ReturnType<ReturnType<typeof useAppStore.getState>['getWorktrees']> = [];
|
const EMPTY_WORKTREES: ReturnType<ReturnType<typeof useAppStore.getState>['getWorktrees']> = [];
|
||||||
@@ -122,7 +114,7 @@ export function BoardView() {
|
|||||||
pendingPlanApproval,
|
pendingPlanApproval,
|
||||||
setPendingPlanApproval,
|
setPendingPlanApproval,
|
||||||
updateFeature,
|
updateFeature,
|
||||||
batchUpdateFeatures,
|
getCurrentWorktree,
|
||||||
setCurrentWorktree,
|
setCurrentWorktree,
|
||||||
getWorktrees,
|
getWorktrees,
|
||||||
setWorktrees,
|
setWorktrees,
|
||||||
@@ -131,7 +123,6 @@ export function BoardView() {
|
|||||||
isPrimaryWorktreeBranch,
|
isPrimaryWorktreeBranch,
|
||||||
getPrimaryWorktreeBranch,
|
getPrimaryWorktreeBranch,
|
||||||
setPipelineConfig,
|
setPipelineConfig,
|
||||||
featureTemplates,
|
|
||||||
} = useAppStore(
|
} = useAppStore(
|
||||||
useShallow((state) => ({
|
useShallow((state) => ({
|
||||||
currentProject: state.currentProject,
|
currentProject: state.currentProject,
|
||||||
@@ -141,7 +132,7 @@ export function BoardView() {
|
|||||||
pendingPlanApproval: state.pendingPlanApproval,
|
pendingPlanApproval: state.pendingPlanApproval,
|
||||||
setPendingPlanApproval: state.setPendingPlanApproval,
|
setPendingPlanApproval: state.setPendingPlanApproval,
|
||||||
updateFeature: state.updateFeature,
|
updateFeature: state.updateFeature,
|
||||||
batchUpdateFeatures: state.batchUpdateFeatures,
|
getCurrentWorktree: state.getCurrentWorktree,
|
||||||
setCurrentWorktree: state.setCurrentWorktree,
|
setCurrentWorktree: state.setCurrentWorktree,
|
||||||
getWorktrees: state.getWorktrees,
|
getWorktrees: state.getWorktrees,
|
||||||
setWorktrees: state.setWorktrees,
|
setWorktrees: state.setWorktrees,
|
||||||
@@ -150,11 +141,8 @@ export function BoardView() {
|
|||||||
isPrimaryWorktreeBranch: state.isPrimaryWorktreeBranch,
|
isPrimaryWorktreeBranch: state.isPrimaryWorktreeBranch,
|
||||||
getPrimaryWorktreeBranch: state.getPrimaryWorktreeBranch,
|
getPrimaryWorktreeBranch: state.getPrimaryWorktreeBranch,
|
||||||
setPipelineConfig: state.setPipelineConfig,
|
setPipelineConfig: state.setPipelineConfig,
|
||||||
featureTemplates: state.featureTemplates,
|
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
// Also get keyboard shortcuts for the add feature shortcut
|
|
||||||
const keyboardShortcuts = useAppStore((state) => state.keyboardShortcuts);
|
|
||||||
// Fetch pipeline config via React Query
|
// Fetch pipeline config via React Query
|
||||||
const { data: pipelineConfig } = usePipelineConfig(currentProject?.path);
|
const { data: pipelineConfig } = usePipelineConfig(currentProject?.path);
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
@@ -176,7 +164,6 @@ export function BoardView() {
|
|||||||
} = useBoardFeatures({ currentProject });
|
} = useBoardFeatures({ currentProject });
|
||||||
const [editingFeature, setEditingFeature] = useState<Feature | null>(null);
|
const [editingFeature, setEditingFeature] = useState<Feature | null>(null);
|
||||||
const [showAddDialog, setShowAddDialog] = useState(false);
|
const [showAddDialog, setShowAddDialog] = useState(false);
|
||||||
const [showQuickAddDialog, setShowQuickAddDialog] = useState(false);
|
|
||||||
const [isMounted, setIsMounted] = useState(false);
|
const [isMounted, setIsMounted] = useState(false);
|
||||||
const [showOutputModal, setShowOutputModal] = useState(false);
|
const [showOutputModal, setShowOutputModal] = useState(false);
|
||||||
const [outputFeature, setOutputFeature] = useState<Feature | null>(null);
|
const [outputFeature, setOutputFeature] = useState<Feature | null>(null);
|
||||||
@@ -199,7 +186,6 @@ export function BoardView() {
|
|||||||
const [showDeleteWorktreeDialog, setShowDeleteWorktreeDialog] = useState(false);
|
const [showDeleteWorktreeDialog, setShowDeleteWorktreeDialog] = useState(false);
|
||||||
const [showCommitWorktreeDialog, setShowCommitWorktreeDialog] = useState(false);
|
const [showCommitWorktreeDialog, setShowCommitWorktreeDialog] = useState(false);
|
||||||
const [showCreatePRDialog, setShowCreatePRDialog] = useState(false);
|
const [showCreatePRDialog, setShowCreatePRDialog] = useState(false);
|
||||||
const [showChangePRNumberDialog, setShowChangePRNumberDialog] = useState(false);
|
|
||||||
const [showCreateBranchDialog, setShowCreateBranchDialog] = useState(false);
|
const [showCreateBranchDialog, setShowCreateBranchDialog] = useState(false);
|
||||||
const [showMergeRebaseDialog, setShowMergeRebaseDialog] = useState(false);
|
const [showMergeRebaseDialog, setShowMergeRebaseDialog] = useState(false);
|
||||||
const [showPRCommentDialog, setShowPRCommentDialog] = useState(false);
|
const [showPRCommentDialog, setShowPRCommentDialog] = useState(false);
|
||||||
@@ -425,122 +411,83 @@ export function BoardView() {
|
|||||||
currentProject,
|
currentProject,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Shared helper: batch-reset branch assignment and persist for each affected feature.
|
|
||||||
// Used when worktrees are deleted or branches are removed during merge.
|
|
||||||
const batchResetBranchFeatures = useCallback(
|
|
||||||
(branchName: string) => {
|
|
||||||
const affectedIds = hookFeatures.filter((f) => f.branchName === branchName).map((f) => f.id);
|
|
||||||
if (affectedIds.length === 0) return;
|
|
||||||
const updates: Partial<Feature> = { branchName: undefined };
|
|
||||||
batchUpdateFeatures(affectedIds, updates);
|
|
||||||
for (const id of affectedIds) {
|
|
||||||
persistFeatureUpdate(id, updates).catch((err: unknown) => {
|
|
||||||
console.error(
|
|
||||||
`[batchResetBranchFeatures] Failed to persist update for feature ${id}:`,
|
|
||||||
err
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[hookFeatures, batchUpdateFeatures, persistFeatureUpdate]
|
|
||||||
);
|
|
||||||
|
|
||||||
// Memoize the removed worktrees handler to prevent infinite loops
|
// Memoize the removed worktrees handler to prevent infinite loops
|
||||||
const handleRemovedWorktrees = useCallback(
|
const handleRemovedWorktrees = useCallback(
|
||||||
(removedWorktrees: Array<{ path: string; branch: string }>) => {
|
(removedWorktrees: Array<{ path: string; branch: string }>) => {
|
||||||
for (const { branch } of removedWorktrees) {
|
// Reset features that were assigned to the removed worktrees (by branch)
|
||||||
batchResetBranchFeatures(branch);
|
hookFeatures.forEach((feature) => {
|
||||||
|
const matchesRemovedWorktree = removedWorktrees.some((removed) => {
|
||||||
|
// Match by branch name since worktreePath is no longer stored
|
||||||
|
return feature.branchName === removed.branch;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (matchesRemovedWorktree) {
|
||||||
|
// Reset the feature's branch assignment - update both local state and persist
|
||||||
|
const updates = { branchName: null as unknown as string | undefined };
|
||||||
|
updateFeature(feature.id, updates);
|
||||||
|
persistFeatureUpdate(feature.id, updates);
|
||||||
}
|
}
|
||||||
|
});
|
||||||
},
|
},
|
||||||
[batchResetBranchFeatures]
|
[hookFeatures, updateFeature, persistFeatureUpdate]
|
||||||
);
|
);
|
||||||
|
|
||||||
const currentProjectPath = currentProject?.path;
|
// Get current worktree info (path) for filtering features
|
||||||
|
// This needs to be before useBoardActions so we can pass currentWorktreeBranch
|
||||||
// Get current worktree info (path/branch) for filtering features.
|
const currentWorktreeInfo = currentProject ? getCurrentWorktree(currentProject.path) : null;
|
||||||
// Subscribe to the selected project's current worktree value directly so worktree
|
|
||||||
// switches trigger an immediate re-render and instant kanban/list re-filtering.
|
|
||||||
const currentWorktreeInfo = useAppStore(
|
|
||||||
useCallback(
|
|
||||||
(s) => (currentProjectPath ? (s.currentWorktreeByProject[currentProjectPath] ?? null) : null),
|
|
||||||
[currentProjectPath]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
const currentWorktreePath = currentWorktreeInfo?.path ?? null;
|
const currentWorktreePath = currentWorktreeInfo?.path ?? null;
|
||||||
|
|
||||||
// Select worktrees for the current project directly from the store.
|
// Track the previous worktree path to detect worktree switches
|
||||||
// Using a project-scoped selector prevents re-renders when OTHER projects'
|
const prevWorktreePathRef = useRef<string | null | undefined>(undefined);
|
||||||
// worktrees change (the old selector subscribed to the entire worktreesByProject
|
|
||||||
// object, causing unnecessary re-renders that cascaded into selectedWorktree →
|
// When the active worktree changes, invalidate feature queries to ensure
|
||||||
// useAutoMode → refreshStatus → setAutoModeRunning → store update → re-render loop
|
// feature cards (especially their todo lists / planSpec tasks) render fresh data.
|
||||||
// that could trigger React error #185 on initial project open).
|
// Without this, cards that unmount when filtered out and remount when the user
|
||||||
const worktrees = useAppStore(
|
// switches back may show stale or missing todo list data until the next polling cycle.
|
||||||
useCallback(
|
useEffect(() => {
|
||||||
(s) =>
|
// Skip the initial mount (prevWorktreePathRef starts as undefined)
|
||||||
currentProjectPath
|
if (prevWorktreePathRef.current === undefined) {
|
||||||
? (s.worktreesByProject[currentProjectPath] ?? EMPTY_WORKTREES)
|
prevWorktreePathRef.current = currentWorktreePath;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Only invalidate when the worktree actually changed
|
||||||
|
if (prevWorktreePathRef.current !== currentWorktreePath && currentProject?.path) {
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: queryKeys.features.all(currentProject.path),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
prevWorktreePathRef.current = currentWorktreePath;
|
||||||
|
}, [currentWorktreePath, currentProject?.path, queryClient]);
|
||||||
|
|
||||||
|
const worktreesByProject = useAppStore((s) => s.worktreesByProject);
|
||||||
|
const worktrees = useMemo(
|
||||||
|
() =>
|
||||||
|
currentProject
|
||||||
|
? (worktreesByProject[currentProject.path] ?? EMPTY_WORKTREES)
|
||||||
: EMPTY_WORKTREES,
|
: EMPTY_WORKTREES,
|
||||||
[currentProjectPath]
|
[currentProject, worktreesByProject]
|
||||||
)
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Get the branch for the currently selected worktree
|
// Get the branch for the currently selected worktree
|
||||||
// Find the worktree that matches the current selection, or use main worktree
|
// Find the worktree that matches the current selection, or use main worktree
|
||||||
//
|
|
||||||
// IMPORTANT: Stabilize the returned object reference using a ref to prevent
|
|
||||||
// cascading re-renders during project switches. The spread `{ ...found, ... }`
|
|
||||||
// creates a new object every time, even when the underlying data is identical.
|
|
||||||
// Without stabilization, the new reference propagates to useAutoMode and other
|
|
||||||
// consumers, contributing to the re-render cascade that triggers React error #185.
|
|
||||||
const prevSelectedWorktreeRef = useRef<WorktreeInfo | undefined>(undefined);
|
|
||||||
const selectedWorktree = useMemo((): WorktreeInfo | undefined => {
|
const selectedWorktree = useMemo((): WorktreeInfo | undefined => {
|
||||||
let found;
|
let found;
|
||||||
let usedFallback = false;
|
|
||||||
if (currentWorktreePath === null) {
|
if (currentWorktreePath === null) {
|
||||||
// Primary worktree selected - find the main worktree
|
// Primary worktree selected - find the main worktree
|
||||||
found = worktrees.find((w) => w.isMain);
|
found = worktrees.find((w) => w.isMain);
|
||||||
} else {
|
} else {
|
||||||
// Specific worktree selected - find it by path
|
// Specific worktree selected - find it by path
|
||||||
found = worktrees.find((w) => !w.isMain && pathsEqual(w.path, currentWorktreePath));
|
found = worktrees.find((w) => !w.isMain && pathsEqual(w.path, currentWorktreePath));
|
||||||
// If the selected worktree no longer exists (e.g. just deleted),
|
|
||||||
// fall back to main to prevent rendering with undefined worktree.
|
|
||||||
// onDeleted will call setCurrentWorktree(…, null) to reset properly.
|
|
||||||
if (!found) {
|
|
||||||
found = worktrees.find((w) => w.isMain);
|
|
||||||
usedFallback = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!found) {
|
|
||||||
prevSelectedWorktreeRef.current = undefined;
|
|
||||||
return undefined;
|
|
||||||
}
|
}
|
||||||
|
if (!found) return undefined;
|
||||||
// Ensure all required WorktreeInfo fields are present
|
// Ensure all required WorktreeInfo fields are present
|
||||||
const result: WorktreeInfo = {
|
return {
|
||||||
...found,
|
...found,
|
||||||
isCurrent:
|
isCurrent:
|
||||||
found.isCurrent ??
|
found.isCurrent ??
|
||||||
(usedFallback
|
(currentWorktreePath !== null ? pathsEqual(found.path, currentWorktreePath) : found.isMain),
|
||||||
? found.isMain // treat main as current during the transient fallback render
|
|
||||||
: currentWorktreePath !== null
|
|
||||||
? pathsEqual(found.path, currentWorktreePath)
|
|
||||||
: found.isMain),
|
|
||||||
hasWorktree: found.hasWorktree ?? true,
|
hasWorktree: found.hasWorktree ?? true,
|
||||||
};
|
};
|
||||||
// Return the previous reference if the key fields haven't changed,
|
|
||||||
// preventing downstream hooks from seeing a "new" worktree on every render.
|
|
||||||
const prev = prevSelectedWorktreeRef.current;
|
|
||||||
if (
|
|
||||||
prev &&
|
|
||||||
prev.path === result.path &&
|
|
||||||
prev.branch === result.branch &&
|
|
||||||
prev.isMain === result.isMain &&
|
|
||||||
prev.isCurrent === result.isCurrent &&
|
|
||||||
prev.hasWorktree === result.hasWorktree
|
|
||||||
) {
|
|
||||||
return prev;
|
|
||||||
}
|
|
||||||
prevSelectedWorktreeRef.current = result;
|
|
||||||
return result;
|
|
||||||
}, [worktrees, currentWorktreePath]);
|
}, [worktrees, currentWorktreePath]);
|
||||||
|
|
||||||
// Auto mode hook - pass current worktree to get worktree-specific state
|
// Auto mode hook - pass current worktree to get worktree-specific state
|
||||||
@@ -655,15 +602,6 @@ export function BoardView() {
|
|||||||
);
|
);
|
||||||
}, [hookFeatures, worktrees]);
|
}, [hookFeatures, worktrees]);
|
||||||
|
|
||||||
// Recovery handler for BoardErrorBoundary: reset worktree selection to main
|
|
||||||
// so the board can re-render without the stale worktree state that caused the crash.
|
|
||||||
const handleBoardRecover = useCallback(() => {
|
|
||||||
if (!currentProject) return;
|
|
||||||
const mainWorktree = worktrees.find((w) => w.isMain);
|
|
||||||
const mainBranch = mainWorktree?.branch || 'main';
|
|
||||||
setCurrentWorktree(currentProject.path, null, mainBranch);
|
|
||||||
}, [currentProject, worktrees, setCurrentWorktree]);
|
|
||||||
|
|
||||||
// Helper function to add and select a worktree
|
// Helper function to add and select a worktree
|
||||||
const addAndSelectWorktree = useCallback(
|
const addAndSelectWorktree = useCallback(
|
||||||
(worktreeResult: { path: string; branch: string }) => {
|
(worktreeResult: { path: string; branch: string }) => {
|
||||||
@@ -1014,89 +952,6 @@ export function BoardView() {
|
|||||||
[handleAddFeature, handleStartImplementation]
|
[handleAddFeature, handleStartImplementation]
|
||||||
);
|
);
|
||||||
|
|
||||||
// Handler for Quick Add - creates a feature with minimal data using defaults
|
|
||||||
const handleQuickAdd = useCallback(
|
|
||||||
async (
|
|
||||||
description: string,
|
|
||||||
modelEntry: { model: string; thinkingLevel?: string; reasoningEffort?: string }
|
|
||||||
) => {
|
|
||||||
// Generate a title from the first line of the description
|
|
||||||
const title = description.split('\n')[0].substring(0, 100);
|
|
||||||
|
|
||||||
await handleAddFeature({
|
|
||||||
title,
|
|
||||||
description,
|
|
||||||
category: '',
|
|
||||||
images: [],
|
|
||||||
imagePaths: [],
|
|
||||||
skipTests: defaultSkipTests,
|
|
||||||
model: resolveModelString(modelEntry.model) as ModelAlias,
|
|
||||||
thinkingLevel: (modelEntry.thinkingLevel as ThinkingLevel) || 'none',
|
|
||||||
reasoningEffort: modelEntry.reasoningEffort,
|
|
||||||
branchName: addFeatureUseSelectedWorktreeBranch ? selectedWorktreeBranch : undefined,
|
|
||||||
priority: 2,
|
|
||||||
planningMode: useAppStore.getState().defaultPlanningMode ?? 'skip',
|
|
||||||
requirePlanApproval: useAppStore.getState().defaultRequirePlanApproval ?? false,
|
|
||||||
dependencies: [],
|
|
||||||
workMode: addFeatureUseSelectedWorktreeBranch ? 'custom' : 'current',
|
|
||||||
});
|
|
||||||
},
|
|
||||||
[
|
|
||||||
handleAddFeature,
|
|
||||||
defaultSkipTests,
|
|
||||||
addFeatureUseSelectedWorktreeBranch,
|
|
||||||
selectedWorktreeBranch,
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
// Handler for Quick Add & Start - creates and immediately starts a feature
|
|
||||||
const handleQuickAddAndStart = useCallback(
|
|
||||||
async (
|
|
||||||
description: string,
|
|
||||||
modelEntry: { model: string; thinkingLevel?: string; reasoningEffort?: string }
|
|
||||||
) => {
|
|
||||||
// Generate a title from the first line of the description
|
|
||||||
const title = description.split('\n')[0].substring(0, 100);
|
|
||||||
|
|
||||||
await handleAddAndStartFeature({
|
|
||||||
title,
|
|
||||||
description,
|
|
||||||
category: '',
|
|
||||||
images: [],
|
|
||||||
imagePaths: [],
|
|
||||||
skipTests: defaultSkipTests,
|
|
||||||
model: resolveModelString(modelEntry.model) as ModelAlias,
|
|
||||||
thinkingLevel: (modelEntry.thinkingLevel as ThinkingLevel) || 'none',
|
|
||||||
reasoningEffort: modelEntry.reasoningEffort,
|
|
||||||
branchName: addFeatureUseSelectedWorktreeBranch ? selectedWorktreeBranch : undefined,
|
|
||||||
priority: 2,
|
|
||||||
planningMode: useAppStore.getState().defaultPlanningMode ?? 'skip',
|
|
||||||
requirePlanApproval: useAppStore.getState().defaultRequirePlanApproval ?? false,
|
|
||||||
dependencies: [],
|
|
||||||
workMode: addFeatureUseSelectedWorktreeBranch ? 'custom' : 'current',
|
|
||||||
initialStatus: 'in_progress',
|
|
||||||
});
|
|
||||||
},
|
|
||||||
[
|
|
||||||
handleAddAndStartFeature,
|
|
||||||
defaultSkipTests,
|
|
||||||
addFeatureUseSelectedWorktreeBranch,
|
|
||||||
selectedWorktreeBranch,
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
// Handler for template selection - creates a feature from a template
|
|
||||||
const handleTemplateSelect = useCallback(
|
|
||||||
async (template: FeatureTemplate) => {
|
|
||||||
const modelEntry = template.model ||
|
|
||||||
useAppStore.getState().defaultFeatureModel || { model: 'claude-opus' };
|
|
||||||
|
|
||||||
// Start the template immediately (same behavior as clicking "Make")
|
|
||||||
await handleQuickAddAndStart(template.prompt, modelEntry);
|
|
||||||
},
|
|
||||||
[handleQuickAddAndStart]
|
|
||||||
);
|
|
||||||
|
|
||||||
// Handler for managing PR comments - opens the PR Comment Resolution dialog
|
// Handler for managing PR comments - opens the PR Comment Resolution dialog
|
||||||
const handleAddressPRComments = useCallback((worktree: WorktreeInfo, prInfo: PRInfo) => {
|
const handleAddressPRComments = useCallback((worktree: WorktreeInfo, prInfo: PRInfo) => {
|
||||||
setPRCommentDialogPRInfo({
|
setPRCommentDialogPRInfo({
|
||||||
@@ -1176,7 +1031,7 @@ export function BoardView() {
|
|||||||
images: [],
|
images: [],
|
||||||
imagePaths: [],
|
imagePaths: [],
|
||||||
skipTests: defaultSkipTests,
|
skipTests: defaultSkipTests,
|
||||||
model: resolveModelString('opus'),
|
model: 'opus' as const,
|
||||||
thinkingLevel: 'none' as const,
|
thinkingLevel: 'none' as const,
|
||||||
branchName: conflictInfo.targetBranch,
|
branchName: conflictInfo.targetBranch,
|
||||||
workMode: 'custom' as const, // Use the target branch where conflicts need to be resolved
|
workMode: 'custom' as const, // Use the target branch where conflicts need to be resolved
|
||||||
@@ -1666,10 +1521,6 @@ export function BoardView() {
|
|||||||
onViewModeChange={setViewMode}
|
onViewModeChange={setViewMode}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
{/* BoardErrorBoundary catches render errors during worktree switches (e.g. React
|
|
||||||
error #185 re-render cascades on mobile Safari PWA) and provides a recovery UI
|
|
||||||
that resets to main branch instead of crashing the entire page. */}
|
|
||||||
<BoardErrorBoundary onRecover={handleBoardRecover}>
|
|
||||||
{/* DndContext wraps both WorktreePanel and main content area to enable drag-to-worktree */}
|
{/* DndContext wraps both WorktreePanel and main content area to enable drag-to-worktree */}
|
||||||
<DndContext
|
<DndContext
|
||||||
sensors={sensors}
|
sensors={sensors}
|
||||||
@@ -1695,10 +1546,6 @@ export function BoardView() {
|
|||||||
setSelectedWorktreeForAction(worktree);
|
setSelectedWorktreeForAction(worktree);
|
||||||
setShowCreatePRDialog(true);
|
setShowCreatePRDialog(true);
|
||||||
}}
|
}}
|
||||||
onChangePRNumber={(worktree) => {
|
|
||||||
setSelectedWorktreeForAction(worktree);
|
|
||||||
setShowChangePRNumberDialog(true);
|
|
||||||
}}
|
|
||||||
onCreateBranch={(worktree) => {
|
onCreateBranch={(worktree) => {
|
||||||
setSelectedWorktreeForAction(worktree);
|
setSelectedWorktreeForAction(worktree);
|
||||||
setShowCreateBranchDialog(true);
|
setShowCreateBranchDialog(true);
|
||||||
@@ -1711,7 +1558,17 @@ export function BoardView() {
|
|||||||
onStashPopConflict={handleStashPopConflict}
|
onStashPopConflict={handleStashPopConflict}
|
||||||
onStashApplyConflict={handleStashApplyConflict}
|
onStashApplyConflict={handleStashApplyConflict}
|
||||||
onBranchDeletedDuringMerge={(branchName) => {
|
onBranchDeletedDuringMerge={(branchName) => {
|
||||||
batchResetBranchFeatures(branchName);
|
// Reset features that were assigned to the deleted branch (same logic as onDeleted in DeleteWorktreeDialog)
|
||||||
|
hookFeatures.forEach((feature) => {
|
||||||
|
if (feature.branchName === branchName) {
|
||||||
|
// Reset the feature's branch assignment - update both local state and persist
|
||||||
|
const updates = {
|
||||||
|
branchName: null as unknown as string | undefined,
|
||||||
|
};
|
||||||
|
updateFeature(feature.id, updates);
|
||||||
|
persistFeatureUpdate(feature.id, updates);
|
||||||
|
}
|
||||||
|
});
|
||||||
setWorktreeRefreshKey((k) => k + 1);
|
setWorktreeRefreshKey((k) => k + 1);
|
||||||
}}
|
}}
|
||||||
onRemovedWorktrees={handleRemovedWorktrees}
|
onRemovedWorktrees={handleRemovedWorktrees}
|
||||||
@@ -1757,9 +1614,6 @@ export function BoardView() {
|
|||||||
runningAutoTasks={runningAutoTasksAllWorktrees}
|
runningAutoTasks={runningAutoTasksAllWorktrees}
|
||||||
pipelineConfig={pipelineConfig}
|
pipelineConfig={pipelineConfig}
|
||||||
onAddFeature={() => setShowAddDialog(true)}
|
onAddFeature={() => setShowAddDialog(true)}
|
||||||
onQuickAdd={() => setShowQuickAddDialog(true)}
|
|
||||||
onTemplateSelect={handleTemplateSelect}
|
|
||||||
templates={featureTemplates}
|
|
||||||
isSelectionMode={isSelectionMode}
|
isSelectionMode={isSelectionMode}
|
||||||
selectedFeatureIds={selectedFeatureIds}
|
selectedFeatureIds={selectedFeatureIds}
|
||||||
onToggleFeatureSelection={toggleFeatureSelection}
|
onToggleFeatureSelection={toggleFeatureSelection}
|
||||||
@@ -1802,10 +1656,6 @@ export function BoardView() {
|
|||||||
runningAutoTasks={runningAutoTasksAllWorktrees}
|
runningAutoTasks={runningAutoTasksAllWorktrees}
|
||||||
onArchiveAllVerified={() => setShowArchiveAllVerifiedDialog(true)}
|
onArchiveAllVerified={() => setShowArchiveAllVerifiedDialog(true)}
|
||||||
onAddFeature={() => setShowAddDialog(true)}
|
onAddFeature={() => setShowAddDialog(true)}
|
||||||
onQuickAdd={() => setShowQuickAddDialog(true)}
|
|
||||||
onTemplateSelect={handleTemplateSelect}
|
|
||||||
templates={featureTemplates}
|
|
||||||
addFeatureShortcut={keyboardShortcuts.addFeature}
|
|
||||||
onShowCompletedModal={() => setShowCompletedModal(true)}
|
onShowCompletedModal={() => setShowCompletedModal(true)}
|
||||||
completedCount={completedFeatures.length}
|
completedCount={completedFeatures.length}
|
||||||
pipelineConfig={pipelineConfig ?? null}
|
pipelineConfig={pipelineConfig ?? null}
|
||||||
@@ -1822,7 +1672,6 @@ export function BoardView() {
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</DndContext>
|
</DndContext>
|
||||||
</BoardErrorBoundary>
|
|
||||||
|
|
||||||
{/* Selection Action Bar */}
|
{/* Selection Action Bar */}
|
||||||
{isSelectionMode && (
|
{isSelectionMode && (
|
||||||
@@ -1918,14 +1767,6 @@ export function BoardView() {
|
|||||||
forceCurrentBranchMode={!addFeatureUseSelectedWorktreeBranch}
|
forceCurrentBranchMode={!addFeatureUseSelectedWorktreeBranch}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
{/* Quick Add Dialog */}
|
|
||||||
<QuickAddDialog
|
|
||||||
open={showQuickAddDialog}
|
|
||||||
onOpenChange={setShowQuickAddDialog}
|
|
||||||
onAdd={handleQuickAdd}
|
|
||||||
onAddAndStart={handleQuickAddAndStart}
|
|
||||||
/>
|
|
||||||
|
|
||||||
{/* Dependency Link Dialog */}
|
{/* Dependency Link Dialog */}
|
||||||
<DependencyLinkDialog
|
<DependencyLinkDialog
|
||||||
open={Boolean(pendingDependencyLink)}
|
open={Boolean(pendingDependencyLink)}
|
||||||
@@ -2104,81 +1945,21 @@ export function BoardView() {
|
|||||||
}
|
}
|
||||||
defaultDeleteBranch={getDefaultDeleteBranch(currentProject.path)}
|
defaultDeleteBranch={getDefaultDeleteBranch(currentProject.path)}
|
||||||
onDeleted={(deletedWorktree, _deletedBranch) => {
|
onDeleted={(deletedWorktree, _deletedBranch) => {
|
||||||
// 1. Reset current worktree to main FIRST. This must happen
|
// Reset features that were assigned to the deleted worktree (by branch)
|
||||||
// BEFORE removing from the list to ensure downstream hooks
|
hookFeatures.forEach((feature) => {
|
||||||
// (useAutoMode, useBoardFeatures) see a valid worktree and
|
// Match by branch name since worktreePath is no longer stored
|
||||||
// never try to render the deleted worktree.
|
if (feature.branchName === deletedWorktree.branch) {
|
||||||
const mainBranch = worktrees.find((w) => w.isMain)?.branch || 'main';
|
// Reset the feature's branch assignment - update both local state and persist
|
||||||
setCurrentWorktree(currentProject.path, null, mainBranch);
|
const updates = {
|
||||||
|
branchName: null as unknown as string | undefined,
|
||||||
// 2. Immediately remove the deleted worktree from the store's
|
|
||||||
// worktree list so the UI never renders a stale tab/dropdown
|
|
||||||
// item that can be clicked and cause a crash.
|
|
||||||
const remainingWorktrees = worktrees.filter(
|
|
||||||
(w) => !pathsEqual(w.path, deletedWorktree.path)
|
|
||||||
);
|
|
||||||
setWorktrees(currentProject.path, remainingWorktrees);
|
|
||||||
|
|
||||||
// 3. Cancel any in-flight worktree queries, then optimistically
|
|
||||||
// update the React Query cache so the worktree disappears
|
|
||||||
// from the dropdown immediately. Cancelling first prevents a
|
|
||||||
// pending refetch from overwriting our optimistic update with
|
|
||||||
// stale server data.
|
|
||||||
const worktreeQueryKey = queryKeys.worktrees.all(currentProject.path);
|
|
||||||
void queryClient.cancelQueries({ queryKey: worktreeQueryKey });
|
|
||||||
queryClient.setQueryData(
|
|
||||||
worktreeQueryKey,
|
|
||||||
(
|
|
||||||
old:
|
|
||||||
| {
|
|
||||||
worktrees: WorktreeInfo[];
|
|
||||||
removedWorktrees: Array<{ path: string; branch: string }>;
|
|
||||||
}
|
|
||||||
| undefined
|
|
||||||
) => {
|
|
||||||
if (!old) return old;
|
|
||||||
return {
|
|
||||||
...old,
|
|
||||||
worktrees: old.worktrees.filter(
|
|
||||||
(w: WorktreeInfo) => !pathsEqual(w.path, deletedWorktree.path)
|
|
||||||
),
|
|
||||||
};
|
};
|
||||||
|
updateFeature(feature.id, updates);
|
||||||
|
persistFeatureUpdate(feature.id, updates);
|
||||||
}
|
}
|
||||||
);
|
|
||||||
|
|
||||||
// 4. Batch-reset features assigned to the deleted worktree in one
|
|
||||||
// store mutation to avoid N individual updateFeature calls that
|
|
||||||
// cascade into React error #185.
|
|
||||||
batchResetBranchFeatures(deletedWorktree.branch);
|
|
||||||
|
|
||||||
// 5. Schedule a deferred refetch to reconcile with the server.
|
|
||||||
// The server has already completed the deletion, so this
|
|
||||||
// refetch will return data without the deleted worktree.
|
|
||||||
// This protects against stale in-flight polling responses
|
|
||||||
// that may slip through the cancelQueries window and
|
|
||||||
// overwrite the optimistic update above.
|
|
||||||
const projectPathForRefetch = currentProject.path;
|
|
||||||
setTimeout(() => {
|
|
||||||
queryClient.invalidateQueries({
|
|
||||||
queryKey: queryKeys.worktrees.all(projectPathForRefetch),
|
|
||||||
});
|
});
|
||||||
}, 1500);
|
|
||||||
|
|
||||||
|
setWorktreeRefreshKey((k) => k + 1);
|
||||||
setSelectedWorktreeForAction(null);
|
setSelectedWorktreeForAction(null);
|
||||||
|
|
||||||
// 6. Force-sync settings immediately so the reset worktree
|
|
||||||
// selection is persisted before any potential page reload.
|
|
||||||
// Without this, the debounced sync (1s) may not complete
|
|
||||||
// in time and the stale worktree path survives in
|
|
||||||
// server settings, causing the deleted worktree to
|
|
||||||
// reappear on next load.
|
|
||||||
forceSyncSettingsToServer().then((ok) => {
|
|
||||||
if (!ok) {
|
|
||||||
logger.warn(
|
|
||||||
'forceSyncSettingsToServer failed after worktree deletion; stale path may reappear on reload'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@@ -2237,18 +2018,6 @@ export function BoardView() {
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
{/* Change PR Number Dialog */}
|
|
||||||
<ChangePRNumberDialog
|
|
||||||
open={showChangePRNumberDialog}
|
|
||||||
onOpenChange={setShowChangePRNumberDialog}
|
|
||||||
worktree={selectedWorktreeForAction}
|
|
||||||
projectPath={currentProject?.path || null}
|
|
||||||
onChanged={() => {
|
|
||||||
setWorktreeRefreshKey((k) => k + 1);
|
|
||||||
setSelectedWorktreeForAction(null);
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
|
|
||||||
{/* Create Branch Dialog */}
|
{/* Create Branch Dialog */}
|
||||||
<CreateBranchDialog
|
<CreateBranchDialog
|
||||||
open={showCreateBranchDialog}
|
open={showCreateBranchDialog}
|
||||||
|
|||||||
@@ -1,74 +0,0 @@
|
|||||||
import { Component, type ReactNode, type ErrorInfo } from 'react';
|
|
||||||
import { createLogger } from '@automaker/utils/logger';
|
|
||||||
import { AlertCircle, RefreshCw } from 'lucide-react';
|
|
||||||
import { Button } from '@/components/ui/button';
|
|
||||||
|
|
||||||
const logger = createLogger('BoardErrorBoundary');
|
|
||||||
|
|
||||||
interface Props {
|
|
||||||
children: ReactNode;
|
|
||||||
/** Called when the user clicks "Recover" - should reset worktree to main */
|
|
||||||
onRecover?: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface State {
|
|
||||||
hasError: boolean;
|
|
||||||
error: Error | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Error boundary for the board's content area (WorktreePanel + KanbanBoard/ListView).
|
|
||||||
*
|
|
||||||
* Catches render errors caused by stale worktree state during worktree switches
|
|
||||||
* (e.g. re-render cascades that trigger React error #185 on mobile Safari PWA).
|
|
||||||
* Instead of crashing the entire page, this shows a recovery UI that resets
|
|
||||||
* the worktree selection to main and retries rendering.
|
|
||||||
*/
|
|
||||||
export class BoardErrorBoundary extends Component<Props, State> {
|
|
||||||
constructor(props: Props) {
|
|
||||||
super(props);
|
|
||||||
this.state = { hasError: false, error: null };
|
|
||||||
}
|
|
||||||
|
|
||||||
static getDerivedStateFromError(error: Error): State {
|
|
||||||
return { hasError: true, error };
|
|
||||||
}
|
|
||||||
|
|
||||||
componentDidCatch(error: Error, errorInfo: ErrorInfo) {
|
|
||||||
logger.error('Board content crashed:', {
|
|
||||||
error: error.message,
|
|
||||||
stack: error.stack,
|
|
||||||
componentStack: errorInfo.componentStack,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
handleRecover = () => {
|
|
||||||
this.setState({ hasError: false, error: null });
|
|
||||||
this.props.onRecover?.();
|
|
||||||
};
|
|
||||||
|
|
||||||
render() {
|
|
||||||
if (this.state.hasError) {
|
|
||||||
return (
|
|
||||||
<div className="flex-1 flex flex-col items-center justify-center gap-4 p-6 text-center">
|
|
||||||
<div className="w-12 h-12 rounded-full bg-destructive/10 flex items-center justify-center">
|
|
||||||
<AlertCircle className="w-6 h-6 text-destructive" />
|
|
||||||
</div>
|
|
||||||
<div className="space-y-2">
|
|
||||||
<h3 className="text-lg font-semibold text-foreground">Board crashed</h3>
|
|
||||||
<p className="text-sm text-muted-foreground max-w-sm">
|
|
||||||
A rendering error occurred, possibly during a worktree switch. Click recover to reset
|
|
||||||
to the main branch and retry.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<Button variant="outline" size="sm" onClick={this.handleRecover} className="gap-2">
|
|
||||||
<RefreshCw className="w-4 h-4" />
|
|
||||||
Recover
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.props.children;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,188 +0,0 @@
|
|||||||
import { useState } from 'react';
|
|
||||||
import { Button } from '@/components/ui/button';
|
|
||||||
import {
|
|
||||||
DropdownMenu,
|
|
||||||
DropdownMenuContent,
|
|
||||||
DropdownMenuItem,
|
|
||||||
DropdownMenuTrigger,
|
|
||||||
} from '@/components/ui/dropdown-menu';
|
|
||||||
import { Plus, ChevronDown, Zap, FileText } from 'lucide-react';
|
|
||||||
import type { FeatureTemplate } from '@automaker/types';
|
|
||||||
import { cn } from '@/lib/utils';
|
|
||||||
|
|
||||||
interface AddFeatureButtonProps {
|
|
||||||
/** Handler for the primary "Add Feature" action (opens full dialog) */
|
|
||||||
onAddFeature: () => void;
|
|
||||||
/** Handler for Quick Add submission */
|
|
||||||
onQuickAdd: () => void;
|
|
||||||
/** Handler for template selection */
|
|
||||||
onTemplateSelect: (template: FeatureTemplate) => void;
|
|
||||||
/** Available templates (filtered to enabled ones) */
|
|
||||||
templates: FeatureTemplate[];
|
|
||||||
/** Whether to show as a small icon button or full button */
|
|
||||||
compact?: boolean;
|
|
||||||
/** Whether the button should take full width */
|
|
||||||
fullWidth?: boolean;
|
|
||||||
/** Additional className */
|
|
||||||
className?: string;
|
|
||||||
/** Test ID prefix */
|
|
||||||
testIdPrefix?: string;
|
|
||||||
/** Shortcut text to display (optional) */
|
|
||||||
shortcut?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function AddFeatureButton({
|
|
||||||
onAddFeature,
|
|
||||||
onQuickAdd,
|
|
||||||
onTemplateSelect,
|
|
||||||
templates,
|
|
||||||
compact = false,
|
|
||||||
fullWidth = false,
|
|
||||||
className,
|
|
||||||
testIdPrefix = 'add-feature',
|
|
||||||
shortcut,
|
|
||||||
}: AddFeatureButtonProps) {
|
|
||||||
const [dropdownOpen, setDropdownOpen] = useState(false);
|
|
||||||
|
|
||||||
// Filter to only enabled templates and sort by order
|
|
||||||
const enabledTemplates = templates
|
|
||||||
.filter((t) => t.enabled !== false)
|
|
||||||
.sort((a, b) => (a.order ?? 0) - (b.order ?? 0));
|
|
||||||
|
|
||||||
const handleTemplateClick = (template: FeatureTemplate) => {
|
|
||||||
setDropdownOpen(false);
|
|
||||||
onTemplateSelect(template);
|
|
||||||
};
|
|
||||||
|
|
||||||
if (compact) {
|
|
||||||
// Compact mode: Three small icon segments
|
|
||||||
return (
|
|
||||||
<div className={cn('flex', className)}>
|
|
||||||
{/* Segment 1: Add Feature */}
|
|
||||||
<Button
|
|
||||||
variant="default"
|
|
||||||
size="sm"
|
|
||||||
className="h-6 w-6 p-0 rounded-r-none"
|
|
||||||
onClick={onAddFeature}
|
|
||||||
title="Add Feature"
|
|
||||||
data-testid={`${testIdPrefix}-button`}
|
|
||||||
>
|
|
||||||
<Plus className="w-3.5 h-3.5" />
|
|
||||||
</Button>
|
|
||||||
{/* Segment 2: Quick Add */}
|
|
||||||
<Button
|
|
||||||
variant="default"
|
|
||||||
size="sm"
|
|
||||||
className="h-6 w-6 p-0 rounded-none border-l border-primary-foreground/20"
|
|
||||||
onClick={onQuickAdd}
|
|
||||||
title="Quick Add"
|
|
||||||
data-testid={`${testIdPrefix}-quick-add-button`}
|
|
||||||
>
|
|
||||||
<Zap className="w-3 h-3" />
|
|
||||||
</Button>
|
|
||||||
{/* Segment 3: Templates dropdown */}
|
|
||||||
{enabledTemplates.length > 0 && (
|
|
||||||
<DropdownMenu open={dropdownOpen} onOpenChange={setDropdownOpen}>
|
|
||||||
<DropdownMenuTrigger asChild>
|
|
||||||
<Button
|
|
||||||
variant="default"
|
|
||||||
size="sm"
|
|
||||||
className="h-6 w-4 p-0 rounded-l-none border-l border-primary-foreground/20"
|
|
||||||
title="Templates"
|
|
||||||
data-testid={`${testIdPrefix}-dropdown-trigger`}
|
|
||||||
>
|
|
||||||
<ChevronDown className="w-3 h-3" />
|
|
||||||
</Button>
|
|
||||||
</DropdownMenuTrigger>
|
|
||||||
<DropdownMenuContent align="start" sideOffset={4}>
|
|
||||||
{enabledTemplates.map((template) => (
|
|
||||||
<DropdownMenuItem
|
|
||||||
key={template.id}
|
|
||||||
onClick={() => handleTemplateClick(template)}
|
|
||||||
data-testid={`template-menu-item-${template.id}`}
|
|
||||||
>
|
|
||||||
<FileText className="w-4 h-4 mr-2" />
|
|
||||||
<span className="truncate max-w-[200px]">{template.name}</span>
|
|
||||||
</DropdownMenuItem>
|
|
||||||
))}
|
|
||||||
</DropdownMenuContent>
|
|
||||||
</DropdownMenu>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Full mode: Three-segment button
|
|
||||||
return (
|
|
||||||
<div className={cn('flex justify-center', fullWidth && 'w-full', className)}>
|
|
||||||
{/* Segment 1: Add Feature */}
|
|
||||||
<Button
|
|
||||||
variant="default"
|
|
||||||
size="sm"
|
|
||||||
className={cn('h-8 text-xs px-3 rounded-r-none', fullWidth && 'flex-1')}
|
|
||||||
onClick={onAddFeature}
|
|
||||||
data-testid={`${testIdPrefix}-button`}
|
|
||||||
>
|
|
||||||
<Plus className="w-3.5 h-3.5 mr-1.5" />
|
|
||||||
Add Feature
|
|
||||||
{shortcut && (
|
|
||||||
<span className="ml-auto pl-2 text-[10px] font-mono opacity-70 bg-black/20 px-1 py-0.5 rounded">
|
|
||||||
{shortcut}
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</Button>
|
|
||||||
{/* Segment 2: Quick Add */}
|
|
||||||
<Button
|
|
||||||
variant="default"
|
|
||||||
size="sm"
|
|
||||||
className={cn(
|
|
||||||
'h-8 text-xs px-2.5 rounded-none border-l border-primary-foreground/20',
|
|
||||||
fullWidth && 'flex-shrink-0'
|
|
||||||
)}
|
|
||||||
onClick={onQuickAdd}
|
|
||||||
data-testid={`${testIdPrefix}-quick-add-button`}
|
|
||||||
>
|
|
||||||
<Zap className="w-3.5 h-3.5 mr-1" />
|
|
||||||
Quick
|
|
||||||
</Button>
|
|
||||||
{/* Segment 3: Templates dropdown */}
|
|
||||||
<DropdownMenu open={dropdownOpen} onOpenChange={setDropdownOpen}>
|
|
||||||
<DropdownMenuTrigger asChild>
|
|
||||||
<Button
|
|
||||||
variant="default"
|
|
||||||
size="sm"
|
|
||||||
className={cn(
|
|
||||||
'h-8 rounded-l-none border-l border-primary-foreground/20',
|
|
||||||
enabledTemplates.length > 0 ? 'px-1.5' : 'w-7 p-0',
|
|
||||||
fullWidth && 'flex-shrink-0'
|
|
||||||
)}
|
|
||||||
aria-label="Templates"
|
|
||||||
title="Templates"
|
|
||||||
data-testid={`${testIdPrefix}-dropdown-trigger`}
|
|
||||||
>
|
|
||||||
<FileText className="w-3.5 h-3.5 mr-0.5" />
|
|
||||||
<ChevronDown className="w-2.5 h-2.5" />
|
|
||||||
</Button>
|
|
||||||
</DropdownMenuTrigger>
|
|
||||||
<DropdownMenuContent align="end" sideOffset={4}>
|
|
||||||
{enabledTemplates.length > 0 ? (
|
|
||||||
enabledTemplates.map((template) => (
|
|
||||||
<DropdownMenuItem
|
|
||||||
key={template.id}
|
|
||||||
onClick={() => handleTemplateClick(template)}
|
|
||||||
data-testid={`template-menu-item-${template.id}`}
|
|
||||||
>
|
|
||||||
<FileText className="w-4 h-4 mr-2" />
|
|
||||||
<span className="truncate max-w-[200px]">{template.name}</span>
|
|
||||||
</DropdownMenuItem>
|
|
||||||
))
|
|
||||||
) : (
|
|
||||||
<DropdownMenuItem disabled className="text-muted-foreground">
|
|
||||||
No templates configured
|
|
||||||
</DropdownMenuItem>
|
|
||||||
)}
|
|
||||||
</DropdownMenuContent>
|
|
||||||
</DropdownMenu>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -5,13 +5,12 @@ import { Button } from '@/components/ui/button';
|
|||||||
import { getBlockingDependencies } from '@automaker/dependency-resolver';
|
import { getBlockingDependencies } from '@automaker/dependency-resolver';
|
||||||
import { useAppStore, formatShortcut } from '@/store/app-store';
|
import { useAppStore, formatShortcut } from '@/store/app-store';
|
||||||
import type { Feature } from '@/store/app-store';
|
import type { Feature } from '@/store/app-store';
|
||||||
import type { PipelineConfig, FeatureStatusWithPipeline, FeatureTemplate } from '@automaker/types';
|
import type { PipelineConfig, FeatureStatusWithPipeline } from '@automaker/types';
|
||||||
import { ListHeader } from './list-header';
|
import { ListHeader } from './list-header';
|
||||||
import { ListRow, sortFeatures } from './list-row';
|
import { ListRow, sortFeatures } from './list-row';
|
||||||
import { createRowActionHandlers, type RowActionHandlers } from './row-actions';
|
import { createRowActionHandlers, type RowActionHandlers } from './row-actions';
|
||||||
import { getStatusOrder } from './status-badge';
|
import { getStatusOrder } from './status-badge';
|
||||||
import { getColumnsWithPipeline } from '../../constants';
|
import { getColumnsWithPipeline } from '../../constants';
|
||||||
import { AddFeatureButton } from '../add-feature-button';
|
|
||||||
import type { SortConfig, SortColumn } from '../../hooks/use-list-view-state';
|
import type { SortConfig, SortColumn } from '../../hooks/use-list-view-state';
|
||||||
|
|
||||||
/** Empty set constant to avoid creating new instances on each render */
|
/** Empty set constant to avoid creating new instances on each render */
|
||||||
@@ -66,12 +65,6 @@ export interface ListViewProps {
|
|||||||
pipelineConfig?: PipelineConfig | null;
|
pipelineConfig?: PipelineConfig | null;
|
||||||
/** Callback to add a new feature */
|
/** Callback to add a new feature */
|
||||||
onAddFeature?: () => void;
|
onAddFeature?: () => void;
|
||||||
/** Callback for quick add */
|
|
||||||
onQuickAdd?: () => void;
|
|
||||||
/** Callback for template selection */
|
|
||||||
onTemplateSelect?: (template: FeatureTemplate) => void;
|
|
||||||
/** Available feature templates */
|
|
||||||
templates?: FeatureTemplate[];
|
|
||||||
/** Whether selection mode is enabled */
|
/** Whether selection mode is enabled */
|
||||||
isSelectionMode?: boolean;
|
isSelectionMode?: boolean;
|
||||||
/** Set of selected feature IDs */
|
/** Set of selected feature IDs */
|
||||||
@@ -132,22 +125,7 @@ const StatusGroupHeader = memo(function StatusGroupHeader({
|
|||||||
/**
|
/**
|
||||||
* EmptyState displays a message when there are no features
|
* EmptyState displays a message when there are no features
|
||||||
*/
|
*/
|
||||||
const EmptyState = memo(function EmptyState({
|
const EmptyState = memo(function EmptyState({ onAddFeature }: { onAddFeature?: () => void }) {
|
||||||
onAddFeature,
|
|
||||||
onQuickAdd,
|
|
||||||
onTemplateSelect,
|
|
||||||
templates,
|
|
||||||
shortcut,
|
|
||||||
}: {
|
|
||||||
onAddFeature?: () => void;
|
|
||||||
onQuickAdd?: () => void;
|
|
||||||
onTemplateSelect?: (template: FeatureTemplate) => void;
|
|
||||||
templates?: FeatureTemplate[];
|
|
||||||
shortcut?: string;
|
|
||||||
}) {
|
|
||||||
// Only show AddFeatureButton if all required handlers are provided
|
|
||||||
const canShowSplitButton = onAddFeature && onQuickAdd && onTemplateSelect;
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
@@ -157,21 +135,12 @@ const EmptyState = memo(function EmptyState({
|
|||||||
data-testid="list-view-empty"
|
data-testid="list-view-empty"
|
||||||
>
|
>
|
||||||
<p className="text-sm mb-4">No features to display</p>
|
<p className="text-sm mb-4">No features to display</p>
|
||||||
{canShowSplitButton ? (
|
{onAddFeature && (
|
||||||
<AddFeatureButton
|
|
||||||
onAddFeature={onAddFeature}
|
|
||||||
onQuickAdd={onQuickAdd}
|
|
||||||
onTemplateSelect={onTemplateSelect}
|
|
||||||
templates={templates || []}
|
|
||||||
shortcut={shortcut}
|
|
||||||
testIdPrefix="list-view-empty-add-feature"
|
|
||||||
/>
|
|
||||||
) : onAddFeature ? (
|
|
||||||
<Button variant="default" size="sm" onClick={onAddFeature}>
|
<Button variant="default" size="sm" onClick={onAddFeature}>
|
||||||
<Plus className="w-4 h-4 mr-2" />
|
<Plus className="w-4 h-4 mr-2" />
|
||||||
Add Feature
|
Add Feature
|
||||||
</Button>
|
</Button>
|
||||||
) : null}
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -221,9 +190,6 @@ export const ListView = memo(function ListView({
|
|||||||
runningAutoTasks,
|
runningAutoTasks,
|
||||||
pipelineConfig = null,
|
pipelineConfig = null,
|
||||||
onAddFeature,
|
onAddFeature,
|
||||||
onQuickAdd,
|
|
||||||
onTemplateSelect,
|
|
||||||
templates = [],
|
|
||||||
isSelectionMode = false,
|
isSelectionMode = false,
|
||||||
selectedFeatureIds = EMPTY_SET,
|
selectedFeatureIds = EMPTY_SET,
|
||||||
onToggleFeatureSelection,
|
onToggleFeatureSelection,
|
||||||
@@ -422,13 +388,7 @@ export const ListView = memo(function ListView({
|
|||||||
if (totalFeatures === 0) {
|
if (totalFeatures === 0) {
|
||||||
return (
|
return (
|
||||||
<div className={cn('flex flex-col h-full bg-background', className)} data-testid="list-view">
|
<div className={cn('flex flex-col h-full bg-background', className)} data-testid="list-view">
|
||||||
<EmptyState
|
<EmptyState onAddFeature={onAddFeature} />
|
||||||
onAddFeature={onAddFeature}
|
|
||||||
onQuickAdd={onQuickAdd}
|
|
||||||
onTemplateSelect={onTemplateSelect}
|
|
||||||
templates={templates}
|
|
||||||
shortcut={formatShortcut(addFeatureShortcut, true)}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -492,17 +452,21 @@ export const ListView = memo(function ListView({
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Footer with Add Feature button, styled like board view */}
|
{/* Footer with Add Feature button, styled like board view */}
|
||||||
{onAddFeature && onQuickAdd && onTemplateSelect && (
|
{onAddFeature && (
|
||||||
<div className="border-t border-border px-4 py-2">
|
<div className="border-t border-border px-4 py-2">
|
||||||
<AddFeatureButton
|
<Button
|
||||||
onAddFeature={onAddFeature}
|
variant="default"
|
||||||
onQuickAdd={onQuickAdd}
|
size="sm"
|
||||||
onTemplateSelect={onTemplateSelect}
|
onClick={onAddFeature}
|
||||||
templates={templates}
|
className="w-full h-9 text-sm"
|
||||||
fullWidth
|
data-testid="list-view-add-feature"
|
||||||
shortcut={formatShortcut(addFeatureShortcut, true)}
|
>
|
||||||
testIdPrefix="list-view-add-feature"
|
<Plus className="w-4 h-4 mr-2" />
|
||||||
/>
|
Add Feature
|
||||||
|
<span className="ml-auto pl-2 text-[10px] font-mono opacity-70 bg-black/20 px-1.5 py-0.5 rounded">
|
||||||
|
{formatShortcut(addFeatureShortcut, true)}
|
||||||
|
</span>
|
||||||
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -26,14 +26,9 @@ import { useNavigate } from '@tanstack/react-router';
|
|||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import { modelSupportsThinking } from '@/lib/utils';
|
import { modelSupportsThinking } from '@/lib/utils';
|
||||||
import { useAppStore } from '@/store/app-store';
|
import { useAppStore, ThinkingLevel, FeatureImage, PlanningMode, Feature } from '@/store/app-store';
|
||||||
import type { ThinkingLevel, PlanningMode, Feature, FeatureImage } from '@/store/types';
|
|
||||||
import type { ReasoningEffort, PhaseModelEntry, AgentModel } from '@automaker/types';
|
import type { ReasoningEffort, PhaseModelEntry, AgentModel } from '@automaker/types';
|
||||||
import {
|
import { supportsReasoningEffort, isAdaptiveThinkingModel } from '@automaker/types';
|
||||||
supportsReasoningEffort,
|
|
||||||
normalizeThinkingLevelForModel,
|
|
||||||
getThinkingLevelsForModel,
|
|
||||||
} from '@automaker/types';
|
|
||||||
import {
|
import {
|
||||||
PrioritySelector,
|
PrioritySelector,
|
||||||
WorkModeSelector,
|
WorkModeSelector,
|
||||||
@@ -216,7 +211,6 @@ export function AddFeatureDialog({
|
|||||||
defaultRequirePlanApproval,
|
defaultRequirePlanApproval,
|
||||||
useWorktrees,
|
useWorktrees,
|
||||||
defaultFeatureModel,
|
defaultFeatureModel,
|
||||||
defaultThinkingLevel,
|
|
||||||
currentProject,
|
currentProject,
|
||||||
} = useAppStore();
|
} = useAppStore();
|
||||||
|
|
||||||
@@ -233,9 +227,9 @@ export function AddFeatureDialog({
|
|||||||
|
|
||||||
if (justOpened) {
|
if (justOpened) {
|
||||||
// Initialize with prefilled values if provided, otherwise use defaults
|
// Initialize with prefilled values if provided, otherwise use defaults
|
||||||
setTitle(prefilledTitle ?? '');
|
setTitle(prefilledTitle || '');
|
||||||
setDescription(prefilledDescription ?? '');
|
setDescription(prefilledDescription || '');
|
||||||
setCategory(prefilledCategory ?? '');
|
setCategory(prefilledCategory || '');
|
||||||
|
|
||||||
setSkipTests(defaultSkipTests);
|
setSkipTests(defaultSkipTests);
|
||||||
// When a non-main worktree is selected, use its branch name for custom mode
|
// When a non-main worktree is selected, use its branch name for custom mode
|
||||||
@@ -246,22 +240,7 @@ export function AddFeatureDialog({
|
|||||||
);
|
);
|
||||||
setPlanningMode(defaultPlanningMode);
|
setPlanningMode(defaultPlanningMode);
|
||||||
setRequirePlanApproval(defaultRequirePlanApproval);
|
setRequirePlanApproval(defaultRequirePlanApproval);
|
||||||
|
setModelEntry(effectiveDefaultFeatureModel);
|
||||||
// Apply defaultThinkingLevel from settings to the model entry.
|
|
||||||
// This ensures the "Quick-Select Defaults" thinking level setting is respected
|
|
||||||
// even when the user doesn't change the model in the dropdown.
|
|
||||||
const modelId =
|
|
||||||
typeof effectiveDefaultFeatureModel.model === 'string'
|
|
||||||
? effectiveDefaultFeatureModel.model
|
|
||||||
: '';
|
|
||||||
const availableLevels = getThinkingLevelsForModel(modelId);
|
|
||||||
const effectiveThinkingLevel = availableLevels.includes(defaultThinkingLevel)
|
|
||||||
? defaultThinkingLevel
|
|
||||||
: availableLevels[0];
|
|
||||||
setModelEntry({
|
|
||||||
...effectiveDefaultFeatureModel,
|
|
||||||
thinkingLevel: effectiveThinkingLevel,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Initialize description history (empty for new feature)
|
// Initialize description history (empty for new feature)
|
||||||
setDescriptionHistory([]);
|
setDescriptionHistory([]);
|
||||||
@@ -290,7 +269,6 @@ export function AddFeatureDialog({
|
|||||||
defaultPlanningMode,
|
defaultPlanningMode,
|
||||||
defaultRequirePlanApproval,
|
defaultRequirePlanApproval,
|
||||||
effectiveDefaultFeatureModel,
|
effectiveDefaultFeatureModel,
|
||||||
defaultThinkingLevel,
|
|
||||||
useWorktrees,
|
useWorktrees,
|
||||||
selectedNonMainWorktreeBranch,
|
selectedNonMainWorktreeBranch,
|
||||||
forceCurrentBranchMode,
|
forceCurrentBranchMode,
|
||||||
@@ -309,10 +287,20 @@ export function AddFeatureDialog({
|
|||||||
}, [planningMode]);
|
}, [planningMode]);
|
||||||
|
|
||||||
const handleModelChange = (entry: PhaseModelEntry) => {
|
const handleModelChange = (entry: PhaseModelEntry) => {
|
||||||
const modelId = typeof entry.model === 'string' ? entry.model : '';
|
// Normalize thinking level when switching between adaptive and non-adaptive models
|
||||||
const normalizedThinkingLevel = normalizeThinkingLevelForModel(modelId, entry.thinkingLevel);
|
const isNewModelAdaptive =
|
||||||
|
typeof entry.model === 'string' && isAdaptiveThinkingModel(entry.model);
|
||||||
|
const currentLevel = entry.thinkingLevel || 'none';
|
||||||
|
|
||||||
setModelEntry({ ...entry, thinkingLevel: normalizedThinkingLevel });
|
if (isNewModelAdaptive && currentLevel !== 'none' && currentLevel !== 'adaptive') {
|
||||||
|
// Switching TO Opus 4.6 with a manual level -> auto-switch to 'adaptive'
|
||||||
|
setModelEntry({ ...entry, thinkingLevel: 'adaptive' });
|
||||||
|
} else if (!isNewModelAdaptive && currentLevel === 'adaptive') {
|
||||||
|
// Switching FROM Opus 4.6 with adaptive -> auto-switch to 'high'
|
||||||
|
setModelEntry({ ...entry, thinkingLevel: 'high' });
|
||||||
|
} else {
|
||||||
|
setModelEntry(entry);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const buildFeatureData = (): FeatureData | null => {
|
const buildFeatureData = (): FeatureData | null => {
|
||||||
@@ -406,19 +394,7 @@ export function AddFeatureDialog({
|
|||||||
// When a non-main worktree is selected, use its branch name for custom mode
|
// When a non-main worktree is selected, use its branch name for custom mode
|
||||||
setBranchName(selectedNonMainWorktreeBranch || '');
|
setBranchName(selectedNonMainWorktreeBranch || '');
|
||||||
setPriority(2);
|
setPriority(2);
|
||||||
// Apply defaultThinkingLevel to the model entry (same logic as dialog open)
|
setModelEntry(effectiveDefaultFeatureModel);
|
||||||
const resetModelId =
|
|
||||||
typeof effectiveDefaultFeatureModel.model === 'string'
|
|
||||||
? effectiveDefaultFeatureModel.model
|
|
||||||
: '';
|
|
||||||
const resetAvailableLevels = getThinkingLevelsForModel(resetModelId);
|
|
||||||
const resetThinkingLevel = resetAvailableLevels.includes(defaultThinkingLevel)
|
|
||||||
? defaultThinkingLevel
|
|
||||||
: resetAvailableLevels[0];
|
|
||||||
setModelEntry({
|
|
||||||
...effectiveDefaultFeatureModel,
|
|
||||||
thinkingLevel: resetThinkingLevel,
|
|
||||||
});
|
|
||||||
setWorkMode(
|
setWorkMode(
|
||||||
getDefaultWorkMode(useWorktrees, selectedNonMainWorktreeBranch, forceCurrentBranchMode)
|
getDefaultWorkMode(useWorktrees, selectedNonMainWorktreeBranch, forceCurrentBranchMode)
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -29,10 +29,7 @@ import { useAppStore } from '@/store/app-store';
|
|||||||
/**
|
/**
|
||||||
* Normalize PhaseModelEntry or string to PhaseModelEntry
|
* Normalize PhaseModelEntry or string to PhaseModelEntry
|
||||||
*/
|
*/
|
||||||
function normalizeEntry(entry: PhaseModelEntry | string | undefined | null): PhaseModelEntry {
|
function normalizeEntry(entry: PhaseModelEntry | string): PhaseModelEntry {
|
||||||
if (!entry) {
|
|
||||||
return { model: 'claude-sonnet' as ModelAlias };
|
|
||||||
}
|
|
||||||
if (typeof entry === 'string') {
|
if (typeof entry === 'string') {
|
||||||
return { model: entry as ModelAlias | CursorModelId };
|
return { model: entry as ModelAlias | CursorModelId };
|
||||||
}
|
}
|
||||||
@@ -113,12 +110,7 @@ export function BacklogPlanDialog({
|
|||||||
// Use model override if set, otherwise use global default (extract model string from PhaseModelEntry)
|
// Use model override if set, otherwise use global default (extract model string from PhaseModelEntry)
|
||||||
const effectiveModelEntry = modelOverride || normalizeEntry(phaseModels.backlogPlanningModel);
|
const effectiveModelEntry = modelOverride || normalizeEntry(phaseModels.backlogPlanningModel);
|
||||||
const effectiveModel = effectiveModelEntry.model;
|
const effectiveModel = effectiveModelEntry.model;
|
||||||
const result = await api.backlogPlan.generate(
|
const result = await api.backlogPlan.generate(projectPath, prompt, effectiveModel);
|
||||||
projectPath,
|
|
||||||
prompt,
|
|
||||||
effectiveModel,
|
|
||||||
currentBranch
|
|
||||||
);
|
|
||||||
if (!result.success) {
|
if (!result.success) {
|
||||||
logger.error('Backlog plan generation failed to start', {
|
logger.error('Backlog plan generation failed to start', {
|
||||||
error: result.error,
|
error: result.error,
|
||||||
@@ -139,15 +131,7 @@ export function BacklogPlanDialog({
|
|||||||
});
|
});
|
||||||
setPrompt('');
|
setPrompt('');
|
||||||
onClose();
|
onClose();
|
||||||
}, [
|
}, [projectPath, prompt, modelOverride, phaseModels, setIsGeneratingPlan, onClose]);
|
||||||
projectPath,
|
|
||||||
prompt,
|
|
||||||
modelOverride,
|
|
||||||
phaseModels,
|
|
||||||
setIsGeneratingPlan,
|
|
||||||
onClose,
|
|
||||||
currentBranch,
|
|
||||||
]);
|
|
||||||
|
|
||||||
const handleApply = useCallback(async () => {
|
const handleApply = useCallback(async () => {
|
||||||
if (!pendingPlanResult) return;
|
if (!pendingPlanResult) return;
|
||||||
|
|||||||
@@ -1,197 +0,0 @@
|
|||||||
import { useState, useEffect, useCallback } from 'react';
|
|
||||||
import {
|
|
||||||
Dialog,
|
|
||||||
DialogContent,
|
|
||||||
DialogDescription,
|
|
||||||
DialogFooter,
|
|
||||||
DialogHeader,
|
|
||||||
DialogTitle,
|
|
||||||
} from '@/components/ui/dialog';
|
|
||||||
import { Button } from '@/components/ui/button';
|
|
||||||
import { Input } from '@/components/ui/input';
|
|
||||||
import { Label } from '@/components/ui/label';
|
|
||||||
import { GitPullRequest } from 'lucide-react';
|
|
||||||
import { Spinner } from '@/components/ui/spinner';
|
|
||||||
import { getElectronAPI } from '@/lib/electron';
|
|
||||||
import { toast } from 'sonner';
|
|
||||||
|
|
||||||
interface WorktreeInfo {
|
|
||||||
path: string;
|
|
||||||
branch: string;
|
|
||||||
isMain: boolean;
|
|
||||||
pr?: {
|
|
||||||
number: number;
|
|
||||||
url: string;
|
|
||||||
title: string;
|
|
||||||
state: string;
|
|
||||||
createdAt: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ChangePRNumberDialogProps {
|
|
||||||
open: boolean;
|
|
||||||
onOpenChange: (open: boolean) => void;
|
|
||||||
worktree: WorktreeInfo | null;
|
|
||||||
projectPath: string | null;
|
|
||||||
onChanged: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function ChangePRNumberDialog({
|
|
||||||
open,
|
|
||||||
onOpenChange,
|
|
||||||
worktree,
|
|
||||||
projectPath,
|
|
||||||
onChanged,
|
|
||||||
}: ChangePRNumberDialogProps) {
|
|
||||||
const [prNumberInput, setPrNumberInput] = useState('');
|
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
// Initialize with current PR number when dialog opens
|
|
||||||
useEffect(() => {
|
|
||||||
if (open && worktree?.pr?.number) {
|
|
||||||
setPrNumberInput(String(worktree.pr.number));
|
|
||||||
} else if (open) {
|
|
||||||
setPrNumberInput('');
|
|
||||||
}
|
|
||||||
setError(null);
|
|
||||||
}, [open, worktree]);
|
|
||||||
|
|
||||||
const handleSubmit = useCallback(async () => {
|
|
||||||
if (!worktree) return;
|
|
||||||
|
|
||||||
const trimmed = prNumberInput.trim();
|
|
||||||
if (!/^\d+$/.test(trimmed)) {
|
|
||||||
setError('Please enter a valid positive PR number');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const prNumber = Number(trimmed);
|
|
||||||
if (prNumber <= 0) {
|
|
||||||
setError('Please enter a valid positive PR number');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
setIsLoading(true);
|
|
||||||
setError(null);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const api = getElectronAPI();
|
|
||||||
if (!api?.worktree?.updatePRNumber) {
|
|
||||||
setError('Worktree API not available');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await api.worktree.updatePRNumber(
|
|
||||||
worktree.path,
|
|
||||||
prNumber,
|
|
||||||
projectPath || undefined
|
|
||||||
);
|
|
||||||
|
|
||||||
if (result.success) {
|
|
||||||
const prInfo = result.result?.prInfo;
|
|
||||||
toast.success('PR tracking updated', {
|
|
||||||
description: prInfo?.title
|
|
||||||
? `Now tracking PR #${prNumber}: ${prInfo.title}`
|
|
||||||
: `Now tracking PR #${prNumber}`,
|
|
||||||
});
|
|
||||||
onOpenChange(false);
|
|
||||||
onChanged();
|
|
||||||
} else {
|
|
||||||
setError(result.error || 'Failed to update PR number');
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to update PR number');
|
|
||||||
} finally {
|
|
||||||
setIsLoading(false);
|
|
||||||
}
|
|
||||||
}, [worktree, prNumberInput, projectPath, onOpenChange, onChanged]);
|
|
||||||
|
|
||||||
const handleKeyDown = useCallback(
|
|
||||||
(e: React.KeyboardEvent) => {
|
|
||||||
if (e.key === 'Enter' && !isLoading) {
|
|
||||||
e.preventDefault();
|
|
||||||
handleSubmit();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[isLoading, handleSubmit]
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!worktree) return null;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Dialog
|
|
||||||
open={open}
|
|
||||||
onOpenChange={(isOpen) => {
|
|
||||||
if (!isLoading) {
|
|
||||||
onOpenChange(isOpen);
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<DialogContent className="sm:max-w-[400px]" onKeyDown={handleKeyDown}>
|
|
||||||
<DialogHeader>
|
|
||||||
<DialogTitle className="flex items-center gap-2">
|
|
||||||
<GitPullRequest className="w-5 h-5" />
|
|
||||||
Change Tracked PR Number
|
|
||||||
</DialogTitle>
|
|
||||||
<DialogDescription>
|
|
||||||
Update which pull request number is tracked for{' '}
|
|
||||||
<code className="font-mono bg-muted px-1 rounded">{worktree.branch}</code>.
|
|
||||||
{worktree.pr && (
|
|
||||||
<span className="block mt-1 text-xs">
|
|
||||||
Currently tracking PR #{worktree.pr.number}
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</DialogDescription>
|
|
||||||
</DialogHeader>
|
|
||||||
|
|
||||||
<div className="py-2 space-y-3">
|
|
||||||
<div className="space-y-2">
|
|
||||||
<Label htmlFor="pr-number">Pull Request Number</Label>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<span className="text-muted-foreground text-sm">#</span>
|
|
||||||
<Input
|
|
||||||
id="pr-number"
|
|
||||||
type="text"
|
|
||||||
inputMode="numeric"
|
|
||||||
placeholder="e.g. 42"
|
|
||||||
value={prNumberInput}
|
|
||||||
onChange={(e) => {
|
|
||||||
setPrNumberInput(e.target.value);
|
|
||||||
setError(null);
|
|
||||||
}}
|
|
||||||
disabled={isLoading}
|
|
||||||
autoFocus
|
|
||||||
className="flex-1"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<p className="text-xs text-muted-foreground">
|
|
||||||
Enter the GitHub PR number to associate with this worktree. The PR info will be
|
|
||||||
fetched from GitHub if available.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{error && <p className="text-sm text-destructive">{error}</p>}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<DialogFooter>
|
|
||||||
<Button variant="outline" onClick={() => onOpenChange(false)} disabled={isLoading}>
|
|
||||||
Cancel
|
|
||||||
</Button>
|
|
||||||
<Button onClick={handleSubmit} disabled={isLoading || !prNumberInput.trim()}>
|
|
||||||
{isLoading ? (
|
|
||||||
<>
|
|
||||||
<Spinner size="xs" className="mr-2" />
|
|
||||||
Updating...
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<GitPullRequest className="w-4 h-4 mr-2" />
|
|
||||||
Update PR
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</Button>
|
|
||||||
</DialogFooter>
|
|
||||||
</DialogContent>
|
|
||||||
</Dialog>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useState, useEffect, useMemo, useCallback, useRef } from 'react';
|
import { useState, useEffect, useMemo, useCallback } from 'react';
|
||||||
import {
|
import {
|
||||||
Dialog,
|
Dialog,
|
||||||
DialogContent,
|
DialogContent,
|
||||||
@@ -30,17 +30,13 @@ import {
|
|||||||
ChevronDown,
|
ChevronDown,
|
||||||
ChevronRight,
|
ChevronRight,
|
||||||
Upload,
|
Upload,
|
||||||
RefreshCw,
|
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import { Spinner } from '@/components/ui/spinner';
|
import { Spinner } from '@/components/ui/spinner';
|
||||||
import { getElectronAPI } from '@/lib/electron';
|
import { getElectronAPI } from '@/lib/electron';
|
||||||
import { getHttpApiClient } from '@/lib/http-api-client';
|
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import { useAppStore } from '@/store/app-store';
|
import { useAppStore } from '@/store/app-store';
|
||||||
import { resolveModelString } from '@automaker/model-resolver';
|
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import { TruncatedFilePath } from '@/components/ui/truncated-file-path';
|
import { TruncatedFilePath } from '@/components/ui/truncated-file-path';
|
||||||
import { ModelOverrideTrigger, useModelOverride } from '@/components/shared';
|
|
||||||
import type { FileStatus, MergeStateInfo } from '@/types/electron';
|
import type { FileStatus, MergeStateInfo } from '@/types/electron';
|
||||||
import { parseDiff, type ParsedFileDiff } from '@/lib/diff-utils';
|
import { parseDiff, type ParsedFileDiff } from '@/lib/diff-utils';
|
||||||
|
|
||||||
@@ -210,11 +206,6 @@ export function CommitWorktreeDialog({
|
|||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
const enableAiCommitMessages = useAppStore((state) => state.enableAiCommitMessages);
|
const enableAiCommitMessages = useAppStore((state) => state.enableAiCommitMessages);
|
||||||
|
|
||||||
// Commit message model override
|
|
||||||
const commitModelOverride = useModelOverride({ phase: 'commitMessageModel' });
|
|
||||||
const { effectiveModel: commitEffectiveModel, effectiveModelEntry: commitEffectiveModelEntry } =
|
|
||||||
commitModelOverride;
|
|
||||||
|
|
||||||
// File selection state
|
// File selection state
|
||||||
const [files, setFiles] = useState<FileStatus[]>([]);
|
const [files, setFiles] = useState<FileStatus[]>([]);
|
||||||
const [diffContent, setDiffContent] = useState('');
|
const [diffContent, setDiffContent] = useState('');
|
||||||
@@ -541,46 +532,6 @@ export function CommitWorktreeDialog({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Generate AI commit message
|
|
||||||
const generateCommitMessage = useCallback(async () => {
|
|
||||||
if (!worktree) return;
|
|
||||||
|
|
||||||
setIsGenerating(true);
|
|
||||||
try {
|
|
||||||
const resolvedCommitModel = resolveModelString(commitEffectiveModel);
|
|
||||||
const api = getHttpApiClient();
|
|
||||||
const result = await api.worktree.generateCommitMessage(
|
|
||||||
worktree.path,
|
|
||||||
resolvedCommitModel,
|
|
||||||
commitEffectiveModelEntry?.thinkingLevel,
|
|
||||||
commitEffectiveModelEntry?.providerId
|
|
||||||
);
|
|
||||||
|
|
||||||
if (result.success && result.message) {
|
|
||||||
setMessage(result.message);
|
|
||||||
} else {
|
|
||||||
console.warn('Failed to generate commit message:', result.error);
|
|
||||||
toast.error('Failed to generate commit message', {
|
|
||||||
description: result.error || 'Unknown error',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.warn('Error generating commit message:', err);
|
|
||||||
toast.error('Failed to generate commit message', {
|
|
||||||
description: err instanceof Error ? err.message : 'Unknown error',
|
|
||||||
});
|
|
||||||
} finally {
|
|
||||||
setIsGenerating(false);
|
|
||||||
}
|
|
||||||
}, [worktree, commitEffectiveModel, commitEffectiveModelEntry]);
|
|
||||||
|
|
||||||
// Keep a stable ref to generateCommitMessage so the open-dialog effect
|
|
||||||
// doesn't re-fire (and erase user edits) when the model override changes.
|
|
||||||
const generateCommitMessageRef = useRef(generateCommitMessage);
|
|
||||||
useEffect(() => {
|
|
||||||
generateCommitMessageRef.current = generateCommitMessage;
|
|
||||||
});
|
|
||||||
|
|
||||||
// Generate AI commit message when dialog opens (if enabled)
|
// Generate AI commit message when dialog opens (if enabled)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (open && worktree) {
|
if (open && worktree) {
|
||||||
@@ -592,7 +543,45 @@ export function CommitWorktreeDialog({
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
generateCommitMessageRef.current();
|
setIsGenerating(true);
|
||||||
|
let cancelled = false;
|
||||||
|
|
||||||
|
const generateMessage = async () => {
|
||||||
|
try {
|
||||||
|
const api = getElectronAPI();
|
||||||
|
if (!api?.worktree?.generateCommitMessage) {
|
||||||
|
if (!cancelled) {
|
||||||
|
setIsGenerating(false);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await api.worktree.generateCommitMessage(worktree.path);
|
||||||
|
|
||||||
|
if (cancelled) return;
|
||||||
|
|
||||||
|
if (result.success && result.message) {
|
||||||
|
setMessage(result.message);
|
||||||
|
} else {
|
||||||
|
console.warn('Failed to generate commit message:', result.error);
|
||||||
|
setMessage('');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
if (cancelled) return;
|
||||||
|
console.warn('Error generating commit message:', err);
|
||||||
|
setMessage('');
|
||||||
|
} finally {
|
||||||
|
if (!cancelled) {
|
||||||
|
setIsGenerating(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
generateMessage();
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
cancelled = true;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}, [open, worktree, enableAiCommitMessages]);
|
}, [open, worktree, enableAiCommitMessages]);
|
||||||
|
|
||||||
@@ -600,12 +589,12 @@ export function CommitWorktreeDialog({
|
|||||||
|
|
||||||
const allSelected = selectedFiles.size === files.length && files.length > 0;
|
const allSelected = selectedFiles.size === files.length && files.length > 0;
|
||||||
|
|
||||||
// Prevent the dialog from being dismissed while a push or generation is in progress.
|
// Prevent the dialog from being dismissed while a push is in progress.
|
||||||
// Overlay clicks and Escape key both route through onOpenChange(false); we
|
// Overlay clicks and Escape key both route through onOpenChange(false); we
|
||||||
// intercept those here so the UI stays open until the operation completes.
|
// intercept those here so the UI stays open until the push completes.
|
||||||
const handleOpenChange = (nextOpen: boolean) => {
|
const handleOpenChange = (nextOpen: boolean) => {
|
||||||
if (!nextOpen && (isLoading || isPushing || isGenerating)) {
|
if (!nextOpen && isPushing) {
|
||||||
// Ignore close requests during an active commit, push, or generation.
|
// Ignore close requests during an active push.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
onOpenChange(nextOpen);
|
onOpenChange(nextOpen);
|
||||||
@@ -824,7 +813,6 @@ export function CommitWorktreeDialog({
|
|||||||
|
|
||||||
{/* Commit Message */}
|
{/* Commit Message */}
|
||||||
<div className="grid gap-1.5">
|
<div className="grid gap-1.5">
|
||||||
<div className="flex items-center justify-between">
|
|
||||||
<Label htmlFor="commit-message" className="flex items-center gap-2">
|
<Label htmlFor="commit-message" className="flex items-center gap-2">
|
||||||
Commit Message
|
Commit Message
|
||||||
{isGenerating && (
|
{isGenerating && (
|
||||||
@@ -834,36 +822,6 @@ export function CommitWorktreeDialog({
|
|||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
</Label>
|
</Label>
|
||||||
<div className="flex items-center gap-1">
|
|
||||||
{enableAiCommitMessages && (
|
|
||||||
<>
|
|
||||||
<Button
|
|
||||||
variant="ghost"
|
|
||||||
size="sm"
|
|
||||||
onClick={generateCommitMessage}
|
|
||||||
disabled={isGenerating || isLoading}
|
|
||||||
className="h-6 px-2 text-xs"
|
|
||||||
title="Regenerate commit message"
|
|
||||||
>
|
|
||||||
{isGenerating ? (
|
|
||||||
<Spinner size="xs" className="mr-1" />
|
|
||||||
) : (
|
|
||||||
<RefreshCw className="w-3 h-3 mr-1" />
|
|
||||||
)}
|
|
||||||
Regenerate
|
|
||||||
</Button>
|
|
||||||
<ModelOverrideTrigger
|
|
||||||
currentModelEntry={commitModelOverride.effectiveModelEntry}
|
|
||||||
onModelChange={commitModelOverride.setOverride}
|
|
||||||
phase="commitMessageModel"
|
|
||||||
isOverridden={commitModelOverride.isOverridden}
|
|
||||||
size="sm"
|
|
||||||
variant="icon"
|
|
||||||
/>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<Textarea
|
<Textarea
|
||||||
id="commit-message"
|
id="commit-message"
|
||||||
placeholder={
|
placeholder={
|
||||||
|
|||||||
@@ -26,8 +26,6 @@ import { getElectronAPI } from '@/lib/electron';
|
|||||||
import { getHttpApiClient } from '@/lib/http-api-client';
|
import { getHttpApiClient } from '@/lib/http-api-client';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import { useWorktreeBranches } from '@/hooks/queries';
|
import { useWorktreeBranches } from '@/hooks/queries';
|
||||||
import { ModelOverrideTrigger, useModelOverride } from '@/components/shared';
|
|
||||||
import { resolveModelString } from '@automaker/model-resolver';
|
|
||||||
|
|
||||||
interface RemoteInfo {
|
interface RemoteInfo {
|
||||||
name: string;
|
name: string;
|
||||||
@@ -94,9 +92,6 @@ export function CreatePRDialog({
|
|||||||
// Generate description state
|
// Generate description state
|
||||||
const [isGeneratingDescription, setIsGeneratingDescription] = useState(false);
|
const [isGeneratingDescription, setIsGeneratingDescription] = useState(false);
|
||||||
|
|
||||||
// PR description model override
|
|
||||||
const prDescriptionModelOverride = useModelOverride({ phase: 'prDescriptionModel' });
|
|
||||||
|
|
||||||
// Use React Query for branch fetching - only enabled when dialog is open
|
// Use React Query for branch fetching - only enabled when dialog is open
|
||||||
const { data: branchesData, isLoading: isLoadingBranches } = useWorktreeBranches(
|
const { data: branchesData, isLoading: isLoadingBranches } = useWorktreeBranches(
|
||||||
open ? worktree?.path : undefined,
|
open ? worktree?.path : undefined,
|
||||||
@@ -304,20 +299,10 @@ export function CreatePRDialog({
|
|||||||
const api = getHttpApiClient();
|
const api = getHttpApiClient();
|
||||||
// Resolve the display name to the actual branch name for the API
|
// Resolve the display name to the actual branch name for the API
|
||||||
const resolvedRef = branchFullRefMap.get(baseBranch) || baseBranch;
|
const resolvedRef = branchFullRefMap.get(baseBranch) || baseBranch;
|
||||||
// Only strip the remote prefix if the resolved ref differs from the original
|
const branchNameForApi = resolvedRef.includes('/')
|
||||||
// (indicating it was resolved from a full ref like "origin/main").
|
|
||||||
// This preserves local branch names that contain slashes (e.g. "release/1.0").
|
|
||||||
const branchNameForApi =
|
|
||||||
resolvedRef !== baseBranch && resolvedRef.includes('/')
|
|
||||||
? resolvedRef.substring(resolvedRef.indexOf('/') + 1)
|
? resolvedRef.substring(resolvedRef.indexOf('/') + 1)
|
||||||
: resolvedRef;
|
: resolvedRef;
|
||||||
const result = await api.worktree.generatePRDescription(
|
const result = await api.worktree.generatePRDescription(worktree.path, branchNameForApi);
|
||||||
worktree.path,
|
|
||||||
branchNameForApi,
|
|
||||||
resolveModelString(prDescriptionModelOverride.effectiveModel),
|
|
||||||
prDescriptionModelOverride.effectiveModelEntry.thinkingLevel,
|
|
||||||
prDescriptionModelOverride.effectiveModelEntry.providerId
|
|
||||||
);
|
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
if (result.title) {
|
if (result.title) {
|
||||||
@@ -359,11 +344,9 @@ export function CreatePRDialog({
|
|||||||
// since the backend handles branch resolution. However, if the full ref is
|
// since the backend handles branch resolution. However, if the full ref is
|
||||||
// available, we can use it for more precise targeting.
|
// available, we can use it for more precise targeting.
|
||||||
const resolvedBaseBranch = branchFullRefMap.get(baseBranch) || baseBranch;
|
const resolvedBaseBranch = branchFullRefMap.get(baseBranch) || baseBranch;
|
||||||
// Only strip the remote prefix if the resolved ref differs from the original
|
// Strip the remote prefix from the resolved ref for the API call
|
||||||
// (indicating it was resolved from a full ref like "origin/main").
|
// (e.g. "origin/main" → "main") since the backend expects the branch name only
|
||||||
// This preserves local branch names that contain slashes (e.g. "release/1.0").
|
const baseBranchForApi = resolvedBaseBranch.includes('/')
|
||||||
const baseBranchForApi =
|
|
||||||
resolvedBaseBranch !== baseBranch && resolvedBaseBranch.includes('/')
|
|
||||||
? resolvedBaseBranch.substring(resolvedBaseBranch.indexOf('/') + 1)
|
? resolvedBaseBranch.substring(resolvedBaseBranch.indexOf('/') + 1)
|
||||||
: resolvedBaseBranch;
|
: resolvedBaseBranch;
|
||||||
|
|
||||||
@@ -502,7 +485,7 @@ export function CreatePRDialog({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<Dialog open={open} onOpenChange={handleClose}>
|
<Dialog open={open} onOpenChange={handleClose}>
|
||||||
<DialogContent className="sm:max-w-[550px] max-h-[85vh] flex flex-col">
|
<DialogContent className="sm:max-w-[550px]">
|
||||||
<DialogHeader>
|
<DialogHeader>
|
||||||
<DialogTitle className="flex items-center gap-2">
|
<DialogTitle className="flex items-center gap-2">
|
||||||
<GitPullRequest className="w-5 h-5" />
|
<GitPullRequest className="w-5 h-5" />
|
||||||
@@ -576,7 +559,7 @@ export function CreatePRDialog({
|
|||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
<div className="grid gap-4 py-4 overflow-y-auto min-h-0 flex-1">
|
<div className="grid gap-4 py-4">
|
||||||
{worktree.hasChanges && (
|
{worktree.hasChanges && (
|
||||||
<div className="grid gap-2">
|
<div className="grid gap-2">
|
||||||
<Label htmlFor="commit-message">
|
<Label htmlFor="commit-message">
|
||||||
@@ -598,7 +581,6 @@ export function CreatePRDialog({
|
|||||||
<div className="grid gap-2">
|
<div className="grid gap-2">
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
<Label htmlFor="pr-title">PR Title</Label>
|
<Label htmlFor="pr-title">PR Title</Label>
|
||||||
<div className="flex items-center gap-1">
|
|
||||||
<Button
|
<Button
|
||||||
variant="ghost"
|
variant="ghost"
|
||||||
size="sm"
|
size="sm"
|
||||||
@@ -623,15 +605,6 @@ export function CreatePRDialog({
|
|||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
</Button>
|
</Button>
|
||||||
<ModelOverrideTrigger
|
|
||||||
currentModelEntry={prDescriptionModelOverride.effectiveModelEntry}
|
|
||||||
onModelChange={prDescriptionModelOverride.setOverride}
|
|
||||||
phase="prDescriptionModel"
|
|
||||||
isOverridden={prDescriptionModelOverride.isOverridden}
|
|
||||||
size="sm"
|
|
||||||
variant="icon"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
<Input
|
<Input
|
||||||
id="pr-title"
|
id="pr-title"
|
||||||
@@ -760,7 +733,7 @@ export function CreatePRDialog({
|
|||||||
{error && <p className="text-sm text-destructive">{error}</p>}
|
{error && <p className="text-sm text-destructive">{error}</p>}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<DialogFooter className="shrink-0 pt-2 border-t">
|
<DialogFooter>
|
||||||
<Button variant="ghost" onClick={handleClose} disabled={isLoading}>
|
<Button variant="ghost" onClick={handleClose} disabled={isLoading}>
|
||||||
Cancel
|
Cancel
|
||||||
</Button>
|
</Button>
|
||||||
|
|||||||
@@ -10,27 +10,12 @@ import {
|
|||||||
import { Button } from '@/components/ui/button';
|
import { Button } from '@/components/ui/button';
|
||||||
import { Input } from '@/components/ui/input';
|
import { Input } from '@/components/ui/input';
|
||||||
import { Label } from '@/components/ui/label';
|
import { Label } from '@/components/ui/label';
|
||||||
import {
|
import { GitBranch, AlertCircle, ChevronDown, ChevronRight, Globe, RefreshCw } from 'lucide-react';
|
||||||
GitBranch,
|
|
||||||
AlertCircle,
|
|
||||||
ChevronDown,
|
|
||||||
ChevronRight,
|
|
||||||
Globe,
|
|
||||||
RefreshCw,
|
|
||||||
Cloud,
|
|
||||||
} from 'lucide-react';
|
|
||||||
import { Spinner } from '@/components/ui/spinner';
|
import { Spinner } from '@/components/ui/spinner';
|
||||||
import { getElectronAPI } from '@/lib/electron';
|
import { getElectronAPI } from '@/lib/electron';
|
||||||
import { getHttpApiClient } from '@/lib/http-api-client';
|
import { getHttpApiClient } from '@/lib/http-api-client';
|
||||||
import { BranchAutocomplete } from '@/components/ui/branch-autocomplete';
|
import { BranchAutocomplete } from '@/components/ui/branch-autocomplete';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import {
|
|
||||||
Select,
|
|
||||||
SelectContent,
|
|
||||||
SelectItem,
|
|
||||||
SelectTrigger,
|
|
||||||
SelectValue,
|
|
||||||
} from '@/components/ui/select';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse git/worktree error messages and return user-friendly versions
|
* Parse git/worktree error messages and return user-friendly versions
|
||||||
@@ -128,19 +113,10 @@ export function CreateWorktreeDialog({
|
|||||||
// allow free-form branch entry via allowCreate as a fallback.
|
// allow free-form branch entry via allowCreate as a fallback.
|
||||||
const [branchFetchError, setBranchFetchError] = useState<string | null>(null);
|
const [branchFetchError, setBranchFetchError] = useState<string | null>(null);
|
||||||
|
|
||||||
// Remote selection state
|
|
||||||
const [selectedRemote, setSelectedRemote] = useState<string>('local');
|
|
||||||
const [availableRemotes, setAvailableRemotes] = useState<Array<{ name: string; url: string }>>(
|
|
||||||
[]
|
|
||||||
);
|
|
||||||
const [remoteBranches, setRemoteBranches] = useState<
|
|
||||||
Map<string, Array<{ name: string; fullRef: string }>>
|
|
||||||
>(new Map());
|
|
||||||
|
|
||||||
// AbortController ref so in-flight branch fetches can be cancelled when the dialog closes
|
// AbortController ref so in-flight branch fetches can be cancelled when the dialog closes
|
||||||
const branchFetchAbortRef = useRef<AbortController | null>(null);
|
const branchFetchAbortRef = useRef<AbortController | null>(null);
|
||||||
|
|
||||||
// Fetch available branches and remotes when the base branch section is expanded
|
// Fetch available branches (local + remote) when the base branch section is expanded
|
||||||
const fetchBranches = useCallback(
|
const fetchBranches = useCallback(
|
||||||
async (signal?: AbortSignal) => {
|
async (signal?: AbortSignal) => {
|
||||||
if (!projectPath) return;
|
if (!projectPath) return;
|
||||||
@@ -149,16 +125,13 @@ export function CreateWorktreeDialog({
|
|||||||
try {
|
try {
|
||||||
const api = getHttpApiClient();
|
const api = getHttpApiClient();
|
||||||
|
|
||||||
// Fetch both branches and remotes in parallel
|
// Fetch branches using the project path (use listBranches on the project root).
|
||||||
const [branchResult, remotesResult] = await Promise.all([
|
// Pass the AbortSignal so controller.abort() cancels the in-flight HTTP request.
|
||||||
api.worktree.listBranches(projectPath, true, signal),
|
const branchResult = await api.worktree.listBranches(projectPath, true, signal);
|
||||||
api.worktree.listRemotes(projectPath),
|
|
||||||
]);
|
|
||||||
|
|
||||||
// If the fetch was aborted while awaiting, bail out to avoid stale state writes
|
// If the fetch was aborted while awaiting, bail out to avoid stale state writes
|
||||||
if (signal?.aborted) return;
|
if (signal?.aborted) return;
|
||||||
|
|
||||||
// Process branches
|
|
||||||
if (branchResult.success && branchResult.result) {
|
if (branchResult.success && branchResult.result) {
|
||||||
setBranchFetchError(null);
|
setBranchFetchError(null);
|
||||||
setAvailableBranches(
|
setAvailableBranches(
|
||||||
@@ -174,30 +147,6 @@ export function CreateWorktreeDialog({
|
|||||||
setBranchFetchError(message);
|
setBranchFetchError(message);
|
||||||
setAvailableBranches([{ name: 'main', isRemote: false }]);
|
setAvailableBranches([{ name: 'main', isRemote: false }]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process remotes
|
|
||||||
if (remotesResult.success && remotesResult.result) {
|
|
||||||
const remotes = remotesResult.result.remotes;
|
|
||||||
setAvailableRemotes(
|
|
||||||
remotes.map((r: { name: string; url: string; branches: unknown[] }) => ({
|
|
||||||
name: r.name,
|
|
||||||
url: r.url,
|
|
||||||
}))
|
|
||||||
);
|
|
||||||
|
|
||||||
// Build remote branches map for filtering
|
|
||||||
const branchesMap = new Map<string, Array<{ name: string; fullRef: string }>>();
|
|
||||||
remotes.forEach(
|
|
||||||
(r: {
|
|
||||||
name: string;
|
|
||||||
url: string;
|
|
||||||
branches: Array<{ name: string; fullRef: string }>;
|
|
||||||
}) => {
|
|
||||||
branchesMap.set(r.name, r.branches || []);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
setRemoteBranches(branchesMap);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// If aborted, don't update state
|
// If aborted, don't update state
|
||||||
if (signal?.aborted) return;
|
if (signal?.aborted) return;
|
||||||
@@ -211,8 +160,6 @@ export function CreateWorktreeDialog({
|
|||||||
// and enable free-form entry (allowCreate) so the user can still type
|
// and enable free-form entry (allowCreate) so the user can still type
|
||||||
// any branch name when the remote list is unavailable.
|
// any branch name when the remote list is unavailable.
|
||||||
setAvailableBranches([{ name: 'main', isRemote: false }]);
|
setAvailableBranches([{ name: 'main', isRemote: false }]);
|
||||||
setAvailableRemotes([]);
|
|
||||||
setRemoteBranches(new Map());
|
|
||||||
} finally {
|
} finally {
|
||||||
if (!signal?.aborted) {
|
if (!signal?.aborted) {
|
||||||
setIsLoadingBranches(false);
|
setIsLoadingBranches(false);
|
||||||
@@ -251,30 +198,27 @@ export function CreateWorktreeDialog({
|
|||||||
setAvailableBranches([]);
|
setAvailableBranches([]);
|
||||||
setBranchFetchError(null);
|
setBranchFetchError(null);
|
||||||
setIsLoadingBranches(false);
|
setIsLoadingBranches(false);
|
||||||
setSelectedRemote('local');
|
|
||||||
setAvailableRemotes([]);
|
|
||||||
setRemoteBranches(new Map());
|
|
||||||
}
|
}
|
||||||
}, [open]);
|
}, [open]);
|
||||||
|
|
||||||
// Build branch name list for the autocomplete, filtered by selected remote
|
// Build branch name list for the autocomplete, with local branches first then remote
|
||||||
const branchNames = useMemo(() => {
|
const branchNames = useMemo(() => {
|
||||||
// If "local" is selected, show only local branches
|
const local: string[] = [];
|
||||||
if (selectedRemote === 'local') {
|
const remote: string[] = [];
|
||||||
return availableBranches.filter((b) => !b.isRemote).map((b) => b.name);
|
|
||||||
|
for (const b of availableBranches) {
|
||||||
|
if (b.isRemote) {
|
||||||
|
// Skip bare remote refs without a branch name (e.g. "origin" by itself)
|
||||||
|
if (!b.name.includes('/')) continue;
|
||||||
|
remote.push(b.name);
|
||||||
|
} else {
|
||||||
|
local.push(b.name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If a specific remote is selected, show only branches from that remote
|
// Local branches first, then remote branches
|
||||||
const remoteBranchList = remoteBranches.get(selectedRemote);
|
return [...local, ...remote];
|
||||||
if (remoteBranchList) {
|
}, [availableBranches]);
|
||||||
return remoteBranchList.map((b) => b.fullRef);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback: filter from available branches by remote prefix
|
|
||||||
return availableBranches
|
|
||||||
.filter((b) => b.isRemote && b.name.startsWith(`${selectedRemote}/`))
|
|
||||||
.map((b) => b.name);
|
|
||||||
}, [availableBranches, selectedRemote, remoteBranches]);
|
|
||||||
|
|
||||||
// Determine if the selected base branch is a remote branch.
|
// Determine if the selected base branch is a remote branch.
|
||||||
// Also detect manually entered remote-style names (e.g. "origin/feature")
|
// Also detect manually entered remote-style names (e.g. "origin/feature")
|
||||||
@@ -340,33 +284,11 @@ export function CreateWorktreeDialog({
|
|||||||
|
|
||||||
if (result.success && result.worktree) {
|
if (result.success && result.worktree) {
|
||||||
const baseDesc = effectiveBaseBranch ? ` from ${effectiveBaseBranch}` : '';
|
const baseDesc = effectiveBaseBranch ? ` from ${effectiveBaseBranch}` : '';
|
||||||
const commitInfo = result.worktree.baseCommitHash
|
|
||||||
? ` (${result.worktree.baseCommitHash})`
|
|
||||||
: '';
|
|
||||||
|
|
||||||
// Show sync result feedback
|
|
||||||
const syncResult = result.worktree.syncResult;
|
|
||||||
if (syncResult?.diverged) {
|
|
||||||
// Branch had diverged — warn the user
|
|
||||||
toast.warning(`Worktree created for branch "${result.worktree.branch}"`, {
|
|
||||||
description: `${syncResult.message}`,
|
|
||||||
duration: 8000,
|
|
||||||
});
|
|
||||||
} else if (syncResult && !syncResult.synced && syncResult.message) {
|
|
||||||
// Sync was attempted but failed (network error, etc.)
|
|
||||||
toast.warning(`Worktree created for branch "${result.worktree.branch}"`, {
|
|
||||||
description: `Created with local copy. ${syncResult.message}`,
|
|
||||||
duration: 6000,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
// Normal success — include commit info if available
|
|
||||||
toast.success(`Worktree created for branch "${result.worktree.branch}"`, {
|
toast.success(`Worktree created for branch "${result.worktree.branch}"`, {
|
||||||
description: result.worktree.isNew
|
description: result.worktree.isNew
|
||||||
? `New branch created${baseDesc}${commitInfo}`
|
? `New branch created${baseDesc}`
|
||||||
: `Using existing branch${commitInfo}`,
|
: 'Using existing branch',
|
||||||
});
|
});
|
||||||
}
|
|
||||||
|
|
||||||
onCreated({ path: result.worktree.path, branch: result.worktree.branch });
|
onCreated({ path: result.worktree.path, branch: result.worktree.branch });
|
||||||
onOpenChange(false);
|
onOpenChange(false);
|
||||||
setBranchName('');
|
setBranchName('');
|
||||||
@@ -474,47 +396,6 @@ export function CreateWorktreeDialog({
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Remote Selector */}
|
|
||||||
<div className="grid gap-1.5">
|
|
||||||
<Label htmlFor="remote-select" className="text-xs text-muted-foreground">
|
|
||||||
Source
|
|
||||||
</Label>
|
|
||||||
<Select
|
|
||||||
value={selectedRemote}
|
|
||||||
onValueChange={(value) => {
|
|
||||||
setSelectedRemote(value);
|
|
||||||
// Clear base branch when switching remotes
|
|
||||||
setBaseBranch('');
|
|
||||||
}}
|
|
||||||
disabled={isLoadingBranches}
|
|
||||||
>
|
|
||||||
<SelectTrigger id="remote-select" className="h-8">
|
|
||||||
<SelectValue placeholder="Select source..." />
|
|
||||||
</SelectTrigger>
|
|
||||||
<SelectContent>
|
|
||||||
<SelectItem value="local">
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<GitBranch className="w-3.5 h-3.5" />
|
|
||||||
<span>Local Branches</span>
|
|
||||||
</div>
|
|
||||||
</SelectItem>
|
|
||||||
{availableRemotes.map((remote) => (
|
|
||||||
<SelectItem key={remote.name} value={remote.name}>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Cloud className="w-3.5 h-3.5" />
|
|
||||||
<span>{remote.name}</span>
|
|
||||||
{remote.url && (
|
|
||||||
<span className="text-xs text-muted-foreground truncate max-w-[150px]">
|
|
||||||
({remote.url})
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</SelectItem>
|
|
||||||
))}
|
|
||||||
</SelectContent>
|
|
||||||
</Select>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<BranchAutocomplete
|
<BranchAutocomplete
|
||||||
value={baseBranch}
|
value={baseBranch}
|
||||||
onChange={(value) => {
|
onChange={(value) => {
|
||||||
@@ -522,13 +403,9 @@ export function CreateWorktreeDialog({
|
|||||||
setError(null);
|
setError(null);
|
||||||
}}
|
}}
|
||||||
branches={branchNames}
|
branches={branchNames}
|
||||||
placeholder={
|
placeholder="Select base branch (default: HEAD)..."
|
||||||
selectedRemote === 'local'
|
|
||||||
? 'Select local branch (default: HEAD)...'
|
|
||||||
: `Select branch from ${selectedRemote}...`
|
|
||||||
}
|
|
||||||
disabled={isLoadingBranches}
|
disabled={isLoadingBranches}
|
||||||
allowCreate={!!branchFetchError || selectedRemote === 'local'}
|
allowCreate={!!branchFetchError}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
{isRemoteBaseBranch && (
|
{isRemoteBaseBranch && (
|
||||||
@@ -537,12 +414,6 @@ export function CreateWorktreeDialog({
|
|||||||
<span>Remote branch — will fetch latest before creating worktree</span>
|
<span>Remote branch — will fetch latest before creating worktree</span>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
{!isRemoteBaseBranch && baseBranch && !branchFetchError && (
|
|
||||||
<div className="flex items-center gap-1.5 text-xs text-muted-foreground">
|
|
||||||
<RefreshCw className="w-3 h-3" />
|
|
||||||
<span>Will sync with remote tracking branch if available</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
@@ -583,7 +454,7 @@ export function CreateWorktreeDialog({
|
|||||||
{isLoading ? (
|
{isLoading ? (
|
||||||
<>
|
<>
|
||||||
<Spinner size="sm" className="mr-2" />
|
<Spinner size="sm" className="mr-2" />
|
||||||
{baseBranch.trim() ? 'Syncing & Creating...' : 'Creating...'}
|
{isRemoteBaseBranch ? 'Fetching & Creating...' : 'Creating...'}
|
||||||
</>
|
</>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
|
|||||||
@@ -72,19 +72,9 @@ export function DeleteWorktreeDialog({
|
|||||||
? `Branch "${worktree.branch}" was also deleted`
|
? `Branch "${worktree.branch}" was also deleted`
|
||||||
: `Branch "${worktree.branch}" was kept`,
|
: `Branch "${worktree.branch}" was kept`,
|
||||||
});
|
});
|
||||||
// Close the dialog first, then notify the parent.
|
onDeleted(worktree, deleteBranch);
|
||||||
// This ensures the dialog unmounts before the parent
|
|
||||||
// triggers potentially heavy state updates (feature branch
|
|
||||||
// resets, worktree refresh), reducing concurrent re-renders
|
|
||||||
// that can cascade into React error #185.
|
|
||||||
onOpenChange(false);
|
onOpenChange(false);
|
||||||
setDeleteBranch(false);
|
setDeleteBranch(false);
|
||||||
try {
|
|
||||||
onDeleted(worktree, deleteBranch);
|
|
||||||
} catch (error) {
|
|
||||||
// Prevent errors in onDeleted from propagating to the error boundary
|
|
||||||
console.error('onDeleted callback failed:', error);
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
toast.error('Failed to delete worktree', {
|
toast.error('Failed to delete worktree', {
|
||||||
description: result.error,
|
description: result.error,
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
export { AddFeatureDialog } from './add-feature-dialog';
|
export { AddFeatureDialog } from './add-feature-dialog';
|
||||||
export { QuickAddDialog } from './quick-add-dialog';
|
|
||||||
export { AgentOutputModal } from './agent-output-modal';
|
export { AgentOutputModal } from './agent-output-modal';
|
||||||
export { BacklogPlanDialog } from './backlog-plan-dialog';
|
export { BacklogPlanDialog } from './backlog-plan-dialog';
|
||||||
export { CompletedFeaturesModal } from './completed-features-modal';
|
export { CompletedFeaturesModal } from './completed-features-modal';
|
||||||
@@ -25,7 +24,6 @@ export { ViewStashesDialog } from './view-stashes-dialog';
|
|||||||
export { StashApplyConflictDialog } from './stash-apply-conflict-dialog';
|
export { StashApplyConflictDialog } from './stash-apply-conflict-dialog';
|
||||||
export { CherryPickDialog } from './cherry-pick-dialog';
|
export { CherryPickDialog } from './cherry-pick-dialog';
|
||||||
export { GitPullDialog } from './git-pull-dialog';
|
export { GitPullDialog } from './git-pull-dialog';
|
||||||
export { ChangePRNumberDialog } from './change-pr-number-dialog';
|
|
||||||
export {
|
export {
|
||||||
BranchConflictDialog,
|
BranchConflictDialog,
|
||||||
type BranchConflictData,
|
type BranchConflictData,
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user