mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-05 09:33:07 +00:00
Compare commits
20 Commits
188b08ba7c
...
refactor/a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a9d39b9320 | ||
|
|
9fd2cf2bc4 | ||
|
|
ebc7987988 | ||
|
|
29b3eef500 | ||
|
|
010e516b0e | ||
|
|
00e4712ae7 | ||
|
|
4b4ae04fbe | ||
|
|
04775af561 | ||
|
|
5a5c56a4cf | ||
|
|
bf82f92132 | ||
|
|
adddcf71a2 | ||
|
|
6bb7b86487 | ||
|
|
b8fa7fc579 | ||
|
|
7fb0d0f2ca | ||
|
|
f15725f28a | ||
|
|
7d7d152d4e | ||
|
|
07f777da22 | ||
|
|
b10501ea79 | ||
|
|
1a460c301a | ||
|
|
c1f480fe49 |
@@ -43,10 +43,14 @@ export function createInitGitHandler() {
|
|||||||
// .git doesn't exist, continue with initialization
|
// .git doesn't exist, continue with initialization
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize git and create an initial empty commit
|
// Initialize git with 'main' as the default branch (matching GitHub's standard since 2020)
|
||||||
await execAsync(`git init && git commit --allow-empty -m "Initial commit"`, {
|
// and create an initial empty commit
|
||||||
cwd: projectPath,
|
await execAsync(
|
||||||
});
|
`git init --initial-branch=main && git commit --allow-empty -m "Initial commit"`,
|
||||||
|
{
|
||||||
|
cwd: projectPath,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import type { ExecuteOptions, ParsedTask } from '@automaker/types';
|
import type { ExecuteOptions, ParsedTask } from '@automaker/types';
|
||||||
import { buildPromptWithImages, createLogger } from '@automaker/utils';
|
import { buildPromptWithImages, createLogger, isAuthenticationError } from '@automaker/utils';
|
||||||
import { getFeatureDir } from '@automaker/platform';
|
import { getFeatureDir } from '@automaker/platform';
|
||||||
import * as secureFs from '../lib/secure-fs.js';
|
import * as secureFs from '../lib/secure-fs.js';
|
||||||
import { TypedEventBus } from './typed-event-bus.js';
|
import { TypedEventBus } from './typed-event-bus.js';
|
||||||
@@ -180,9 +180,9 @@ export class AgentExecutor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Starting stream for feature ${featureId}...`);
|
logger.info(`Starting stream for feature ${featureId}...`);
|
||||||
const stream = provider.executeQuery(executeOptions);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const stream = provider.executeQuery(executeOptions);
|
||||||
streamLoop: for await (const msg of stream) {
|
streamLoop: for await (const msg of stream) {
|
||||||
receivedAnyStreamMessage = true;
|
receivedAnyStreamMessage = true;
|
||||||
appendRawEvent(msg);
|
appendRawEvent(msg);
|
||||||
@@ -206,14 +206,10 @@ export class AgentExecutor {
|
|||||||
responseText += '\n\n';
|
responseText += '\n\n';
|
||||||
}
|
}
|
||||||
responseText += newText;
|
responseText += newText;
|
||||||
if (
|
// Check for authentication errors using provider-agnostic utility
|
||||||
block.text &&
|
if (block.text && isAuthenticationError(block.text))
|
||||||
(block.text.includes('Invalid API key') ||
|
|
||||||
block.text.includes('authentication_failed') ||
|
|
||||||
block.text.includes('Fix external API key'))
|
|
||||||
)
|
|
||||||
throw new Error(
|
throw new Error(
|
||||||
"Authentication failed: Invalid or expired API key. Please check your ANTHROPIC_API_KEY, or run 'claude login' to re-authenticate."
|
'Authentication failed: Invalid or expired API key. Please check your API key configuration or re-authenticate with your provider.'
|
||||||
);
|
);
|
||||||
scheduleWrite();
|
scheduleWrite();
|
||||||
const hasExplicitMarker = responseText.includes('[SPEC_GENERATED]'),
|
const hasExplicitMarker = responseText.includes('[SPEC_GENERATED]'),
|
||||||
@@ -255,7 +251,7 @@ export class AgentExecutor {
|
|||||||
input: block.input,
|
input: block.input,
|
||||||
});
|
});
|
||||||
if (responseText.length > 0 && !responseText.endsWith('\n')) responseText += '\n';
|
if (responseText.length > 0 && !responseText.endsWith('\n')) responseText += '\n';
|
||||||
responseText += `\n Tool: ${block.name}\n`;
|
responseText += `\n🔧 Tool: ${block.name}\n`;
|
||||||
if (block.input) responseText += `Input: ${JSON.stringify(block.input, null, 2)}\n`;
|
if (block.input) responseText += `Input: ${JSON.stringify(block.input, null, 2)}\n`;
|
||||||
scheduleWrite();
|
scheduleWrite();
|
||||||
}
|
}
|
||||||
@@ -502,10 +498,17 @@ export class AgentExecutor {
|
|||||||
planApproved = true;
|
planApproved = true;
|
||||||
userFeedback = approvalResult.feedback;
|
userFeedback = approvalResult.feedback;
|
||||||
approvedPlanContent = approvalResult.editedPlan || currentPlanContent;
|
approvedPlanContent = approvalResult.editedPlan || currentPlanContent;
|
||||||
if (approvalResult.editedPlan)
|
if (approvalResult.editedPlan) {
|
||||||
|
// Re-parse tasks from edited plan to ensure we execute the updated tasks
|
||||||
|
const editedTasks = parseTasksFromSpec(approvalResult.editedPlan);
|
||||||
|
parsedTasks = editedTasks;
|
||||||
await this.featureStateManager.updateFeaturePlanSpec(projectPath, featureId, {
|
await this.featureStateManager.updateFeaturePlanSpec(projectPath, featureId, {
|
||||||
content: approvalResult.editedPlan,
|
content: approvalResult.editedPlan,
|
||||||
|
tasks: editedTasks,
|
||||||
|
tasksTotal: editedTasks.length,
|
||||||
|
tasksCompleted: 0,
|
||||||
});
|
});
|
||||||
|
}
|
||||||
this.eventBus.emitAutoModeEvent('plan_approved', {
|
this.eventBus.emitAutoModeEvent('plan_approved', {
|
||||||
featureId,
|
featureId,
|
||||||
projectPath,
|
projectPath,
|
||||||
|
|||||||
@@ -183,7 +183,13 @@ export class AutoLoopCoordinator {
|
|||||||
nextFeature.id,
|
nextFeature.id,
|
||||||
projectState.config.useWorktrees,
|
projectState.config.useWorktrees,
|
||||||
true
|
true
|
||||||
).catch(() => {});
|
).catch((error) => {
|
||||||
|
const errorInfo = classifyError(error);
|
||||||
|
logger.error(`Auto-loop feature ${nextFeature.id} failed:`, errorInfo.message);
|
||||||
|
if (this.trackFailureAndCheckPauseForProject(projectPath, branchName, errorInfo)) {
|
||||||
|
this.signalShouldPauseForProject(projectPath, branchName, errorInfo);
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
await this.sleep(2000, projectState.abortController.signal);
|
await this.sleep(2000, projectState.abortController.signal);
|
||||||
} catch {
|
} catch {
|
||||||
@@ -268,27 +274,64 @@ export class AutoLoopCoordinator {
|
|||||||
|
|
||||||
trackFailureAndCheckPauseForProject(
|
trackFailureAndCheckPauseForProject(
|
||||||
projectPath: string,
|
projectPath: string,
|
||||||
errorInfo: { type: string; message: string }
|
branchNameOrError: string | null | { type: string; message: string },
|
||||||
|
errorInfo?: { type: string; message: string }
|
||||||
): boolean {
|
): boolean {
|
||||||
const projectState = this.autoLoopsByProject.get(getWorktreeAutoLoopKey(projectPath, null));
|
// Support both old (projectPath, errorInfo) and new (projectPath, branchName, errorInfo) signatures
|
||||||
|
let branchName: string | null;
|
||||||
|
let actualErrorInfo: { type: string; message: string };
|
||||||
|
if (
|
||||||
|
typeof branchNameOrError === 'object' &&
|
||||||
|
branchNameOrError !== null &&
|
||||||
|
'type' in branchNameOrError
|
||||||
|
) {
|
||||||
|
// Old signature: (projectPath, errorInfo)
|
||||||
|
branchName = null;
|
||||||
|
actualErrorInfo = branchNameOrError;
|
||||||
|
} else {
|
||||||
|
// New signature: (projectPath, branchName, errorInfo)
|
||||||
|
branchName = branchNameOrError;
|
||||||
|
actualErrorInfo = errorInfo!;
|
||||||
|
}
|
||||||
|
const projectState = this.autoLoopsByProject.get(
|
||||||
|
getWorktreeAutoLoopKey(projectPath, branchName)
|
||||||
|
);
|
||||||
if (!projectState) return false;
|
if (!projectState) return false;
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
projectState.consecutiveFailures.push({ timestamp: now, error: errorInfo.message });
|
projectState.consecutiveFailures.push({ timestamp: now, error: actualErrorInfo.message });
|
||||||
projectState.consecutiveFailures = projectState.consecutiveFailures.filter(
|
projectState.consecutiveFailures = projectState.consecutiveFailures.filter(
|
||||||
(f) => now - f.timestamp < FAILURE_WINDOW_MS
|
(f) => now - f.timestamp < FAILURE_WINDOW_MS
|
||||||
);
|
);
|
||||||
return (
|
return (
|
||||||
projectState.consecutiveFailures.length >= CONSECUTIVE_FAILURE_THRESHOLD ||
|
projectState.consecutiveFailures.length >= CONSECUTIVE_FAILURE_THRESHOLD ||
|
||||||
errorInfo.type === 'quota_exhausted' ||
|
actualErrorInfo.type === 'quota_exhausted' ||
|
||||||
errorInfo.type === 'rate_limit'
|
actualErrorInfo.type === 'rate_limit'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
signalShouldPauseForProject(
|
signalShouldPauseForProject(
|
||||||
projectPath: string,
|
projectPath: string,
|
||||||
errorInfo: { type: string; message: string }
|
branchNameOrError: string | null | { type: string; message: string },
|
||||||
|
errorInfo?: { type: string; message: string }
|
||||||
): void {
|
): void {
|
||||||
const projectState = this.autoLoopsByProject.get(getWorktreeAutoLoopKey(projectPath, null));
|
// Support both old (projectPath, errorInfo) and new (projectPath, branchName, errorInfo) signatures
|
||||||
|
let branchName: string | null;
|
||||||
|
let actualErrorInfo: { type: string; message: string };
|
||||||
|
if (
|
||||||
|
typeof branchNameOrError === 'object' &&
|
||||||
|
branchNameOrError !== null &&
|
||||||
|
'type' in branchNameOrError
|
||||||
|
) {
|
||||||
|
branchName = null;
|
||||||
|
actualErrorInfo = branchNameOrError;
|
||||||
|
} else {
|
||||||
|
branchName = branchNameOrError;
|
||||||
|
actualErrorInfo = errorInfo!;
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectState = this.autoLoopsByProject.get(
|
||||||
|
getWorktreeAutoLoopKey(projectPath, branchName)
|
||||||
|
);
|
||||||
if (!projectState || projectState.pausedDueToFailures) return;
|
if (!projectState || projectState.pausedDueToFailures) return;
|
||||||
projectState.pausedDueToFailures = true;
|
projectState.pausedDueToFailures = true;
|
||||||
const failureCount = projectState.consecutiveFailures.length;
|
const failureCount = projectState.consecutiveFailures.length;
|
||||||
@@ -297,24 +340,29 @@ export class AutoLoopCoordinator {
|
|||||||
failureCount >= CONSECUTIVE_FAILURE_THRESHOLD
|
failureCount >= CONSECUTIVE_FAILURE_THRESHOLD
|
||||||
? `Auto Mode paused: ${failureCount} consecutive failures detected.`
|
? `Auto Mode paused: ${failureCount} consecutive failures detected.`
|
||||||
: 'Auto Mode paused: Usage limit or API error detected.',
|
: 'Auto Mode paused: Usage limit or API error detected.',
|
||||||
errorType: errorInfo.type,
|
errorType: actualErrorInfo.type,
|
||||||
originalError: errorInfo.message,
|
originalError: actualErrorInfo.message,
|
||||||
failureCount,
|
failureCount,
|
||||||
projectPath,
|
projectPath,
|
||||||
|
branchName,
|
||||||
});
|
});
|
||||||
this.stopAutoLoopForProject(projectPath);
|
this.stopAutoLoopForProject(projectPath, branchName);
|
||||||
}
|
}
|
||||||
|
|
||||||
resetFailureTrackingForProject(projectPath: string): void {
|
resetFailureTrackingForProject(projectPath: string, branchName: string | null = null): void {
|
||||||
const projectState = this.autoLoopsByProject.get(getWorktreeAutoLoopKey(projectPath, null));
|
const projectState = this.autoLoopsByProject.get(
|
||||||
|
getWorktreeAutoLoopKey(projectPath, branchName)
|
||||||
|
);
|
||||||
if (projectState) {
|
if (projectState) {
|
||||||
projectState.consecutiveFailures = [];
|
projectState.consecutiveFailures = [];
|
||||||
projectState.pausedDueToFailures = false;
|
projectState.pausedDueToFailures = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
recordSuccessForProject(projectPath: string): void {
|
recordSuccessForProject(projectPath: string, branchName: string | null = null): void {
|
||||||
const projectState = this.autoLoopsByProject.get(getWorktreeAutoLoopKey(projectPath, null));
|
const projectState = this.autoLoopsByProject.get(
|
||||||
|
getWorktreeAutoLoopKey(projectPath, branchName)
|
||||||
|
);
|
||||||
if (projectState) projectState.consecutiveFailures = [];
|
if (projectState) projectState.consecutiveFailures = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,8 @@
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { exec } from 'child_process';
|
import { exec } from 'child_process';
|
||||||
import { promisify } from 'util';
|
import { promisify } from 'util';
|
||||||
import type { Feature } from '@automaker/types';
|
import type { Feature, PlanningMode, ThinkingLevel } from '@automaker/types';
|
||||||
import { DEFAULT_MAX_CONCURRENCY } from '@automaker/types';
|
import { DEFAULT_MAX_CONCURRENCY, stripProviderPrefix } from '@automaker/types';
|
||||||
import { createLogger, loadContextFiles, classifyError } from '@automaker/utils';
|
import { createLogger, loadContextFiles, classifyError } from '@automaker/utils';
|
||||||
import { getFeatureDir } from '@automaker/platform';
|
import { getFeatureDir } from '@automaker/platform';
|
||||||
import * as secureFs from '../../lib/secure-fs.js';
|
import * as secureFs from '../../lib/secure-fs.js';
|
||||||
@@ -32,6 +32,7 @@ import { RecoveryService } from '../recovery-service.js';
|
|||||||
import { PipelineOrchestrator } from '../pipeline-orchestrator.js';
|
import { PipelineOrchestrator } from '../pipeline-orchestrator.js';
|
||||||
import { AgentExecutor } from '../agent-executor.js';
|
import { AgentExecutor } from '../agent-executor.js';
|
||||||
import { TestRunnerService } from '../test-runner-service.js';
|
import { TestRunnerService } from '../test-runner-service.js';
|
||||||
|
import { ProviderFactory } from '../../providers/provider-factory.js';
|
||||||
import { FeatureLoader } from '../feature-loader.js';
|
import { FeatureLoader } from '../feature-loader.js';
|
||||||
import type { SettingsService } from '../settings-service.js';
|
import type { SettingsService } from '../settings-service.js';
|
||||||
import type { EventEmitter } from '../../lib/events.js';
|
import type { EventEmitter } from '../../lib/events.js';
|
||||||
@@ -153,55 +154,162 @@ export class AutoModeServiceFacade {
|
|||||||
buildFeaturePrompt,
|
buildFeaturePrompt,
|
||||||
(pPath, featureId, useWorktrees, _isAutoMode, _model, opts) =>
|
(pPath, featureId, useWorktrees, _isAutoMode, _model, opts) =>
|
||||||
facadeInstance!.executeFeature(featureId, useWorktrees, false, undefined, opts),
|
facadeInstance!.executeFeature(featureId, useWorktrees, false, undefined, opts),
|
||||||
// runAgentFn stub - facade does not implement runAgent directly
|
// runAgentFn - delegates to AgentExecutor
|
||||||
async () => {
|
async (
|
||||||
throw new Error('runAgentFn not implemented in facade');
|
workDir: string,
|
||||||
|
featureId: string,
|
||||||
|
prompt: string,
|
||||||
|
abortController: AbortController,
|
||||||
|
pPath: string,
|
||||||
|
imagePaths?: string[],
|
||||||
|
model?: string,
|
||||||
|
opts?: Record<string, unknown>
|
||||||
|
) => {
|
||||||
|
const resolvedModel = model || 'claude-sonnet-4-20250514';
|
||||||
|
const provider = ProviderFactory.getProviderForModel(resolvedModel);
|
||||||
|
const effectiveBareModel = stripProviderPrefix(resolvedModel);
|
||||||
|
|
||||||
|
await agentExecutor.execute(
|
||||||
|
{
|
||||||
|
workDir,
|
||||||
|
featureId,
|
||||||
|
prompt,
|
||||||
|
projectPath: pPath,
|
||||||
|
abortController,
|
||||||
|
imagePaths,
|
||||||
|
model: resolvedModel,
|
||||||
|
planningMode: opts?.planningMode as PlanningMode | undefined,
|
||||||
|
requirePlanApproval: opts?.requirePlanApproval as boolean | undefined,
|
||||||
|
previousContent: opts?.previousContent as string | undefined,
|
||||||
|
systemPrompt: opts?.systemPrompt as string | undefined,
|
||||||
|
autoLoadClaudeMd: opts?.autoLoadClaudeMd as boolean | undefined,
|
||||||
|
thinkingLevel: opts?.thinkingLevel as ThinkingLevel | undefined,
|
||||||
|
branchName: opts?.branchName as string | null | undefined,
|
||||||
|
provider,
|
||||||
|
effectiveBareModel,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
waitForApproval: (fId, projPath) => planApprovalService.waitForApproval(fId, projPath),
|
||||||
|
saveFeatureSummary: (projPath, fId, summary) =>
|
||||||
|
featureStateManager.saveFeatureSummary(projPath, fId, summary),
|
||||||
|
updateFeatureSummary: (projPath, fId, summary) =>
|
||||||
|
featureStateManager.saveFeatureSummary(projPath, fId, summary),
|
||||||
|
buildTaskPrompt: (task, allTasks, taskIndex, _planContent, template, feedback) => {
|
||||||
|
let taskPrompt = template
|
||||||
|
.replace(/\{\{taskName\}\}/g, task.description)
|
||||||
|
.replace(/\{\{taskIndex\}\}/g, String(taskIndex + 1))
|
||||||
|
.replace(/\{\{totalTasks\}\}/g, String(allTasks.length))
|
||||||
|
.replace(/\{\{taskDescription\}\}/g, task.description || task.description);
|
||||||
|
if (feedback) {
|
||||||
|
taskPrompt = taskPrompt.replace(/\{\{userFeedback\}\}/g, feedback);
|
||||||
|
}
|
||||||
|
return taskPrompt;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
// AutoLoopCoordinator - use shared if provided, otherwise create new
|
// AutoLoopCoordinator - ALWAYS create new with proper execution callbacks
|
||||||
// Note: When using shared autoLoopCoordinator, callbacks are already set up by the global service
|
// NOTE: We don't use sharedServices.autoLoopCoordinator because it doesn't have
|
||||||
const autoLoopCoordinator =
|
// execution callbacks. Each facade needs its own coordinator to execute features.
|
||||||
sharedServices?.autoLoopCoordinator ??
|
// The shared coordinator in GlobalAutoModeService is for monitoring only.
|
||||||
new AutoLoopCoordinator(
|
const autoLoopCoordinator = new AutoLoopCoordinator(
|
||||||
eventBus,
|
eventBus,
|
||||||
concurrencyManager,
|
concurrencyManager,
|
||||||
settingsService,
|
settingsService,
|
||||||
// Callbacks
|
// Callbacks
|
||||||
(pPath, featureId, useWorktrees, isAutoMode) =>
|
(pPath, featureId, useWorktrees, isAutoMode) =>
|
||||||
facadeInstance!.executeFeature(featureId, useWorktrees, isAutoMode),
|
facadeInstance!.executeFeature(featureId, useWorktrees, isAutoMode),
|
||||||
(pPath, branchName) =>
|
(pPath, branchName) =>
|
||||||
featureLoader
|
featureLoader
|
||||||
.getAll(pPath)
|
.getAll(pPath)
|
||||||
.then((features) =>
|
.then((features) =>
|
||||||
features.filter(
|
features.filter(
|
||||||
(f) =>
|
(f) =>
|
||||||
(f.status === 'backlog' || f.status === 'ready') &&
|
(f.status === 'backlog' || f.status === 'ready') &&
|
||||||
(branchName === null
|
(branchName === null
|
||||||
? !f.branchName || f.branchName === 'main'
|
? !f.branchName || f.branchName === 'main'
|
||||||
: f.branchName === branchName)
|
: f.branchName === branchName)
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
(pPath, branchName, maxConcurrency) =>
|
(pPath, branchName, maxConcurrency) =>
|
||||||
facadeInstance!.saveExecutionStateForProject(branchName, maxConcurrency),
|
facadeInstance!.saveExecutionStateForProject(branchName, maxConcurrency),
|
||||||
(pPath, branchName) => facadeInstance!.clearExecutionState(branchName),
|
(pPath, branchName) => facadeInstance!.clearExecutionState(branchName),
|
||||||
(pPath) => featureStateManager.resetStuckFeatures(pPath),
|
(pPath) => featureStateManager.resetStuckFeatures(pPath),
|
||||||
(feature) =>
|
(feature) =>
|
||||||
feature.status === 'completed' ||
|
feature.status === 'completed' ||
|
||||||
feature.status === 'verified' ||
|
feature.status === 'verified' ||
|
||||||
feature.status === 'waiting_approval',
|
feature.status === 'waiting_approval',
|
||||||
(featureId) => concurrencyManager.isRunning(featureId)
|
(featureId) => concurrencyManager.isRunning(featureId)
|
||||||
);
|
);
|
||||||
|
|
||||||
// ExecutionService - runAgentFn is a stub
|
// ExecutionService - runAgentFn calls AgentExecutor.execute
|
||||||
const executionService = new ExecutionService(
|
const executionService = new ExecutionService(
|
||||||
eventBus,
|
eventBus,
|
||||||
concurrencyManager,
|
concurrencyManager,
|
||||||
worktreeResolver,
|
worktreeResolver,
|
||||||
settingsService,
|
settingsService,
|
||||||
// Callbacks - runAgentFn stub
|
// runAgentFn - delegates to AgentExecutor
|
||||||
async () => {
|
async (
|
||||||
throw new Error('runAgentFn not implemented in facade');
|
workDir: string,
|
||||||
|
featureId: string,
|
||||||
|
prompt: string,
|
||||||
|
abortController: AbortController,
|
||||||
|
pPath: string,
|
||||||
|
imagePaths?: string[],
|
||||||
|
model?: string,
|
||||||
|
opts?: {
|
||||||
|
projectPath?: string;
|
||||||
|
planningMode?: PlanningMode;
|
||||||
|
requirePlanApproval?: boolean;
|
||||||
|
systemPrompt?: string;
|
||||||
|
autoLoadClaudeMd?: boolean;
|
||||||
|
thinkingLevel?: ThinkingLevel;
|
||||||
|
branchName?: string | null;
|
||||||
|
}
|
||||||
|
) => {
|
||||||
|
const resolvedModel = model || 'claude-sonnet-4-20250514';
|
||||||
|
const provider = ProviderFactory.getProviderForModel(resolvedModel);
|
||||||
|
const effectiveBareModel = stripProviderPrefix(resolvedModel);
|
||||||
|
|
||||||
|
await agentExecutor.execute(
|
||||||
|
{
|
||||||
|
workDir,
|
||||||
|
featureId,
|
||||||
|
prompt,
|
||||||
|
projectPath: pPath,
|
||||||
|
abortController,
|
||||||
|
imagePaths,
|
||||||
|
model: resolvedModel,
|
||||||
|
planningMode: opts?.planningMode,
|
||||||
|
requirePlanApproval: opts?.requirePlanApproval,
|
||||||
|
systemPrompt: opts?.systemPrompt,
|
||||||
|
autoLoadClaudeMd: opts?.autoLoadClaudeMd,
|
||||||
|
thinkingLevel: opts?.thinkingLevel,
|
||||||
|
branchName: opts?.branchName,
|
||||||
|
provider,
|
||||||
|
effectiveBareModel,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
waitForApproval: (fId, projPath) => planApprovalService.waitForApproval(fId, projPath),
|
||||||
|
saveFeatureSummary: (projPath, fId, summary) =>
|
||||||
|
featureStateManager.saveFeatureSummary(projPath, fId, summary),
|
||||||
|
updateFeatureSummary: (projPath, fId, summary) =>
|
||||||
|
featureStateManager.saveFeatureSummary(projPath, fId, summary),
|
||||||
|
buildTaskPrompt: (task, allTasks, taskIndex, planContent, template, feedback) => {
|
||||||
|
let taskPrompt = template
|
||||||
|
.replace(/\{\{taskName\}\}/g, task.description)
|
||||||
|
.replace(/\{\{taskIndex\}\}/g, String(taskIndex + 1))
|
||||||
|
.replace(/\{\{totalTasks\}\}/g, String(allTasks.length))
|
||||||
|
.replace(/\{\{taskDescription\}\}/g, task.description || task.description);
|
||||||
|
if (feedback) {
|
||||||
|
taskPrompt = taskPrompt.replace(/\{\{userFeedback\}\}/g, feedback);
|
||||||
|
}
|
||||||
|
return taskPrompt;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
},
|
},
|
||||||
(context) => pipelineOrchestrator.executePipeline(context),
|
(context) => pipelineOrchestrator.executePipeline(context),
|
||||||
(pPath, featureId, status) =>
|
(pPath, featureId, status) =>
|
||||||
@@ -220,8 +328,8 @@ export class AutoModeServiceFacade {
|
|||||||
(pPath, featureId, useWorktrees, _calledInternally) =>
|
(pPath, featureId, useWorktrees, _calledInternally) =>
|
||||||
facadeInstance!.resumeFeature(featureId, useWorktrees, _calledInternally),
|
facadeInstance!.resumeFeature(featureId, useWorktrees, _calledInternally),
|
||||||
(errorInfo) =>
|
(errorInfo) =>
|
||||||
autoLoopCoordinator.trackFailureAndCheckPauseForProject(projectPath, errorInfo),
|
autoLoopCoordinator.trackFailureAndCheckPauseForProject(projectPath, null, errorInfo),
|
||||||
(errorInfo) => autoLoopCoordinator.signalShouldPauseForProject(projectPath, errorInfo),
|
(errorInfo) => autoLoopCoordinator.signalShouldPauseForProject(projectPath, null, errorInfo),
|
||||||
() => {
|
() => {
|
||||||
/* recordSuccess - no-op */
|
/* recordSuccess - no-op */
|
||||||
},
|
},
|
||||||
@@ -638,18 +746,14 @@ Address the follow-up instructions above. Review the previous work and make the
|
|||||||
* Get all active auto loop projects (unique project paths)
|
* Get all active auto loop projects (unique project paths)
|
||||||
*/
|
*/
|
||||||
getActiveAutoLoopProjects(): string[] {
|
getActiveAutoLoopProjects(): string[] {
|
||||||
// This needs access to internal state - for now return empty
|
return this.autoLoopCoordinator.getActiveProjects();
|
||||||
// Routes should migrate to getActiveAutoLoopWorktrees
|
|
||||||
return [];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all active auto loop worktrees
|
* Get all active auto loop worktrees
|
||||||
*/
|
*/
|
||||||
getActiveAutoLoopWorktrees(): Array<{ projectPath: string; branchName: string | null }> {
|
getActiveAutoLoopWorktrees(): Array<{ projectPath: string; branchName: string | null }> {
|
||||||
// This needs access to internal state - for now return empty
|
return this.autoLoopCoordinator.getActiveWorktrees();
|
||||||
// Will be properly implemented when routes migrate
|
|
||||||
return [];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -783,7 +887,7 @@ Address the follow-up instructions above. Review the previous work and make the
|
|||||||
* @param featureId - The feature ID
|
* @param featureId - The feature ID
|
||||||
*/
|
*/
|
||||||
hasPendingApproval(featureId: string): boolean {
|
hasPendingApproval(featureId: string): boolean {
|
||||||
return this.planApprovalService.hasPendingApproval(featureId);
|
return this.planApprovalService.hasPendingApproval(featureId, this.projectPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -791,7 +895,7 @@ Address the follow-up instructions above. Review the previous work and make the
|
|||||||
* @param featureId - The feature ID
|
* @param featureId - The feature ID
|
||||||
*/
|
*/
|
||||||
cancelPlanApproval(featureId: string): void {
|
cancelPlanApproval(featureId: string): void {
|
||||||
this.planApprovalService.cancelApproval(featureId);
|
this.planApprovalService.cancelApproval(featureId, this.projectPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
// ===========================================================================
|
// ===========================================================================
|
||||||
|
|||||||
@@ -51,15 +51,19 @@ export class GlobalAutoModeService {
|
|||||||
this.featureStateManager = new FeatureStateManager(events, featureLoader);
|
this.featureStateManager = new FeatureStateManager(events, featureLoader);
|
||||||
|
|
||||||
// Create AutoLoopCoordinator with callbacks
|
// Create AutoLoopCoordinator with callbacks
|
||||||
// These callbacks use placeholders since GlobalAutoModeService doesn't execute features
|
// IMPORTANT: This coordinator is for MONITORING ONLY (getActiveProjects, getActiveWorktrees).
|
||||||
// Feature execution is done via facades
|
// Facades MUST create their own AutoLoopCoordinator for actual execution.
|
||||||
|
// The executeFeatureFn here is a safety guard - it should never be called.
|
||||||
this.autoLoopCoordinator = new AutoLoopCoordinator(
|
this.autoLoopCoordinator = new AutoLoopCoordinator(
|
||||||
this.eventBus,
|
this.eventBus,
|
||||||
this.concurrencyManager,
|
this.concurrencyManager,
|
||||||
settingsService,
|
settingsService,
|
||||||
// executeFeatureFn - not used by global service, routes handle execution
|
// executeFeatureFn - throws because facades must use their own coordinator for execution
|
||||||
async () => {
|
async () => {
|
||||||
throw new Error('executeFeatureFn not available in GlobalAutoModeService');
|
throw new Error(
|
||||||
|
'executeFeatureFn not available in GlobalAutoModeService. ' +
|
||||||
|
'Facades must create their own AutoLoopCoordinator for execution.'
|
||||||
|
);
|
||||||
},
|
},
|
||||||
// getBacklogFeaturesFn
|
// getBacklogFeaturesFn
|
||||||
(pPath, branchName) =>
|
(pPath, branchName) =>
|
||||||
|
|||||||
@@ -138,9 +138,9 @@ export type ExecuteFeatureFn = (
|
|||||||
projectPath: string,
|
projectPath: string,
|
||||||
featureId: string,
|
featureId: string,
|
||||||
useWorktrees: boolean,
|
useWorktrees: boolean,
|
||||||
useScreenshots: boolean,
|
isAutoMode: boolean,
|
||||||
model?: string,
|
providedWorktreePath?: string,
|
||||||
options?: { _calledInternally?: boolean }
|
options?: { continuationPrompt?: string; _calledInternally?: boolean }
|
||||||
) => Promise<void>;
|
) => Promise<void>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -60,8 +60,12 @@ export class FeatureStateManager {
|
|||||||
const featurePath = path.join(featureDir, 'feature.json');
|
const featurePath = path.join(featureDir, 'feature.json');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||||
return JSON.parse(data);
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
|
autoRestore: true,
|
||||||
|
});
|
||||||
|
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||||
|
return result.data;
|
||||||
} catch {
|
} catch {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|||||||
175
apps/server/src/services/merge-service.ts
Normal file
175
apps/server/src/services/merge-service.ts
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
/**
|
||||||
|
* MergeService - Direct merge operations without HTTP
|
||||||
|
*
|
||||||
|
* Extracted from worktree merge route to allow internal service calls.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { exec } from 'child_process';
|
||||||
|
import { promisify } from 'util';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import { spawnProcess } from '@automaker/platform';
|
||||||
|
|
||||||
|
const execAsync = promisify(exec);
|
||||||
|
const logger = createLogger('MergeService');
|
||||||
|
|
||||||
|
export interface MergeOptions {
|
||||||
|
squash?: boolean;
|
||||||
|
message?: string;
|
||||||
|
deleteWorktreeAndBranch?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MergeServiceResult {
|
||||||
|
success: boolean;
|
||||||
|
error?: string;
|
||||||
|
hasConflicts?: boolean;
|
||||||
|
mergedBranch?: string;
|
||||||
|
targetBranch?: string;
|
||||||
|
deleted?: {
|
||||||
|
worktreeDeleted: boolean;
|
||||||
|
branchDeleted: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute git command with array arguments to prevent command injection.
|
||||||
|
*/
|
||||||
|
async function execGitCommand(args: string[], cwd: string): Promise<string> {
|
||||||
|
const result = await spawnProcess({
|
||||||
|
command: 'git',
|
||||||
|
args,
|
||||||
|
cwd,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.exitCode === 0) {
|
||||||
|
return result.stdout;
|
||||||
|
} else {
|
||||||
|
const errorMessage = result.stderr || `Git command failed with code ${result.exitCode}`;
|
||||||
|
throw new Error(errorMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate branch name to prevent command injection.
|
||||||
|
*/
|
||||||
|
function isValidBranchName(name: string): boolean {
|
||||||
|
return /^[a-zA-Z0-9._\-/]+$/.test(name) && name.length < 250;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform a git merge operation directly without HTTP.
|
||||||
|
*
|
||||||
|
* @param projectPath - Path to the git repository
|
||||||
|
* @param branchName - Source branch to merge
|
||||||
|
* @param worktreePath - Path to the worktree (used for deletion if requested)
|
||||||
|
* @param targetBranch - Branch to merge into (defaults to 'main')
|
||||||
|
* @param options - Merge options (squash, message, deleteWorktreeAndBranch)
|
||||||
|
*/
|
||||||
|
export async function performMerge(
|
||||||
|
projectPath: string,
|
||||||
|
branchName: string,
|
||||||
|
worktreePath: string,
|
||||||
|
targetBranch: string = 'main',
|
||||||
|
options?: MergeOptions
|
||||||
|
): Promise<MergeServiceResult> {
|
||||||
|
if (!projectPath || !branchName || !worktreePath) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'projectPath, branchName, and worktreePath are required',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const mergeTo = targetBranch || 'main';
|
||||||
|
|
||||||
|
// Validate source branch exists
|
||||||
|
try {
|
||||||
|
await execAsync(`git rev-parse --verify ${branchName}`, { cwd: projectPath });
|
||||||
|
} catch {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Branch "${branchName}" does not exist`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate target branch exists
|
||||||
|
try {
|
||||||
|
await execAsync(`git rev-parse --verify ${mergeTo}`, { cwd: projectPath });
|
||||||
|
} catch {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Target branch "${mergeTo}" does not exist`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge the feature branch into the target branch
|
||||||
|
const mergeCmd = options?.squash
|
||||||
|
? `git merge --squash ${branchName}`
|
||||||
|
: `git merge ${branchName} -m "${options?.message || `Merge ${branchName} into ${mergeTo}`}"`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await execAsync(mergeCmd, { cwd: projectPath });
|
||||||
|
} catch (mergeError: unknown) {
|
||||||
|
// Check if this is a merge conflict
|
||||||
|
const err = mergeError as { stdout?: string; stderr?: string; message?: string };
|
||||||
|
const output = `${err.stdout || ''} ${err.stderr || ''} ${err.message || ''}`;
|
||||||
|
const hasConflicts = output.includes('CONFLICT') || output.includes('Automatic merge failed');
|
||||||
|
|
||||||
|
if (hasConflicts) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Merge CONFLICT: Automatic merge of "${branchName}" into "${mergeTo}" failed. Please resolve conflicts manually.`,
|
||||||
|
hasConflicts: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-throw non-conflict errors
|
||||||
|
throw mergeError;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If squash merge, need to commit
|
||||||
|
if (options?.squash) {
|
||||||
|
await execAsync(`git commit -m "${options?.message || `Merge ${branchName} (squash)`}"`, {
|
||||||
|
cwd: projectPath,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optionally delete the worktree and branch after merging
|
||||||
|
let worktreeDeleted = false;
|
||||||
|
let branchDeleted = false;
|
||||||
|
|
||||||
|
if (options?.deleteWorktreeAndBranch) {
|
||||||
|
// Remove the worktree
|
||||||
|
try {
|
||||||
|
await execGitCommand(['worktree', 'remove', worktreePath, '--force'], projectPath);
|
||||||
|
worktreeDeleted = true;
|
||||||
|
} catch {
|
||||||
|
// Try with prune if remove fails
|
||||||
|
try {
|
||||||
|
await execGitCommand(['worktree', 'prune'], projectPath);
|
||||||
|
worktreeDeleted = true;
|
||||||
|
} catch {
|
||||||
|
logger.warn(`Failed to remove worktree: ${worktreePath}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the branch (but not main/master)
|
||||||
|
if (branchName !== 'main' && branchName !== 'master') {
|
||||||
|
if (!isValidBranchName(branchName)) {
|
||||||
|
logger.warn(`Invalid branch name detected, skipping deletion: ${branchName}`);
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
await execGitCommand(['branch', '-D', branchName], projectPath);
|
||||||
|
branchDeleted = true;
|
||||||
|
} catch {
|
||||||
|
logger.warn(`Failed to delete branch: ${branchName}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
mergedBranch: branchName,
|
||||||
|
targetBranch: mergeTo,
|
||||||
|
deleted: options?.deleteWorktreeAndBranch ? { worktreeDeleted, branchDeleted } : undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -27,6 +27,7 @@ import type { SettingsService } from './settings-service.js';
|
|||||||
import type { ConcurrencyManager } from './concurrency-manager.js';
|
import type { ConcurrencyManager } from './concurrency-manager.js';
|
||||||
import { pipelineService } from './pipeline-service.js';
|
import { pipelineService } from './pipeline-service.js';
|
||||||
import type { TestRunnerService, TestRunStatus } from './test-runner-service.js';
|
import type { TestRunnerService, TestRunStatus } from './test-runner-service.js';
|
||||||
|
import { performMerge } from './merge-service.js';
|
||||||
import type {
|
import type {
|
||||||
PipelineContext,
|
PipelineContext,
|
||||||
PipelineStatusInfo,
|
PipelineStatusInfo,
|
||||||
@@ -65,8 +66,7 @@ export class PipelineOrchestrator {
|
|||||||
private loadContextFilesFn: typeof loadContextFiles,
|
private loadContextFilesFn: typeof loadContextFiles,
|
||||||
private buildFeaturePromptFn: BuildFeaturePromptFn,
|
private buildFeaturePromptFn: BuildFeaturePromptFn,
|
||||||
private executeFeatureFn: ExecuteFeatureFn,
|
private executeFeatureFn: ExecuteFeatureFn,
|
||||||
private runAgentFn: RunAgentFn,
|
private runAgentFn: RunAgentFn
|
||||||
private serverPort = 3008
|
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
async executePipeline(ctx: PipelineContext): Promise<void> {
|
async executePipeline(ctx: PipelineContext): Promise<void> {
|
||||||
@@ -483,25 +483,19 @@ export class PipelineOrchestrator {
|
|||||||
|
|
||||||
logger.info(`Attempting auto-merge for feature ${featureId} (branch: ${branchName})`);
|
logger.info(`Attempting auto-merge for feature ${featureId} (branch: ${branchName})`);
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`http://localhost:${this.serverPort}/api/worktree/merge`, {
|
// Call merge service directly instead of HTTP fetch
|
||||||
method: 'POST',
|
const result = await performMerge(
|
||||||
headers: { 'Content-Type': 'application/json' },
|
projectPath,
|
||||||
body: JSON.stringify({
|
branchName,
|
||||||
projectPath,
|
worktreePath || projectPath,
|
||||||
branchName,
|
'main',
|
||||||
worktreePath,
|
{
|
||||||
targetBranch: 'main',
|
deleteWorktreeAndBranch: false,
|
||||||
options: { deleteWorktreeAndBranch: false },
|
}
|
||||||
}),
|
);
|
||||||
});
|
|
||||||
|
|
||||||
const data = (await response.json()) as {
|
if (!result.success) {
|
||||||
success: boolean;
|
if (result.hasConflicts) {
|
||||||
hasConflicts?: boolean;
|
|
||||||
error?: string;
|
|
||||||
};
|
|
||||||
if (!response.ok) {
|
|
||||||
if (data.hasConflicts) {
|
|
||||||
await this.updateFeatureStatusFn(projectPath, featureId, 'merge_conflict');
|
await this.updateFeatureStatusFn(projectPath, featureId, 'merge_conflict');
|
||||||
this.eventBus.emitAutoModeEvent('pipeline_merge_conflict', {
|
this.eventBus.emitAutoModeEvent('pipeline_merge_conflict', {
|
||||||
featureId,
|
featureId,
|
||||||
@@ -510,7 +504,7 @@ export class PipelineOrchestrator {
|
|||||||
});
|
});
|
||||||
return { success: false, hasConflicts: true, needsAgentResolution: true };
|
return { success: false, hasConflicts: true, needsAgentResolution: true };
|
||||||
}
|
}
|
||||||
return { success: false, error: data.error };
|
return { success: false, error: result.error };
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Auto-merge successful for feature ${featureId}`);
|
logger.info(`Auto-merge successful for feature ${featureId}`);
|
||||||
|
|||||||
@@ -66,12 +66,18 @@ export class PlanApprovalService {
|
|||||||
this.settingsService = settingsService;
|
this.settingsService = settingsService;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Generate project-scoped key to prevent collisions across projects */
|
||||||
|
private approvalKey(projectPath: string, featureId: string): string {
|
||||||
|
return `${projectPath}::${featureId}`;
|
||||||
|
}
|
||||||
|
|
||||||
/** Wait for plan approval with timeout (default 30 min). Rejects on timeout/cancellation. */
|
/** Wait for plan approval with timeout (default 30 min). Rejects on timeout/cancellation. */
|
||||||
async waitForApproval(featureId: string, projectPath: string): Promise<PlanApprovalResult> {
|
async waitForApproval(featureId: string, projectPath: string): Promise<PlanApprovalResult> {
|
||||||
const timeoutMs = await this.getTimeoutMs(projectPath);
|
const timeoutMs = await this.getTimeoutMs(projectPath);
|
||||||
const timeoutMinutes = Math.round(timeoutMs / 60000);
|
const timeoutMinutes = Math.round(timeoutMs / 60000);
|
||||||
|
const key = this.approvalKey(projectPath, featureId);
|
||||||
|
|
||||||
logger.info(`Registering pending approval for feature ${featureId}`);
|
logger.info(`Registering pending approval for feature ${featureId} in project ${projectPath}`);
|
||||||
logger.info(
|
logger.info(
|
||||||
`Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
`Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
||||||
);
|
);
|
||||||
@@ -80,12 +86,12 @@ export class PlanApprovalService {
|
|||||||
// Set up timeout to prevent indefinite waiting and memory leaks
|
// Set up timeout to prevent indefinite waiting and memory leaks
|
||||||
// timeoutId stored in closure, NOT in PendingApproval object
|
// timeoutId stored in closure, NOT in PendingApproval object
|
||||||
const timeoutId = setTimeout(() => {
|
const timeoutId = setTimeout(() => {
|
||||||
const pending = this.pendingApprovals.get(featureId);
|
const pending = this.pendingApprovals.get(key);
|
||||||
if (pending) {
|
if (pending) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`Plan approval for feature ${featureId} timed out after ${timeoutMinutes} minutes`
|
`Plan approval for feature ${featureId} timed out after ${timeoutMinutes} minutes`
|
||||||
);
|
);
|
||||||
this.pendingApprovals.delete(featureId);
|
this.pendingApprovals.delete(key);
|
||||||
reject(
|
reject(
|
||||||
new Error(
|
new Error(
|
||||||
`Plan approval timed out after ${timeoutMinutes} minutes - feature execution cancelled`
|
`Plan approval timed out after ${timeoutMinutes} minutes - feature execution cancelled`
|
||||||
@@ -106,7 +112,7 @@ export class PlanApprovalService {
|
|||||||
reject(error);
|
reject(error);
|
||||||
};
|
};
|
||||||
|
|
||||||
this.pendingApprovals.set(featureId, {
|
this.pendingApprovals.set(key, {
|
||||||
resolve: wrappedResolve,
|
resolve: wrappedResolve,
|
||||||
reject: wrappedReject,
|
reject: wrappedReject,
|
||||||
featureId,
|
featureId,
|
||||||
@@ -132,7 +138,23 @@ export class PlanApprovalService {
|
|||||||
`Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
`Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
||||||
);
|
);
|
||||||
|
|
||||||
const pending = this.pendingApprovals.get(featureId);
|
// Try to find pending approval using project-scoped key if projectPath is available
|
||||||
|
let foundKey: string | undefined;
|
||||||
|
let pending: PendingApproval | undefined;
|
||||||
|
|
||||||
|
if (projectPathFromClient) {
|
||||||
|
foundKey = this.approvalKey(projectPathFromClient, featureId);
|
||||||
|
pending = this.pendingApprovals.get(foundKey);
|
||||||
|
} else {
|
||||||
|
// Fallback: search by featureId (backward compatibility)
|
||||||
|
for (const [key, approval] of this.pendingApprovals) {
|
||||||
|
if (approval.featureId === featureId) {
|
||||||
|
foundKey = key;
|
||||||
|
pending = approval;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!pending) {
|
if (!pending) {
|
||||||
logger.info(`No pending approval in Map for feature ${featureId}`);
|
logger.info(`No pending approval in Map for feature ${featureId}`);
|
||||||
@@ -219,32 +241,60 @@ export class PlanApprovalService {
|
|||||||
// Resolve the promise with all data including feedback
|
// Resolve the promise with all data including feedback
|
||||||
// This triggers the wrapped resolve which clears the timeout
|
// This triggers the wrapped resolve which clears the timeout
|
||||||
pending.resolve({ approved, editedPlan, feedback });
|
pending.resolve({ approved, editedPlan, feedback });
|
||||||
this.pendingApprovals.delete(featureId);
|
if (foundKey) {
|
||||||
|
this.pendingApprovals.delete(foundKey);
|
||||||
|
}
|
||||||
|
|
||||||
return { success: true };
|
return { success: true };
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Cancel approval (e.g., when feature stopped). Timeout cleared via wrapped reject. */
|
/** Cancel approval (e.g., when feature stopped). Timeout cleared via wrapped reject. */
|
||||||
cancelApproval(featureId: string): void {
|
cancelApproval(featureId: string, projectPath?: string): void {
|
||||||
logger.info(`cancelApproval called for feature ${featureId}`);
|
logger.info(`cancelApproval called for feature ${featureId}`);
|
||||||
logger.info(
|
logger.info(
|
||||||
`Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
`Current pending approvals: ${Array.from(this.pendingApprovals.keys()).join(', ') || 'none'}`
|
||||||
);
|
);
|
||||||
|
|
||||||
const pending = this.pendingApprovals.get(featureId);
|
// If projectPath provided, use project-scoped key; otherwise search by featureId
|
||||||
if (pending) {
|
let foundKey: string | undefined;
|
||||||
|
let pending: PendingApproval | undefined;
|
||||||
|
|
||||||
|
if (projectPath) {
|
||||||
|
foundKey = this.approvalKey(projectPath, featureId);
|
||||||
|
pending = this.pendingApprovals.get(foundKey);
|
||||||
|
} else {
|
||||||
|
// Fallback: search for any approval with this featureId (backward compatibility)
|
||||||
|
for (const [key, approval] of this.pendingApprovals) {
|
||||||
|
if (approval.featureId === featureId) {
|
||||||
|
foundKey = key;
|
||||||
|
pending = approval;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pending && foundKey) {
|
||||||
logger.info(`Found and cancelling pending approval for feature ${featureId}`);
|
logger.info(`Found and cancelling pending approval for feature ${featureId}`);
|
||||||
// Wrapped reject clears timeout automatically
|
// Wrapped reject clears timeout automatically
|
||||||
pending.reject(new Error('Plan approval cancelled - feature was stopped'));
|
pending.reject(new Error('Plan approval cancelled - feature was stopped'));
|
||||||
this.pendingApprovals.delete(featureId);
|
this.pendingApprovals.delete(foundKey);
|
||||||
} else {
|
} else {
|
||||||
logger.info(`No pending approval to cancel for feature ${featureId}`);
|
logger.info(`No pending approval to cancel for feature ${featureId}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Check if a feature has a pending plan approval. */
|
/** Check if a feature has a pending plan approval. */
|
||||||
hasPendingApproval(featureId: string): boolean {
|
hasPendingApproval(featureId: string, projectPath?: string): boolean {
|
||||||
return this.pendingApprovals.has(featureId);
|
if (projectPath) {
|
||||||
|
return this.pendingApprovals.has(this.approvalKey(projectPath, featureId));
|
||||||
|
}
|
||||||
|
// Fallback: search by featureId (backward compatibility)
|
||||||
|
for (const approval of this.pendingApprovals.values()) {
|
||||||
|
if (approval.featureId === featureId) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Get timeout from project settings or default (30 min). */
|
/** Get timeout from project settings or default (30 min). */
|
||||||
|
|||||||
@@ -20,8 +20,8 @@ export interface TestRepo {
|
|||||||
export async function createTestGitRepo(): Promise<TestRepo> {
|
export async function createTestGitRepo(): Promise<TestRepo> {
|
||||||
const tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'automaker-test-'));
|
const tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'automaker-test-'));
|
||||||
|
|
||||||
// Initialize git repo
|
// Initialize git repo with 'main' as the default branch (matching GitHub's standard)
|
||||||
await execAsync('git init', { cwd: tmpDir });
|
await execAsync('git init --initial-branch=main', { cwd: tmpDir });
|
||||||
|
|
||||||
// Use environment variables instead of git config to avoid affecting user's git config
|
// Use environment variables instead of git config to avoid affecting user's git config
|
||||||
// These env vars override git config without modifying it
|
// These env vars override git config without modifying it
|
||||||
@@ -38,9 +38,6 @@ export async function createTestGitRepo(): Promise<TestRepo> {
|
|||||||
await execAsync('git add .', { cwd: tmpDir, env: gitEnv });
|
await execAsync('git add .', { cwd: tmpDir, env: gitEnv });
|
||||||
await execAsync('git commit -m "Initial commit"', { cwd: tmpDir, env: gitEnv });
|
await execAsync('git commit -m "Initial commit"', { cwd: tmpDir, env: gitEnv });
|
||||||
|
|
||||||
// Create main branch explicitly
|
|
||||||
await execAsync('git branch -M main', { cwd: tmpDir });
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
path: tmpDir,
|
path: tmpDir,
|
||||||
cleanup: async () => {
|
cleanup: async () => {
|
||||||
|
|||||||
@@ -14,7 +14,8 @@ describe('worktree create route - repositories without commits', () => {
|
|||||||
|
|
||||||
async function initRepoWithoutCommit() {
|
async function initRepoWithoutCommit() {
|
||||||
repoPath = await fs.mkdtemp(path.join(os.tmpdir(), 'automaker-no-commit-'));
|
repoPath = await fs.mkdtemp(path.join(os.tmpdir(), 'automaker-no-commit-'));
|
||||||
await execAsync('git init', { cwd: repoPath });
|
// Initialize with 'main' as the default branch (matching GitHub's standard)
|
||||||
|
await execAsync('git init --initial-branch=main', { cwd: repoPath });
|
||||||
// Don't set git config - use environment variables in commit operations instead
|
// Don't set git config - use environment variables in commit operations instead
|
||||||
// to avoid affecting user's git config
|
// to avoid affecting user's git config
|
||||||
// Intentionally skip creating an initial commit
|
// Intentionally skip creating an initial commit
|
||||||
|
|||||||
@@ -30,11 +30,16 @@ import net from 'net';
|
|||||||
|
|
||||||
describe('dev-server-service.ts', () => {
|
describe('dev-server-service.ts', () => {
|
||||||
let testDir: string;
|
let testDir: string;
|
||||||
|
let originalHostname: string | undefined;
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
vi.resetModules();
|
vi.resetModules();
|
||||||
|
|
||||||
|
// Store and set HOSTNAME for consistent test behavior
|
||||||
|
originalHostname = process.env.HOSTNAME;
|
||||||
|
process.env.HOSTNAME = 'localhost';
|
||||||
|
|
||||||
testDir = path.join(os.tmpdir(), `dev-server-test-${Date.now()}`);
|
testDir = path.join(os.tmpdir(), `dev-server-test-${Date.now()}`);
|
||||||
await fs.mkdir(testDir, { recursive: true });
|
await fs.mkdir(testDir, { recursive: true });
|
||||||
|
|
||||||
@@ -56,6 +61,13 @@ describe('dev-server-service.ts', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
afterEach(async () => {
|
afterEach(async () => {
|
||||||
|
// Restore original HOSTNAME
|
||||||
|
if (originalHostname === undefined) {
|
||||||
|
delete process.env.HOSTNAME;
|
||||||
|
} else {
|
||||||
|
process.env.HOSTNAME = originalHostname;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await fs.rm(testDir, { recursive: true, force: true });
|
await fs.rm(testDir, { recursive: true, force: true });
|
||||||
} catch {
|
} catch {
|
||||||
|
|||||||
@@ -1,5 +1,11 @@
|
|||||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
|
import path from 'path';
|
||||||
import type { Feature } from '@automaker/types';
|
import type { Feature } from '@automaker/types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to normalize paths for cross-platform test compatibility.
|
||||||
|
*/
|
||||||
|
const normalizePath = (p: string): string => path.resolve(p);
|
||||||
import {
|
import {
|
||||||
ExecutionService,
|
ExecutionService,
|
||||||
type RunAgentFn,
|
type RunAgentFn,
|
||||||
@@ -931,8 +937,8 @@ describe('execution-service.ts', () => {
|
|||||||
// Should still run agent, just with project path
|
// Should still run agent, just with project path
|
||||||
expect(mockRunAgentFn).toHaveBeenCalled();
|
expect(mockRunAgentFn).toHaveBeenCalled();
|
||||||
const callArgs = mockRunAgentFn.mock.calls[0];
|
const callArgs = mockRunAgentFn.mock.calls[0];
|
||||||
// First argument is workDir - should end with /test/project
|
// First argument is workDir - should be normalized path to /test/project
|
||||||
expect(callArgs[0]).toMatch(/\/test\/project$/);
|
expect(callArgs[0]).toBe(normalizePath('/test/project'));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('skips worktree resolution when useWorktrees is false', async () => {
|
it('skips worktree resolution when useWorktrees is false', async () => {
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { describe, it, expect, beforeEach, vi, type Mock } from 'vitest';
|
import { describe, it, expect, beforeEach, vi, type Mock } from 'vitest';
|
||||||
|
import path from 'path';
|
||||||
import { FeatureStateManager } from '@/services/feature-state-manager.js';
|
import { FeatureStateManager } from '@/services/feature-state-manager.js';
|
||||||
import type { Feature } from '@automaker/types';
|
import type { Feature } from '@automaker/types';
|
||||||
import type { EventEmitter } from '@/lib/events.js';
|
import type { EventEmitter } from '@/lib/events.js';
|
||||||
@@ -8,6 +9,12 @@ import { atomicWriteJson, readJsonWithRecovery } from '@automaker/utils';
|
|||||||
import { getFeatureDir, getFeaturesDir } from '@automaker/platform';
|
import { getFeatureDir, getFeaturesDir } from '@automaker/platform';
|
||||||
import { getNotificationService } from '@/services/notification-service.js';
|
import { getNotificationService } from '@/services/notification-service.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to normalize paths for cross-platform test compatibility.
|
||||||
|
* Uses path.normalize (not path.resolve) to match path.join behavior in production code.
|
||||||
|
*/
|
||||||
|
const normalizePath = (p: string): string => path.normalize(p);
|
||||||
|
|
||||||
// Mock dependencies
|
// Mock dependencies
|
||||||
vi.mock('@/lib/secure-fs.js', () => ({
|
vi.mock('@/lib/secure-fs.js', () => ({
|
||||||
readFile: vi.fn(),
|
readFile: vi.fn(),
|
||||||
@@ -71,20 +78,21 @@ describe('FeatureStateManager', () => {
|
|||||||
|
|
||||||
describe('loadFeature', () => {
|
describe('loadFeature', () => {
|
||||||
it('should load feature from disk', async () => {
|
it('should load feature from disk', async () => {
|
||||||
(secureFs.readFile as Mock).mockResolvedValue(JSON.stringify(mockFeature));
|
(readJsonWithRecovery as Mock).mockResolvedValue({ data: mockFeature, recovered: false });
|
||||||
|
|
||||||
const feature = await manager.loadFeature('/project', 'feature-123');
|
const feature = await manager.loadFeature('/project', 'feature-123');
|
||||||
|
|
||||||
expect(feature).toEqual(mockFeature);
|
expect(feature).toEqual(mockFeature);
|
||||||
expect(getFeatureDir).toHaveBeenCalledWith('/project', 'feature-123');
|
expect(getFeatureDir).toHaveBeenCalledWith('/project', 'feature-123');
|
||||||
expect(secureFs.readFile).toHaveBeenCalledWith(
|
expect(readJsonWithRecovery).toHaveBeenCalledWith(
|
||||||
'/project/.automaker/features/feature-123/feature.json',
|
normalizePath('/project/.automaker/features/feature-123/feature.json'),
|
||||||
'utf-8'
|
null,
|
||||||
|
expect.objectContaining({ autoRestore: true })
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return null if feature does not exist', async () => {
|
it('should return null if feature does not exist', async () => {
|
||||||
(secureFs.readFile as Mock).mockRejectedValue(new Error('ENOENT'));
|
(readJsonWithRecovery as Mock).mockRejectedValue(new Error('ENOENT'));
|
||||||
|
|
||||||
const feature = await manager.loadFeature('/project', 'non-existent');
|
const feature = await manager.loadFeature('/project', 'non-existent');
|
||||||
|
|
||||||
@@ -92,7 +100,8 @@ describe('FeatureStateManager', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should return null if feature JSON is invalid', async () => {
|
it('should return null if feature JSON is invalid', async () => {
|
||||||
(secureFs.readFile as Mock).mockResolvedValue('not valid json');
|
// readJsonWithRecovery returns null as the default value when JSON is invalid
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({ data: null, recovered: false });
|
||||||
|
|
||||||
const feature = await manager.loadFeature('/project', 'feature-123');
|
const feature = await manager.loadFeature('/project', 'feature-123');
|
||||||
|
|
||||||
|
|||||||
@@ -35,6 +35,13 @@ vi.mock('../../../src/services/pipeline-service.js', () => ({
|
|||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
// Mock merge-service
|
||||||
|
vi.mock('../../../src/services/merge-service.js', () => ({
|
||||||
|
performMerge: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
import { performMerge } from '../../../src/services/merge-service.js';
|
||||||
|
|
||||||
// Mock secureFs
|
// Mock secureFs
|
||||||
vi.mock('../../../src/lib/secure-fs.js', () => ({
|
vi.mock('../../../src/lib/secure-fs.js', () => ({
|
||||||
readFile: vi.fn(),
|
readFile: vi.fn(),
|
||||||
@@ -470,36 +477,26 @@ describe('PipelineOrchestrator', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
global.fetch = vi.fn();
|
vi.mocked(performMerge).mockReset();
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
it('should call performMerge with correct parameters', async () => {
|
||||||
vi.mocked(global.fetch).mockReset();
|
vi.mocked(performMerge).mockResolvedValue({ success: true });
|
||||||
});
|
|
||||||
|
|
||||||
it('should call merge endpoint with correct parameters', async () => {
|
|
||||||
vi.mocked(global.fetch).mockResolvedValue({
|
|
||||||
ok: true,
|
|
||||||
json: vi.fn().mockResolvedValue({ success: true }),
|
|
||||||
} as never);
|
|
||||||
|
|
||||||
const context = createMergeContext();
|
const context = createMergeContext();
|
||||||
await orchestrator.attemptMerge(context);
|
await orchestrator.attemptMerge(context);
|
||||||
|
|
||||||
expect(global.fetch).toHaveBeenCalledWith(
|
expect(performMerge).toHaveBeenCalledWith(
|
||||||
expect.stringContaining('/api/worktree/merge'),
|
'/test/project',
|
||||||
expect.objectContaining({
|
'feature/test-1',
|
||||||
method: 'POST',
|
'/test/worktree',
|
||||||
body: expect.stringContaining('feature/test-1'),
|
'main',
|
||||||
})
|
{ deleteWorktreeAndBranch: false }
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return success on clean merge', async () => {
|
it('should return success on clean merge', async () => {
|
||||||
vi.mocked(global.fetch).mockResolvedValue({
|
vi.mocked(performMerge).mockResolvedValue({ success: true });
|
||||||
ok: true,
|
|
||||||
json: vi.fn().mockResolvedValue({ success: true }),
|
|
||||||
} as never);
|
|
||||||
|
|
||||||
const context = createMergeContext();
|
const context = createMergeContext();
|
||||||
const result = await orchestrator.attemptMerge(context);
|
const result = await orchestrator.attemptMerge(context);
|
||||||
@@ -509,10 +506,11 @@ describe('PipelineOrchestrator', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should set merge_conflict status when hasConflicts is true', async () => {
|
it('should set merge_conflict status when hasConflicts is true', async () => {
|
||||||
vi.mocked(global.fetch).mockResolvedValue({
|
vi.mocked(performMerge).mockResolvedValue({
|
||||||
ok: false,
|
success: false,
|
||||||
json: vi.fn().mockResolvedValue({ success: false, hasConflicts: true }),
|
hasConflicts: true,
|
||||||
} as never);
|
error: 'Merge conflict',
|
||||||
|
});
|
||||||
|
|
||||||
const context = createMergeContext();
|
const context = createMergeContext();
|
||||||
await orchestrator.attemptMerge(context);
|
await orchestrator.attemptMerge(context);
|
||||||
@@ -525,10 +523,11 @@ describe('PipelineOrchestrator', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should emit pipeline_merge_conflict event on conflict', async () => {
|
it('should emit pipeline_merge_conflict event on conflict', async () => {
|
||||||
vi.mocked(global.fetch).mockResolvedValue({
|
vi.mocked(performMerge).mockResolvedValue({
|
||||||
ok: false,
|
success: false,
|
||||||
json: vi.fn().mockResolvedValue({ success: false, hasConflicts: true }),
|
hasConflicts: true,
|
||||||
} as never);
|
error: 'Merge conflict',
|
||||||
|
});
|
||||||
|
|
||||||
const context = createMergeContext();
|
const context = createMergeContext();
|
||||||
await orchestrator.attemptMerge(context);
|
await orchestrator.attemptMerge(context);
|
||||||
@@ -540,10 +539,7 @@ describe('PipelineOrchestrator', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should emit auto_mode_feature_complete on success', async () => {
|
it('should emit auto_mode_feature_complete on success', async () => {
|
||||||
vi.mocked(global.fetch).mockResolvedValue({
|
vi.mocked(performMerge).mockResolvedValue({ success: true });
|
||||||
ok: true,
|
|
||||||
json: vi.fn().mockResolvedValue({ success: true }),
|
|
||||||
} as never);
|
|
||||||
|
|
||||||
const context = createMergeContext();
|
const context = createMergeContext();
|
||||||
await orchestrator.attemptMerge(context);
|
await orchestrator.attemptMerge(context);
|
||||||
@@ -555,10 +551,11 @@ describe('PipelineOrchestrator', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should return needsAgentResolution true on conflict', async () => {
|
it('should return needsAgentResolution true on conflict', async () => {
|
||||||
vi.mocked(global.fetch).mockResolvedValue({
|
vi.mocked(performMerge).mockResolvedValue({
|
||||||
ok: false,
|
success: false,
|
||||||
json: vi.fn().mockResolvedValue({ success: false, hasConflicts: true }),
|
hasConflicts: true,
|
||||||
} as never);
|
error: 'Merge conflict',
|
||||||
|
});
|
||||||
|
|
||||||
const context = createMergeContext();
|
const context = createMergeContext();
|
||||||
const result = await orchestrator.attemptMerge(context);
|
const result = await orchestrator.attemptMerge(context);
|
||||||
@@ -728,10 +725,7 @@ describe('PipelineOrchestrator', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
global.fetch = vi.fn().mockResolvedValue({
|
vi.mocked(performMerge).mockResolvedValue({ success: true });
|
||||||
ok: true,
|
|
||||||
json: vi.fn().mockResolvedValue({ success: true }),
|
|
||||||
} as never);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should execute steps in sequence', async () => {
|
it('should execute steps in sequence', async () => {
|
||||||
@@ -792,9 +786,12 @@ describe('PipelineOrchestrator', () => {
|
|||||||
const context = createPipelineContext();
|
const context = createPipelineContext();
|
||||||
await orchestrator.executePipeline(context);
|
await orchestrator.executePipeline(context);
|
||||||
|
|
||||||
expect(global.fetch).toHaveBeenCalledWith(
|
expect(performMerge).toHaveBeenCalledWith(
|
||||||
expect.stringContaining('/api/worktree/merge'),
|
'/test/project',
|
||||||
expect.any(Object)
|
'feature/test-1',
|
||||||
|
'/test/project', // Falls back to projectPath when worktreePath is null
|
||||||
|
'main',
|
||||||
|
{ deleteWorktreeAndBranch: false }
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -816,10 +813,7 @@ describe('PipelineOrchestrator', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
global.fetch = vi.fn().mockResolvedValue({
|
vi.mocked(performMerge).mockResolvedValue({ success: true });
|
||||||
ok: true,
|
|
||||||
json: vi.fn().mockResolvedValue({ success: true }),
|
|
||||||
} as never);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('builds PipelineContext with correct fields from executeFeature', async () => {
|
it('builds PipelineContext with correct fields from executeFeature', async () => {
|
||||||
@@ -845,11 +839,12 @@ describe('PipelineOrchestrator', () => {
|
|||||||
await orchestrator.executePipeline(context);
|
await orchestrator.executePipeline(context);
|
||||||
|
|
||||||
// Merge should receive the worktree path
|
// Merge should receive the worktree path
|
||||||
expect(global.fetch).toHaveBeenCalledWith(
|
expect(performMerge).toHaveBeenCalledWith(
|
||||||
expect.stringContaining('/api/worktree/merge'),
|
'/test/project',
|
||||||
expect.objectContaining({
|
'feature/test-1',
|
||||||
body: expect.stringContaining('/test/custom-worktree'),
|
'/test/custom-worktree',
|
||||||
})
|
'main',
|
||||||
|
{ deleteWorktreeAndBranch: false }
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -860,11 +855,12 @@ describe('PipelineOrchestrator', () => {
|
|||||||
|
|
||||||
await orchestrator.executePipeline(context);
|
await orchestrator.executePipeline(context);
|
||||||
|
|
||||||
expect(global.fetch).toHaveBeenCalledWith(
|
expect(performMerge).toHaveBeenCalledWith(
|
||||||
expect.stringContaining('/api/worktree/merge'),
|
'/test/project',
|
||||||
expect.objectContaining({
|
'feature/custom-branch',
|
||||||
body: expect.stringContaining('feature/custom-branch'),
|
'/test/worktree',
|
||||||
})
|
'main',
|
||||||
|
{ deleteWorktreeAndBranch: false }
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -54,6 +54,8 @@ describe('PlanApprovalService', () => {
|
|||||||
|
|
||||||
it('should timeout and reject after configured period', async () => {
|
it('should timeout and reject after configured period', async () => {
|
||||||
const approvalPromise = service.waitForApproval('feature-1', '/project');
|
const approvalPromise = service.waitForApproval('feature-1', '/project');
|
||||||
|
// Attach catch to prevent unhandled rejection warning (will be properly asserted below)
|
||||||
|
approvalPromise.catch(() => {});
|
||||||
// Flush the async initialization
|
// Flush the async initialization
|
||||||
await vi.advanceTimersByTimeAsync(0);
|
await vi.advanceTimersByTimeAsync(0);
|
||||||
|
|
||||||
@@ -73,6 +75,8 @@ describe('PlanApprovalService', () => {
|
|||||||
} as never);
|
} as never);
|
||||||
|
|
||||||
const approvalPromise = service.waitForApproval('feature-1', '/project');
|
const approvalPromise = service.waitForApproval('feature-1', '/project');
|
||||||
|
// Attach catch to prevent unhandled rejection warning (will be properly asserted below)
|
||||||
|
approvalPromise.catch(() => {});
|
||||||
// Flush the async initialization
|
// Flush the async initialization
|
||||||
await vi.advanceTimersByTimeAsync(0);
|
await vi.advanceTimersByTimeAsync(0);
|
||||||
|
|
||||||
@@ -93,6 +97,8 @@ describe('PlanApprovalService', () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
const approvalPromise = serviceNoSettings.waitForApproval('feature-1', '/project');
|
const approvalPromise = serviceNoSettings.waitForApproval('feature-1', '/project');
|
||||||
|
// Attach catch to prevent unhandled rejection warning (will be properly asserted below)
|
||||||
|
approvalPromise.catch(() => {});
|
||||||
// Flush async
|
// Flush async
|
||||||
await vi.advanceTimersByTimeAsync(0);
|
await vi.advanceTimersByTimeAsync(0);
|
||||||
|
|
||||||
@@ -417,6 +423,8 @@ describe('PlanApprovalService', () => {
|
|||||||
} as never);
|
} as never);
|
||||||
|
|
||||||
const approvalPromise = service.waitForApproval('feature-1', '/project');
|
const approvalPromise = service.waitForApproval('feature-1', '/project');
|
||||||
|
// Attach catch to prevent unhandled rejection warning (will be properly asserted below)
|
||||||
|
approvalPromise.catch(() => {});
|
||||||
await vi.advanceTimersByTimeAsync(0);
|
await vi.advanceTimersByTimeAsync(0);
|
||||||
|
|
||||||
// Should not timeout at 4 minutes
|
// Should not timeout at 4 minutes
|
||||||
@@ -432,6 +440,8 @@ describe('PlanApprovalService', () => {
|
|||||||
vi.mocked(mockSettingsService!.getProjectSettings).mockRejectedValue(new Error('Failed'));
|
vi.mocked(mockSettingsService!.getProjectSettings).mockRejectedValue(new Error('Failed'));
|
||||||
|
|
||||||
const approvalPromise = service.waitForApproval('feature-1', '/project');
|
const approvalPromise = service.waitForApproval('feature-1', '/project');
|
||||||
|
// Attach catch to prevent unhandled rejection warning (will be properly asserted below)
|
||||||
|
approvalPromise.catch(() => {});
|
||||||
await vi.advanceTimersByTimeAsync(0);
|
await vi.advanceTimersByTimeAsync(0);
|
||||||
|
|
||||||
// Should use default 30 minute timeout
|
// Should use default 30 minute timeout
|
||||||
@@ -448,6 +458,8 @@ describe('PlanApprovalService', () => {
|
|||||||
} as never);
|
} as never);
|
||||||
|
|
||||||
const approvalPromise = service.waitForApproval('feature-1', '/project');
|
const approvalPromise = service.waitForApproval('feature-1', '/project');
|
||||||
|
// Attach catch to prevent unhandled rejection warning (will be properly asserted below)
|
||||||
|
approvalPromise.catch(() => {});
|
||||||
await vi.advanceTimersByTimeAsync(0);
|
await vi.advanceTimersByTimeAsync(0);
|
||||||
|
|
||||||
// Should use default 30 minute timeout
|
// Should use default 30 minute timeout
|
||||||
|
|||||||
@@ -9,9 +9,16 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
|
import path from 'path';
|
||||||
import { RecoveryService, DEFAULT_EXECUTION_STATE } from '@/services/recovery-service.js';
|
import { RecoveryService, DEFAULT_EXECUTION_STATE } from '@/services/recovery-service.js';
|
||||||
import type { Feature } from '@automaker/types';
|
import type { Feature } from '@automaker/types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to normalize paths for cross-platform test compatibility.
|
||||||
|
* Uses path.normalize (not path.resolve) to match path.join behavior in production code.
|
||||||
|
*/
|
||||||
|
const normalizePath = (p: string): string => path.normalize(p);
|
||||||
|
|
||||||
// Mock dependencies
|
// Mock dependencies
|
||||||
vi.mock('@automaker/utils', () => ({
|
vi.mock('@automaker/utils', () => ({
|
||||||
createLogger: () => ({
|
createLogger: () => ({
|
||||||
@@ -288,7 +295,7 @@ describe('recovery-service.ts', () => {
|
|||||||
|
|
||||||
expect(result).toBe(true);
|
expect(result).toBe(true);
|
||||||
expect(secureFs.access).toHaveBeenCalledWith(
|
expect(secureFs.access).toHaveBeenCalledWith(
|
||||||
'/test/project/.automaker/features/feature-1/agent-output.md'
|
normalizePath('/test/project/.automaker/features/feature-1/agent-output.md')
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,20 @@
|
|||||||
import { describe, it, expect, beforeEach, vi, type Mock } from 'vitest';
|
import { describe, it, expect, beforeEach, vi, type Mock } from 'vitest';
|
||||||
import { WorktreeResolver, type WorktreeInfo } from '@/services/worktree-resolver.js';
|
import { WorktreeResolver, type WorktreeInfo } from '@/services/worktree-resolver.js';
|
||||||
import { exec } from 'child_process';
|
import { exec } from 'child_process';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
// Mock child_process
|
// Mock child_process
|
||||||
vi.mock('child_process', () => ({
|
vi.mock('child_process', () => ({
|
||||||
exec: vi.fn(),
|
exec: vi.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to normalize paths for cross-platform test compatibility.
|
||||||
|
* On Windows, path.resolve('/Users/dev/project') returns 'C:\Users\dev\project' (with current drive).
|
||||||
|
* This helper ensures test expectations match the actual platform behavior.
|
||||||
|
*/
|
||||||
|
const normalizePath = (p: string): string => path.resolve(p);
|
||||||
|
|
||||||
// Create promisified mock helper
|
// Create promisified mock helper
|
||||||
const mockExecAsync = (
|
const mockExecAsync = (
|
||||||
impl: (cmd: string, options?: { cwd?: string }) => Promise<{ stdout: string; stderr: string }>
|
impl: (cmd: string, options?: { cwd?: string }) => Promise<{ stdout: string; stderr: string }>
|
||||||
@@ -94,9 +102,9 @@ branch refs/heads/feature-y
|
|||||||
it('should find worktree by branch name', async () => {
|
it('should find worktree by branch name', async () => {
|
||||||
mockExecAsync(async () => ({ stdout: porcelainOutput, stderr: '' }));
|
mockExecAsync(async () => ({ stdout: porcelainOutput, stderr: '' }));
|
||||||
|
|
||||||
const path = await resolver.findWorktreeForBranch('/Users/dev/project', 'feature-x');
|
const result = await resolver.findWorktreeForBranch('/Users/dev/project', 'feature-x');
|
||||||
|
|
||||||
expect(path).toBe('/Users/dev/project/.worktrees/feature-x');
|
expect(result).toBe(normalizePath('/Users/dev/project/.worktrees/feature-x'));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return null when branch not found', async () => {
|
it('should return null when branch not found', async () => {
|
||||||
@@ -120,9 +128,9 @@ branch refs/heads/feature-y
|
|||||||
it('should find main worktree', async () => {
|
it('should find main worktree', async () => {
|
||||||
mockExecAsync(async () => ({ stdout: porcelainOutput, stderr: '' }));
|
mockExecAsync(async () => ({ stdout: porcelainOutput, stderr: '' }));
|
||||||
|
|
||||||
const path = await resolver.findWorktreeForBranch('/Users/dev/project', 'main');
|
const result = await resolver.findWorktreeForBranch('/Users/dev/project', 'main');
|
||||||
|
|
||||||
expect(path).toBe('/Users/dev/project');
|
expect(result).toBe(normalizePath('/Users/dev/project'));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle porcelain output without trailing newline', async () => {
|
it('should handle porcelain output without trailing newline', async () => {
|
||||||
@@ -134,9 +142,9 @@ branch refs/heads/feature-x`;
|
|||||||
|
|
||||||
mockExecAsync(async () => ({ stdout: noTrailingNewline, stderr: '' }));
|
mockExecAsync(async () => ({ stdout: noTrailingNewline, stderr: '' }));
|
||||||
|
|
||||||
const path = await resolver.findWorktreeForBranch('/Users/dev/project', 'feature-x');
|
const result = await resolver.findWorktreeForBranch('/Users/dev/project', 'feature-x');
|
||||||
|
|
||||||
expect(path).toBe('/Users/dev/project/.worktrees/feature-x');
|
expect(result).toBe(normalizePath('/Users/dev/project/.worktrees/feature-x'));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should resolve relative paths to absolute', async () => {
|
it('should resolve relative paths to absolute', async () => {
|
||||||
@@ -151,8 +159,8 @@ branch refs/heads/feature-relative
|
|||||||
|
|
||||||
const result = await resolver.findWorktreeForBranch('/Users/dev/project', 'feature-relative');
|
const result = await resolver.findWorktreeForBranch('/Users/dev/project', 'feature-relative');
|
||||||
|
|
||||||
// Should resolve to absolute path
|
// Should resolve to absolute path (platform-specific)
|
||||||
expect(result).toBe('/Users/dev/project/.worktrees/feature-relative');
|
expect(result).toBe(normalizePath('/Users/dev/project/.worktrees/feature-relative'));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use projectPath as cwd for git command', async () => {
|
it('should use projectPath as cwd for git command', async () => {
|
||||||
@@ -186,17 +194,17 @@ branch refs/heads/feature-y
|
|||||||
|
|
||||||
expect(worktrees).toHaveLength(3);
|
expect(worktrees).toHaveLength(3);
|
||||||
expect(worktrees[0]).toEqual({
|
expect(worktrees[0]).toEqual({
|
||||||
path: '/Users/dev/project',
|
path: normalizePath('/Users/dev/project'),
|
||||||
branch: 'main',
|
branch: 'main',
|
||||||
isMain: true,
|
isMain: true,
|
||||||
});
|
});
|
||||||
expect(worktrees[1]).toEqual({
|
expect(worktrees[1]).toEqual({
|
||||||
path: '/Users/dev/project/.worktrees/feature-x',
|
path: normalizePath('/Users/dev/project/.worktrees/feature-x'),
|
||||||
branch: 'feature-x',
|
branch: 'feature-x',
|
||||||
isMain: false,
|
isMain: false,
|
||||||
});
|
});
|
||||||
expect(worktrees[2]).toEqual({
|
expect(worktrees[2]).toEqual({
|
||||||
path: '/Users/dev/project/.worktrees/feature-y',
|
path: normalizePath('/Users/dev/project/.worktrees/feature-y'),
|
||||||
branch: 'feature-y',
|
branch: 'feature-y',
|
||||||
isMain: false,
|
isMain: false,
|
||||||
});
|
});
|
||||||
@@ -226,7 +234,7 @@ detached
|
|||||||
|
|
||||||
expect(worktrees).toHaveLength(2);
|
expect(worktrees).toHaveLength(2);
|
||||||
expect(worktrees[1]).toEqual({
|
expect(worktrees[1]).toEqual({
|
||||||
path: '/Users/dev/project/.worktrees/detached-wt',
|
path: normalizePath('/Users/dev/project/.worktrees/detached-wt'),
|
||||||
branch: null, // Detached HEAD has no branch
|
branch: null, // Detached HEAD has no branch
|
||||||
isMain: false,
|
isMain: false,
|
||||||
});
|
});
|
||||||
@@ -264,7 +272,7 @@ branch refs/heads/relative-branch
|
|||||||
|
|
||||||
const worktrees = await resolver.listWorktrees('/Users/dev/project');
|
const worktrees = await resolver.listWorktrees('/Users/dev/project');
|
||||||
|
|
||||||
expect(worktrees[1].path).toBe('/Users/dev/project/.worktrees/relative-wt');
|
expect(worktrees[1].path).toBe(normalizePath('/Users/dev/project/.worktrees/relative-wt'));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle single worktree (main only)', async () => {
|
it('should handle single worktree (main only)', async () => {
|
||||||
@@ -278,7 +286,7 @@ branch refs/heads/main
|
|||||||
|
|
||||||
expect(worktrees).toHaveLength(1);
|
expect(worktrees).toHaveLength(1);
|
||||||
expect(worktrees[0]).toEqual({
|
expect(worktrees[0]).toEqual({
|
||||||
path: '/Users/dev/project',
|
path: normalizePath('/Users/dev/project'),
|
||||||
branch: 'main',
|
branch: 'main',
|
||||||
isMain: true,
|
isMain: true,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -7,8 +7,8 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { execSync } from 'child_process';
|
import { execSync } from 'child_process';
|
||||||
import { cpSync, existsSync, mkdirSync, rmSync, writeFileSync, readFileSync } from 'fs';
|
import { cpSync, existsSync, mkdirSync, rmSync, writeFileSync, readFileSync, lstatSync } from 'fs';
|
||||||
import { join, dirname } from 'path';
|
import { join, dirname, resolve } from 'path';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
@@ -112,6 +112,29 @@ execSync('npm install --omit=dev', {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Step 6b: Replace symlinks for local packages with real copies
|
||||||
|
// npm install creates symlinks for file: references, but these break when packaged by electron-builder
|
||||||
|
console.log('🔗 Replacing symlinks with real directory copies...');
|
||||||
|
const nodeModulesAutomaker = join(BUNDLE_DIR, 'node_modules', '@automaker');
|
||||||
|
for (const pkgName of LOCAL_PACKAGES) {
|
||||||
|
const pkgDir = pkgName.replace('@automaker/', '');
|
||||||
|
const nmPkgPath = join(nodeModulesAutomaker, pkgDir);
|
||||||
|
try {
|
||||||
|
// lstatSync does not follow symlinks, allowing us to check for broken ones
|
||||||
|
if (lstatSync(nmPkgPath).isSymbolicLink()) {
|
||||||
|
const realPath = resolve(BUNDLE_DIR, 'libs', pkgDir);
|
||||||
|
rmSync(nmPkgPath);
|
||||||
|
cpSync(realPath, nmPkgPath, { recursive: true });
|
||||||
|
console.log(` ✓ Replaced symlink: ${pkgName}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// If the path doesn't exist, lstatSync throws ENOENT. We can safely ignore this.
|
||||||
|
if (error.code !== 'ENOENT') {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Step 7: Rebuild native modules for current architecture
|
// Step 7: Rebuild native modules for current architecture
|
||||||
// This is critical for modules like node-pty that have native bindings
|
// This is critical for modules like node-pty that have native bindings
|
||||||
console.log('🔨 Rebuilding native modules for current architecture...');
|
console.log('🔨 Rebuilding native modules for current architecture...');
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { useState, useCallback, useEffect } from 'react';
|
import { useState, useCallback, useEffect } from 'react';
|
||||||
import { Plus, Bug, FolderOpen, BookOpen } from 'lucide-react';
|
import { Plus, Bug, FolderOpen, BookOpen } from 'lucide-react';
|
||||||
import { useNavigate, useLocation } from '@tanstack/react-router';
|
import { useNavigate, useLocation } from '@tanstack/react-router';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn, isMac } from '@/lib/utils';
|
||||||
import { useAppStore } from '@/store/app-store';
|
import { useAppStore } from '@/store/app-store';
|
||||||
import { useOSDetection } from '@/hooks/use-os-detection';
|
import { useOSDetection } from '@/hooks/use-os-detection';
|
||||||
import { ProjectSwitcherItem } from './components/project-switcher-item';
|
import { ProjectSwitcherItem } from './components/project-switcher-item';
|
||||||
@@ -11,9 +11,12 @@ import { NotificationBell } from './components/notification-bell';
|
|||||||
import { NewProjectModal } from '@/components/dialogs/new-project-modal';
|
import { NewProjectModal } from '@/components/dialogs/new-project-modal';
|
||||||
import { OnboardingDialog } from '@/components/layout/sidebar/dialogs';
|
import { OnboardingDialog } from '@/components/layout/sidebar/dialogs';
|
||||||
import { useProjectCreation } from '@/components/layout/sidebar/hooks';
|
import { useProjectCreation } from '@/components/layout/sidebar/hooks';
|
||||||
import { SIDEBAR_FEATURE_FLAGS } from '@/components/layout/sidebar/constants';
|
import {
|
||||||
|
MACOS_ELECTRON_TOP_PADDING_CLASS,
|
||||||
|
SIDEBAR_FEATURE_FLAGS,
|
||||||
|
} from '@/components/layout/sidebar/constants';
|
||||||
import type { Project } from '@/lib/electron';
|
import type { Project } from '@/lib/electron';
|
||||||
import { getElectronAPI } from '@/lib/electron';
|
import { getElectronAPI, isElectron } from '@/lib/electron';
|
||||||
import { initializeProject, hasAppSpec, hasAutomakerDir } from '@/lib/project-init';
|
import { initializeProject, hasAppSpec, hasAutomakerDir } from '@/lib/project-init';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import { CreateSpecDialog } from '@/components/views/spec-view/dialogs';
|
import { CreateSpecDialog } from '@/components/views/spec-view/dialogs';
|
||||||
@@ -279,7 +282,12 @@ export function ProjectSwitcher() {
|
|||||||
data-testid="project-switcher"
|
data-testid="project-switcher"
|
||||||
>
|
>
|
||||||
{/* Automaker Logo and Version */}
|
{/* Automaker Logo and Version */}
|
||||||
<div className="flex flex-col items-center pt-3 pb-2 px-2">
|
<div
|
||||||
|
className={cn(
|
||||||
|
'flex flex-col items-center pb-2 px-2',
|
||||||
|
isMac && isElectron() ? MACOS_ELECTRON_TOP_PADDING_CLASS : 'pt-3'
|
||||||
|
)}
|
||||||
|
>
|
||||||
<button
|
<button
|
||||||
onClick={() => navigate({ to: '/dashboard' })}
|
onClick={() => navigate({ to: '/dashboard' })}
|
||||||
className="group flex flex-col items-center gap-0.5"
|
className="group flex flex-col items-center gap-0.5"
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import type { LucideIcon } from 'lucide-react';
|
|||||||
import { cn, isMac } from '@/lib/utils';
|
import { cn, isMac } from '@/lib/utils';
|
||||||
import { formatShortcut } from '@/store/app-store';
|
import { formatShortcut } from '@/store/app-store';
|
||||||
import { isElectron, type Project } from '@/lib/electron';
|
import { isElectron, type Project } from '@/lib/electron';
|
||||||
|
import { MACOS_ELECTRON_TOP_PADDING_CLASS } from '../constants';
|
||||||
import { getAuthenticatedImageUrl } from '@/lib/api-fetch';
|
import { getAuthenticatedImageUrl } from '@/lib/api-fetch';
|
||||||
import { useAppStore } from '@/store/app-store';
|
import { useAppStore } from '@/store/app-store';
|
||||||
import {
|
import {
|
||||||
@@ -89,7 +90,7 @@ export function SidebarHeader({
|
|||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
'shrink-0 flex flex-col items-center relative px-2 pt-3 pb-2',
|
'shrink-0 flex flex-col items-center relative px-2 pt-3 pb-2',
|
||||||
isMac && isElectron() && 'pt-[10px]'
|
isMac && isElectron() && MACOS_ELECTRON_TOP_PADDING_CLASS
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
<Tooltip>
|
<Tooltip>
|
||||||
@@ -240,7 +241,7 @@ export function SidebarHeader({
|
|||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
'shrink-0 flex flex-col relative px-3 pt-3 pb-2',
|
'shrink-0 flex flex-col relative px-3 pt-3 pb-2',
|
||||||
isMac && isElectron() && 'pt-[10px]'
|
isMac && isElectron() && MACOS_ELECTRON_TOP_PADDING_CLASS
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
{/* Header with logo and project dropdown */}
|
{/* Header with logo and project dropdown */}
|
||||||
|
|||||||
@@ -3,7 +3,9 @@ import type { NavigateOptions } from '@tanstack/react-router';
|
|||||||
import { ChevronDown, Wrench, Github, Folder } from 'lucide-react';
|
import { ChevronDown, Wrench, Github, Folder } from 'lucide-react';
|
||||||
import * as LucideIcons from 'lucide-react';
|
import * as LucideIcons from 'lucide-react';
|
||||||
import type { LucideIcon } from 'lucide-react';
|
import type { LucideIcon } from 'lucide-react';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn, isMac } from '@/lib/utils';
|
||||||
|
import { isElectron } from '@/lib/electron';
|
||||||
|
import { MACOS_ELECTRON_TOP_PADDING_CLASS } from '../constants';
|
||||||
import { formatShortcut, useAppStore } from '@/store/app-store';
|
import { formatShortcut, useAppStore } from '@/store/app-store';
|
||||||
import { getAuthenticatedImageUrl } from '@/lib/api-fetch';
|
import { getAuthenticatedImageUrl } from '@/lib/api-fetch';
|
||||||
import type { NavSection } from '../types';
|
import type { NavSection } from '../types';
|
||||||
@@ -117,7 +119,12 @@ export function SidebarNavigation({
|
|||||||
className={cn(
|
className={cn(
|
||||||
'flex-1 overflow-y-auto scrollbar-hide px-3 pb-2',
|
'flex-1 overflow-y-auto scrollbar-hide px-3 pb-2',
|
||||||
// Add top padding in discord mode since there's no header
|
// Add top padding in discord mode since there's no header
|
||||||
sidebarStyle === 'discord' ? 'pt-3' : 'mt-1'
|
// Extra padding for macOS Electron to avoid traffic light overlap
|
||||||
|
sidebarStyle === 'discord'
|
||||||
|
? isMac && isElectron()
|
||||||
|
? MACOS_ELECTRON_TOP_PADDING_CLASS
|
||||||
|
: 'pt-3'
|
||||||
|
: 'mt-1'
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
{/* Project name display for classic/discord mode */}
|
{/* Project name display for classic/discord mode */}
|
||||||
|
|||||||
@@ -1,5 +1,11 @@
|
|||||||
import { darkThemes, lightThemes } from '@/config/theme-options';
|
import { darkThemes, lightThemes } from '@/config/theme-options';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tailwind class for top padding on macOS Electron to avoid overlapping with traffic light window controls.
|
||||||
|
* This padding is applied conditionally when running on macOS in Electron.
|
||||||
|
*/
|
||||||
|
export const MACOS_ELECTRON_TOP_PADDING_CLASS = 'pt-[38px]';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Shared constants for theme submenu positioning and layout.
|
* Shared constants for theme submenu positioning and layout.
|
||||||
* Used across project-context-menu and project-selector-with-options components
|
* Used across project-context-menu and project-selector-with-options components
|
||||||
|
|||||||
@@ -116,9 +116,8 @@ const PROVIDER_ICON_DEFINITIONS: Record<ProviderIconKey, ProviderIconDefinition>
|
|||||||
},
|
},
|
||||||
copilot: {
|
copilot: {
|
||||||
viewBox: '0 0 98 96',
|
viewBox: '0 0 98 96',
|
||||||
// Official GitHub Octocat logo mark
|
// Official GitHub Octocat logo mark (theme-aware via currentColor)
|
||||||
path: 'M48.854 0C21.839 0 0 22 0 49.217c0 21.756 13.993 40.172 33.405 46.69 2.427.49 3.316-1.059 3.316-2.362 0-1.141-.08-5.052-.08-9.127-13.59 2.934-16.42-5.867-16.42-5.867-2.184-5.704-5.42-7.17-5.42-7.17-4.448-3.015.324-3.015.324-3.015 4.934.326 7.523 5.052 7.523 5.052 4.367 7.496 11.404 5.378 14.235 4.074.404-3.178 1.699-5.378 3.074-6.6-10.839-1.141-22.243-5.378-22.243-24.283 0-5.378 1.94-9.778 5.014-13.2-.485-1.222-2.184-6.275.486-13.038 0 0 4.125-1.304 13.426 5.052a46.97 46.97 0 0 1 12.214-1.63c4.125 0 8.33.571 12.213 1.63 9.302-6.356 13.427-5.052 13.427-5.052 2.67 6.763.97 11.816.485 13.038 3.155 3.422 5.015 7.822 5.015 13.2 0 18.905-11.404 23.06-22.324 24.283 1.78 1.548 3.316 4.481 3.316 9.126 0 6.6-.08 11.897-.08 13.526 0 1.304.89 2.853 3.316 2.364 19.412-6.52 33.405-24.935 33.405-46.691C97.707 22 75.788 0 48.854 0z',
|
path: 'M48.854 0C21.839 0 0 22 0 49.217c0 21.756 13.993 40.172 33.405 46.69 2.427.49 3.316-1.059 3.316-2.362 0-1.141-.08-5.052-.08-9.127-13.59 2.934-16.42-5.867-16.42-5.867-2.184-5.704-5.42-7.17-5.42-7.17-4.448-3.015.324-3.015.324-3.015 4.934.326 7.523 5.052 7.523 5.052 4.367 7.496 11.404 5.378 14.235 4.074.404-3.178 1.699-5.378 3.074-6.6-10.839-1.141-22.243-5.378-22.243-24.283 0-5.378 1.94-9.778 5.014-13.2-.485-1.222-2.184-6.275.486-13.038 0 0 4.125-1.304 13.426 5.052a46.97 46.97 0 0 1 12.214-1.63c4.125 0 8.33.571 12.213 1.63 9.302-6.356 13.427-5.052 13.427-5.052 2.67 6.763.97 11.816.485 13.038 3.155 3.422 5.015 7.822 5.015 13.2 0 18.905-11.404 23.06-22.324 24.283 1.78 1.548 3.316 4.481 3.316 9.126 0 6.6-.08 11.897-.08 13.526 0 1.304.89 2.853 3.316 2.364 19.412-6.52 33.405-24.935 33.405-46.691C97.707 22 75.788 0 48.854 0z',
|
||||||
fill: '#ffffff',
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1275,8 +1275,10 @@ export function BoardView() {
|
|||||||
maxConcurrency={maxConcurrency}
|
maxConcurrency={maxConcurrency}
|
||||||
runningAgentsCount={runningAutoTasks.length}
|
runningAgentsCount={runningAutoTasks.length}
|
||||||
onConcurrencyChange={(newMaxConcurrency) => {
|
onConcurrencyChange={(newMaxConcurrency) => {
|
||||||
if (currentProject && selectedWorktree) {
|
if (currentProject) {
|
||||||
const branchName = selectedWorktree.isMain ? null : selectedWorktree.branch;
|
// If selectedWorktree is undefined or it's the main worktree, branchName will be null.
|
||||||
|
// Otherwise, use the branch name.
|
||||||
|
const branchName = selectedWorktree?.isMain === false ? selectedWorktree.branch : null;
|
||||||
setMaxConcurrencyForWorktree(currentProject.id, branchName, newMaxConcurrency);
|
setMaxConcurrencyForWorktree(currentProject.id, branchName, newMaxConcurrency);
|
||||||
|
|
||||||
// Persist to server settings so capacity checks use the correct value
|
// Persist to server settings so capacity checks use the correct value
|
||||||
|
|||||||
@@ -750,6 +750,9 @@ export function electronUserDataWriteFileSync(
|
|||||||
throw new Error('[SystemPaths] Electron userData path not initialized');
|
throw new Error('[SystemPaths] Electron userData path not initialized');
|
||||||
}
|
}
|
||||||
const fullPath = path.join(electronUserDataPath, relativePath);
|
const fullPath = path.join(electronUserDataPath, relativePath);
|
||||||
|
// Ensure parent directory exists (may not exist on first launch)
|
||||||
|
const dir = path.dirname(fullPath);
|
||||||
|
fsSync.mkdirSync(dir, { recursive: true });
|
||||||
fsSync.writeFileSync(fullPath, data, options);
|
fsSync.writeFileSync(fullPath, data, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user