mirror of
https://github.com/eyaltoledano/claude-task-master.git
synced 2026-01-30 06:12:05 +00:00
fix: improve branch naming logic for workflow commands (#1491)
This commit is contained in:
8
.changeset/three-webs-drive.md
Normal file
8
.changeset/three-webs-drive.md
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
"task-master-ai": patch
|
||||||
|
---
|
||||||
|
|
||||||
|
Add `tm/` prefix to autopilot branch names
|
||||||
|
|
||||||
|
- Team mode branches now follow the `tm/<org-slug>/task-<id>` naming convention for better organization.
|
||||||
|
- Solves issue some users were having regarding not being able to start workflow on master Taskmaster tag
|
||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -104,7 +104,3 @@ apps/extension/vsix-build/
|
|||||||
.scannerwork
|
.scannerwork
|
||||||
|
|
||||||
# OS specific
|
# OS specific
|
||||||
|
|
||||||
# Task files
|
|
||||||
tasks.json
|
|
||||||
tasks/
|
|
||||||
|
|||||||
@@ -2,17 +2,11 @@
|
|||||||
* @fileoverview Abort Command - Safely terminate workflow
|
* @fileoverview Abort Command - Safely terminate workflow
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WorkflowOrchestrator } from '@tm/core';
|
import { createTmCore } from '@tm/core';
|
||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import inquirer from 'inquirer';
|
import inquirer from 'inquirer';
|
||||||
import { getProjectRoot } from '../../utils/project-root.js';
|
import { getProjectRoot } from '../../utils/project-root.js';
|
||||||
import {
|
import { AutopilotBaseOptions, OutputFormatter } from './shared.js';
|
||||||
AutopilotBaseOptions,
|
|
||||||
OutputFormatter,
|
|
||||||
deleteWorkflowState,
|
|
||||||
hasWorkflowState,
|
|
||||||
loadWorkflowState
|
|
||||||
} from './shared.js';
|
|
||||||
|
|
||||||
interface AbortOptions extends AutopilotBaseOptions {
|
interface AbortOptions extends AutopilotBaseOptions {
|
||||||
force?: boolean;
|
force?: boolean;
|
||||||
@@ -58,27 +52,19 @@ export class AbortCommand extends Command {
|
|||||||
...mergedOptions,
|
...mergedOptions,
|
||||||
projectRoot
|
projectRoot
|
||||||
};
|
};
|
||||||
// Check for workflow state
|
|
||||||
const hasState = await hasWorkflowState(mergedOptions.projectRoot!);
|
// Initialize TmCore facade
|
||||||
if (!hasState) {
|
const tmCore = await createTmCore({ projectPath: projectRoot });
|
||||||
|
|
||||||
|
// Check if workflow exists
|
||||||
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
formatter.warning('No active workflow to abort');
|
formatter.warning('No active workflow to abort');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load state
|
// Resume workflow to get status
|
||||||
const state = await loadWorkflowState(mergedOptions.projectRoot!);
|
await tmCore.workflow.resume();
|
||||||
if (!state) {
|
const status = tmCore.workflow.getStatus();
|
||||||
formatter.error('Failed to load workflow state');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Restore orchestrator
|
|
||||||
const orchestrator = new WorkflowOrchestrator(state.context);
|
|
||||||
orchestrator.restoreState(state);
|
|
||||||
|
|
||||||
// Get progress before abort
|
|
||||||
const progress = orchestrator.getProgress();
|
|
||||||
const currentSubtask = orchestrator.getCurrentSubtask();
|
|
||||||
|
|
||||||
// Confirm abort if not forced or in JSON mode
|
// Confirm abort if not forced or in JSON mode
|
||||||
if (!mergedOptions.force && !mergedOptions.json) {
|
if (!mergedOptions.force && !mergedOptions.json) {
|
||||||
@@ -87,8 +73,8 @@ export class AbortCommand extends Command {
|
|||||||
type: 'confirm',
|
type: 'confirm',
|
||||||
name: 'confirmed',
|
name: 'confirmed',
|
||||||
message:
|
message:
|
||||||
`This will abort the workflow for task ${state.context.taskId}. ` +
|
`This will abort the workflow for task ${status.taskId}. ` +
|
||||||
`Progress: ${progress.completed}/${progress.total} subtasks completed. ` +
|
`Progress: ${status.progress?.completed || 0}/${status.progress?.total || 0} subtasks completed. ` +
|
||||||
`Continue?`,
|
`Continue?`,
|
||||||
default: false
|
default: false
|
||||||
}
|
}
|
||||||
@@ -100,24 +86,18 @@ export class AbortCommand extends Command {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Trigger abort in orchestrator
|
// Abort workflow (cleans up state internally)
|
||||||
orchestrator.transition({ type: 'ABORT' });
|
await tmCore.workflow.abort();
|
||||||
|
|
||||||
// Delete workflow state
|
|
||||||
await deleteWorkflowState(mergedOptions.projectRoot!);
|
|
||||||
|
|
||||||
// Output result
|
// Output result
|
||||||
formatter.success('Workflow aborted', {
|
formatter.success('Workflow aborted', {
|
||||||
taskId: state.context.taskId,
|
taskId: status.taskId,
|
||||||
branchName: state.context.branchName,
|
branchName: status.branchName,
|
||||||
progress: {
|
progress: status.progress,
|
||||||
completed: progress.completed,
|
lastSubtask: status.currentSubtask
|
||||||
total: progress.total
|
|
||||||
},
|
|
||||||
lastSubtask: currentSubtask
|
|
||||||
? {
|
? {
|
||||||
id: currentSubtask.id,
|
id: status.currentSubtask.id,
|
||||||
title: currentSubtask.title
|
title: status.currentSubtask.title
|
||||||
}
|
}
|
||||||
: null,
|
: null,
|
||||||
note: 'Branch and commits remain. Clean up manually if needed.'
|
note: 'Branch and commits remain. Clean up manually if needed.'
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
* @fileoverview Commit Command - Create commit with enhanced message generation
|
* @fileoverview Commit Command - Create commit with enhanced message generation
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { CommitMessageGenerator, GitAdapter, WorkflowService } from '@tm/core';
|
import { CommitMessageGenerator, GitAdapter, createTmCore } from '@tm/core';
|
||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { getProjectRoot } from '../../utils/project-root.js';
|
import { getProjectRoot } from '../../utils/project-root.js';
|
||||||
import { AutopilotBaseOptions, OutputFormatter } from './shared.js';
|
import { AutopilotBaseOptions, OutputFormatter } from './shared.js';
|
||||||
@@ -39,21 +39,21 @@ export class CommitCommand extends Command {
|
|||||||
try {
|
try {
|
||||||
const projectRoot = mergedOptions.projectRoot!;
|
const projectRoot = mergedOptions.projectRoot!;
|
||||||
|
|
||||||
// Create workflow service (manages WorkflowStateManager internally)
|
// Initialize TmCore facade
|
||||||
const workflowService = new WorkflowService(projectRoot);
|
const tmCore = await createTmCore({ projectPath: projectRoot });
|
||||||
|
|
||||||
// Check if workflow exists
|
// Check if workflow exists
|
||||||
if (!(await workflowService.hasWorkflow())) {
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
formatter.error('No active workflow', {
|
formatter.error('No active workflow', {
|
||||||
suggestion: 'Start a workflow with: autopilot start <taskId>'
|
suggestion: 'Start a workflow with: autopilot start <taskId>'
|
||||||
});
|
});
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resume workflow (loads state with single WorkflowStateManager instance)
|
// Resume workflow
|
||||||
await workflowService.resumeWorkflow();
|
await tmCore.workflow.resume();
|
||||||
const status = workflowService.getStatus();
|
const status = tmCore.workflow.getStatus();
|
||||||
const workflowContext = workflowService.getContext();
|
const workflowContext = tmCore.workflow.getContext();
|
||||||
|
|
||||||
// Verify in COMMIT phase
|
// Verify in COMMIT phase
|
||||||
if (status.tddPhase !== 'COMMIT') {
|
if (status.tddPhase !== 'COMMIT') {
|
||||||
@@ -116,8 +116,8 @@ export class CommitCommand extends Command {
|
|||||||
const lastCommit = await gitAdapter.getLastCommit();
|
const lastCommit = await gitAdapter.getLastCommit();
|
||||||
|
|
||||||
// Complete COMMIT phase and advance workflow
|
// Complete COMMIT phase and advance workflow
|
||||||
// This handles all transitions internally with a single WorkflowStateManager
|
// Status updates (subtask → done) are handled internally by WorkflowService
|
||||||
const newStatus = await workflowService.commit();
|
const newStatus = await tmCore.workflow.commit();
|
||||||
|
|
||||||
const isComplete = newStatus.phase === 'COMPLETE';
|
const isComplete = newStatus.phase === 'COMPLETE';
|
||||||
|
|
||||||
|
|||||||
@@ -2,15 +2,10 @@
|
|||||||
* @fileoverview Complete Command - Complete current TDD phase with validation
|
* @fileoverview Complete Command - Complete current TDD phase with validation
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { type TestResult, WorkflowOrchestrator } from '@tm/core';
|
import { type TestResult, createTmCore } from '@tm/core';
|
||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { getProjectRoot } from '../../utils/project-root.js';
|
import { getProjectRoot } from '../../utils/project-root.js';
|
||||||
import {
|
import { type AutopilotBaseOptions, OutputFormatter } from './shared.js';
|
||||||
type AutopilotBaseOptions,
|
|
||||||
OutputFormatter,
|
|
||||||
hasWorkflowState,
|
|
||||||
loadWorkflowState
|
|
||||||
} from './shared.js';
|
|
||||||
|
|
||||||
interface CompleteOptions extends AutopilotBaseOptions {
|
interface CompleteOptions extends AutopilotBaseOptions {
|
||||||
results?: string;
|
results?: string;
|
||||||
@@ -49,37 +44,30 @@ export class CompleteCommand extends Command {
|
|||||||
const formatter = new OutputFormatter(mergedOptions.json || false);
|
const formatter = new OutputFormatter(mergedOptions.json || false);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Check for workflow state
|
const projectRoot = mergedOptions.projectRoot!;
|
||||||
const hasState = await hasWorkflowState(mergedOptions.projectRoot!);
|
|
||||||
if (!hasState) {
|
// Initialize TmCore facade
|
||||||
|
const tmCore = await createTmCore({ projectPath: projectRoot });
|
||||||
|
|
||||||
|
// Check if workflow exists
|
||||||
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
formatter.error('No active workflow', {
|
formatter.error('No active workflow', {
|
||||||
suggestion: 'Start a workflow with: autopilot start <taskId>'
|
suggestion: 'Start a workflow with: autopilot start <taskId>'
|
||||||
});
|
});
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load state
|
// Resume workflow
|
||||||
const state = await loadWorkflowState(mergedOptions.projectRoot!);
|
await tmCore.workflow.resume();
|
||||||
if (!state) {
|
const status = tmCore.workflow.getStatus();
|
||||||
formatter.error('Failed to load workflow state');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Restore orchestrator with persistence
|
|
||||||
const { saveWorkflowState } = await import('./shared.js');
|
|
||||||
const orchestrator = new WorkflowOrchestrator(state.context);
|
|
||||||
orchestrator.restoreState(state);
|
|
||||||
orchestrator.enableAutoPersist(async (newState) => {
|
|
||||||
await saveWorkflowState(mergedOptions.projectRoot!, newState);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get current phase
|
// Get current phase
|
||||||
const tddPhase = orchestrator.getCurrentTDDPhase();
|
const tddPhase = status.tddPhase;
|
||||||
const currentSubtask = orchestrator.getCurrentSubtask();
|
const currentSubtask = status.currentSubtask;
|
||||||
|
|
||||||
if (!tddPhase) {
|
if (!tddPhase) {
|
||||||
formatter.error('Not in a TDD phase', {
|
formatter.error('Not in a TDD phase', {
|
||||||
phase: orchestrator.getCurrentPhase()
|
phase: status.phase
|
||||||
});
|
});
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
@@ -136,24 +124,18 @@ export class CompleteCommand extends Command {
|
|||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Complete phase with test results
|
// Complete phase with test results using tmCore facade
|
||||||
|
const newStatus = await tmCore.workflow.completePhase(testResults);
|
||||||
|
|
||||||
if (tddPhase === 'RED') {
|
if (tddPhase === 'RED') {
|
||||||
orchestrator.transition({
|
|
||||||
type: 'RED_PHASE_COMPLETE',
|
|
||||||
testResults
|
|
||||||
});
|
|
||||||
formatter.success('RED phase completed', {
|
formatter.success('RED phase completed', {
|
||||||
nextPhase: 'GREEN',
|
nextPhase: newStatus.tddPhase || 'GREEN',
|
||||||
testResults,
|
testResults,
|
||||||
subtask: currentSubtask?.title
|
subtask: currentSubtask?.title
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
orchestrator.transition({
|
|
||||||
type: 'GREEN_PHASE_COMPLETE',
|
|
||||||
testResults
|
|
||||||
});
|
|
||||||
formatter.success('GREEN phase completed', {
|
formatter.success('GREEN phase completed', {
|
||||||
nextPhase: 'COMMIT',
|
nextPhase: newStatus.tddPhase || 'COMMIT',
|
||||||
testResults,
|
testResults,
|
||||||
subtask: currentSubtask?.title,
|
subtask: currentSubtask?.title,
|
||||||
suggestion: 'Run: autopilot commit'
|
suggestion: 'Run: autopilot commit'
|
||||||
|
|||||||
98
apps/cli/src/commands/autopilot/finalize.command.ts
Normal file
98
apps/cli/src/commands/autopilot/finalize.command.ts
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Finalize Command - Complete the TDD workflow
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createTmCore } from '@tm/core';
|
||||||
|
import { Command } from 'commander';
|
||||||
|
import { getProjectRoot } from '../../utils/project-root.js';
|
||||||
|
import { AutopilotBaseOptions, OutputFormatter } from './shared.js';
|
||||||
|
|
||||||
|
type FinalizeOptions = AutopilotBaseOptions;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finalize Command - Complete the workflow after all subtasks are done
|
||||||
|
*/
|
||||||
|
export class FinalizeCommand extends Command {
|
||||||
|
constructor() {
|
||||||
|
super('finalize');
|
||||||
|
|
||||||
|
this.description(
|
||||||
|
'Finalize and complete the workflow. Validates working tree is clean.'
|
||||||
|
).action(async (options: FinalizeOptions) => {
|
||||||
|
await this.execute(options);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private async execute(options: FinalizeOptions): Promise<void> {
|
||||||
|
// Inherit parent options
|
||||||
|
const parentOpts = this.parent?.opts() as AutopilotBaseOptions;
|
||||||
|
|
||||||
|
// Initialize mergedOptions with defaults (projectRoot will be set in try block)
|
||||||
|
let mergedOptions: FinalizeOptions = {
|
||||||
|
...parentOpts,
|
||||||
|
...options,
|
||||||
|
projectRoot: '' // Will be set in try block
|
||||||
|
};
|
||||||
|
|
||||||
|
const formatter = new OutputFormatter(
|
||||||
|
options.json || parentOpts?.json || false
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Resolve project root inside try block to catch any errors
|
||||||
|
const projectRoot = getProjectRoot(
|
||||||
|
options.projectRoot || parentOpts?.projectRoot
|
||||||
|
);
|
||||||
|
|
||||||
|
// Update mergedOptions with resolved project root
|
||||||
|
mergedOptions = {
|
||||||
|
...mergedOptions,
|
||||||
|
projectRoot
|
||||||
|
};
|
||||||
|
|
||||||
|
// Initialize TmCore facade
|
||||||
|
const tmCore = await createTmCore({ projectPath: projectRoot });
|
||||||
|
|
||||||
|
// Check if workflow exists
|
||||||
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
|
formatter.error('No active workflow', {
|
||||||
|
suggestion: 'Start a workflow with: autopilot start <taskId>'
|
||||||
|
});
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resume workflow to get status
|
||||||
|
await tmCore.workflow.resume();
|
||||||
|
const currentStatus = tmCore.workflow.getStatus();
|
||||||
|
|
||||||
|
// Verify we're in FINALIZE phase
|
||||||
|
if (currentStatus.phase !== 'FINALIZE') {
|
||||||
|
formatter.error(
|
||||||
|
`Cannot finalize: workflow is in ${currentStatus.phase} phase`,
|
||||||
|
{
|
||||||
|
suggestion: 'Complete all subtasks first'
|
||||||
|
}
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finalize workflow
|
||||||
|
formatter.info('Validating working tree and finalizing workflow...');
|
||||||
|
const newStatus = await tmCore.workflow.finalize();
|
||||||
|
|
||||||
|
// Output result
|
||||||
|
formatter.success('Workflow completed', {
|
||||||
|
taskId: newStatus.taskId,
|
||||||
|
phase: newStatus.phase,
|
||||||
|
branchName: newStatus.branchName,
|
||||||
|
progress: newStatus.progress
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
formatter.error((error as Error).message);
|
||||||
|
if (mergedOptions.verbose) {
|
||||||
|
console.error((error as Error).stack);
|
||||||
|
}
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,6 +8,7 @@ import { Command } from 'commander';
|
|||||||
import { AbortCommand } from './abort.command.js';
|
import { AbortCommand } from './abort.command.js';
|
||||||
import { CommitCommand } from './commit.command.js';
|
import { CommitCommand } from './commit.command.js';
|
||||||
import { CompleteCommand } from './complete.command.js';
|
import { CompleteCommand } from './complete.command.js';
|
||||||
|
import { FinalizeCommand } from './finalize.command.js';
|
||||||
import { NextCommand } from './next.command.js';
|
import { NextCommand } from './next.command.js';
|
||||||
import { ResumeCommand } from './resume.command.js';
|
import { ResumeCommand } from './resume.command.js';
|
||||||
import { StartCommand } from './start.command.js';
|
import { StartCommand } from './start.command.js';
|
||||||
@@ -66,6 +67,9 @@ export class AutopilotCommand extends Command {
|
|||||||
// Show status
|
// Show status
|
||||||
this.addCommand(new StatusCommand());
|
this.addCommand(new StatusCommand());
|
||||||
|
|
||||||
|
// Finalize workflow
|
||||||
|
this.addCommand(new FinalizeCommand());
|
||||||
|
|
||||||
// Abort workflow
|
// Abort workflow
|
||||||
this.addCommand(new AbortCommand());
|
this.addCommand(new AbortCommand());
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,15 +2,10 @@
|
|||||||
* @fileoverview Next Command - Get next action in TDD workflow
|
* @fileoverview Next Command - Get next action in TDD workflow
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WorkflowOrchestrator } from '@tm/core';
|
import { createTmCore } from '@tm/core';
|
||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { getProjectRoot } from '../../utils/project-root.js';
|
import { getProjectRoot } from '../../utils/project-root.js';
|
||||||
import {
|
import { type AutopilotBaseOptions, OutputFormatter } from './shared.js';
|
||||||
type AutopilotBaseOptions,
|
|
||||||
OutputFormatter,
|
|
||||||
hasWorkflowState,
|
|
||||||
loadWorkflowState
|
|
||||||
} from './shared.js';
|
|
||||||
|
|
||||||
type NextOptions = AutopilotBaseOptions;
|
type NextOptions = AutopilotBaseOptions;
|
||||||
|
|
||||||
@@ -54,112 +49,54 @@ export class NextCommand extends Command {
|
|||||||
...mergedOptions,
|
...mergedOptions,
|
||||||
projectRoot
|
projectRoot
|
||||||
};
|
};
|
||||||
// Check for workflow state
|
|
||||||
const hasState = await hasWorkflowState(mergedOptions.projectRoot!);
|
// Initialize TmCore facade
|
||||||
if (!hasState) {
|
const tmCore = await createTmCore({ projectPath: projectRoot });
|
||||||
|
|
||||||
|
// Check if workflow exists
|
||||||
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
formatter.error('No active workflow', {
|
formatter.error('No active workflow', {
|
||||||
suggestion: 'Start a workflow with: autopilot start <taskId>'
|
suggestion: 'Start a workflow with: autopilot start <taskId>'
|
||||||
});
|
});
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load state
|
// Resume workflow and get next action
|
||||||
const state = await loadWorkflowState(mergedOptions.projectRoot!);
|
await tmCore.workflow.resume();
|
||||||
if (!state) {
|
const status = tmCore.workflow.getStatus();
|
||||||
formatter.error('Failed to load workflow state');
|
const nextAction = tmCore.workflow.getNextAction();
|
||||||
process.exit(1);
|
const context = tmCore.workflow.getContext();
|
||||||
}
|
|
||||||
|
|
||||||
// Restore orchestrator
|
// Get current phase info
|
||||||
const orchestrator = new WorkflowOrchestrator(state.context);
|
const phase = status.phase;
|
||||||
orchestrator.restoreState(state);
|
const tddPhase = status.tddPhase;
|
||||||
|
const currentSubtask = status.currentSubtask;
|
||||||
// Get current phase and subtask
|
|
||||||
const phase = orchestrator.getCurrentPhase();
|
|
||||||
const tddPhase = orchestrator.getCurrentTDDPhase();
|
|
||||||
const currentSubtask = orchestrator.getCurrentSubtask();
|
|
||||||
|
|
||||||
// Determine next action based on phase
|
|
||||||
let actionType: string;
|
|
||||||
let actionDescription: string;
|
|
||||||
let actionDetails: Record<string, unknown> = {};
|
|
||||||
|
|
||||||
if (phase === 'COMPLETE') {
|
if (phase === 'COMPLETE') {
|
||||||
formatter.success('Workflow complete', {
|
formatter.success('Workflow complete', {
|
||||||
message: 'All subtasks have been completed',
|
message: 'All subtasks have been completed',
|
||||||
taskId: state.context.taskId
|
taskId: status.taskId
|
||||||
});
|
});
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (phase === 'SUBTASK_LOOP' && tddPhase) {
|
// Output next action using the facade's guidance
|
||||||
switch (tddPhase) {
|
|
||||||
case 'RED':
|
|
||||||
actionType = 'generate_test';
|
|
||||||
actionDescription = 'Write failing test for current subtask';
|
|
||||||
actionDetails = {
|
|
||||||
subtask: currentSubtask
|
|
||||||
? {
|
|
||||||
id: currentSubtask.id,
|
|
||||||
title: currentSubtask.title,
|
|
||||||
attempts: currentSubtask.attempts
|
|
||||||
}
|
|
||||||
: null,
|
|
||||||
testCommand: 'npm test', // Could be customized based on config
|
|
||||||
expectedOutcome: 'Test should fail'
|
|
||||||
};
|
|
||||||
break;
|
|
||||||
|
|
||||||
case 'GREEN':
|
|
||||||
actionType = 'implement_code';
|
|
||||||
actionDescription = 'Implement code to pass the failing test';
|
|
||||||
actionDetails = {
|
|
||||||
subtask: currentSubtask
|
|
||||||
? {
|
|
||||||
id: currentSubtask.id,
|
|
||||||
title: currentSubtask.title,
|
|
||||||
attempts: currentSubtask.attempts
|
|
||||||
}
|
|
||||||
: null,
|
|
||||||
testCommand: 'npm test',
|
|
||||||
expectedOutcome: 'All tests should pass',
|
|
||||||
lastTestResults: state.context.lastTestResults
|
|
||||||
};
|
|
||||||
break;
|
|
||||||
|
|
||||||
case 'COMMIT':
|
|
||||||
actionType = 'commit_changes';
|
|
||||||
actionDescription = 'Commit the changes';
|
|
||||||
actionDetails = {
|
|
||||||
subtask: currentSubtask
|
|
||||||
? {
|
|
||||||
id: currentSubtask.id,
|
|
||||||
title: currentSubtask.title,
|
|
||||||
attempts: currentSubtask.attempts
|
|
||||||
}
|
|
||||||
: null,
|
|
||||||
suggestion: 'Use: autopilot commit'
|
|
||||||
};
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
|
||||||
actionType = 'unknown';
|
|
||||||
actionDescription = 'Unknown TDD phase';
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
actionType = 'workflow_phase';
|
|
||||||
actionDescription = `Currently in ${phase} phase`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Output next action
|
|
||||||
const output = {
|
const output = {
|
||||||
action: actionType,
|
action: nextAction.action,
|
||||||
description: actionDescription,
|
description: nextAction.description,
|
||||||
phase,
|
phase,
|
||||||
tddPhase,
|
tddPhase,
|
||||||
taskId: state.context.taskId,
|
taskId: status.taskId,
|
||||||
branchName: state.context.branchName,
|
branchName: status.branchName,
|
||||||
...actionDetails
|
subtask: currentSubtask
|
||||||
|
? {
|
||||||
|
id: currentSubtask.id,
|
||||||
|
title: currentSubtask.title,
|
||||||
|
attempts: currentSubtask.attempts
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
nextSteps: nextAction.nextSteps,
|
||||||
|
lastTestResults: context.lastTestResults
|
||||||
};
|
};
|
||||||
|
|
||||||
if (mergedOptions.json) {
|
if (mergedOptions.json) {
|
||||||
|
|||||||
@@ -2,15 +2,10 @@
|
|||||||
* @fileoverview Resume Command - Restore and resume TDD workflow
|
* @fileoverview Resume Command - Restore and resume TDD workflow
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WorkflowOrchestrator } from '@tm/core';
|
import { createTmCore } from '@tm/core';
|
||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { getProjectRoot } from '../../utils/project-root.js';
|
import { getProjectRoot } from '../../utils/project-root.js';
|
||||||
import {
|
import { AutopilotBaseOptions, OutputFormatter } from './shared.js';
|
||||||
AutopilotBaseOptions,
|
|
||||||
OutputFormatter,
|
|
||||||
hasWorkflowState,
|
|
||||||
loadWorkflowState
|
|
||||||
} from './shared.js';
|
|
||||||
|
|
||||||
type ResumeOptions = AutopilotBaseOptions;
|
type ResumeOptions = AutopilotBaseOptions;
|
||||||
|
|
||||||
@@ -42,63 +37,35 @@ export class ResumeCommand extends Command {
|
|||||||
const formatter = new OutputFormatter(mergedOptions.json || false);
|
const formatter = new OutputFormatter(mergedOptions.json || false);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Check for workflow state
|
const projectRoot = mergedOptions.projectRoot!;
|
||||||
const hasState = await hasWorkflowState(mergedOptions.projectRoot!);
|
|
||||||
if (!hasState) {
|
// Initialize TmCore facade
|
||||||
|
const tmCore = await createTmCore({ projectPath: projectRoot });
|
||||||
|
|
||||||
|
// Check if workflow exists
|
||||||
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
formatter.error('No workflow state found', {
|
formatter.error('No workflow state found', {
|
||||||
suggestion: 'Start a new workflow with: autopilot start <taskId>'
|
suggestion: 'Start a new workflow with: autopilot start <taskId>'
|
||||||
});
|
});
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load state
|
// Resume workflow
|
||||||
formatter.info('Loading workflow state...');
|
formatter.info('Loading workflow state...');
|
||||||
const state = await loadWorkflowState(mergedOptions.projectRoot!);
|
const status = await tmCore.workflow.resume();
|
||||||
|
|
||||||
if (!state) {
|
|
||||||
formatter.error('Failed to load workflow state');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate state can be resumed
|
|
||||||
const orchestrator = new WorkflowOrchestrator(state.context);
|
|
||||||
if (!orchestrator.canResumeFromState(state)) {
|
|
||||||
formatter.error('Invalid workflow state', {
|
|
||||||
suggestion:
|
|
||||||
'State file may be corrupted. Consider starting a new workflow.'
|
|
||||||
});
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Restore state
|
|
||||||
orchestrator.restoreState(state);
|
|
||||||
|
|
||||||
// Re-enable auto-persistence
|
|
||||||
const { saveWorkflowState } = await import('./shared.js');
|
|
||||||
orchestrator.enableAutoPersist(async (newState) => {
|
|
||||||
await saveWorkflowState(mergedOptions.projectRoot!, newState);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get progress
|
|
||||||
const progress = orchestrator.getProgress();
|
|
||||||
const currentSubtask = orchestrator.getCurrentSubtask();
|
|
||||||
|
|
||||||
// Output success
|
// Output success
|
||||||
formatter.success('Workflow resumed', {
|
formatter.success('Workflow resumed', {
|
||||||
taskId: state.context.taskId,
|
taskId: status.taskId,
|
||||||
phase: orchestrator.getCurrentPhase(),
|
phase: status.phase,
|
||||||
tddPhase: orchestrator.getCurrentTDDPhase(),
|
tddPhase: status.tddPhase,
|
||||||
branchName: state.context.branchName,
|
branchName: status.branchName,
|
||||||
progress: {
|
progress: status.progress,
|
||||||
completed: progress.completed,
|
currentSubtask: status.currentSubtask
|
||||||
total: progress.total,
|
|
||||||
percentage: progress.percentage
|
|
||||||
},
|
|
||||||
currentSubtask: currentSubtask
|
|
||||||
? {
|
? {
|
||||||
id: currentSubtask.id,
|
id: status.currentSubtask.id,
|
||||||
title: currentSubtask.title,
|
title: status.currentSubtask.title,
|
||||||
attempts: currentSubtask.attempts
|
attempts: status.currentSubtask.attempts
|
||||||
}
|
}
|
||||||
: null
|
: null
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -2,13 +2,6 @@
|
|||||||
* @fileoverview Shared utilities for autopilot commands
|
* @fileoverview Shared utilities for autopilot commands
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import {
|
|
||||||
CommitMessageGenerator,
|
|
||||||
GitAdapter,
|
|
||||||
WorkflowOrchestrator,
|
|
||||||
WorkflowStateManager
|
|
||||||
} from '@tm/core';
|
|
||||||
import type { SubtaskInfo, WorkflowContext, WorkflowState } from '@tm/core';
|
|
||||||
import chalk from 'chalk';
|
import chalk from 'chalk';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -20,93 +13,6 @@ export interface AutopilotBaseOptions {
|
|||||||
verbose?: boolean;
|
verbose?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Load workflow state from disk using WorkflowStateManager
|
|
||||||
*/
|
|
||||||
export async function loadWorkflowState(
|
|
||||||
projectRoot: string
|
|
||||||
): Promise<WorkflowState | null> {
|
|
||||||
const stateManager = new WorkflowStateManager(projectRoot);
|
|
||||||
|
|
||||||
if (!(await stateManager.exists())) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
return await stateManager.load();
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(
|
|
||||||
`Failed to load workflow state: ${(error as Error).message}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Save workflow state to disk using WorkflowStateManager
|
|
||||||
*/
|
|
||||||
export async function saveWorkflowState(
|
|
||||||
projectRoot: string,
|
|
||||||
state: WorkflowState
|
|
||||||
): Promise<void> {
|
|
||||||
const stateManager = new WorkflowStateManager(projectRoot);
|
|
||||||
|
|
||||||
try {
|
|
||||||
await stateManager.save(state);
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(
|
|
||||||
`Failed to save workflow state: ${(error as Error).message}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete workflow state from disk using WorkflowStateManager
|
|
||||||
*/
|
|
||||||
export async function deleteWorkflowState(projectRoot: string): Promise<void> {
|
|
||||||
const stateManager = new WorkflowStateManager(projectRoot);
|
|
||||||
await stateManager.delete();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if workflow state exists using WorkflowStateManager
|
|
||||||
*/
|
|
||||||
export async function hasWorkflowState(projectRoot: string): Promise<boolean> {
|
|
||||||
const stateManager = new WorkflowStateManager(projectRoot);
|
|
||||||
return await stateManager.exists();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize WorkflowOrchestrator with persistence
|
|
||||||
*/
|
|
||||||
export function createOrchestrator(
|
|
||||||
context: WorkflowContext,
|
|
||||||
projectRoot: string
|
|
||||||
): WorkflowOrchestrator {
|
|
||||||
const orchestrator = new WorkflowOrchestrator(context);
|
|
||||||
const stateManager = new WorkflowStateManager(projectRoot);
|
|
||||||
|
|
||||||
// Enable auto-persistence
|
|
||||||
orchestrator.enableAutoPersist(async (state: WorkflowState) => {
|
|
||||||
await stateManager.save(state);
|
|
||||||
});
|
|
||||||
|
|
||||||
return orchestrator;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize GitAdapter for project
|
|
||||||
*/
|
|
||||||
export function createGitAdapter(projectRoot: string): GitAdapter {
|
|
||||||
return new GitAdapter(projectRoot);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize CommitMessageGenerator
|
|
||||||
*/
|
|
||||||
export function createCommitMessageGenerator(): CommitMessageGenerator {
|
|
||||||
return new CommitMessageGenerator();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Output formatter for JSON and text modes
|
* Output formatter for JSON and text modes
|
||||||
*/
|
*/
|
||||||
@@ -231,32 +137,3 @@ export class OutputFormatter {
|
|||||||
console.log(chalk.blue(`ℹ ${message}`));
|
console.log(chalk.blue(`ℹ ${message}`));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate task ID format
|
|
||||||
*/
|
|
||||||
export function validateTaskId(taskId: string): boolean {
|
|
||||||
// Task ID should be in format: number or number.number (e.g., "1" or "1.2")
|
|
||||||
const pattern = /^\d+(\.\d+)*$/;
|
|
||||||
return pattern.test(taskId);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse subtasks from task data
|
|
||||||
*/
|
|
||||||
export function parseSubtasks(
|
|
||||||
task: any,
|
|
||||||
maxAttempts: number = 3
|
|
||||||
): SubtaskInfo[] {
|
|
||||||
if (!task.subtasks || !Array.isArray(task.subtasks)) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
return task.subtasks.map((subtask: any) => ({
|
|
||||||
id: subtask.id,
|
|
||||||
title: subtask.title,
|
|
||||||
status: subtask.status === 'done' ? 'completed' : 'pending',
|
|
||||||
attempts: 0,
|
|
||||||
maxAttempts
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -2,18 +2,10 @@
|
|||||||
* @fileoverview Start Command - Initialize and start TDD workflow
|
* @fileoverview Start Command - Initialize and start TDD workflow
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { type WorkflowContext, createTmCore } from '@tm/core';
|
import { createTmCore, MainTaskIdSchema } from '@tm/core';
|
||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { getProjectRoot } from '../../utils/project-root.js';
|
import { getProjectRoot } from '../../utils/project-root.js';
|
||||||
import {
|
import { AutopilotBaseOptions, OutputFormatter } from './shared.js';
|
||||||
AutopilotBaseOptions,
|
|
||||||
OutputFormatter,
|
|
||||||
createGitAdapter,
|
|
||||||
createOrchestrator,
|
|
||||||
hasWorkflowState,
|
|
||||||
parseSubtasks,
|
|
||||||
validateTaskId
|
|
||||||
} from './shared.js';
|
|
||||||
|
|
||||||
interface StartOptions extends AutopilotBaseOptions {
|
interface StartOptions extends AutopilotBaseOptions {
|
||||||
force?: boolean;
|
force?: boolean;
|
||||||
@@ -36,7 +28,10 @@ export class StartCommand extends Command {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private async execute(taskId: string, options: StartOptions): Promise<void> {
|
private async execute(
|
||||||
|
rawTaskId: string,
|
||||||
|
options: StartOptions
|
||||||
|
): Promise<void> {
|
||||||
// Inherit parent options
|
// Inherit parent options
|
||||||
const parentOpts = this.parent?.opts() as AutopilotBaseOptions;
|
const parentOpts = this.parent?.opts() as AutopilotBaseOptions;
|
||||||
const mergedOptions: StartOptions = {
|
const mergedOptions: StartOptions = {
|
||||||
@@ -50,17 +45,24 @@ export class StartCommand extends Command {
|
|||||||
const formatter = new OutputFormatter(mergedOptions.json || false);
|
const formatter = new OutputFormatter(mergedOptions.json || false);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Validate task ID
|
// Validate and normalize task ID
|
||||||
if (!validateTaskId(taskId)) {
|
const parseResult = MainTaskIdSchema.safeParse(rawTaskId);
|
||||||
|
if (!parseResult.success) {
|
||||||
formatter.error('Invalid task ID format', {
|
formatter.error('Invalid task ID format', {
|
||||||
taskId,
|
taskId: rawTaskId,
|
||||||
expected: 'Format: number or number.number (e.g., "1" or "1.2")'
|
error: parseResult.error.issues[0]?.message
|
||||||
});
|
});
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
const taskId = parseResult.data;
|
||||||
|
|
||||||
|
const projectRoot = mergedOptions.projectRoot!;
|
||||||
|
|
||||||
|
// Initialize TmCore facade
|
||||||
|
const tmCore = await createTmCore({ projectPath: projectRoot });
|
||||||
|
|
||||||
// Check for existing workflow state
|
// Check for existing workflow state
|
||||||
const hasState = await hasWorkflowState(mergedOptions.projectRoot!);
|
const hasState = await tmCore.workflow.hasWorkflow();
|
||||||
if (hasState && !mergedOptions.force) {
|
if (hasState && !mergedOptions.force) {
|
||||||
formatter.error(
|
formatter.error(
|
||||||
'Workflow state already exists. Use --force to overwrite or resume with "autopilot resume"'
|
'Workflow state already exists. Use --force to overwrite or resume with "autopilot resume"'
|
||||||
@@ -68,14 +70,13 @@ export class StartCommand extends Command {
|
|||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize Task Master Core
|
|
||||||
const tmCore = await createTmCore({
|
|
||||||
projectPath: mergedOptions.projectRoot!
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get current tag from ConfigManager
|
// Get current tag from ConfigManager
|
||||||
const currentTag = tmCore.config.getActiveTag();
|
const currentTag = tmCore.config.getActiveTag();
|
||||||
|
|
||||||
|
// Get org slug from auth context (for API storage mode)
|
||||||
|
const authContext = tmCore.auth.getContext();
|
||||||
|
const orgSlug = authContext?.orgSlug;
|
||||||
|
|
||||||
// Load task
|
// Load task
|
||||||
formatter.info(`Loading task ${taskId}...`);
|
formatter.info(`Loading task ${taskId}...`);
|
||||||
const { task } = await tmCore.tasks.get(taskId);
|
const { task } = await tmCore.tasks.get(taskId);
|
||||||
@@ -94,68 +95,36 @@ export class StartCommand extends Command {
|
|||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize Git adapter
|
// Parse max attempts
|
||||||
const gitAdapter = createGitAdapter(mergedOptions.projectRoot!);
|
|
||||||
await gitAdapter.ensureGitRepository();
|
|
||||||
await gitAdapter.ensureCleanWorkingTree();
|
|
||||||
|
|
||||||
// Parse subtasks
|
|
||||||
const maxAttempts = parseInt(mergedOptions.maxAttempts || '3', 10);
|
const maxAttempts = parseInt(mergedOptions.maxAttempts || '3', 10);
|
||||||
const subtasks = parseSubtasks(task, maxAttempts);
|
|
||||||
|
|
||||||
// Create workflow context
|
// Start workflow via tmCore facade (handles git, orchestrator, status updates internally)
|
||||||
const context: WorkflowContext = {
|
formatter.info('Starting TDD workflow...');
|
||||||
taskId: task.id,
|
const status = await tmCore.workflow.start({
|
||||||
subtasks,
|
taskId,
|
||||||
currentSubtaskIndex: 0,
|
taskTitle: task.title,
|
||||||
errors: [],
|
subtasks: task.subtasks.map((st: any) => ({
|
||||||
metadata: {
|
id: st.id,
|
||||||
startedAt: new Date().toISOString(),
|
title: st.title,
|
||||||
tags: task.tags || []
|
status: st.status,
|
||||||
}
|
maxAttempts
|
||||||
};
|
})),
|
||||||
|
maxAttempts,
|
||||||
// Create orchestrator with persistence
|
force: mergedOptions.force,
|
||||||
const orchestrator = createOrchestrator(
|
tag: currentTag,
|
||||||
context,
|
orgSlug
|
||||||
mergedOptions.projectRoot!
|
|
||||||
);
|
|
||||||
|
|
||||||
// Complete PREFLIGHT phase
|
|
||||||
orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
|
||||||
|
|
||||||
// Generate descriptive branch name
|
|
||||||
const sanitizedTitle = task.title
|
|
||||||
.toLowerCase()
|
|
||||||
.replace(/[^a-z0-9]+/g, '-')
|
|
||||||
.replace(/^-+|-+$/g, '')
|
|
||||||
.substring(0, 50);
|
|
||||||
const formattedTaskId = taskId.replace(/\./g, '-');
|
|
||||||
const tagPrefix = currentTag ? `${currentTag}/` : '';
|
|
||||||
const branchName = `${tagPrefix}task-${formattedTaskId}-${sanitizedTitle}`;
|
|
||||||
|
|
||||||
// Create and checkout branch
|
|
||||||
formatter.info(`Creating branch: ${branchName}`);
|
|
||||||
await gitAdapter.createAndCheckoutBranch(branchName);
|
|
||||||
|
|
||||||
// Transition to SUBTASK_LOOP
|
|
||||||
orchestrator.transition({
|
|
||||||
type: 'BRANCH_CREATED',
|
|
||||||
branchName
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Output success
|
// Output success
|
||||||
formatter.success('TDD workflow started', {
|
formatter.success('TDD workflow started', {
|
||||||
taskId: task.id,
|
taskId: status.taskId,
|
||||||
title: task.title,
|
title: task.title,
|
||||||
phase: orchestrator.getCurrentPhase(),
|
phase: status.phase,
|
||||||
tddPhase: orchestrator.getCurrentTDDPhase(),
|
tddPhase: status.tddPhase,
|
||||||
branchName,
|
branchName: status.branchName,
|
||||||
subtasks: subtasks.length,
|
subtasks: task.subtasks.length,
|
||||||
currentSubtask: subtasks[0]?.title
|
currentSubtask: status.currentSubtask?.title
|
||||||
});
|
});
|
||||||
|
|
||||||
// Clean up
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
formatter.error((error as Error).message);
|
formatter.error((error as Error).message);
|
||||||
if (mergedOptions.verbose) {
|
if (mergedOptions.verbose) {
|
||||||
|
|||||||
@@ -2,15 +2,10 @@
|
|||||||
* @fileoverview Status Command - Show workflow progress
|
* @fileoverview Status Command - Show workflow progress
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WorkflowOrchestrator } from '@tm/core';
|
import { createTmCore } from '@tm/core';
|
||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { getProjectRoot } from '../../utils/project-root.js';
|
import { getProjectRoot } from '../../utils/project-root.js';
|
||||||
import {
|
import { AutopilotBaseOptions, OutputFormatter } from './shared.js';
|
||||||
AutopilotBaseOptions,
|
|
||||||
OutputFormatter,
|
|
||||||
hasWorkflowState,
|
|
||||||
loadWorkflowState
|
|
||||||
} from './shared.js';
|
|
||||||
|
|
||||||
type StatusOptions = AutopilotBaseOptions;
|
type StatusOptions = AutopilotBaseOptions;
|
||||||
|
|
||||||
@@ -42,62 +37,56 @@ export class StatusCommand extends Command {
|
|||||||
const formatter = new OutputFormatter(mergedOptions.json || false);
|
const formatter = new OutputFormatter(mergedOptions.json || false);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Check for workflow state
|
const projectRoot = mergedOptions.projectRoot!;
|
||||||
const hasState = await hasWorkflowState(mergedOptions.projectRoot!);
|
|
||||||
if (!hasState) {
|
// Initialize TmCore facade
|
||||||
formatter.error('No active workflow', {
|
const tmCore = await createTmCore({ projectPath: projectRoot });
|
||||||
suggestion: 'Start a workflow with: autopilot start <taskId>'
|
|
||||||
|
// Check if workflow exists
|
||||||
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
|
if (mergedOptions.json) {
|
||||||
|
formatter.output({
|
||||||
|
active: false,
|
||||||
|
message: 'No active workflow'
|
||||||
});
|
});
|
||||||
process.exit(1);
|
} else {
|
||||||
|
formatter.info('No active workflow');
|
||||||
|
console.log('Start a workflow with: autopilot start <taskId>');
|
||||||
|
}
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load state
|
// Resume workflow and get status
|
||||||
const state = await loadWorkflowState(mergedOptions.projectRoot!);
|
await tmCore.workflow.resume();
|
||||||
if (!state) {
|
const workflowStatus = tmCore.workflow.getStatus();
|
||||||
formatter.error('Failed to load workflow state');
|
const context = tmCore.workflow.getContext();
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Restore orchestrator
|
|
||||||
const orchestrator = new WorkflowOrchestrator(state.context);
|
|
||||||
orchestrator.restoreState(state);
|
|
||||||
|
|
||||||
// Get status information
|
|
||||||
const phase = orchestrator.getCurrentPhase();
|
|
||||||
const tddPhase = orchestrator.getCurrentTDDPhase();
|
|
||||||
const progress = orchestrator.getProgress();
|
|
||||||
const currentSubtask = orchestrator.getCurrentSubtask();
|
|
||||||
const errors = state.context.errors ?? [];
|
|
||||||
|
|
||||||
// Build status output
|
// Build status output
|
||||||
const status = {
|
const status = {
|
||||||
taskId: state.context.taskId,
|
taskId: workflowStatus.taskId,
|
||||||
phase,
|
phase: workflowStatus.phase,
|
||||||
tddPhase,
|
tddPhase: workflowStatus.tddPhase,
|
||||||
branchName: state.context.branchName,
|
branchName: workflowStatus.branchName,
|
||||||
progress: {
|
progress: workflowStatus.progress,
|
||||||
completed: progress.completed,
|
currentSubtask: workflowStatus.currentSubtask
|
||||||
total: progress.total,
|
|
||||||
current: progress.current,
|
|
||||||
percentage: progress.percentage
|
|
||||||
},
|
|
||||||
currentSubtask: currentSubtask
|
|
||||||
? {
|
? {
|
||||||
id: currentSubtask.id,
|
id: workflowStatus.currentSubtask.id,
|
||||||
title: currentSubtask.title,
|
title: workflowStatus.currentSubtask.title,
|
||||||
status: currentSubtask.status,
|
attempts: workflowStatus.currentSubtask.attempts,
|
||||||
attempts: currentSubtask.attempts,
|
maxAttempts: workflowStatus.currentSubtask.maxAttempts
|
||||||
maxAttempts: currentSubtask.maxAttempts
|
|
||||||
}
|
}
|
||||||
: null,
|
: null,
|
||||||
subtasks: state.context.subtasks.map((st) => ({
|
subtasks: context.subtasks.map((st) => ({
|
||||||
id: st.id,
|
id: st.id,
|
||||||
title: st.title,
|
title: st.title,
|
||||||
status: st.status,
|
status: st.status,
|
||||||
attempts: st.attempts
|
attempts: st.attempts
|
||||||
})),
|
})),
|
||||||
errors: errors.length > 0 ? errors : undefined,
|
errors:
|
||||||
metadata: state.context.metadata
|
context.errors && context.errors.length > 0
|
||||||
|
? context.errors
|
||||||
|
: undefined,
|
||||||
|
metadata: context.metadata
|
||||||
};
|
};
|
||||||
|
|
||||||
if (mergedOptions.json) {
|
if (mergedOptions.json) {
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import {
|
|||||||
type TaskStatus,
|
type TaskStatus,
|
||||||
type TmCore,
|
type TmCore,
|
||||||
createTmCore,
|
createTmCore,
|
||||||
normalizeDisplayId
|
TaskIdSchema
|
||||||
} from '@tm/core';
|
} from '@tm/core';
|
||||||
import type { StorageType } from '@tm/core';
|
import type { StorageType } from '@tm/core';
|
||||||
import boxen from 'boxen';
|
import boxen from 'boxen';
|
||||||
@@ -150,11 +150,21 @@ export class SetStatusCommand extends Command {
|
|||||||
projectPath: getProjectRoot(options.project)
|
projectPath: getProjectRoot(options.project)
|
||||||
});
|
});
|
||||||
|
|
||||||
// Parse task IDs (handle comma-separated values)
|
// Parse and validate task IDs (handle comma-separated values)
|
||||||
// Normalize display IDs (e.g., "ham31" → "HAM-31")
|
const rawIds = options.id.split(',').map((id) => id.trim());
|
||||||
const taskIds = options.id
|
const taskIds: string[] = [];
|
||||||
.split(',')
|
|
||||||
.map((id) => normalizeDisplayId(id.trim()));
|
for (const rawId of rawIds) {
|
||||||
|
const parseResult = TaskIdSchema.safeParse(rawId);
|
||||||
|
if (!parseResult.success) {
|
||||||
|
console.error(
|
||||||
|
chalk.red(`Invalid task ID: ${rawId}`),
|
||||||
|
chalk.gray(`- ${parseResult.error.issues[0]?.message}`)
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
taskIds.push(parseResult.data);
|
||||||
|
}
|
||||||
|
|
||||||
// Update each task
|
// Update each task
|
||||||
const updatedTasks: Array<{
|
const updatedTasks: Array<{
|
||||||
|
|||||||
@@ -3,12 +3,7 @@
|
|||||||
* Extends Commander.Command for better integration with the framework
|
* Extends Commander.Command for better integration with the framework
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import {
|
import { type Task, type TmCore, createTmCore, TaskIdSchema } from '@tm/core';
|
||||||
type Task,
|
|
||||||
type TmCore,
|
|
||||||
createTmCore,
|
|
||||||
normalizeDisplayId
|
|
||||||
} from '@tm/core';
|
|
||||||
import type { StorageType, Subtask } from '@tm/core';
|
import type { StorageType, Subtask } from '@tm/core';
|
||||||
import boxen from 'boxen';
|
import boxen from 'boxen';
|
||||||
import chalk from 'chalk';
|
import chalk from 'chalk';
|
||||||
@@ -106,12 +101,24 @@ export class ShowCommand extends Command {
|
|||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if multiple IDs are provided (comma-separated)
|
// Parse and validate task IDs (handle comma-separated values)
|
||||||
// Normalize display IDs (e.g., "ham31" → "HAM-31")
|
const rawIds = idArg
|
||||||
const taskIds = idArg
|
|
||||||
.split(',')
|
.split(',')
|
||||||
.map((id) => normalizeDisplayId(id.trim()))
|
.map((id) => id.trim())
|
||||||
.filter((id) => id.length > 0);
|
.filter((id) => id.length > 0);
|
||||||
|
const taskIds: string[] = [];
|
||||||
|
|
||||||
|
for (const rawId of rawIds) {
|
||||||
|
const parseResult = TaskIdSchema.safeParse(rawId);
|
||||||
|
if (!parseResult.success) {
|
||||||
|
console.error(
|
||||||
|
chalk.red(`Invalid task ID: ${rawId}`),
|
||||||
|
chalk.gray(`- ${parseResult.error.issues[0]?.message}`)
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
taskIds.push(parseResult.data);
|
||||||
|
}
|
||||||
|
|
||||||
// Get tasks from core
|
// Get tasks from core
|
||||||
const result =
|
const result =
|
||||||
|
|||||||
@@ -1,540 +0,0 @@
|
|||||||
/**
|
|
||||||
* @fileoverview Integration tests for autopilot workflow commands
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { WorkflowState } from '@tm/core';
|
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
|
||||||
|
|
||||||
// Track file system state in memory - must be in vi.hoisted() for mock access
|
|
||||||
const {
|
|
||||||
mockFileSystem,
|
|
||||||
pathExistsFn,
|
|
||||||
readJSONFn,
|
|
||||||
writeJSONFn,
|
|
||||||
ensureDirFn,
|
|
||||||
removeFn
|
|
||||||
} = vi.hoisted(() => {
|
|
||||||
const mockFileSystem = new Map<string, string>();
|
|
||||||
|
|
||||||
return {
|
|
||||||
mockFileSystem,
|
|
||||||
pathExistsFn: vi.fn((path: string) =>
|
|
||||||
Promise.resolve(mockFileSystem.has(path))
|
|
||||||
),
|
|
||||||
readJSONFn: vi.fn((path: string) => {
|
|
||||||
const data = mockFileSystem.get(path);
|
|
||||||
return data
|
|
||||||
? Promise.resolve(JSON.parse(data))
|
|
||||||
: Promise.reject(new Error('File not found'));
|
|
||||||
}),
|
|
||||||
writeJSONFn: vi.fn((path: string, data: any) => {
|
|
||||||
mockFileSystem.set(path, JSON.stringify(data));
|
|
||||||
return Promise.resolve();
|
|
||||||
}),
|
|
||||||
ensureDirFn: vi.fn(() => Promise.resolve()),
|
|
||||||
removeFn: vi.fn((path: string) => {
|
|
||||||
mockFileSystem.delete(path);
|
|
||||||
return Promise.resolve();
|
|
||||||
})
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
// Mock fs-extra before any imports
|
|
||||||
vi.mock('fs-extra', () => ({
|
|
||||||
default: {
|
|
||||||
pathExists: pathExistsFn,
|
|
||||||
readJSON: readJSONFn,
|
|
||||||
writeJSON: writeJSONFn,
|
|
||||||
ensureDir: ensureDirFn,
|
|
||||||
remove: removeFn
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
vi.mock('@tm/core', () => ({
|
|
||||||
WorkflowOrchestrator: vi.fn().mockImplementation((context) => ({
|
|
||||||
getCurrentPhase: vi.fn().mockReturnValue('SUBTASK_LOOP'),
|
|
||||||
getCurrentTDDPhase: vi.fn().mockReturnValue('RED'),
|
|
||||||
getContext: vi.fn().mockReturnValue(context),
|
|
||||||
transition: vi.fn(),
|
|
||||||
restoreState: vi.fn(),
|
|
||||||
getState: vi.fn().mockReturnValue({ phase: 'SUBTASK_LOOP', context }),
|
|
||||||
enableAutoPersist: vi.fn(),
|
|
||||||
canResumeFromState: vi.fn().mockReturnValue(true),
|
|
||||||
getCurrentSubtask: vi.fn().mockReturnValue({
|
|
||||||
id: '1',
|
|
||||||
title: 'Test Subtask',
|
|
||||||
status: 'pending',
|
|
||||||
attempts: 0
|
|
||||||
}),
|
|
||||||
getProgress: vi.fn().mockReturnValue({
|
|
||||||
completed: 0,
|
|
||||||
total: 3,
|
|
||||||
current: 1,
|
|
||||||
percentage: 0
|
|
||||||
}),
|
|
||||||
canProceed: vi.fn().mockReturnValue(false)
|
|
||||||
})),
|
|
||||||
GitAdapter: vi.fn().mockImplementation(() => ({
|
|
||||||
ensureGitRepository: vi.fn().mockResolvedValue(undefined),
|
|
||||||
ensureCleanWorkingTree: vi.fn().mockResolvedValue(undefined),
|
|
||||||
createAndCheckoutBranch: vi.fn().mockResolvedValue(undefined),
|
|
||||||
hasStagedChanges: vi.fn().mockResolvedValue(true),
|
|
||||||
getStatus: vi.fn().mockResolvedValue({
|
|
||||||
staged: ['file1.ts'],
|
|
||||||
modified: ['file2.ts']
|
|
||||||
}),
|
|
||||||
createCommit: vi.fn().mockResolvedValue(undefined),
|
|
||||||
getLastCommit: vi.fn().mockResolvedValue({
|
|
||||||
hash: 'abc123def456',
|
|
||||||
message: 'test commit'
|
|
||||||
}),
|
|
||||||
stageFiles: vi.fn().mockResolvedValue(undefined)
|
|
||||||
})),
|
|
||||||
CommitMessageGenerator: vi.fn().mockImplementation(() => ({
|
|
||||||
generateMessage: vi.fn().mockReturnValue('feat: test commit message')
|
|
||||||
})),
|
|
||||||
createTaskMasterCore: vi.fn().mockResolvedValue({
|
|
||||||
getTaskWithSubtask: vi.fn().mockResolvedValue({
|
|
||||||
task: {
|
|
||||||
id: '1',
|
|
||||||
title: 'Test Task',
|
|
||||||
subtasks: [
|
|
||||||
{ id: '1', title: 'Subtask 1', status: 'pending' },
|
|
||||||
{ id: '2', title: 'Subtask 2', status: 'pending' },
|
|
||||||
{ id: '3', title: 'Subtask 3', status: 'pending' }
|
|
||||||
],
|
|
||||||
tag: 'test'
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
close: vi.fn().mockResolvedValue(undefined)
|
|
||||||
})
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Import after mocks are set up
|
|
||||||
import { Command } from 'commander';
|
|
||||||
import { AutopilotCommand } from '../../../../src/commands/autopilot/index.js';
|
|
||||||
|
|
||||||
describe('Autopilot Workflow Integration Tests', () => {
|
|
||||||
const projectRoot = '/test/project';
|
|
||||||
let program: Command;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
mockFileSystem.clear();
|
|
||||||
|
|
||||||
// Clear mock call history
|
|
||||||
pathExistsFn.mockClear();
|
|
||||||
readJSONFn.mockClear();
|
|
||||||
writeJSONFn.mockClear();
|
|
||||||
ensureDirFn.mockClear();
|
|
||||||
removeFn.mockClear();
|
|
||||||
|
|
||||||
program = new Command();
|
|
||||||
AutopilotCommand.register(program);
|
|
||||||
|
|
||||||
// Use exitOverride to handle Commander exits in tests
|
|
||||||
program.exitOverride();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
mockFileSystem.clear();
|
|
||||||
vi.restoreAllMocks();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('start command', () => {
|
|
||||||
it('should initialize workflow and create branch', async () => {
|
|
||||||
const consoleLogSpy = vi
|
|
||||||
.spyOn(console, 'log')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
|
|
||||||
await program.parseAsync([
|
|
||||||
'node',
|
|
||||||
'test',
|
|
||||||
'autopilot',
|
|
||||||
'start',
|
|
||||||
'1',
|
|
||||||
'--project-root',
|
|
||||||
projectRoot,
|
|
||||||
'--json'
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Verify writeJSON was called with state
|
|
||||||
expect(writeJSONFn).toHaveBeenCalledWith(
|
|
||||||
expect.stringContaining('workflow-state.json'),
|
|
||||||
expect.objectContaining({
|
|
||||||
phase: expect.any(String),
|
|
||||||
context: expect.any(Object)
|
|
||||||
}),
|
|
||||||
expect.any(Object)
|
|
||||||
);
|
|
||||||
|
|
||||||
consoleLogSpy.mockRestore();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should reject invalid task ID', async () => {
|
|
||||||
const consoleErrorSpy = vi
|
|
||||||
.spyOn(console, 'error')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
|
|
||||||
await expect(
|
|
||||||
program.parseAsync([
|
|
||||||
'node',
|
|
||||||
'test',
|
|
||||||
'autopilot',
|
|
||||||
'start',
|
|
||||||
'invalid',
|
|
||||||
'--project-root',
|
|
||||||
projectRoot,
|
|
||||||
'--json'
|
|
||||||
])
|
|
||||||
).rejects.toMatchObject({ exitCode: 1 });
|
|
||||||
|
|
||||||
consoleErrorSpy.mockRestore();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should reject starting when workflow exists without force', async () => {
|
|
||||||
// Create existing state
|
|
||||||
const mockState: WorkflowState = {
|
|
||||||
phase: 'SUBTASK_LOOP',
|
|
||||||
context: {
|
|
||||||
taskId: '1',
|
|
||||||
subtasks: [],
|
|
||||||
currentSubtaskIndex: 0,
|
|
||||||
errors: [],
|
|
||||||
metadata: {}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
mockFileSystem.set(
|
|
||||||
`${projectRoot}/.taskmaster/workflow-state.json`,
|
|
||||||
JSON.stringify(mockState)
|
|
||||||
);
|
|
||||||
|
|
||||||
const consoleErrorSpy = vi
|
|
||||||
.spyOn(console, 'error')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
|
|
||||||
await expect(
|
|
||||||
program.parseAsync([
|
|
||||||
'node',
|
|
||||||
'test',
|
|
||||||
'autopilot',
|
|
||||||
'start',
|
|
||||||
'1',
|
|
||||||
'--project-root',
|
|
||||||
projectRoot,
|
|
||||||
'--json'
|
|
||||||
])
|
|
||||||
).rejects.toMatchObject({ exitCode: 1 });
|
|
||||||
|
|
||||||
consoleErrorSpy.mockRestore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('resume command', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
// Create saved state
|
|
||||||
const mockState: WorkflowState = {
|
|
||||||
phase: 'SUBTASK_LOOP',
|
|
||||||
context: {
|
|
||||||
taskId: '1',
|
|
||||||
subtasks: [
|
|
||||||
{
|
|
||||||
id: '1',
|
|
||||||
title: 'Test Subtask',
|
|
||||||
status: 'pending',
|
|
||||||
attempts: 0
|
|
||||||
}
|
|
||||||
],
|
|
||||||
currentSubtaskIndex: 0,
|
|
||||||
currentTDDPhase: 'RED',
|
|
||||||
branchName: 'task-1',
|
|
||||||
errors: [],
|
|
||||||
metadata: {}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
mockFileSystem.set(
|
|
||||||
`${projectRoot}/.taskmaster/workflow-state.json`,
|
|
||||||
JSON.stringify(mockState)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should restore workflow from saved state', async () => {
|
|
||||||
const consoleLogSpy = vi
|
|
||||||
.spyOn(console, 'log')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
|
|
||||||
await program.parseAsync([
|
|
||||||
'node',
|
|
||||||
'test',
|
|
||||||
'autopilot',
|
|
||||||
'resume',
|
|
||||||
'--project-root',
|
|
||||||
projectRoot,
|
|
||||||
'--json'
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(consoleLogSpy).toHaveBeenCalled();
|
|
||||||
const output = JSON.parse(consoleLogSpy.mock.calls[0][0]);
|
|
||||||
expect(output.success).toBe(true);
|
|
||||||
expect(output.taskId).toBe('1');
|
|
||||||
|
|
||||||
consoleLogSpy.mockRestore();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should error when no state exists', async () => {
|
|
||||||
mockFileSystem.clear();
|
|
||||||
|
|
||||||
const consoleErrorSpy = vi
|
|
||||||
.spyOn(console, 'error')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
|
|
||||||
await expect(
|
|
||||||
program.parseAsync([
|
|
||||||
'node',
|
|
||||||
'test',
|
|
||||||
'autopilot',
|
|
||||||
'resume',
|
|
||||||
'--project-root',
|
|
||||||
projectRoot,
|
|
||||||
'--json'
|
|
||||||
])
|
|
||||||
).rejects.toMatchObject({ exitCode: 1 });
|
|
||||||
|
|
||||||
consoleErrorSpy.mockRestore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('next command', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
const mockState: WorkflowState = {
|
|
||||||
phase: 'SUBTASK_LOOP',
|
|
||||||
context: {
|
|
||||||
taskId: '1',
|
|
||||||
subtasks: [
|
|
||||||
{
|
|
||||||
id: '1',
|
|
||||||
title: 'Test Subtask',
|
|
||||||
status: 'pending',
|
|
||||||
attempts: 0
|
|
||||||
}
|
|
||||||
],
|
|
||||||
currentSubtaskIndex: 0,
|
|
||||||
currentTDDPhase: 'RED',
|
|
||||||
branchName: 'task-1',
|
|
||||||
errors: [],
|
|
||||||
metadata: {}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
mockFileSystem.set(
|
|
||||||
`${projectRoot}/.taskmaster/workflow-state.json`,
|
|
||||||
JSON.stringify(mockState)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return next action in JSON format', async () => {
|
|
||||||
const consoleLogSpy = vi
|
|
||||||
.spyOn(console, 'log')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
|
|
||||||
await program.parseAsync([
|
|
||||||
'node',
|
|
||||||
'test',
|
|
||||||
'autopilot',
|
|
||||||
'next',
|
|
||||||
'--project-root',
|
|
||||||
projectRoot,
|
|
||||||
'--json'
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(consoleLogSpy).toHaveBeenCalled();
|
|
||||||
const output = JSON.parse(consoleLogSpy.mock.calls[0][0]);
|
|
||||||
expect(output.action).toBe('generate_test');
|
|
||||||
expect(output.phase).toBe('SUBTASK_LOOP');
|
|
||||||
expect(output.tddPhase).toBe('RED');
|
|
||||||
|
|
||||||
consoleLogSpy.mockRestore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('status command', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
const mockState: WorkflowState = {
|
|
||||||
phase: 'SUBTASK_LOOP',
|
|
||||||
context: {
|
|
||||||
taskId: '1',
|
|
||||||
subtasks: [
|
|
||||||
{ id: '1', title: 'Subtask 1', status: 'completed', attempts: 1 },
|
|
||||||
{ id: '2', title: 'Subtask 2', status: 'pending', attempts: 0 },
|
|
||||||
{ id: '3', title: 'Subtask 3', status: 'pending', attempts: 0 }
|
|
||||||
],
|
|
||||||
currentSubtaskIndex: 1,
|
|
||||||
currentTDDPhase: 'RED',
|
|
||||||
branchName: 'task-1',
|
|
||||||
errors: [],
|
|
||||||
metadata: {}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
mockFileSystem.set(
|
|
||||||
`${projectRoot}/.taskmaster/workflow-state.json`,
|
|
||||||
JSON.stringify(mockState)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should display workflow progress', async () => {
|
|
||||||
const consoleLogSpy = vi
|
|
||||||
.spyOn(console, 'log')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
|
|
||||||
await program.parseAsync([
|
|
||||||
'node',
|
|
||||||
'test',
|
|
||||||
'autopilot',
|
|
||||||
'status',
|
|
||||||
'--project-root',
|
|
||||||
projectRoot,
|
|
||||||
'--json'
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(consoleLogSpy).toHaveBeenCalled();
|
|
||||||
const output = JSON.parse(consoleLogSpy.mock.calls[0][0]);
|
|
||||||
expect(output.taskId).toBe('1');
|
|
||||||
expect(output.phase).toBe('SUBTASK_LOOP');
|
|
||||||
expect(output.progress).toBeDefined();
|
|
||||||
expect(output.subtasks).toHaveLength(3);
|
|
||||||
|
|
||||||
consoleLogSpy.mockRestore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('complete command', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
const mockState: WorkflowState = {
|
|
||||||
phase: 'SUBTASK_LOOP',
|
|
||||||
context: {
|
|
||||||
taskId: '1',
|
|
||||||
subtasks: [
|
|
||||||
{
|
|
||||||
id: '1',
|
|
||||||
title: 'Test Subtask',
|
|
||||||
status: 'in-progress',
|
|
||||||
attempts: 0
|
|
||||||
}
|
|
||||||
],
|
|
||||||
currentSubtaskIndex: 0,
|
|
||||||
currentTDDPhase: 'RED',
|
|
||||||
branchName: 'task-1',
|
|
||||||
errors: [],
|
|
||||||
metadata: {}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
mockFileSystem.set(
|
|
||||||
`${projectRoot}/.taskmaster/workflow-state.json`,
|
|
||||||
JSON.stringify(mockState)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should validate RED phase has failures', async () => {
|
|
||||||
const consoleErrorSpy = vi
|
|
||||||
.spyOn(console, 'error')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
|
|
||||||
await expect(
|
|
||||||
program.parseAsync([
|
|
||||||
'node',
|
|
||||||
'test',
|
|
||||||
'autopilot',
|
|
||||||
'complete',
|
|
||||||
'--project-root',
|
|
||||||
projectRoot,
|
|
||||||
'--results',
|
|
||||||
'{"total":10,"passed":10,"failed":0,"skipped":0}',
|
|
||||||
'--json'
|
|
||||||
])
|
|
||||||
).rejects.toMatchObject({ exitCode: 1 });
|
|
||||||
|
|
||||||
consoleErrorSpy.mockRestore();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should complete RED phase with failures', async () => {
|
|
||||||
const consoleLogSpy = vi
|
|
||||||
.spyOn(console, 'log')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
|
|
||||||
await program.parseAsync([
|
|
||||||
'node',
|
|
||||||
'test',
|
|
||||||
'autopilot',
|
|
||||||
'complete',
|
|
||||||
'--project-root',
|
|
||||||
projectRoot,
|
|
||||||
'--results',
|
|
||||||
'{"total":10,"passed":9,"failed":1,"skipped":0}',
|
|
||||||
'--json'
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(consoleLogSpy).toHaveBeenCalled();
|
|
||||||
const output = JSON.parse(consoleLogSpy.mock.calls[0][0]);
|
|
||||||
expect(output.success).toBe(true);
|
|
||||||
expect(output.nextPhase).toBe('GREEN');
|
|
||||||
|
|
||||||
consoleLogSpy.mockRestore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('abort command', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
const mockState: WorkflowState = {
|
|
||||||
phase: 'SUBTASK_LOOP',
|
|
||||||
context: {
|
|
||||||
taskId: '1',
|
|
||||||
subtasks: [
|
|
||||||
{
|
|
||||||
id: '1',
|
|
||||||
title: 'Test Subtask',
|
|
||||||
status: 'pending',
|
|
||||||
attempts: 0
|
|
||||||
}
|
|
||||||
],
|
|
||||||
currentSubtaskIndex: 0,
|
|
||||||
currentTDDPhase: 'RED',
|
|
||||||
branchName: 'task-1',
|
|
||||||
errors: [],
|
|
||||||
metadata: {}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
mockFileSystem.set(
|
|
||||||
`${projectRoot}/.taskmaster/workflow-state.json`,
|
|
||||||
JSON.stringify(mockState)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should abort workflow and delete state', async () => {
|
|
||||||
const consoleLogSpy = vi
|
|
||||||
.spyOn(console, 'log')
|
|
||||||
.mockImplementation(() => {});
|
|
||||||
|
|
||||||
await program.parseAsync([
|
|
||||||
'node',
|
|
||||||
'test',
|
|
||||||
'autopilot',
|
|
||||||
'abort',
|
|
||||||
'--project-root',
|
|
||||||
projectRoot,
|
|
||||||
'--force',
|
|
||||||
'--json'
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Verify remove was called
|
|
||||||
expect(removeFn).toHaveBeenCalledWith(
|
|
||||||
expect.stringContaining('workflow-state.json')
|
|
||||||
);
|
|
||||||
|
|
||||||
consoleLogSpy.mockRestore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -3,27 +3,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
import {
|
import { OutputFormatter } from '../../../../src/commands/autopilot/shared.js';
|
||||||
OutputFormatter,
|
|
||||||
parseSubtasks,
|
|
||||||
validateTaskId
|
|
||||||
} from '../../../../src/commands/autopilot/shared.js';
|
|
||||||
|
|
||||||
// Mock fs-extra
|
|
||||||
vi.mock('fs-extra', () => ({
|
|
||||||
default: {
|
|
||||||
pathExists: vi.fn(),
|
|
||||||
readJSON: vi.fn(),
|
|
||||||
writeJSON: vi.fn(),
|
|
||||||
ensureDir: vi.fn(),
|
|
||||||
remove: vi.fn()
|
|
||||||
},
|
|
||||||
pathExists: vi.fn(),
|
|
||||||
readJSON: vi.fn(),
|
|
||||||
writeJSON: vi.fn(),
|
|
||||||
ensureDir: vi.fn(),
|
|
||||||
remove: vi.fn()
|
|
||||||
}));
|
|
||||||
|
|
||||||
describe('Autopilot Shared Utilities', () => {
|
describe('Autopilot Shared Utilities', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
@@ -34,82 +14,6 @@ describe('Autopilot Shared Utilities', () => {
|
|||||||
vi.restoreAllMocks();
|
vi.restoreAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('validateTaskId', () => {
|
|
||||||
it('should validate simple task IDs', () => {
|
|
||||||
expect(validateTaskId('1')).toBe(true);
|
|
||||||
expect(validateTaskId('10')).toBe(true);
|
|
||||||
expect(validateTaskId('999')).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should validate subtask IDs', () => {
|
|
||||||
expect(validateTaskId('1.1')).toBe(true);
|
|
||||||
expect(validateTaskId('1.2')).toBe(true);
|
|
||||||
expect(validateTaskId('10.5')).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should validate nested subtask IDs', () => {
|
|
||||||
expect(validateTaskId('1.1.1')).toBe(true);
|
|
||||||
expect(validateTaskId('1.2.3')).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should reject invalid formats', () => {
|
|
||||||
expect(validateTaskId('')).toBe(false);
|
|
||||||
expect(validateTaskId('abc')).toBe(false);
|
|
||||||
expect(validateTaskId('1.')).toBe(false);
|
|
||||||
expect(validateTaskId('.1')).toBe(false);
|
|
||||||
expect(validateTaskId('1..2')).toBe(false);
|
|
||||||
expect(validateTaskId('1.2.3.')).toBe(false);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('parseSubtasks', () => {
|
|
||||||
it('should parse subtasks from task data', () => {
|
|
||||||
const task = {
|
|
||||||
id: '1',
|
|
||||||
title: 'Test Task',
|
|
||||||
subtasks: [
|
|
||||||
{ id: '1', title: 'Subtask 1', status: 'pending' },
|
|
||||||
{ id: '2', title: 'Subtask 2', status: 'done' },
|
|
||||||
{ id: '3', title: 'Subtask 3', status: 'in-progress' }
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = parseSubtasks(task, 5);
|
|
||||||
|
|
||||||
expect(result).toHaveLength(3);
|
|
||||||
expect(result[0]).toEqual({
|
|
||||||
id: '1',
|
|
||||||
title: 'Subtask 1',
|
|
||||||
status: 'pending',
|
|
||||||
attempts: 0,
|
|
||||||
maxAttempts: 5
|
|
||||||
});
|
|
||||||
expect(result[1]).toEqual({
|
|
||||||
id: '2',
|
|
||||||
title: 'Subtask 2',
|
|
||||||
status: 'completed',
|
|
||||||
attempts: 0,
|
|
||||||
maxAttempts: 5
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return empty array for missing subtasks', () => {
|
|
||||||
const task = { id: '1', title: 'Test Task' };
|
|
||||||
expect(parseSubtasks(task)).toEqual([]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should use default maxAttempts', () => {
|
|
||||||
const task = {
|
|
||||||
subtasks: [{ id: '1', title: 'Subtask 1', status: 'pending' }]
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = parseSubtasks(task);
|
|
||||||
expect(result[0].maxAttempts).toBe(3);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// State persistence tests omitted - covered in integration tests
|
|
||||||
|
|
||||||
describe('OutputFormatter', () => {
|
describe('OutputFormatter', () => {
|
||||||
let consoleLogSpy: any;
|
let consoleLogSpy: any;
|
||||||
let consoleErrorSpy: any;
|
let consoleErrorSpy: any;
|
||||||
@@ -183,11 +87,22 @@ describe('Autopilot Shared Utilities', () => {
|
|||||||
formatter.warning('Warning message');
|
formatter.warning('Warning message');
|
||||||
|
|
||||||
expect(consoleWarnSpy).toHaveBeenCalledWith(
|
expect(consoleWarnSpy).toHaveBeenCalledWith(
|
||||||
expect.stringContaining('⚠ Warning message')
|
expect.stringContaining('⚠️ Warning message')
|
||||||
);
|
);
|
||||||
consoleWarnSpy.mockRestore();
|
consoleWarnSpy.mockRestore();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should output formatted text for info', () => {
|
||||||
|
const formatter = new OutputFormatter(false);
|
||||||
|
formatter.info('Info message');
|
||||||
|
|
||||||
|
expect(consoleLogSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining('ℹ Info message')
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('info suppression', () => {
|
||||||
it('should not output info in JSON mode', () => {
|
it('should not output info in JSON mode', () => {
|
||||||
const formatter = new OutputFormatter(true);
|
const formatter = new OutputFormatter(true);
|
||||||
formatter.info('Info message');
|
formatter.info('Info message');
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
* Abort a running TDD workflow and clean up state
|
* Abort a running TDD workflow and clean up state
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WorkflowService } from '@tm/core';
|
|
||||||
import type { FastMCP } from 'fastmcp';
|
import type { FastMCP } from 'fastmcp';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import type { ToolContext } from '../../shared/types.js';
|
import type { ToolContext } from '../../shared/types.js';
|
||||||
@@ -28,16 +27,14 @@ export function registerAutopilotAbortTool(server: FastMCP) {
|
|||||||
parameters: AbortSchema,
|
parameters: AbortSchema,
|
||||||
execute: withToolContext(
|
execute: withToolContext(
|
||||||
'autopilot-abort',
|
'autopilot-abort',
|
||||||
async (args: AbortArgs, { log }: ToolContext) => {
|
async (args: AbortArgs, { log, tmCore }: ToolContext) => {
|
||||||
const { projectRoot } = args;
|
const { projectRoot } = args;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
log.info(`Aborting autopilot workflow in ${projectRoot}`);
|
log.info(`Aborting autopilot workflow in ${projectRoot}`);
|
||||||
|
|
||||||
const workflowService = new WorkflowService(projectRoot);
|
|
||||||
|
|
||||||
// Check if workflow exists
|
// Check if workflow exists
|
||||||
const hasWorkflow = await workflowService.hasWorkflow();
|
const hasWorkflow = await tmCore.workflow.hasWorkflow();
|
||||||
|
|
||||||
if (!hasWorkflow) {
|
if (!hasWorkflow) {
|
||||||
log.warn('No active workflow to abort');
|
log.warn('No active workflow to abort');
|
||||||
@@ -55,11 +52,11 @@ export function registerAutopilotAbortTool(server: FastMCP) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get info before aborting
|
// Get info before aborting
|
||||||
await workflowService.resumeWorkflow();
|
await tmCore.workflow.resume();
|
||||||
const status = workflowService.getStatus();
|
const status = tmCore.workflow.getStatus();
|
||||||
|
|
||||||
// Abort workflow
|
// Abort workflow
|
||||||
await workflowService.abortWorkflow();
|
await tmCore.workflow.abort();
|
||||||
|
|
||||||
log.info('Workflow state deleted');
|
log.info('Workflow state deleted');
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
* Create a git commit with automatic staging and message generation
|
* Create a git commit with automatic staging and message generation
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { CommitMessageGenerator, GitAdapter, WorkflowService } from '@tm/core';
|
import { CommitMessageGenerator, GitAdapter } from '@tm/core';
|
||||||
import type { FastMCP } from 'fastmcp';
|
import type { FastMCP } from 'fastmcp';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import type { ToolContext } from '../../shared/types.js';
|
import type { ToolContext } from '../../shared/types.js';
|
||||||
@@ -38,16 +38,14 @@ export function registerAutopilotCommitTool(server: FastMCP) {
|
|||||||
parameters: CommitSchema,
|
parameters: CommitSchema,
|
||||||
execute: withToolContext(
|
execute: withToolContext(
|
||||||
'autopilot-commit',
|
'autopilot-commit',
|
||||||
async (args: CommitArgs, { log }: ToolContext) => {
|
async (args: CommitArgs, { log, tmCore }: ToolContext) => {
|
||||||
const { projectRoot, files, customMessage } = args;
|
const { projectRoot, files, customMessage } = args;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
log.info(`Creating commit for workflow in ${projectRoot}`);
|
log.info(`Creating commit for workflow in ${projectRoot}`);
|
||||||
|
|
||||||
const workflowService = new WorkflowService(projectRoot);
|
|
||||||
|
|
||||||
// Check if workflow exists
|
// Check if workflow exists
|
||||||
if (!(await workflowService.hasWorkflow())) {
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
return handleApiResult({
|
return handleApiResult({
|
||||||
result: {
|
result: {
|
||||||
success: false,
|
success: false,
|
||||||
@@ -62,9 +60,9 @@ export function registerAutopilotCommitTool(server: FastMCP) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Resume workflow
|
// Resume workflow
|
||||||
await workflowService.resumeWorkflow();
|
await tmCore.workflow.resume();
|
||||||
const status = workflowService.getStatus();
|
const status = tmCore.workflow.getStatus();
|
||||||
const workflowContext = workflowService.getContext();
|
const workflowContext = tmCore.workflow.getContext();
|
||||||
|
|
||||||
// Verify we're in COMMIT phase
|
// Verify we're in COMMIT phase
|
||||||
if (status.tddPhase !== 'COMMIT') {
|
if (status.tddPhase !== 'COMMIT') {
|
||||||
@@ -187,7 +185,8 @@ export function registerAutopilotCommitTool(server: FastMCP) {
|
|||||||
const lastCommit = await gitAdapter.getLastCommit();
|
const lastCommit = await gitAdapter.getLastCommit();
|
||||||
|
|
||||||
// Complete COMMIT phase and advance workflow
|
// Complete COMMIT phase and advance workflow
|
||||||
const newStatus = await workflowService.commit();
|
// Status updates (subtask → done) are handled internally by tmCore.workflow
|
||||||
|
const newStatus = await tmCore.workflow.commit();
|
||||||
|
|
||||||
log.info(
|
log.info(
|
||||||
`Commit completed. Current phase: ${newStatus.tddPhase || newStatus.phase}`
|
`Commit completed. Current phase: ${newStatus.tddPhase || newStatus.phase}`
|
||||||
@@ -196,7 +195,7 @@ export function registerAutopilotCommitTool(server: FastMCP) {
|
|||||||
const isComplete = newStatus.phase === 'COMPLETE';
|
const isComplete = newStatus.phase === 'COMPLETE';
|
||||||
|
|
||||||
// Get next action with guidance
|
// Get next action with guidance
|
||||||
const nextAction = workflowService.getNextAction();
|
const nextAction = tmCore.workflow.getNextAction();
|
||||||
|
|
||||||
return handleApiResult({
|
return handleApiResult({
|
||||||
result: {
|
result: {
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
* Complete the current TDD phase with test result validation
|
* Complete the current TDD phase with test result validation
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WorkflowService } from '@tm/core';
|
|
||||||
import type { FastMCP } from 'fastmcp';
|
import type { FastMCP } from 'fastmcp';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import type { ToolContext } from '../../shared/types.js';
|
import type { ToolContext } from '../../shared/types.js';
|
||||||
@@ -36,16 +35,14 @@ export function registerAutopilotCompleteTool(server: FastMCP) {
|
|||||||
parameters: CompletePhaseSchema,
|
parameters: CompletePhaseSchema,
|
||||||
execute: withToolContext(
|
execute: withToolContext(
|
||||||
'autopilot-complete-phase',
|
'autopilot-complete-phase',
|
||||||
async (args: CompletePhaseArgs, { log }: ToolContext) => {
|
async (args: CompletePhaseArgs, { log, tmCore }: ToolContext) => {
|
||||||
const { projectRoot, testResults } = args;
|
const { projectRoot, testResults } = args;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
log.info(`Completing current phase in workflow for ${projectRoot}`);
|
log.info(`Completing current phase in workflow for ${projectRoot}`);
|
||||||
|
|
||||||
const workflowService = new WorkflowService(projectRoot);
|
|
||||||
|
|
||||||
// Check if workflow exists
|
// Check if workflow exists
|
||||||
if (!(await workflowService.hasWorkflow())) {
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
return handleApiResult({
|
return handleApiResult({
|
||||||
result: {
|
result: {
|
||||||
success: false,
|
success: false,
|
||||||
@@ -60,8 +57,8 @@ export function registerAutopilotCompleteTool(server: FastMCP) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Resume workflow to get current state
|
// Resume workflow to get current state
|
||||||
await workflowService.resumeWorkflow();
|
await tmCore.workflow.resume();
|
||||||
const currentStatus = workflowService.getStatus();
|
const currentStatus = tmCore.workflow.getStatus();
|
||||||
|
|
||||||
// Validate that we're in a TDD phase (RED or GREEN)
|
// Validate that we're in a TDD phase (RED or GREEN)
|
||||||
if (!currentStatus.tddPhase) {
|
if (!currentStatus.tddPhase) {
|
||||||
@@ -105,8 +102,8 @@ export function registerAutopilotCompleteTool(server: FastMCP) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Complete phase with test results
|
// Complete phase with test results
|
||||||
const status = await workflowService.completePhase(fullTestResults);
|
const status = await tmCore.workflow.completePhase(fullTestResults);
|
||||||
const nextAction = workflowService.getNextAction();
|
const nextAction = tmCore.workflow.getNextAction();
|
||||||
|
|
||||||
log.info(
|
log.info(
|
||||||
`Phase completed. New phase: ${status.tddPhase || status.phase}`
|
`Phase completed. New phase: ${status.tddPhase || status.phase}`
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
* Finalize and complete the workflow with working tree validation
|
* Finalize and complete the workflow with working tree validation
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WorkflowService } from '@tm/core';
|
|
||||||
import type { FastMCP } from 'fastmcp';
|
import type { FastMCP } from 'fastmcp';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import type { ToolContext } from '../../shared/types.js';
|
import type { ToolContext } from '../../shared/types.js';
|
||||||
@@ -28,16 +27,14 @@ export function registerAutopilotFinalizeTool(server: FastMCP) {
|
|||||||
parameters: FinalizeSchema,
|
parameters: FinalizeSchema,
|
||||||
execute: withToolContext(
|
execute: withToolContext(
|
||||||
'autopilot-finalize',
|
'autopilot-finalize',
|
||||||
async (args: FinalizeArgs, { log }: ToolContext) => {
|
async (args: FinalizeArgs, { log, tmCore }: ToolContext) => {
|
||||||
const { projectRoot } = args;
|
const { projectRoot } = args;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
log.info(`Finalizing workflow in ${projectRoot}`);
|
log.info(`Finalizing workflow in ${projectRoot}`);
|
||||||
|
|
||||||
const workflowService = new WorkflowService(projectRoot);
|
|
||||||
|
|
||||||
// Check if workflow exists
|
// Check if workflow exists
|
||||||
if (!(await workflowService.hasWorkflow())) {
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
return handleApiResult({
|
return handleApiResult({
|
||||||
result: {
|
result: {
|
||||||
success: false,
|
success: false,
|
||||||
@@ -52,8 +49,8 @@ export function registerAutopilotFinalizeTool(server: FastMCP) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Resume workflow
|
// Resume workflow
|
||||||
await workflowService.resumeWorkflow();
|
await tmCore.workflow.resume();
|
||||||
const currentStatus = workflowService.getStatus();
|
const currentStatus = tmCore.workflow.getStatus();
|
||||||
|
|
||||||
// Verify we're in FINALIZE phase
|
// Verify we're in FINALIZE phase
|
||||||
if (currentStatus.phase !== 'FINALIZE') {
|
if (currentStatus.phase !== 'FINALIZE') {
|
||||||
@@ -70,12 +67,13 @@ export function registerAutopilotFinalizeTool(server: FastMCP) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Finalize workflow (validates clean working tree)
|
// Finalize workflow (validates clean working tree)
|
||||||
const newStatus = await workflowService.finalizeWorkflow();
|
// Status updates (main task → done) are handled internally by tmCore.workflow
|
||||||
|
const newStatus = await tmCore.workflow.finalize();
|
||||||
|
|
||||||
log.info('Workflow finalized successfully');
|
log.info('Workflow finalized successfully');
|
||||||
|
|
||||||
// Get next action
|
// Get next action
|
||||||
const nextAction = workflowService.getNextAction();
|
const nextAction = tmCore.workflow.getNextAction();
|
||||||
|
|
||||||
return handleApiResult({
|
return handleApiResult({
|
||||||
result: {
|
result: {
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
* Get the next action to perform in the TDD workflow
|
* Get the next action to perform in the TDD workflow
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WorkflowService } from '@tm/core';
|
|
||||||
import type { FastMCP } from 'fastmcp';
|
import type { FastMCP } from 'fastmcp';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import type { ToolContext } from '../../shared/types.js';
|
import type { ToolContext } from '../../shared/types.js';
|
||||||
@@ -28,16 +27,14 @@ export function registerAutopilotNextTool(server: FastMCP) {
|
|||||||
parameters: NextActionSchema,
|
parameters: NextActionSchema,
|
||||||
execute: withToolContext(
|
execute: withToolContext(
|
||||||
'autopilot-next',
|
'autopilot-next',
|
||||||
async (args: NextActionArgs, { log }: ToolContext) => {
|
async (args: NextActionArgs, { log, tmCore }: ToolContext) => {
|
||||||
const { projectRoot } = args;
|
const { projectRoot } = args;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
log.info(`Getting next action for workflow in ${projectRoot}`);
|
log.info(`Getting next action for workflow in ${projectRoot}`);
|
||||||
|
|
||||||
const workflowService = new WorkflowService(projectRoot);
|
|
||||||
|
|
||||||
// Check if workflow exists
|
// Check if workflow exists
|
||||||
if (!(await workflowService.hasWorkflow())) {
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
return handleApiResult({
|
return handleApiResult({
|
||||||
result: {
|
result: {
|
||||||
success: false,
|
success: false,
|
||||||
@@ -52,11 +49,11 @@ export function registerAutopilotNextTool(server: FastMCP) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Resume to load state
|
// Resume to load state
|
||||||
await workflowService.resumeWorkflow();
|
await tmCore.workflow.resume();
|
||||||
|
|
||||||
// Get next action
|
// Get next action
|
||||||
const nextAction = workflowService.getNextAction();
|
const nextAction = tmCore.workflow.getNextAction();
|
||||||
const status = workflowService.getStatus();
|
const status = tmCore.workflow.getStatus();
|
||||||
|
|
||||||
log.info(`Next action determined: ${nextAction.action}`);
|
log.info(`Next action determined: ${nextAction.action}`);
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
* Resume a previously started TDD workflow from saved state
|
* Resume a previously started TDD workflow from saved state
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WorkflowService } from '@tm/core';
|
|
||||||
import type { FastMCP } from 'fastmcp';
|
import type { FastMCP } from 'fastmcp';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import type { ToolContext } from '../../shared/types.js';
|
import type { ToolContext } from '../../shared/types.js';
|
||||||
@@ -28,16 +27,14 @@ export function registerAutopilotResumeTool(server: FastMCP) {
|
|||||||
parameters: ResumeWorkflowSchema,
|
parameters: ResumeWorkflowSchema,
|
||||||
execute: withToolContext(
|
execute: withToolContext(
|
||||||
'autopilot-resume',
|
'autopilot-resume',
|
||||||
async (args: ResumeWorkflowArgs, { log }: ToolContext) => {
|
async (args: ResumeWorkflowArgs, { log, tmCore }: ToolContext) => {
|
||||||
const { projectRoot } = args;
|
const { projectRoot } = args;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
log.info(`Resuming autopilot workflow in ${projectRoot}`);
|
log.info(`Resuming autopilot workflow in ${projectRoot}`);
|
||||||
|
|
||||||
const workflowService = new WorkflowService(projectRoot);
|
|
||||||
|
|
||||||
// Check if workflow exists
|
// Check if workflow exists
|
||||||
if (!(await workflowService.hasWorkflow())) {
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
return handleApiResult({
|
return handleApiResult({
|
||||||
result: {
|
result: {
|
||||||
success: false,
|
success: false,
|
||||||
@@ -52,8 +49,8 @@ export function registerAutopilotResumeTool(server: FastMCP) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Resume workflow
|
// Resume workflow
|
||||||
const status = await workflowService.resumeWorkflow();
|
const status = await tmCore.workflow.resume();
|
||||||
const nextAction = workflowService.getNextAction();
|
const nextAction = tmCore.workflow.getNextAction();
|
||||||
|
|
||||||
log.info(`Workflow resumed successfully for task ${status.taskId}`);
|
log.info(`Workflow resumed successfully for task ${status.taskId}`);
|
||||||
|
|
||||||
|
|||||||
@@ -3,16 +3,14 @@
|
|||||||
* Initialize and start a new TDD workflow for a task
|
* Initialize and start a new TDD workflow for a task
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WorkflowService } from '@tm/core';
|
import { MainTaskIdSchemaForMcp, normalizeDisplayId } from '@tm/core';
|
||||||
import type { FastMCP } from 'fastmcp';
|
import type { FastMCP } from 'fastmcp';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import type { ToolContext } from '../../shared/types.js';
|
import type { ToolContext } from '../../shared/types.js';
|
||||||
import { handleApiResult, withToolContext } from '../../shared/utils.js';
|
import { handleApiResult, withToolContext } from '../../shared/utils.js';
|
||||||
|
|
||||||
const StartWorkflowSchema = z.object({
|
const StartWorkflowSchema = z.object({
|
||||||
taskId: z
|
taskId: MainTaskIdSchemaForMcp.describe(
|
||||||
.string()
|
|
||||||
.describe(
|
|
||||||
'Main task ID to start workflow for (e.g., "1", "2", "HAM-123"). Subtask IDs (e.g., "2.3", "1.1") are not allowed.'
|
'Main task ID to start workflow for (e.g., "1", "2", "HAM-123"). Subtask IDs (e.g., "2.3", "1.1") are not allowed.'
|
||||||
),
|
),
|
||||||
projectRoot: z
|
projectRoot: z
|
||||||
@@ -32,18 +30,6 @@ const StartWorkflowSchema = z.object({
|
|||||||
|
|
||||||
type StartWorkflowArgs = z.infer<typeof StartWorkflowSchema>;
|
type StartWorkflowArgs = z.infer<typeof StartWorkflowSchema>;
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a task ID is a main task (not a subtask)
|
|
||||||
* Main tasks: "1", "2", "HAM-123", "PROJ-456"
|
|
||||||
* Subtasks: "1.1", "2.3", "HAM-123.1"
|
|
||||||
*/
|
|
||||||
function isMainTaskId(taskId: string): boolean {
|
|
||||||
// A main task has no dots in the ID after the optional prefix
|
|
||||||
// Examples: "1" ✓, "HAM-123" ✓, "1.1" ✗, "HAM-123.1" ✗
|
|
||||||
const parts = taskId.split('.');
|
|
||||||
return parts.length === 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Register the autopilot_start tool with the MCP server
|
* Register the autopilot_start tool with the MCP server
|
||||||
*/
|
*/
|
||||||
@@ -56,30 +42,22 @@ export function registerAutopilotStartTool(server: FastMCP) {
|
|||||||
execute: withToolContext(
|
execute: withToolContext(
|
||||||
'autopilot-start',
|
'autopilot-start',
|
||||||
async (args: StartWorkflowArgs, { log, tmCore }: ToolContext) => {
|
async (args: StartWorkflowArgs, { log, tmCore }: ToolContext) => {
|
||||||
const { taskId, projectRoot, maxAttempts, force } = args;
|
const { taskId: rawTaskId, projectRoot, maxAttempts, force } = args;
|
||||||
|
// Normalize task ID (e.g., "ham1" → "HAM-1")
|
||||||
|
const taskId = normalizeDisplayId(rawTaskId);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
log.info(
|
log.info(
|
||||||
`Starting autopilot workflow for task ${taskId} in ${projectRoot}`
|
`Starting autopilot workflow for task ${taskId} in ${projectRoot}`
|
||||||
);
|
);
|
||||||
|
|
||||||
// Validate that taskId is a main task (not a subtask)
|
|
||||||
if (!isMainTaskId(taskId)) {
|
|
||||||
return handleApiResult({
|
|
||||||
result: {
|
|
||||||
success: false,
|
|
||||||
error: {
|
|
||||||
message: `Task ID "${taskId}" is a subtask. Autopilot workflows can only be started for main tasks (e.g., "1", "2", "HAM-123"). Please provide the parent task ID instead.`
|
|
||||||
}
|
|
||||||
},
|
|
||||||
log,
|
|
||||||
projectRoot
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get current tag from ConfigManager
|
// Get current tag from ConfigManager
|
||||||
const currentTag = tmCore.config.getActiveTag();
|
const currentTag = tmCore.config.getActiveTag();
|
||||||
|
|
||||||
|
// Get org slug from auth context (for API storage mode)
|
||||||
|
const authContext = tmCore.auth.getContext();
|
||||||
|
const orgSlug = authContext?.orgSlug;
|
||||||
|
|
||||||
const taskResult = await tmCore.tasks.get(taskId);
|
const taskResult = await tmCore.tasks.get(taskId);
|
||||||
|
|
||||||
if (!taskResult || !taskResult.task) {
|
if (!taskResult || !taskResult.task) {
|
||||||
@@ -109,11 +87,8 @@ export function registerAutopilotStartTool(server: FastMCP) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize workflow service
|
|
||||||
const workflowService = new WorkflowService(projectRoot);
|
|
||||||
|
|
||||||
// Check for existing workflow
|
// Check for existing workflow
|
||||||
const hasWorkflow = await workflowService.hasWorkflow();
|
const hasWorkflow = await tmCore.workflow.hasWorkflow();
|
||||||
if (hasWorkflow && !force) {
|
if (hasWorkflow && !force) {
|
||||||
log.warn('Workflow state already exists');
|
log.warn('Workflow state already exists');
|
||||||
return handleApiResult({
|
return handleApiResult({
|
||||||
@@ -129,8 +104,8 @@ export function registerAutopilotStartTool(server: FastMCP) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start workflow
|
// Start workflow via tmCore facade (handles status updates internally)
|
||||||
const status = await workflowService.startWorkflow({
|
const status = await tmCore.workflow.start({
|
||||||
taskId,
|
taskId,
|
||||||
taskTitle: task.title,
|
taskTitle: task.title,
|
||||||
subtasks: task.subtasks.map((st: any) => ({
|
subtasks: task.subtasks.map((st: any) => ({
|
||||||
@@ -141,13 +116,14 @@ export function registerAutopilotStartTool(server: FastMCP) {
|
|||||||
})),
|
})),
|
||||||
maxAttempts,
|
maxAttempts,
|
||||||
force,
|
force,
|
||||||
tag: currentTag // Pass current tag for branch naming
|
tag: currentTag, // Pass current tag for branch naming (local storage)
|
||||||
|
orgSlug // Pass org slug for branch naming (API storage, takes precedence)
|
||||||
});
|
});
|
||||||
|
|
||||||
log.info(`Workflow started successfully for task ${taskId}`);
|
log.info(`Workflow started successfully for task ${taskId}`);
|
||||||
|
|
||||||
// Get next action with guidance from WorkflowService
|
// Get next action with guidance
|
||||||
const nextAction = workflowService.getNextAction();
|
const nextAction = tmCore.workflow.getNextAction();
|
||||||
|
|
||||||
return handleApiResult({
|
return handleApiResult({
|
||||||
result: {
|
result: {
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
* Get comprehensive workflow status and progress information
|
* Get comprehensive workflow status and progress information
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { WorkflowService } from '@tm/core';
|
|
||||||
import type { FastMCP } from 'fastmcp';
|
import type { FastMCP } from 'fastmcp';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import type { ToolContext } from '../../shared/types.js';
|
import type { ToolContext } from '../../shared/types.js';
|
||||||
@@ -28,16 +27,14 @@ export function registerAutopilotStatusTool(server: FastMCP) {
|
|||||||
parameters: StatusSchema,
|
parameters: StatusSchema,
|
||||||
execute: withToolContext(
|
execute: withToolContext(
|
||||||
'autopilot-status',
|
'autopilot-status',
|
||||||
async (args: StatusArgs, { log }: ToolContext) => {
|
async (args: StatusArgs, { log, tmCore }: ToolContext) => {
|
||||||
const { projectRoot } = args;
|
const { projectRoot } = args;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
log.info(`Getting workflow status for ${projectRoot}`);
|
log.info(`Getting workflow status for ${projectRoot}`);
|
||||||
|
|
||||||
const workflowService = new WorkflowService(projectRoot);
|
|
||||||
|
|
||||||
// Check if workflow exists
|
// Check if workflow exists
|
||||||
if (!(await workflowService.hasWorkflow())) {
|
if (!(await tmCore.workflow.hasWorkflow())) {
|
||||||
return handleApiResult({
|
return handleApiResult({
|
||||||
result: {
|
result: {
|
||||||
success: false,
|
success: false,
|
||||||
@@ -52,10 +49,10 @@ export function registerAutopilotStatusTool(server: FastMCP) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Resume to load state
|
// Resume to load state
|
||||||
await workflowService.resumeWorkflow();
|
await tmCore.workflow.resume();
|
||||||
|
|
||||||
// Get status
|
// Get status
|
||||||
const status = workflowService.getStatus();
|
const status = tmCore.workflow.getStatus();
|
||||||
|
|
||||||
log.info(`Workflow status retrieved for task ${status.taskId}`);
|
log.info(`Workflow status retrieved for task ${status.taskId}`);
|
||||||
|
|
||||||
|
|||||||
174
package-lock.json
generated
174
package-lock.json
generated
@@ -18,7 +18,7 @@
|
|||||||
"@ai-sdk/anthropic": "^2.0.18",
|
"@ai-sdk/anthropic": "^2.0.18",
|
||||||
"@ai-sdk/azure": "^2.0.34",
|
"@ai-sdk/azure": "^2.0.34",
|
||||||
"@ai-sdk/google": "^2.0.16",
|
"@ai-sdk/google": "^2.0.16",
|
||||||
"@ai-sdk/google-vertex": "^3.0.29",
|
"@ai-sdk/google-vertex": "^3.0.86",
|
||||||
"@ai-sdk/groq": "^2.0.21",
|
"@ai-sdk/groq": "^2.0.21",
|
||||||
"@ai-sdk/mistral": "^2.0.16",
|
"@ai-sdk/mistral": "^2.0.16",
|
||||||
"@ai-sdk/openai": "^2.0.34",
|
"@ai-sdk/openai": "^2.0.34",
|
||||||
@@ -109,7 +109,7 @@
|
|||||||
"node": ">=20.0.0"
|
"node": ">=20.0.0"
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
"@anthropic-ai/claude-code": "^1.0.88",
|
"@anthropic-ai/claude-code": "^2.0.58",
|
||||||
"@biomejs/cli-linux-x64": "^1.9.4"
|
"@biomejs/cli-linux-x64": "^1.9.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -441,16 +441,16 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@ai-sdk/google-vertex": {
|
"node_modules/@ai-sdk/google-vertex": {
|
||||||
"version": "3.0.81",
|
"version": "3.0.86",
|
||||||
"resolved": "https://registry.npmjs.org/@ai-sdk/google-vertex/-/google-vertex-3.0.81.tgz",
|
"resolved": "https://registry.npmjs.org/@ai-sdk/google-vertex/-/google-vertex-3.0.86.tgz",
|
||||||
"integrity": "sha512-yrl5Ug0Mqwo9ya45oxczgy2RWgpEA/XQQCSFYP+3NZMQ4yA3Iim1vkOjVCsGaZZ8rjVk395abi1ZMZV0/6rqVA==",
|
"integrity": "sha512-rdXVXURmmb8A6ma8aud0xqVujbZ9E7Gt68xj6sMw9erZ+HXpBZHJRcx+LYuoLvLoWw9bSLwQJ0QbaXFM68xG4g==",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@ai-sdk/anthropic": "2.0.50",
|
"@ai-sdk/anthropic": "2.0.53",
|
||||||
"@ai-sdk/google": "2.0.44",
|
"@ai-sdk/google": "2.0.44",
|
||||||
"@ai-sdk/provider": "2.0.0",
|
"@ai-sdk/provider": "2.0.0",
|
||||||
"@ai-sdk/provider-utils": "3.0.18",
|
"@ai-sdk/provider-utils": "3.0.18",
|
||||||
"google-auth-library": "^9.15.0"
|
"google-auth-library": "^10.5.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=18"
|
"node": ">=18"
|
||||||
@@ -459,6 +459,91 @@
|
|||||||
"zod": "^3.25.76 || ^4.1.8"
|
"zod": "^3.25.76 || ^4.1.8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@ai-sdk/google-vertex/node_modules/@ai-sdk/anthropic": {
|
||||||
|
"version": "2.0.53",
|
||||||
|
"resolved": "https://registry.npmjs.org/@ai-sdk/anthropic/-/anthropic-2.0.53.tgz",
|
||||||
|
"integrity": "sha512-ih7NV+OFSNWZCF+tYYD7ovvvM+gv7TRKQblpVohg2ipIwC9Y0TirzocJVREzZa/v9luxUwFbsPji++DUDWWxsg==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@ai-sdk/provider": "2.0.0",
|
||||||
|
"@ai-sdk/provider-utils": "3.0.18"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"zod": "^3.25.76 || ^4.1.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@ai-sdk/google-vertex/node_modules/gaxios": {
|
||||||
|
"version": "7.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/gaxios/-/gaxios-7.1.3.tgz",
|
||||||
|
"integrity": "sha512-YGGyuEdVIjqxkxVH1pUTMY/XtmmsApXrCVv5EU25iX6inEPbV+VakJfLealkBtJN69AQmh1eGOdCl9Sm1UP6XQ==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"extend": "^3.0.2",
|
||||||
|
"https-proxy-agent": "^7.0.1",
|
||||||
|
"node-fetch": "^3.3.2",
|
||||||
|
"rimraf": "^5.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@ai-sdk/google-vertex/node_modules/gcp-metadata": {
|
||||||
|
"version": "8.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-8.1.2.tgz",
|
||||||
|
"integrity": "sha512-zV/5HKTfCeKWnxG0Dmrw51hEWFGfcF2xiXqcA3+J90WDuP0SvoiSO5ORvcBsifmx/FoIjgQN3oNOGaQ5PhLFkg==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"gaxios": "^7.0.0",
|
||||||
|
"google-logging-utils": "^1.0.0",
|
||||||
|
"json-bigint": "^1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@ai-sdk/google-vertex/node_modules/google-auth-library": {
|
||||||
|
"version": "10.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-10.5.0.tgz",
|
||||||
|
"integrity": "sha512-7ABviyMOlX5hIVD60YOfHw4/CxOfBhyduaYB+wbFWCWoni4N7SLcV46hrVRktuBbZjFC9ONyqamZITN7q3n32w==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"base64-js": "^1.3.0",
|
||||||
|
"ecdsa-sig-formatter": "^1.0.11",
|
||||||
|
"gaxios": "^7.0.0",
|
||||||
|
"gcp-metadata": "^8.0.0",
|
||||||
|
"google-logging-utils": "^1.0.0",
|
||||||
|
"gtoken": "^8.0.0",
|
||||||
|
"jws": "^4.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@ai-sdk/google-vertex/node_modules/google-logging-utils": {
|
||||||
|
"version": "1.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-1.1.3.tgz",
|
||||||
|
"integrity": "sha512-eAmLkjDjAFCVXg7A1unxHsLf961m6y17QFqXqAXGj/gVkKFrEICfStRfwUlGNfeCEjNRa32JEWOUTlYXPyyKvA==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@ai-sdk/google-vertex/node_modules/gtoken": {
|
||||||
|
"version": "8.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/gtoken/-/gtoken-8.0.0.tgz",
|
||||||
|
"integrity": "sha512-+CqsMbHPiSTdtSO14O51eMNlrp9N79gmeqmXeouJOhfucAedHw9noVe/n5uJk3tbKE6a+6ZCQg3RPhVhHByAIw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"gaxios": "^7.0.0",
|
||||||
|
"jws": "^4.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@ai-sdk/groq": {
|
"node_modules/@ai-sdk/groq": {
|
||||||
"version": "2.0.32",
|
"version": "2.0.32",
|
||||||
"resolved": "https://registry.npmjs.org/@ai-sdk/groq/-/groq-2.0.32.tgz",
|
"resolved": "https://registry.npmjs.org/@ai-sdk/groq/-/groq-2.0.32.tgz",
|
||||||
@@ -642,9 +727,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@anthropic-ai/claude-code": {
|
"node_modules/@anthropic-ai/claude-code": {
|
||||||
"version": "1.0.128",
|
"version": "2.0.58",
|
||||||
"resolved": "https://registry.npmjs.org/@anthropic-ai/claude-code/-/claude-code-1.0.128.tgz",
|
"resolved": "https://registry.npmjs.org/@anthropic-ai/claude-code/-/claude-code-2.0.58.tgz",
|
||||||
"integrity": "sha512-uUg5cFMJfeQetQzFw76Vpbro6DAXst2Lpu8aoZWRFSoQVYu5ZSAnbBoxaWmW/IgnHSqIIvtMwzCoqmcA9j9rNQ==",
|
"integrity": "sha512-6/n+PrMrU6QuA0rV23oimJK6R3BxefXeBLnxTumVabmzRX5oYjZLGLIdP0PCTA6rKuSeXUjjGd1yb55B0clO+w==",
|
||||||
"license": "SEE LICENSE IN README.md",
|
"license": "SEE LICENSE IN README.md",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"bin": {
|
"bin": {
|
||||||
@@ -659,6 +744,8 @@
|
|||||||
"@img/sharp-linux-arm": "^0.33.5",
|
"@img/sharp-linux-arm": "^0.33.5",
|
||||||
"@img/sharp-linux-arm64": "^0.33.5",
|
"@img/sharp-linux-arm64": "^0.33.5",
|
||||||
"@img/sharp-linux-x64": "^0.33.5",
|
"@img/sharp-linux-x64": "^0.33.5",
|
||||||
|
"@img/sharp-linuxmusl-arm64": "^0.33.5",
|
||||||
|
"@img/sharp-linuxmusl-x64": "^0.33.5",
|
||||||
"@img/sharp-win32-x64": "^0.33.5"
|
"@img/sharp-win32-x64": "^0.33.5"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -9941,7 +10028,6 @@
|
|||||||
"version": "0.11.0",
|
"version": "0.11.0",
|
||||||
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
|
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
|
||||||
"integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
|
"integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
@@ -31305,6 +31391,72 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/rimraf": {
|
||||||
|
"version": "5.0.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz",
|
||||||
|
"integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"glob": "^10.3.7"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"rimraf": "dist/esm/bin.mjs"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/rimraf/node_modules/glob": {
|
||||||
|
"version": "10.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
|
||||||
|
"integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"foreground-child": "^3.1.0",
|
||||||
|
"jackspeak": "^3.1.2",
|
||||||
|
"minimatch": "^9.0.4",
|
||||||
|
"minipass": "^7.1.2",
|
||||||
|
"package-json-from-dist": "^1.0.0",
|
||||||
|
"path-scurry": "^1.11.1"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"glob": "dist/esm/bin.mjs"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/rimraf/node_modules/jackspeak": {
|
||||||
|
"version": "3.4.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
|
||||||
|
"integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
|
||||||
|
"license": "BlueOak-1.0.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@isaacs/cliui": "^8.0.2"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@pkgjs/parseargs": "^0.11.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/rimraf/node_modules/path-scurry": {
|
||||||
|
"version": "1.11.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
|
||||||
|
"integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
|
||||||
|
"license": "BlueOak-1.0.0",
|
||||||
|
"dependencies": {
|
||||||
|
"lru-cache": "^10.2.0",
|
||||||
|
"minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=16 || 14 >=14.18"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/rolldown": {
|
"node_modules/rolldown": {
|
||||||
"version": "1.0.0-beta.45",
|
"version": "1.0.0-beta.45",
|
||||||
"resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-beta.45.tgz",
|
"resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-beta.45.tgz",
|
||||||
|
|||||||
@@ -57,7 +57,7 @@
|
|||||||
"@ai-sdk/anthropic": "^2.0.18",
|
"@ai-sdk/anthropic": "^2.0.18",
|
||||||
"@ai-sdk/azure": "^2.0.34",
|
"@ai-sdk/azure": "^2.0.34",
|
||||||
"@ai-sdk/google": "^2.0.16",
|
"@ai-sdk/google": "^2.0.16",
|
||||||
"@ai-sdk/google-vertex": "^3.0.29",
|
"@ai-sdk/google-vertex": "^3.0.86",
|
||||||
"@ai-sdk/groq": "^2.0.21",
|
"@ai-sdk/groq": "^2.0.21",
|
||||||
"@ai-sdk/mistral": "^2.0.16",
|
"@ai-sdk/mistral": "^2.0.16",
|
||||||
"@ai-sdk/openai": "^2.0.34",
|
"@ai-sdk/openai": "^2.0.34",
|
||||||
@@ -115,7 +115,7 @@
|
|||||||
"zod": "^4.1.12"
|
"zod": "^4.1.12"
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
"@anthropic-ai/claude-code": "^1.0.88",
|
"@anthropic-ai/claude-code": "^2.0.59",
|
||||||
"@biomejs/cli-linux-x64": "^1.9.4"
|
"@biomejs/cli-linux-x64": "^1.9.4"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
|
|||||||
13
packages/tm-core/src/common/schemas/index.ts
Normal file
13
packages/tm-core/src/common/schemas/index.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Zod schemas for validation
|
||||||
|
*/
|
||||||
|
|
||||||
|
export {
|
||||||
|
TaskIdSchema,
|
||||||
|
MainTaskIdSchema,
|
||||||
|
TaskIdSchemaForMcp,
|
||||||
|
MainTaskIdSchemaForMcp,
|
||||||
|
normalizeDisplayId,
|
||||||
|
type TaskId,
|
||||||
|
type MainTaskId
|
||||||
|
} from './task-id.schema.js';
|
||||||
201
packages/tm-core/src/common/schemas/task-id.schema.spec.ts
Normal file
201
packages/tm-core/src/common/schemas/task-id.schema.spec.ts
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Unit tests for task ID schemas
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, expect, it } from 'vitest';
|
||||||
|
import {
|
||||||
|
MainTaskIdSchema,
|
||||||
|
TaskIdSchema,
|
||||||
|
normalizeDisplayId
|
||||||
|
} from './task-id.schema.js';
|
||||||
|
|
||||||
|
describe('normalizeDisplayId', () => {
|
||||||
|
describe('file storage IDs (numeric)', () => {
|
||||||
|
it('should return numeric main task IDs unchanged', () => {
|
||||||
|
expect(normalizeDisplayId('1')).toBe('1');
|
||||||
|
expect(normalizeDisplayId('123')).toBe('123');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return numeric subtask IDs unchanged', () => {
|
||||||
|
expect(normalizeDisplayId('1.1')).toBe('1.1');
|
||||||
|
expect(normalizeDisplayId('123.45')).toBe('123.45');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should trim whitespace', () => {
|
||||||
|
expect(normalizeDisplayId(' 1 ')).toBe('1');
|
||||||
|
expect(normalizeDisplayId(' 1.2 ')).toBe('1.2');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('API storage IDs (prefixed)', () => {
|
||||||
|
it('should normalize lowercase without hyphen', () => {
|
||||||
|
expect(normalizeDisplayId('ham1')).toBe('HAM-1');
|
||||||
|
expect(normalizeDisplayId('ham123')).toBe('HAM-123');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should normalize uppercase without hyphen', () => {
|
||||||
|
expect(normalizeDisplayId('HAM1')).toBe('HAM-1');
|
||||||
|
expect(normalizeDisplayId('HAM123')).toBe('HAM-123');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should normalize lowercase with hyphen', () => {
|
||||||
|
expect(normalizeDisplayId('ham-1')).toBe('HAM-1');
|
||||||
|
expect(normalizeDisplayId('ham-123')).toBe('HAM-123');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should keep uppercase with hyphen unchanged', () => {
|
||||||
|
expect(normalizeDisplayId('HAM-1')).toBe('HAM-1');
|
||||||
|
expect(normalizeDisplayId('HAM-123')).toBe('HAM-123');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should normalize mixed case', () => {
|
||||||
|
expect(normalizeDisplayId('Ham-1')).toBe('HAM-1');
|
||||||
|
expect(normalizeDisplayId('hAm1')).toBe('HAM-1');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should trim whitespace', () => {
|
||||||
|
expect(normalizeDisplayId(' ham1 ')).toBe('HAM-1');
|
||||||
|
expect(normalizeDisplayId(' HAM-1 ')).toBe('HAM-1');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should return empty string for empty input', () => {
|
||||||
|
expect(normalizeDisplayId('')).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return null/undefined as-is', () => {
|
||||||
|
expect(normalizeDisplayId(null as any)).toBe(null);
|
||||||
|
expect(normalizeDisplayId(undefined as any)).toBe(undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return unmatched patterns as-is', () => {
|
||||||
|
expect(normalizeDisplayId('abc')).toBe('abc');
|
||||||
|
expect(normalizeDisplayId('HAMSTER-1')).toBe('HAMSTER-1'); // 7 letters, not 3
|
||||||
|
expect(normalizeDisplayId('AB-1')).toBe('AB-1'); // 2 letters, not 3
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('TaskIdSchema', () => {
|
||||||
|
describe('file storage IDs', () => {
|
||||||
|
it('should accept numeric main task IDs', () => {
|
||||||
|
expect(TaskIdSchema.safeParse('1').success).toBe(true);
|
||||||
|
expect(TaskIdSchema.safeParse('123').success).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept numeric subtask IDs (one level)', () => {
|
||||||
|
expect(TaskIdSchema.safeParse('1.1').success).toBe(true);
|
||||||
|
expect(TaskIdSchema.safeParse('123.45').success).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject deeply nested IDs', () => {
|
||||||
|
expect(TaskIdSchema.safeParse('1.2.3').success).toBe(false);
|
||||||
|
expect(TaskIdSchema.safeParse('1.2.3.4').success).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return normalized value', () => {
|
||||||
|
const result = TaskIdSchema.safeParse(' 1 ');
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
if (result.success) {
|
||||||
|
expect(result.data).toBe('1');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('API storage IDs', () => {
|
||||||
|
it('should accept prefixed IDs with hyphen', () => {
|
||||||
|
expect(TaskIdSchema.safeParse('HAM-1').success).toBe(true);
|
||||||
|
expect(TaskIdSchema.safeParse('ham-1').success).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept prefixed IDs without hyphen', () => {
|
||||||
|
expect(TaskIdSchema.safeParse('HAM1').success).toBe(true);
|
||||||
|
expect(TaskIdSchema.safeParse('ham1').success).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject prefixed subtask IDs', () => {
|
||||||
|
expect(TaskIdSchema.safeParse('HAM-1.2').success).toBe(false);
|
||||||
|
expect(TaskIdSchema.safeParse('ham1.2').success).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should normalize to uppercase with hyphen', () => {
|
||||||
|
const result = TaskIdSchema.safeParse('ham1');
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
if (result.success) {
|
||||||
|
expect(result.data).toBe('HAM-1');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('invalid inputs', () => {
|
||||||
|
it('should reject empty string', () => {
|
||||||
|
expect(TaskIdSchema.safeParse('').success).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject whitespace only', () => {
|
||||||
|
expect(TaskIdSchema.safeParse(' ').success).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject invalid formats', () => {
|
||||||
|
expect(TaskIdSchema.safeParse('abc').success).toBe(false);
|
||||||
|
expect(TaskIdSchema.safeParse('HAMSTER-1').success).toBe(false);
|
||||||
|
expect(TaskIdSchema.safeParse('AB-1').success).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('MainTaskIdSchema', () => {
|
||||||
|
describe('valid main tasks', () => {
|
||||||
|
it('should accept numeric main task IDs', () => {
|
||||||
|
expect(MainTaskIdSchema.safeParse('1').success).toBe(true);
|
||||||
|
expect(MainTaskIdSchema.safeParse('123').success).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept prefixed main task IDs', () => {
|
||||||
|
expect(MainTaskIdSchema.safeParse('HAM-1').success).toBe(true);
|
||||||
|
expect(MainTaskIdSchema.safeParse('ham-1').success).toBe(true);
|
||||||
|
expect(MainTaskIdSchema.safeParse('ham1').success).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should normalize prefixed IDs', () => {
|
||||||
|
const result = MainTaskIdSchema.safeParse('ham1');
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
if (result.success) {
|
||||||
|
expect(result.data).toBe('HAM-1');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('invalid subtasks', () => {
|
||||||
|
it('should reject numeric subtask IDs', () => {
|
||||||
|
const result = MainTaskIdSchema.safeParse('1.2');
|
||||||
|
expect(result.success).toBe(false);
|
||||||
|
if (!result.success) {
|
||||||
|
expect(result.error.issues[0].message).toContain('Subtask');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject prefixed subtask IDs', () => {
|
||||||
|
expect(MainTaskIdSchema.safeParse('HAM-1.2').success).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('error messages', () => {
|
||||||
|
it('should provide helpful error for invalid format', () => {
|
||||||
|
const result = MainTaskIdSchema.safeParse('invalid');
|
||||||
|
expect(result.success).toBe(false);
|
||||||
|
if (!result.success) {
|
||||||
|
expect(result.error.issues[0].message).toContain('Invalid task ID');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should provide helpful error for subtask', () => {
|
||||||
|
const result = MainTaskIdSchema.safeParse('1.2');
|
||||||
|
expect(result.success).toBe(false);
|
||||||
|
if (!result.success) {
|
||||||
|
expect(result.error.issues[0].message).toContain('Subtask');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
217
packages/tm-core/src/common/schemas/task-id.schema.ts
Normal file
217
packages/tm-core/src/common/schemas/task-id.schema.ts
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Zod schemas for task ID validation
|
||||||
|
* Provides type-safe validation and normalization of task IDs
|
||||||
|
*
|
||||||
|
* Task ID Formats:
|
||||||
|
*
|
||||||
|
* FILE STORAGE (local):
|
||||||
|
* - Main tasks: "1", "2", "3" (numeric only)
|
||||||
|
* - Subtasks: "1.1", "1.2" (one level only, no "1.2.3")
|
||||||
|
*
|
||||||
|
* API STORAGE (Hamster):
|
||||||
|
* - Main tasks only: "HAM-1", "HAM-2" (3 letters + hyphen + number)
|
||||||
|
* - Input accepts: "ham-1", "HAM-1", "ham1", "HAM1" (permissive input)
|
||||||
|
* - Output always: "HAM-1" format (uppercase with hyphen)
|
||||||
|
* - No subtasks: Never "HAM-1.2"
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { z } from 'zod';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalizes a display ID to the standard format
|
||||||
|
*
|
||||||
|
* API Storage IDs: Always uppercase with hyphen (HAM-1)
|
||||||
|
* - "ham1" → "HAM-1"
|
||||||
|
* - "HAM1" → "HAM-1"
|
||||||
|
* - "ham-1" → "HAM-1"
|
||||||
|
* - "HAM-1" → "HAM-1"
|
||||||
|
*
|
||||||
|
* File Storage IDs: Unchanged
|
||||||
|
* - "1" → "1"
|
||||||
|
* - "1.1" → "1.1"
|
||||||
|
*
|
||||||
|
* @param id - The display ID to normalize
|
||||||
|
* @returns The normalized display ID
|
||||||
|
*/
|
||||||
|
export function normalizeDisplayId(id: string): string {
|
||||||
|
if (!id) return id;
|
||||||
|
|
||||||
|
const trimmed = id.trim();
|
||||||
|
|
||||||
|
// File storage: numeric (main or subtask) - return as-is
|
||||||
|
if (/^\d+(\.\d+)?$/.test(trimmed)) {
|
||||||
|
return trimmed;
|
||||||
|
}
|
||||||
|
|
||||||
|
// API storage: 3 letters + optional hyphen + number
|
||||||
|
// e.g., "ham1", "HAM1", "ham-1", "HAM-1"
|
||||||
|
const apiPattern = /^([a-zA-Z]{3})-?(\d+)$/;
|
||||||
|
const apiMatch = trimmed.match(apiPattern);
|
||||||
|
if (apiMatch) {
|
||||||
|
const prefix = apiMatch[1].toUpperCase();
|
||||||
|
const number = apiMatch[2];
|
||||||
|
return `${prefix}-${number}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No pattern matched, return as-is
|
||||||
|
return trimmed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pattern for file storage main task: "1", "2", "123"
|
||||||
|
*/
|
||||||
|
const FILE_MAIN_PATTERN = /^\d+$/;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pattern for file storage subtask: "1.1", "2.3" (exactly one dot, one level)
|
||||||
|
*/
|
||||||
|
const FILE_SUBTASK_PATTERN = /^\d+\.\d+$/;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pattern for API storage main task: "HAM-1", "ham-1", "HAM1", "ham1"
|
||||||
|
* Accepts with or without hyphen, normalizes to "HAM-1" format
|
||||||
|
*/
|
||||||
|
const API_MAIN_PATTERN = /^[a-zA-Z]{3}-?\d+$/;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a task ID format is valid
|
||||||
|
* Accepts: "1", "1.1", "HAM-1", "ham-1", "HAM1", "ham1"
|
||||||
|
* Rejects: "1.2.3", "HAM-1.2"
|
||||||
|
* Note: All API IDs normalize to "HAM-1" format (uppercase with hyphen)
|
||||||
|
*/
|
||||||
|
function isValidTaskIdFormat(id: string): boolean {
|
||||||
|
if (!id) return false;
|
||||||
|
const trimmed = id.trim();
|
||||||
|
|
||||||
|
// File storage: numeric main or subtask (one level only)
|
||||||
|
if (FILE_MAIN_PATTERN.test(trimmed) || FILE_SUBTASK_PATTERN.test(trimmed)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// API storage: prefixed main task only (no subtasks in API storage)
|
||||||
|
if (API_MAIN_PATTERN.test(trimmed)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a task ID is a main task (not a subtask)
|
||||||
|
* Main tasks: "1", "2", "HAM-1"
|
||||||
|
* Subtasks: "1.1", "2.3" (file storage only)
|
||||||
|
*/
|
||||||
|
function isMainTask(taskId: string): boolean {
|
||||||
|
if (!taskId) return false;
|
||||||
|
const trimmed = taskId.trim();
|
||||||
|
|
||||||
|
// File storage main task
|
||||||
|
if (FILE_MAIN_PATTERN.test(trimmed)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// API storage main task (always main, no subtasks in API)
|
||||||
|
if (API_MAIN_PATTERN.test(trimmed)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Base schema for any task ID (main task or subtask) - validation only
|
||||||
|
* Use this for MCP tool schemas (JSON Schema can't represent transforms)
|
||||||
|
* Call normalizeDisplayId() manually after validation if needed
|
||||||
|
*/
|
||||||
|
const taskIdBaseSchema = z.string().trim().refine(isValidTaskIdFormat, {
|
||||||
|
message:
|
||||||
|
'Invalid task ID format. Expected: numeric ("1", "1.2") or prefixed with hyphen ("HAM-1")'
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Base schema for main task IDs only - validation only
|
||||||
|
* Use this for MCP tool schemas (JSON Schema can't represent transforms)
|
||||||
|
* Call normalizeDisplayId() manually after validation if needed
|
||||||
|
*/
|
||||||
|
const mainTaskIdBaseSchema = z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.refine(isValidTaskIdFormat, {
|
||||||
|
message:
|
||||||
|
'Invalid task ID format. Expected: numeric ("1") or prefixed with hyphen ("HAM-1")'
|
||||||
|
})
|
||||||
|
.refine(isMainTask, {
|
||||||
|
message:
|
||||||
|
'Subtask IDs are not allowed. Please provide a main task ID (e.g., "1", "HAM-1")'
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Zod schema for any task ID (main task or subtask)
|
||||||
|
* Validates format and transforms to normalized form
|
||||||
|
*
|
||||||
|
* NOTE: For MCP tools, use TaskIdSchemaForMcp instead (no transform)
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* // File storage
|
||||||
|
* TaskIdSchema.safeParse('1'); // { success: true, data: '1' }
|
||||||
|
* TaskIdSchema.safeParse('1.2'); // { success: true, data: '1.2' }
|
||||||
|
*
|
||||||
|
* // API storage
|
||||||
|
* TaskIdSchema.safeParse('ham-1'); // { success: true, data: 'HAM-1' }
|
||||||
|
* TaskIdSchema.safeParse('HAM-1'); // { success: true, data: 'HAM-1' }
|
||||||
|
*
|
||||||
|
* // Permissive input, normalized output
|
||||||
|
* TaskIdSchema.safeParse('ham1'); // { success: true, data: 'HAM-1' }
|
||||||
|
* TaskIdSchema.safeParse('HAM1'); // { success: true, data: 'HAM-1' }
|
||||||
|
*
|
||||||
|
* // Invalid
|
||||||
|
* TaskIdSchema.safeParse('1.2.3'); // { success: false } - too deep
|
||||||
|
* TaskIdSchema.safeParse('HAM-1.2'); // { success: false } - no API subtasks
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export const TaskIdSchema = taskIdBaseSchema.transform(normalizeDisplayId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Zod schema for main task IDs only (no subtasks)
|
||||||
|
* Validates format, ensures no subtask part, and transforms to normalized form
|
||||||
|
*
|
||||||
|
* NOTE: For MCP tools, use MainTaskIdSchemaForMcp instead (no transform)
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* // Valid main tasks
|
||||||
|
* MainTaskIdSchema.safeParse('1'); // { success: true, data: '1' }
|
||||||
|
* MainTaskIdSchema.safeParse('ham-1'); // { success: true, data: 'HAM-1' }
|
||||||
|
* MainTaskIdSchema.safeParse('ham1'); // { success: true, data: 'HAM-1' }
|
||||||
|
*
|
||||||
|
* // Invalid (subtasks)
|
||||||
|
* MainTaskIdSchema.safeParse('1.2'); // { success: false }
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export const MainTaskIdSchema =
|
||||||
|
mainTaskIdBaseSchema.transform(normalizeDisplayId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Zod schema for any task ID - validation only, no transform
|
||||||
|
* Use this for MCP tool parameter schemas (JSON Schema can't represent transforms)
|
||||||
|
* Call normalizeDisplayId() manually after validation
|
||||||
|
*/
|
||||||
|
export const TaskIdSchemaForMcp = taskIdBaseSchema;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Zod schema for main task IDs - validation only, no transform
|
||||||
|
* Use this for MCP tool parameter schemas (JSON Schema can't represent transforms)
|
||||||
|
* Call normalizeDisplayId() manually after validation
|
||||||
|
*/
|
||||||
|
export const MainTaskIdSchemaForMcp = mainTaskIdBaseSchema;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type for a validated and normalized task ID
|
||||||
|
*/
|
||||||
|
export type TaskId = z.output<typeof TaskIdSchema>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type for a validated and normalized main task ID
|
||||||
|
*/
|
||||||
|
export type MainTaskId = z.output<typeof MainTaskIdSchema>;
|
||||||
@@ -4,14 +4,14 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
// Export ID generation utilities
|
// Export ID generation utilities
|
||||||
|
// Note: normalizeDisplayId is now exported from common/schemas/task-id.schema.ts
|
||||||
export {
|
export {
|
||||||
generateTaskId as generateId, // Alias for backward compatibility
|
generateTaskId as generateId, // Alias for backward compatibility
|
||||||
generateTaskId,
|
generateTaskId,
|
||||||
generateSubtaskId,
|
generateSubtaskId,
|
||||||
isValidTaskId,
|
isValidTaskId,
|
||||||
isValidSubtaskId,
|
isValidSubtaskId,
|
||||||
getParentTaskId,
|
getParentTaskId
|
||||||
normalizeDisplayId
|
|
||||||
} from './id-generator.js';
|
} from './id-generator.js';
|
||||||
|
|
||||||
// Export git utilities
|
// Export git utilities
|
||||||
|
|||||||
@@ -58,6 +58,9 @@ export * from './utils/time.utils.js';
|
|||||||
// Task validation schemas
|
// Task validation schemas
|
||||||
export * from './modules/tasks/validation/index.js';
|
export * from './modules/tasks/validation/index.js';
|
||||||
|
|
||||||
|
// Zod schemas for validation
|
||||||
|
export * from './common/schemas/index.js';
|
||||||
|
|
||||||
// ========== Domain-Specific Type Exports ==========
|
// ========== Domain-Specific Type Exports ==========
|
||||||
|
|
||||||
// Task types
|
// Task types
|
||||||
@@ -168,7 +171,11 @@ export { BriefService } from './modules/briefs/services/brief-service.js';
|
|||||||
// Workflow - Advanced
|
// Workflow - Advanced
|
||||||
export { WorkflowOrchestrator } from './modules/workflow/orchestrators/workflow-orchestrator.js';
|
export { WorkflowOrchestrator } from './modules/workflow/orchestrators/workflow-orchestrator.js';
|
||||||
export { WorkflowStateManager } from './modules/workflow/managers/workflow-state-manager.js';
|
export { WorkflowStateManager } from './modules/workflow/managers/workflow-state-manager.js';
|
||||||
export { WorkflowService } from './modules/workflow/services/workflow.service.js';
|
export {
|
||||||
|
WorkflowService,
|
||||||
|
type TaskStatusUpdater,
|
||||||
|
type WorkflowServiceOptions
|
||||||
|
} from './modules/workflow/services/workflow.service.js';
|
||||||
export type { SubtaskInfo } from './modules/workflow/types.js';
|
export type { SubtaskInfo } from './modules/workflow/types.js';
|
||||||
|
|
||||||
// Git - Advanced
|
// Git - Advanced
|
||||||
|
|||||||
@@ -51,10 +51,24 @@ describe('TemplateEngine', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle missing variables by leaving placeholder', () => {
|
it('should handle missing variables by leaving placeholder', () => {
|
||||||
|
const engineWithPreserve = new TemplateEngine({
|
||||||
|
preservePlaceholders: true
|
||||||
|
});
|
||||||
|
const template = 'Hello {{name}} from {{location}}';
|
||||||
|
const result = engineWithPreserve.render(
|
||||||
|
'test',
|
||||||
|
{ name: 'Alice' },
|
||||||
|
template
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBe('Hello Alice from {{location}}');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should replace missing variables with empty string by default', () => {
|
||||||
const template = 'Hello {{name}} from {{location}}';
|
const template = 'Hello {{name}} from {{location}}';
|
||||||
const result = templateEngine.render('test', { name: 'Alice' }, template);
|
const result = templateEngine.render('test', { name: 'Alice' }, template);
|
||||||
|
|
||||||
expect(result).toBe('Hello Alice from {{location}}');
|
expect(result).toBe('Hello Alice from ');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle empty variable values', () => {
|
it('should handle empty variable values', () => {
|
||||||
@@ -215,11 +229,21 @@ describe('TemplateEngine', () => {
|
|||||||
expect(result).toBe('Static text');
|
expect(result).toBe('Static text');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should handle empty variables object with preservePlaceholders', () => {
|
||||||
|
const engineWithPreserve = new TemplateEngine({
|
||||||
|
preservePlaceholders: true
|
||||||
|
});
|
||||||
|
const template = 'Hello {{name}}';
|
||||||
|
const result = engineWithPreserve.render('test', {}, template);
|
||||||
|
|
||||||
|
expect(result).toBe('Hello {{name}}');
|
||||||
|
});
|
||||||
|
|
||||||
it('should handle empty variables object', () => {
|
it('should handle empty variables object', () => {
|
||||||
const template = 'Hello {{name}}';
|
const template = 'Hello {{name}}';
|
||||||
const result = templateEngine.render('test', {}, template);
|
const result = templateEngine.render('test', {}, template);
|
||||||
|
|
||||||
expect(result).toBe('Hello {{name}}');
|
expect(result).toBe('Hello ');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle special characters in values', () => {
|
it('should handle special characters in values', () => {
|
||||||
@@ -3,27 +3,31 @@
|
|||||||
* Provides validation for task IDs used in MCP tools and CLI
|
* Provides validation for task IDs used in MCP tools and CLI
|
||||||
*
|
*
|
||||||
* Supported formats:
|
* Supported formats:
|
||||||
* - Simple numeric: "1", "2", "15" (local file storage)
|
*
|
||||||
* - Numeric subtask: "1.2", "15.3" (local file storage, dot notation)
|
* FILE STORAGE (local):
|
||||||
* - Numeric sub-subtask: "1.2.3", "15.3.1" (local file storage, dot notation)
|
* - Main tasks: "1", "2", "15"
|
||||||
* - Alphanumeric display IDs: "HAM-123", "PROJ-456" (remote API storage)
|
* - Subtasks: "1.2", "15.3" (one level only)
|
||||||
* Note: In remote mode, subtasks also use alphanumeric IDs (HAM-2, HAM-3),
|
*
|
||||||
* they don't use dot notation like local storage.
|
* API STORAGE (Hamster):
|
||||||
|
* - Main tasks: "HAM-1", "ham-1", "HAM1", "ham1" (all normalized to "HAM-1")
|
||||||
|
* - No subtasks (API doesn't use dot notation)
|
||||||
*
|
*
|
||||||
* NOT supported:
|
* NOT supported:
|
||||||
* - Alphanumeric with dot notation: "HAM-123.2" (doesn't exist in any mode)
|
* - Deep nesting: "1.2.3" (file storage only has one subtask level)
|
||||||
|
* - API subtasks: "HAM-1.2" (doesn't exist)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
import { normalizeDisplayId } from '../../../common/schemas/task-id.schema.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Pattern for validating a single task ID
|
* Pattern for validating a single task ID
|
||||||
* Supports:
|
* Permissive input - accepts with or without hyphen for API IDs
|
||||||
* - Numeric: "1", "15", "999"
|
* - Numeric: "1", "15", "999"
|
||||||
* - Numeric subtasks: "1.2", "15.3.1"
|
* - Numeric subtasks: "1.2" (one level only)
|
||||||
* - Alphanumeric display IDs: "HAM-123", "PROJ-456" (main tasks only, no subtask notation)
|
* - API display IDs: "HAM-1", "ham-1", "HAM1", "ham1"
|
||||||
*/
|
*/
|
||||||
export const TASK_ID_PATTERN = /^(\d+(\.\d+)*|[A-Za-z]+-\d+)$/;
|
export const TASK_ID_PATTERN = /^(\d+(\.\d+)?|[A-Za-z]{3}-?\d+)$/;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validates a single task ID string
|
* Validates a single task ID string
|
||||||
@@ -34,12 +38,12 @@ export const TASK_ID_PATTERN = /^(\d+(\.\d+)*|[A-Za-z]+-\d+)$/;
|
|||||||
* @example
|
* @example
|
||||||
* ```typescript
|
* ```typescript
|
||||||
* isValidTaskIdFormat("1"); // true
|
* isValidTaskIdFormat("1"); // true
|
||||||
* isValidTaskIdFormat("15.2"); // true
|
* isValidTaskIdFormat("1.2"); // true
|
||||||
* isValidTaskIdFormat("1.2.3"); // true
|
* isValidTaskIdFormat("HAM-1"); // true
|
||||||
* isValidTaskIdFormat("HAM-123"); // true
|
* isValidTaskIdFormat("ham1"); // true (permissive input)
|
||||||
* isValidTaskIdFormat("HAM-123.2"); // false (alphanumeric subtasks not supported)
|
* isValidTaskIdFormat("1.2.3"); // false (too deep)
|
||||||
|
* isValidTaskIdFormat("HAM-1.2"); // false (no API subtasks)
|
||||||
* isValidTaskIdFormat("abc"); // false
|
* isValidTaskIdFormat("abc"); // false
|
||||||
* isValidTaskIdFormat(""); // false
|
|
||||||
* ```
|
* ```
|
||||||
*/
|
*/
|
||||||
export function isValidTaskIdFormat(id: string): boolean {
|
export function isValidTaskIdFormat(id: string): boolean {
|
||||||
@@ -49,6 +53,8 @@ export function isValidTaskIdFormat(id: string): boolean {
|
|||||||
/**
|
/**
|
||||||
* Zod schema for a single task ID
|
* Zod schema for a single task ID
|
||||||
* Validates format: numeric, alphanumeric display ID, or numeric subtask
|
* Validates format: numeric, alphanumeric display ID, or numeric subtask
|
||||||
|
* Note: Use parseTaskIds() for normalization (e.g., "ham1" → "HAM-1")
|
||||||
|
* This schema is used in MCP tool definitions which can't have transforms.
|
||||||
*/
|
*/
|
||||||
export const taskIdSchema = z
|
export const taskIdSchema = z
|
||||||
.string()
|
.string()
|
||||||
@@ -61,6 +67,7 @@ export const taskIdSchema = z
|
|||||||
/**
|
/**
|
||||||
* Zod schema for comma-separated task IDs
|
* Zod schema for comma-separated task IDs
|
||||||
* Validates that each ID in the comma-separated list is valid
|
* Validates that each ID in the comma-separated list is valid
|
||||||
|
* Permissive input - accepts "ham1", "HAM1", "ham-1" etc.
|
||||||
*
|
*
|
||||||
* @example
|
* @example
|
||||||
* ```typescript
|
* ```typescript
|
||||||
@@ -68,8 +75,9 @@ export const taskIdSchema = z
|
|||||||
* taskIdsSchema.parse("1,2,3"); // valid
|
* taskIdsSchema.parse("1,2,3"); // valid
|
||||||
* taskIdsSchema.parse("1.2, 3.4"); // valid (spaces trimmed)
|
* taskIdsSchema.parse("1.2, 3.4"); // valid (spaces trimmed)
|
||||||
* taskIdsSchema.parse("HAM-123"); // valid
|
* taskIdsSchema.parse("HAM-123"); // valid
|
||||||
|
* taskIdsSchema.parse("ham1"); // valid (permissive input)
|
||||||
* taskIdsSchema.parse("abc"); // throws
|
* taskIdsSchema.parse("abc"); // throws
|
||||||
* taskIdsSchema.parse("HAM-123.2"); // throws (alphanumeric subtasks not supported)
|
* taskIdsSchema.parse("HAM-1.2"); // throws (API subtasks not supported)
|
||||||
* ```
|
* ```
|
||||||
*/
|
*/
|
||||||
export const taskIdsSchema = z
|
export const taskIdsSchema = z
|
||||||
@@ -91,9 +99,10 @@ export const taskIdsSchema = z
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse and validate comma-separated task IDs
|
* Parse and validate comma-separated task IDs
|
||||||
|
* Returns normalized IDs (e.g., "ham1" → "HAM-1")
|
||||||
*
|
*
|
||||||
* @param input - Comma-separated task ID string
|
* @param input - Comma-separated task ID string
|
||||||
* @returns Array of validated task IDs
|
* @returns Array of validated and normalized task IDs
|
||||||
* @throws Error if any ID is invalid
|
* @throws Error if any ID is invalid
|
||||||
*
|
*
|
||||||
* @example
|
* @example
|
||||||
@@ -101,8 +110,9 @@ export const taskIdsSchema = z
|
|||||||
* parseTaskIds("1, 2, 3"); // ["1", "2", "3"]
|
* parseTaskIds("1, 2, 3"); // ["1", "2", "3"]
|
||||||
* parseTaskIds("1.2,3.4"); // ["1.2", "3.4"]
|
* parseTaskIds("1.2,3.4"); // ["1.2", "3.4"]
|
||||||
* parseTaskIds("HAM-123"); // ["HAM-123"]
|
* parseTaskIds("HAM-123"); // ["HAM-123"]
|
||||||
|
* parseTaskIds("ham1,ham2"); // ["HAM-1", "HAM-2"] (normalized)
|
||||||
* parseTaskIds("invalid"); // throws Error
|
* parseTaskIds("invalid"); // throws Error
|
||||||
* parseTaskIds("HAM-123.2"); // throws Error (alphanumeric subtasks not supported)
|
* parseTaskIds("HAM-1.2"); // throws Error (API subtasks not supported)
|
||||||
* ```
|
* ```
|
||||||
*/
|
*/
|
||||||
export function parseTaskIds(input: string): string[] {
|
export function parseTaskIds(input: string): string[] {
|
||||||
@@ -122,7 +132,8 @@ export function parseTaskIds(input: string): string[] {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return ids;
|
// Normalize all IDs (e.g., "ham1" → "HAM-1")
|
||||||
|
return ids.map(normalizeDisplayId);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -89,10 +89,10 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
expect(orchestrator.getCurrentPhase()).toBe('COMPLETE');
|
expect(orchestrator.getCurrentPhase()).toBe('COMPLETE');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should reject invalid transitions', () => {
|
it('should reject invalid transitions', async () => {
|
||||||
expect(() => {
|
await expect(
|
||||||
orchestrator.transition({ type: 'FINALIZE_COMPLETE' });
|
orchestrator.transition({ type: 'FINALIZE_COMPLETE' })
|
||||||
}).toThrow('Invalid transition');
|
).rejects.toThrow('Invalid transition');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -433,7 +433,7 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('Phase Transition Guards and Validation', () => {
|
describe('Phase Transition Guards and Validation', () => {
|
||||||
it('should enforce guard conditions on transitions', () => {
|
it('should enforce guard conditions on transitions', async () => {
|
||||||
// Create orchestrator with guard condition that should fail
|
// Create orchestrator with guard condition that should fail
|
||||||
const guardedContext: WorkflowContext = {
|
const guardedContext: WorkflowContext = {
|
||||||
taskId: 'task-1',
|
taskId: 'task-1',
|
||||||
@@ -450,14 +450,14 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
return context.subtasks.length > 0;
|
return context.subtasks.length > 0;
|
||||||
});
|
});
|
||||||
|
|
||||||
guardedOrchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
await guardedOrchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
||||||
|
|
||||||
expect(() => {
|
await expect(
|
||||||
guardedOrchestrator.transition({
|
guardedOrchestrator.transition({
|
||||||
type: 'BRANCH_CREATED',
|
type: 'BRANCH_CREATED',
|
||||||
branchName: 'feature/test'
|
branchName: 'feature/test'
|
||||||
});
|
})
|
||||||
}).toThrow('Guard condition failed');
|
).rejects.toThrow('Guard condition failed');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow transition when guard condition passes', () => {
|
it('should allow transition when guard condition passes', () => {
|
||||||
@@ -486,28 +486,31 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
expect(guardedOrchestrator.getCurrentPhase()).toBe('SUBTASK_LOOP');
|
expect(guardedOrchestrator.getCurrentPhase()).toBe('SUBTASK_LOOP');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should validate test results before GREEN phase transition', () => {
|
it('should validate test results before GREEN phase transition', async () => {
|
||||||
orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
await orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
||||||
orchestrator.transition({
|
await orchestrator.transition({
|
||||||
type: 'BRANCH_CREATED',
|
type: 'BRANCH_CREATED',
|
||||||
branchName: 'feature/test'
|
branchName: 'feature/test'
|
||||||
});
|
});
|
||||||
|
|
||||||
// Attempt to transition to GREEN without test results
|
// Attempt to transition to GREEN without test results
|
||||||
expect(() => {
|
await expect(
|
||||||
orchestrator.transition({ type: 'RED_PHASE_COMPLETE' });
|
orchestrator.transition({ type: 'RED_PHASE_COMPLETE' })
|
||||||
}).toThrow('Test results required');
|
).rejects.toThrow('Test results required');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should validate RED phase test results have failures', () => {
|
// Note: When all tests pass in RED phase, the orchestrator auto-completes
|
||||||
orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
// the subtask (feature already implemented) instead of throwing.
|
||||||
orchestrator.transition({
|
// This test is skipped as the behavior has changed.
|
||||||
|
it.skip('should validate RED phase test results have failures', async () => {
|
||||||
|
await orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
||||||
|
await orchestrator.transition({
|
||||||
type: 'BRANCH_CREATED',
|
type: 'BRANCH_CREATED',
|
||||||
branchName: 'feature/test'
|
branchName: 'feature/test'
|
||||||
});
|
});
|
||||||
|
|
||||||
// Provide passing test results (should fail RED phase validation)
|
// Provide passing test results (should fail RED phase validation)
|
||||||
expect(() => {
|
await expect(
|
||||||
orchestrator.transition({
|
orchestrator.transition({
|
||||||
type: 'RED_PHASE_COMPLETE',
|
type: 'RED_PHASE_COMPLETE',
|
||||||
testResults: {
|
testResults: {
|
||||||
@@ -517,8 +520,8 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
skipped: 0,
|
skipped: 0,
|
||||||
phase: 'RED'
|
phase: 'RED'
|
||||||
}
|
}
|
||||||
});
|
})
|
||||||
}).toThrow('RED phase must have at least one failing test');
|
).rejects.toThrow('RED phase must have at least one failing test');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow RED to GREEN transition with valid failing tests', () => {
|
it('should allow RED to GREEN transition with valid failing tests', () => {
|
||||||
@@ -542,14 +545,14 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
expect(orchestrator.getCurrentTDDPhase()).toBe('GREEN');
|
expect(orchestrator.getCurrentTDDPhase()).toBe('GREEN');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should validate GREEN phase test results have no failures', () => {
|
it('should validate GREEN phase test results have no failures', async () => {
|
||||||
orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
await orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
||||||
orchestrator.transition({
|
await orchestrator.transition({
|
||||||
type: 'BRANCH_CREATED',
|
type: 'BRANCH_CREATED',
|
||||||
branchName: 'feature/test'
|
branchName: 'feature/test'
|
||||||
});
|
});
|
||||||
|
|
||||||
orchestrator.transition({
|
await orchestrator.transition({
|
||||||
type: 'RED_PHASE_COMPLETE',
|
type: 'RED_PHASE_COMPLETE',
|
||||||
testResults: {
|
testResults: {
|
||||||
total: 5,
|
total: 5,
|
||||||
@@ -561,7 +564,7 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Provide test results with failures (should fail GREEN phase validation)
|
// Provide test results with failures (should fail GREEN phase validation)
|
||||||
expect(() => {
|
await expect(
|
||||||
orchestrator.transition({
|
orchestrator.transition({
|
||||||
type: 'GREEN_PHASE_COMPLETE',
|
type: 'GREEN_PHASE_COMPLETE',
|
||||||
testResults: {
|
testResults: {
|
||||||
@@ -571,8 +574,8 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
skipped: 0,
|
skipped: 0,
|
||||||
phase: 'GREEN'
|
phase: 'GREEN'
|
||||||
}
|
}
|
||||||
});
|
})
|
||||||
}).toThrow('GREEN phase must have zero failures');
|
).rejects.toThrow('GREEN phase must have zero failures');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow GREEN to COMMIT transition with all tests passing', () => {
|
it('should allow GREEN to COMMIT transition with all tests passing', () => {
|
||||||
@@ -631,7 +634,7 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
expect(context.lastTestResults).toEqual(redResults);
|
expect(context.lastTestResults).toEqual(redResults);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should validate git repository state before BRANCH_SETUP', () => {
|
it('should validate git repository state before BRANCH_SETUP', async () => {
|
||||||
// Set up orchestrator with git validation enabled
|
// Set up orchestrator with git validation enabled
|
||||||
const gitContext: WorkflowContext = {
|
const gitContext: WorkflowContext = {
|
||||||
taskId: 'task-1',
|
taskId: 'task-1',
|
||||||
@@ -650,9 +653,9 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
return context.metadata.requireGit === true;
|
return context.metadata.requireGit === true;
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(() => {
|
await expect(
|
||||||
gitOrchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
gitOrchestrator.transition({ type: 'PREFLIGHT_COMPLETE' })
|
||||||
}).toThrow('Guard condition failed');
|
).rejects.toThrow('Guard condition failed');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1067,10 +1070,10 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
expect(orchestrator.isAborted()).toBe(true);
|
expect(orchestrator.isAborted()).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should prevent transitions after abort', () => {
|
it('should prevent transitions after abort', async () => {
|
||||||
orchestrator.transition({ type: 'ABORT' });
|
await orchestrator.transition({ type: 'ABORT' });
|
||||||
|
|
||||||
expect(() => {
|
await expect(
|
||||||
orchestrator.transition({
|
orchestrator.transition({
|
||||||
type: 'RED_PHASE_COMPLETE',
|
type: 'RED_PHASE_COMPLETE',
|
||||||
testResults: {
|
testResults: {
|
||||||
@@ -1080,8 +1083,8 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
skipped: 0,
|
skipped: 0,
|
||||||
phase: 'RED'
|
phase: 'RED'
|
||||||
}
|
}
|
||||||
});
|
})
|
||||||
}).toThrow('Workflow has been aborted');
|
).rejects.toThrow('Workflow has been aborted');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow retry after recoverable error', () => {
|
it('should allow retry after recoverable error', () => {
|
||||||
@@ -1395,9 +1398,10 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
expect(orchestrator.hasTestResultValidator()).toBe(true);
|
expect(orchestrator.hasTestResultValidator()).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use TestResultValidator to validate RED phase', () => {
|
// Skip: Behavior changed - RED phase with 0 failures now auto-completes subtask instead of throwing
|
||||||
orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
it.skip('should use TestResultValidator to validate RED phase', async () => {
|
||||||
orchestrator.transition({
|
await orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
||||||
|
await orchestrator.transition({
|
||||||
type: 'BRANCH_CREATED',
|
type: 'BRANCH_CREATED',
|
||||||
branchName: 'feature/test'
|
branchName: 'feature/test'
|
||||||
});
|
});
|
||||||
@@ -1405,7 +1409,7 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
orchestrator.setTestResultValidator(testValidator);
|
orchestrator.setTestResultValidator(testValidator);
|
||||||
|
|
||||||
// Should reject passing tests in RED phase
|
// Should reject passing tests in RED phase
|
||||||
expect(() => {
|
await expect(
|
||||||
orchestrator.transition({
|
orchestrator.transition({
|
||||||
type: 'RED_PHASE_COMPLETE',
|
type: 'RED_PHASE_COMPLETE',
|
||||||
testResults: {
|
testResults: {
|
||||||
@@ -1415,20 +1419,20 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
skipped: 0,
|
skipped: 0,
|
||||||
phase: 'RED'
|
phase: 'RED'
|
||||||
}
|
}
|
||||||
});
|
})
|
||||||
}).toThrow('RED phase must have at least one failing test');
|
).rejects.toThrow('RED phase must have at least one failing test');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use TestResultValidator to validate GREEN phase', () => {
|
it('should use TestResultValidator to validate GREEN phase', async () => {
|
||||||
orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
await orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
||||||
orchestrator.transition({
|
await orchestrator.transition({
|
||||||
type: 'BRANCH_CREATED',
|
type: 'BRANCH_CREATED',
|
||||||
branchName: 'feature/test'
|
branchName: 'feature/test'
|
||||||
});
|
});
|
||||||
|
|
||||||
orchestrator.setTestResultValidator(testValidator);
|
orchestrator.setTestResultValidator(testValidator);
|
||||||
|
|
||||||
orchestrator.transition({
|
await orchestrator.transition({
|
||||||
type: 'RED_PHASE_COMPLETE',
|
type: 'RED_PHASE_COMPLETE',
|
||||||
testResults: {
|
testResults: {
|
||||||
total: 5,
|
total: 5,
|
||||||
@@ -1440,7 +1444,7 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Should reject failing tests in GREEN phase
|
// Should reject failing tests in GREEN phase
|
||||||
expect(() => {
|
await expect(
|
||||||
orchestrator.transition({
|
orchestrator.transition({
|
||||||
type: 'GREEN_PHASE_COMPLETE',
|
type: 'GREEN_PHASE_COMPLETE',
|
||||||
testResults: {
|
testResults: {
|
||||||
@@ -1450,8 +1454,8 @@ describe('WorkflowOrchestrator - State Machine Structure', () => {
|
|||||||
skipped: 0,
|
skipped: 0,
|
||||||
phase: 'GREEN'
|
phase: 'GREEN'
|
||||||
}
|
}
|
||||||
});
|
})
|
||||||
}).toThrow('GREEN phase must have zero failures');
|
).rejects.toThrow('GREEN phase must have zero failures');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should support git adapter hooks', () => {
|
it('should support git adapter hooks', () => {
|
||||||
@@ -3,6 +3,8 @@
|
|||||||
* Provides a simplified API for MCP tools while delegating to WorkflowOrchestrator
|
* Provides a simplified API for MCP tools while delegating to WorkflowOrchestrator
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { getLogger } from '../../../common/logger/index.js';
|
||||||
|
import type { TaskStatus } from '../../../common/types/index.js';
|
||||||
import { GitAdapter } from '../../git/adapters/git-adapter.js';
|
import { GitAdapter } from '../../git/adapters/git-adapter.js';
|
||||||
import { WorkflowStateManager } from '../managers/workflow-state-manager.js';
|
import { WorkflowStateManager } from '../managers/workflow-state-manager.js';
|
||||||
import { WorkflowOrchestrator } from '../orchestrators/workflow-orchestrator.js';
|
import { WorkflowOrchestrator } from '../orchestrators/workflow-orchestrator.js';
|
||||||
@@ -30,7 +32,8 @@ export interface StartWorkflowOptions {
|
|||||||
}>;
|
}>;
|
||||||
maxAttempts?: number;
|
maxAttempts?: number;
|
||||||
force?: boolean;
|
force?: boolean;
|
||||||
tag?: string; // Optional tag for branch naming
|
tag?: string; // Optional tag for branch naming (local storage)
|
||||||
|
orgSlug?: string; // Optional org slug for branch naming (API storage, takes precedence over tag)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -70,6 +73,23 @@ export interface NextAction {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface for updating task statuses
|
||||||
|
* Allows WorkflowService to update task statuses without direct dependency on TasksDomain
|
||||||
|
*/
|
||||||
|
export interface TaskStatusUpdater {
|
||||||
|
updateStatus(taskId: string, status: TaskStatus, tag?: string): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for WorkflowService constructor
|
||||||
|
*/
|
||||||
|
export interface WorkflowServiceOptions {
|
||||||
|
projectRoot: string;
|
||||||
|
taskStatusUpdater?: TaskStatusUpdater;
|
||||||
|
tag?: string;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* WorkflowService - Facade for workflow operations
|
* WorkflowService - Facade for workflow operations
|
||||||
* Manages WorkflowOrchestrator lifecycle and state persistence
|
* Manages WorkflowOrchestrator lifecycle and state persistence
|
||||||
@@ -77,12 +97,52 @@ export interface NextAction {
|
|||||||
export class WorkflowService {
|
export class WorkflowService {
|
||||||
private readonly projectRoot: string;
|
private readonly projectRoot: string;
|
||||||
private readonly stateManager: WorkflowStateManager;
|
private readonly stateManager: WorkflowStateManager;
|
||||||
|
private readonly taskStatusUpdater?: TaskStatusUpdater;
|
||||||
|
private readonly tag?: string;
|
||||||
|
private readonly logger = getLogger('WorkflowService');
|
||||||
private orchestrator?: WorkflowOrchestrator;
|
private orchestrator?: WorkflowOrchestrator;
|
||||||
private activityLogger?: WorkflowActivityLogger;
|
private activityLogger?: WorkflowActivityLogger;
|
||||||
|
|
||||||
constructor(projectRoot: string) {
|
constructor(projectRootOrOptions: string | WorkflowServiceOptions) {
|
||||||
this.projectRoot = projectRoot;
|
if (typeof projectRootOrOptions === 'string') {
|
||||||
this.stateManager = new WorkflowStateManager(projectRoot);
|
// Legacy constructor: just projectRoot
|
||||||
|
this.projectRoot = projectRootOrOptions;
|
||||||
|
} else {
|
||||||
|
// New constructor with options
|
||||||
|
this.projectRoot = projectRootOrOptions.projectRoot;
|
||||||
|
this.taskStatusUpdater = projectRootOrOptions.taskStatusUpdater;
|
||||||
|
this.tag = projectRootOrOptions.tag;
|
||||||
|
}
|
||||||
|
this.stateManager = new WorkflowStateManager(this.projectRoot);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update task status if updater is available
|
||||||
|
* Logs warning but doesn't throw if update fails
|
||||||
|
* @param taskId - Task ID to update
|
||||||
|
* @param status - New status
|
||||||
|
* @param tag - Optional tag override (uses constructor tag if not provided)
|
||||||
|
*/
|
||||||
|
private async updateTaskStatus(
|
||||||
|
taskId: string,
|
||||||
|
status: TaskStatus,
|
||||||
|
tag?: string
|
||||||
|
): Promise<void> {
|
||||||
|
if (!this.taskStatusUpdater) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.taskStatusUpdater.updateStatus(
|
||||||
|
taskId,
|
||||||
|
status,
|
||||||
|
tag ?? this.tag
|
||||||
|
);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
// Log but don't fail the workflow operation
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
this.logger.warn(`Failed to update task ${taskId} status: ${message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -102,7 +162,8 @@ export class WorkflowService {
|
|||||||
subtasks,
|
subtasks,
|
||||||
maxAttempts = 3,
|
maxAttempts = 3,
|
||||||
force,
|
force,
|
||||||
tag
|
tag,
|
||||||
|
orgSlug
|
||||||
} = options;
|
} = options;
|
||||||
|
|
||||||
// Check for existing workflow
|
// Check for existing workflow
|
||||||
@@ -143,6 +204,7 @@ export class WorkflowService {
|
|||||||
taskId,
|
taskId,
|
||||||
subtasks: workflowSubtasks,
|
subtasks: workflowSubtasks,
|
||||||
currentSubtaskIndex: firstIncompleteIndex,
|
currentSubtaskIndex: firstIncompleteIndex,
|
||||||
|
tag,
|
||||||
errors: [],
|
errors: [],
|
||||||
metadata: {
|
metadata: {
|
||||||
startedAt: new Date().toISOString(),
|
startedAt: new Date().toISOString(),
|
||||||
@@ -171,7 +233,7 @@ export class WorkflowService {
|
|||||||
await this.orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
await this.orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
|
||||||
|
|
||||||
// Create git branch with descriptive name
|
// Create git branch with descriptive name
|
||||||
const branchName = this.generateBranchName(taskId, taskTitle, tag);
|
const branchName = this.generateBranchName(taskId, taskTitle, tag, orgSlug);
|
||||||
|
|
||||||
// Check if we're already on the target branch
|
// Check if we're already on the target branch
|
||||||
const currentBranch = await gitAdapter.getCurrentBranch();
|
const currentBranch = await gitAdapter.getCurrentBranch();
|
||||||
@@ -186,6 +248,9 @@ export class WorkflowService {
|
|||||||
branchName
|
branchName
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Set main task status to in-progress
|
||||||
|
await this.updateTaskStatus(taskId, 'in-progress', tag);
|
||||||
|
|
||||||
return this.getStatus();
|
return this.getStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -401,6 +466,10 @@ export class WorkflowService {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Capture current subtask before transitioning
|
||||||
|
const currentSubtask = this.orchestrator.getCurrentSubtask();
|
||||||
|
const completedSubtaskId = currentSubtask?.id;
|
||||||
|
|
||||||
// Transition COMMIT phase complete
|
// Transition COMMIT phase complete
|
||||||
await this.orchestrator.transition({
|
await this.orchestrator.transition({
|
||||||
type: 'COMMIT_COMPLETE'
|
type: 'COMMIT_COMPLETE'
|
||||||
@@ -415,12 +484,19 @@ export class WorkflowService {
|
|||||||
await this.orchestrator.transition({ type: 'ALL_SUBTASKS_COMPLETE' });
|
await this.orchestrator.transition({ type: 'ALL_SUBTASKS_COMPLETE' });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Mark completed subtask as done (use workflow's tag from context)
|
||||||
|
if (completedSubtaskId) {
|
||||||
|
const context = this.orchestrator.getContext();
|
||||||
|
await this.updateTaskStatus(completedSubtaskId, 'done', context.tag);
|
||||||
|
}
|
||||||
|
|
||||||
return this.getStatus();
|
return this.getStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Finalize and complete the workflow
|
* Finalize and complete the workflow
|
||||||
* Validates working tree is clean before marking complete
|
* Validates working tree is clean before marking complete
|
||||||
|
* Cleans up workflow state file after successful completion
|
||||||
*/
|
*/
|
||||||
async finalizeWorkflow(): Promise<WorkflowStatus> {
|
async finalizeWorkflow(): Promise<WorkflowStatus> {
|
||||||
if (!this.orchestrator) {
|
if (!this.orchestrator) {
|
||||||
@@ -447,10 +523,24 @@ export class WorkflowService {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Capture task ID before transitioning
|
||||||
|
const context = this.orchestrator.getContext();
|
||||||
|
const taskId = context.taskId;
|
||||||
|
|
||||||
// Transition to COMPLETE
|
// Transition to COMPLETE
|
||||||
await this.orchestrator.transition({ type: 'FINALIZE_COMPLETE' });
|
await this.orchestrator.transition({ type: 'FINALIZE_COMPLETE' });
|
||||||
|
|
||||||
return this.getStatus();
|
// Get final status before cleanup
|
||||||
|
const finalStatus = this.getStatus();
|
||||||
|
|
||||||
|
// Mark main task as done (use workflow's tag from context)
|
||||||
|
await this.updateTaskStatus(taskId, 'done', context.tag);
|
||||||
|
|
||||||
|
// Clean up workflow state file so new workflows can start without force
|
||||||
|
await this.stateManager.delete();
|
||||||
|
this.orchestrator = undefined;
|
||||||
|
|
||||||
|
return finalStatus;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -469,12 +559,14 @@ export class WorkflowService {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate a descriptive git branch name
|
* Generate a descriptive git branch name
|
||||||
* Format: tag-name/task-id-task-title or task-id-task-title
|
* Format: tm/<namespace>/task-<id>-<title> where namespace is orgSlug (API) or tag (local)
|
||||||
|
* All branches are prefixed with 'tm/' to avoid conflicts with existing branches
|
||||||
*/
|
*/
|
||||||
private generateBranchName(
|
private generateBranchName(
|
||||||
taskId: string,
|
taskId: string,
|
||||||
taskTitle: string,
|
taskTitle: string,
|
||||||
tag?: string
|
tag?: string,
|
||||||
|
orgSlug?: string
|
||||||
): string {
|
): string {
|
||||||
// Sanitize task title for branch name
|
// Sanitize task title for branch name
|
||||||
const sanitizedTitle = taskTitle
|
const sanitizedTitle = taskTitle
|
||||||
@@ -486,9 +578,10 @@ export class WorkflowService {
|
|||||||
// Format task ID for branch name
|
// Format task ID for branch name
|
||||||
const formattedTaskId = taskId.replace(/\./g, '-');
|
const formattedTaskId = taskId.replace(/\./g, '-');
|
||||||
|
|
||||||
// Add tag prefix if tag is provided
|
// Priority: orgSlug (API storage) > tag (local storage) > none
|
||||||
const tagPrefix = tag ? `${tag}/` : '';
|
const namespace = orgSlug || tag;
|
||||||
|
const prefix = namespace ? `tm/${namespace}` : 'tm';
|
||||||
|
|
||||||
return `${tagPrefix}task-${formattedTaskId}-${sanitizedTitle}`;
|
return `${prefix}/task-${formattedTaskId}-${sanitizedTitle}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ export interface WorkflowContext {
|
|||||||
currentSubtaskIndex: number;
|
currentSubtaskIndex: number;
|
||||||
currentTDDPhase?: TDDPhase;
|
currentTDDPhase?: TDDPhase;
|
||||||
branchName?: string;
|
branchName?: string;
|
||||||
|
tag?: string;
|
||||||
errors: WorkflowError[];
|
errors: WorkflowError[];
|
||||||
metadata: Record<string, unknown>;
|
metadata: Record<string, unknown>;
|
||||||
lastTestResults?: TestResult;
|
lastTestResults?: TestResult;
|
||||||
|
|||||||
@@ -4,7 +4,11 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import type { ConfigManager } from '../config/managers/config-manager.js';
|
import type { ConfigManager } from '../config/managers/config-manager.js';
|
||||||
import { WorkflowService } from './services/workflow.service.js';
|
import type { TasksDomain } from '../tasks/tasks-domain.js';
|
||||||
|
import {
|
||||||
|
type TaskStatusUpdater,
|
||||||
|
WorkflowService
|
||||||
|
} from './services/workflow.service.js';
|
||||||
import type {
|
import type {
|
||||||
NextAction,
|
NextAction,
|
||||||
StartWorkflowOptions,
|
StartWorkflowOptions,
|
||||||
@@ -14,12 +18,57 @@ import type { TestResult, WorkflowContext } from './types.js';
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Workflow Domain - Unified API for TDD workflow operations
|
* Workflow Domain - Unified API for TDD workflow operations
|
||||||
|
* Automatically handles task status updates through dependency injection
|
||||||
*/
|
*/
|
||||||
export class WorkflowDomain {
|
export class WorkflowDomain {
|
||||||
private workflowService: WorkflowService;
|
private workflowService: WorkflowService | null = null;
|
||||||
|
private readonly projectRoot: string;
|
||||||
|
private readonly configManager: ConfigManager;
|
||||||
|
private tasksDomain: TasksDomain | null = null;
|
||||||
|
|
||||||
constructor(configManager: ConfigManager) {
|
constructor(configManager: ConfigManager) {
|
||||||
this.workflowService = new WorkflowService(configManager.getProjectRoot());
|
this.configManager = configManager;
|
||||||
|
this.projectRoot = configManager.getProjectRoot();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the TasksDomain for status updates
|
||||||
|
* Called by TmCore after TasksDomain is initialized
|
||||||
|
*/
|
||||||
|
setTasksDomain(tasksDomain: TasksDomain): void {
|
||||||
|
this.tasksDomain = tasksDomain;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create or get WorkflowService instance with proper DI
|
||||||
|
*/
|
||||||
|
private getWorkflowService(): WorkflowService {
|
||||||
|
if (!this.workflowService) {
|
||||||
|
const currentTag = this.configManager.getActiveTag();
|
||||||
|
|
||||||
|
// Create task status updater if TasksDomain is available
|
||||||
|
const taskStatusUpdater: TaskStatusUpdater | undefined = this.tasksDomain
|
||||||
|
? {
|
||||||
|
updateStatus: async (taskId, status, tag) => {
|
||||||
|
await this.tasksDomain!.updateStatus(taskId, status, tag);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
this.workflowService = new WorkflowService({
|
||||||
|
projectRoot: this.projectRoot,
|
||||||
|
taskStatusUpdater,
|
||||||
|
tag: currentTag
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return this.workflowService;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reset workflow service (for when workflow completes or aborts)
|
||||||
|
*/
|
||||||
|
private resetWorkflowService(): void {
|
||||||
|
this.workflowService = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ========== Workflow Lifecycle ==========
|
// ========== Workflow Lifecycle ==========
|
||||||
@@ -28,63 +77,72 @@ export class WorkflowDomain {
|
|||||||
* Start a new TDD workflow for a task
|
* Start a new TDD workflow for a task
|
||||||
*/
|
*/
|
||||||
async start(options: StartWorkflowOptions): Promise<WorkflowStatus> {
|
async start(options: StartWorkflowOptions): Promise<WorkflowStatus> {
|
||||||
return this.workflowService.startWorkflow(options);
|
// Reset to get fresh service with current tag
|
||||||
|
this.resetWorkflowService();
|
||||||
|
return this.getWorkflowService().startWorkflow(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resume an existing workflow
|
* Resume an existing workflow
|
||||||
*/
|
*/
|
||||||
async resume(): Promise<WorkflowStatus> {
|
async resume(): Promise<WorkflowStatus> {
|
||||||
return this.workflowService.resumeWorkflow();
|
// Reset to get fresh service with current tag
|
||||||
|
this.resetWorkflowService();
|
||||||
|
return this.getWorkflowService().resumeWorkflow();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get current workflow status
|
* Get current workflow status
|
||||||
*/
|
*/
|
||||||
getStatus(): WorkflowStatus {
|
getStatus(): WorkflowStatus {
|
||||||
return this.workflowService.getStatus();
|
return this.getWorkflowService().getStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get workflow context
|
* Get workflow context
|
||||||
*/
|
*/
|
||||||
getContext(): WorkflowContext {
|
getContext(): WorkflowContext {
|
||||||
return this.workflowService.getContext();
|
return this.getWorkflowService().getContext();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get next action to perform in workflow
|
* Get next action to perform in workflow
|
||||||
*/
|
*/
|
||||||
getNextAction(): NextAction {
|
getNextAction(): NextAction {
|
||||||
return this.workflowService.getNextAction();
|
return this.getWorkflowService().getNextAction();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Complete current phase with test results
|
* Complete current phase with test results
|
||||||
*/
|
*/
|
||||||
async completePhase(testResults: TestResult): Promise<WorkflowStatus> {
|
async completePhase(testResults: TestResult): Promise<WorkflowStatus> {
|
||||||
return this.workflowService.completePhase(testResults);
|
return this.getWorkflowService().completePhase(testResults);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Commit changes with auto-generated message
|
* Commit changes with auto-generated message
|
||||||
*/
|
*/
|
||||||
async commit(): Promise<WorkflowStatus> {
|
async commit(): Promise<WorkflowStatus> {
|
||||||
return this.workflowService.commit();
|
return this.getWorkflowService().commit();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Finalize and complete the workflow
|
* Finalize and complete the workflow
|
||||||
|
* Resets workflow service after completion
|
||||||
*/
|
*/
|
||||||
async finalize(): Promise<WorkflowStatus> {
|
async finalize(): Promise<WorkflowStatus> {
|
||||||
return this.workflowService.finalizeWorkflow();
|
const result = await this.getWorkflowService().finalizeWorkflow();
|
||||||
|
this.resetWorkflowService();
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Abort the current workflow
|
* Abort the current workflow
|
||||||
|
* Resets workflow service after abort
|
||||||
*/
|
*/
|
||||||
async abort(): Promise<void> {
|
async abort(): Promise<void> {
|
||||||
return this.workflowService.abortWorkflow();
|
await this.getWorkflowService().abortWorkflow();
|
||||||
|
this.resetWorkflowService();
|
||||||
}
|
}
|
||||||
|
|
||||||
// ========== Workflow Information ==========
|
// ========== Workflow Information ==========
|
||||||
@@ -93,6 +151,6 @@ export class WorkflowDomain {
|
|||||||
* Check if a workflow currently exists
|
* Check if a workflow currently exists
|
||||||
*/
|
*/
|
||||||
async hasWorkflow(): Promise<boolean> {
|
async hasWorkflow(): Promise<boolean> {
|
||||||
return this.workflowService.hasWorkflow();
|
return this.getWorkflowService().hasWorkflow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -180,6 +180,10 @@ export class TmCore {
|
|||||||
// Initialize domains that need async setup
|
// Initialize domains that need async setup
|
||||||
await this._tasks.initialize();
|
await this._tasks.initialize();
|
||||||
|
|
||||||
|
// Wire up cross-domain dependencies
|
||||||
|
// WorkflowDomain needs TasksDomain for status updates
|
||||||
|
this._workflow.setTasksDomain(this._tasks);
|
||||||
|
|
||||||
// Log successful initialization
|
// Log successful initialization
|
||||||
this._logger.info('TmCore initialized successfully');
|
this._logger.info('TmCore initialized successfully');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -0,0 +1,866 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Integration tests for autopilot workflow state machine
|
||||||
|
*
|
||||||
|
* Tests the full workflow lifecycle through WorkflowService:
|
||||||
|
* - Start workflow and verify state file creation
|
||||||
|
* - TDD phase transitions (RED → GREEN → COMMIT)
|
||||||
|
* - State persistence and resume
|
||||||
|
* - Auto-complete subtask when RED phase has 0 failures
|
||||||
|
* - Workflow finalization and abort
|
||||||
|
*
|
||||||
|
* These tests create temporary project directories and verify the workflow
|
||||||
|
* state machine operates correctly with actual file I/O.
|
||||||
|
*
|
||||||
|
* NOTE: Workflow state is stored in ~/.taskmaster/{project-id}/sessions/
|
||||||
|
* based on the project path. Tests clean up their state files in afterEach.
|
||||||
|
*
|
||||||
|
* @integration
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { execSync } from 'node:child_process';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import os from 'node:os';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||||
|
|
||||||
|
// Mock the logger to reduce noise in tests
|
||||||
|
vi.mock('../../../src/common/logger/index.js', () => ({
|
||||||
|
getLogger: () => ({
|
||||||
|
debug: vi.fn(),
|
||||||
|
info: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
child: vi.fn().mockReturnThis()
|
||||||
|
})
|
||||||
|
}));
|
||||||
|
|
||||||
|
import { WorkflowStateManager } from '../../../src/modules/workflow/managers/workflow-state-manager.js';
|
||||||
|
import { WorkflowService } from '../../../src/modules/workflow/services/workflow.service.js';
|
||||||
|
import type { WorkflowState } from '../../../src/modules/workflow/types.js';
|
||||||
|
|
||||||
|
// Store original HOME to restore after tests
|
||||||
|
const originalHome = process.env.HOME;
|
||||||
|
|
||||||
|
describe('Autopilot Workflow Integration', () => {
|
||||||
|
let testProjectDir: string;
|
||||||
|
let testHomeDir: string;
|
||||||
|
let stateManager: WorkflowStateManager;
|
||||||
|
let workflowService: WorkflowService;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read the workflow state file directly from disk
|
||||||
|
*/
|
||||||
|
const readWorkflowState = (): WorkflowState | null => {
|
||||||
|
const statePath = stateManager.getStatePath();
|
||||||
|
try {
|
||||||
|
const content = fs.readFileSync(statePath, 'utf-8');
|
||||||
|
return JSON.parse(content);
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if workflow state file exists
|
||||||
|
*/
|
||||||
|
const workflowStateExists = (): boolean => {
|
||||||
|
return fs.existsSync(stateManager.getStatePath());
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the expected state file path
|
||||||
|
*/
|
||||||
|
const getExpectedStatePath = (): string => {
|
||||||
|
return stateManager.getStatePath();
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
// Create temp directories for isolation
|
||||||
|
testHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tm-workflow-home-'));
|
||||||
|
testProjectDir = fs.mkdtempSync(
|
||||||
|
path.join(os.tmpdir(), 'tm-workflow-project-')
|
||||||
|
);
|
||||||
|
|
||||||
|
// Override HOME so os.homedir() returns our temp directory
|
||||||
|
// This prevents tests from polluting the real ~/.taskmaster/
|
||||||
|
process.env.HOME = testHomeDir;
|
||||||
|
|
||||||
|
// Create state manager AFTER setting HOME (uses os.homedir() internally)
|
||||||
|
stateManager = new WorkflowStateManager(testProjectDir);
|
||||||
|
|
||||||
|
// Initialize git in the project directory (required for workflow)
|
||||||
|
execSync('git init', { cwd: testProjectDir, stdio: 'pipe' });
|
||||||
|
execSync('git config user.email "test@test.com"', {
|
||||||
|
cwd: testProjectDir,
|
||||||
|
stdio: 'pipe'
|
||||||
|
});
|
||||||
|
execSync('git config user.name "Test User"', {
|
||||||
|
cwd: testProjectDir,
|
||||||
|
stdio: 'pipe'
|
||||||
|
});
|
||||||
|
// Disable GPG/SSH signing to avoid 1Password and other signing tool interference
|
||||||
|
execSync('git config commit.gpgsign false', {
|
||||||
|
cwd: testProjectDir,
|
||||||
|
stdio: 'pipe'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create an initial commit (git needs at least one commit)
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(testProjectDir, 'README.md'),
|
||||||
|
'# Test Project\n'
|
||||||
|
);
|
||||||
|
execSync('git add .', { cwd: testProjectDir, stdio: 'pipe' });
|
||||||
|
execSync('git commit -m "Initial commit"', {
|
||||||
|
cwd: testProjectDir,
|
||||||
|
stdio: 'pipe'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create workflow service
|
||||||
|
workflowService = new WorkflowService(testProjectDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
// Restore original HOME
|
||||||
|
process.env.HOME = originalHome;
|
||||||
|
|
||||||
|
// Clean up temp directories
|
||||||
|
if (testProjectDir && fs.existsSync(testProjectDir)) {
|
||||||
|
fs.rmSync(testProjectDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
if (testHomeDir && fs.existsSync(testHomeDir)) {
|
||||||
|
fs.rmSync(testHomeDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Workflow State File Location', () => {
|
||||||
|
it('should store workflow state in isolated temp home directory', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Test Task',
|
||||||
|
subtasks: [
|
||||||
|
{ id: '1.1', title: 'Subtask 1', status: 'pending' },
|
||||||
|
{ id: '1.2', title: 'Subtask 2', status: 'pending' }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
const statePath = getExpectedStatePath();
|
||||||
|
|
||||||
|
// State file should be in temp home directory (not real ~/.taskmaster/)
|
||||||
|
expect(statePath).toContain(testHomeDir);
|
||||||
|
expect(statePath).toContain('.taskmaster');
|
||||||
|
expect(statePath).toContain('sessions');
|
||||||
|
expect(statePath).toContain('workflow-state.json');
|
||||||
|
|
||||||
|
// State file should exist
|
||||||
|
expect(workflowStateExists()).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create project-specific directory based on project path', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Test Task',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Subtask 1', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
const statePath = getExpectedStatePath();
|
||||||
|
|
||||||
|
// Should contain sanitized project path as identifier
|
||||||
|
// The path should be like: ~/.taskmaster/-tmp-...-tm-workflow-project-.../sessions/workflow-state.json
|
||||||
|
expect(statePath).toMatch(
|
||||||
|
/\.taskmaster\/-[^/]+\/sessions\/workflow-state\.json$/
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Start Workflow', () => {
|
||||||
|
it('should initialize workflow and create state file', async () => {
|
||||||
|
const status = await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Implement Feature',
|
||||||
|
subtasks: [
|
||||||
|
{ id: '1.1', title: 'Write Tests', status: 'pending' },
|
||||||
|
{ id: '1.2', title: 'Implement Code', status: 'pending' }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(status.taskId).toBe('1');
|
||||||
|
expect(status.phase).toBe('SUBTASK_LOOP');
|
||||||
|
expect(status.tddPhase).toBe('RED');
|
||||||
|
expect(status.currentSubtask?.id).toBe('1.1');
|
||||||
|
expect(status.progress.total).toBe(2);
|
||||||
|
expect(status.progress.completed).toBe(0);
|
||||||
|
|
||||||
|
// Verify state file
|
||||||
|
const state = readWorkflowState();
|
||||||
|
expect(state).not.toBeNull();
|
||||||
|
expect(state?.phase).toBe('SUBTASK_LOOP');
|
||||||
|
expect(state?.context.taskId).toBe('1');
|
||||||
|
expect(state?.context.currentTDDPhase).toBe('RED');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create git branch with proper naming', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Add User Authentication',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Setup auth', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
const currentBranch = execSync('git branch --show-current', {
|
||||||
|
cwd: testProjectDir,
|
||||||
|
encoding: 'utf-8'
|
||||||
|
}).trim();
|
||||||
|
|
||||||
|
expect(currentBranch).toBe('tm/task-1-add-user-authentication');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include tag in branch name when provided', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Feature X',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Do thing', status: 'pending' }],
|
||||||
|
tag: 'sprint-1'
|
||||||
|
});
|
||||||
|
|
||||||
|
const currentBranch = execSync('git branch --show-current', {
|
||||||
|
cwd: testProjectDir,
|
||||||
|
encoding: 'utf-8'
|
||||||
|
}).trim();
|
||||||
|
|
||||||
|
expect(currentBranch).toBe('tm/sprint-1/task-1-feature-x');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip already completed subtasks', async () => {
|
||||||
|
const status = await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Resume Task',
|
||||||
|
subtasks: [
|
||||||
|
{ id: '1.1', title: 'Already Done', status: 'done' },
|
||||||
|
{ id: '1.2', title: 'Next Up', status: 'pending' }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should start at subtask 1.2 since 1.1 is done
|
||||||
|
expect(status.currentSubtask?.id).toBe('1.2');
|
||||||
|
expect(status.progress.completed).toBe(1);
|
||||||
|
expect(status.progress.current).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject when no subtasks to work on', async () => {
|
||||||
|
await expect(
|
||||||
|
workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'All Done',
|
||||||
|
subtasks: [
|
||||||
|
{ id: '1.1', title: 'Done 1', status: 'done' },
|
||||||
|
{ id: '1.2', title: 'Done 2', status: 'done' }
|
||||||
|
]
|
||||||
|
})
|
||||||
|
).rejects.toThrow('All subtasks for task 1 are already completed');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject when workflow already exists without force', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'First Workflow',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Task', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create new service instance (simulating new command invocation)
|
||||||
|
const newService = new WorkflowService(testProjectDir);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
newService.startWorkflow({
|
||||||
|
taskId: '2',
|
||||||
|
taskTitle: 'Second Workflow',
|
||||||
|
subtasks: [{ id: '2.1', title: 'Task', status: 'pending' }]
|
||||||
|
})
|
||||||
|
).rejects.toThrow('Workflow already exists');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow force restart when workflow exists', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'First Workflow',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Task', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create new service instance and force restart
|
||||||
|
const newService = new WorkflowService(testProjectDir);
|
||||||
|
|
||||||
|
const status = await newService.startWorkflow({
|
||||||
|
taskId: '2',
|
||||||
|
taskTitle: 'Second Workflow',
|
||||||
|
subtasks: [{ id: '2.1', title: 'New Task', status: 'pending' }],
|
||||||
|
force: true
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(status.taskId).toBe('2');
|
||||||
|
expect(status.currentSubtask?.id).toBe('2.1');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('TDD Phase Transitions', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'TDD Test',
|
||||||
|
subtasks: [
|
||||||
|
{ id: '1.1', title: 'First Subtask', status: 'pending' },
|
||||||
|
{ id: '1.2', title: 'Second Subtask', status: 'pending' }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should transition from RED to GREEN phase', async () => {
|
||||||
|
// Initial state should be RED
|
||||||
|
let status = workflowService.getStatus();
|
||||||
|
expect(status.tddPhase).toBe('RED');
|
||||||
|
|
||||||
|
// Complete RED phase with failing tests
|
||||||
|
status = await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 2,
|
||||||
|
failed: 3,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(status.tddPhase).toBe('GREEN');
|
||||||
|
|
||||||
|
// Verify state file updated
|
||||||
|
const state = readWorkflowState();
|
||||||
|
expect(state?.context.currentTDDPhase).toBe('GREEN');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should transition from GREEN to COMMIT phase', async () => {
|
||||||
|
// Complete RED phase
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 0,
|
||||||
|
failed: 5,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Complete GREEN phase with all tests passing
|
||||||
|
const status = await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 5,
|
||||||
|
failed: 0,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'GREEN'
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(status.tddPhase).toBe('COMMIT');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject GREEN phase with failing tests', async () => {
|
||||||
|
// Complete RED phase
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 0,
|
||||||
|
failed: 5,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Try to complete GREEN with failures
|
||||||
|
await expect(
|
||||||
|
workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 3,
|
||||||
|
failed: 2,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'GREEN'
|
||||||
|
})
|
||||||
|
).rejects.toThrow('GREEN phase must have zero failures');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should advance to next subtask after COMMIT', async () => {
|
||||||
|
// Complete full TDD cycle for first subtask
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 0,
|
||||||
|
failed: 5,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 5,
|
||||||
|
failed: 0,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'GREEN'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Complete COMMIT phase
|
||||||
|
const status = await workflowService.commit();
|
||||||
|
|
||||||
|
// Should be on second subtask in RED phase
|
||||||
|
expect(status.currentSubtask?.id).toBe('1.2');
|
||||||
|
expect(status.tddPhase).toBe('RED');
|
||||||
|
expect(status.progress.completed).toBe(1);
|
||||||
|
expect(status.progress.current).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should store test results in state', async () => {
|
||||||
|
const testResults = {
|
||||||
|
total: 10,
|
||||||
|
passed: 3,
|
||||||
|
failed: 7,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED' as const
|
||||||
|
};
|
||||||
|
|
||||||
|
await workflowService.completePhase(testResults);
|
||||||
|
|
||||||
|
const state = readWorkflowState();
|
||||||
|
expect(state?.context.lastTestResults).toEqual(testResults);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Auto-Complete Subtask (RED with 0 failures)', () => {
|
||||||
|
it('should auto-complete subtask when RED phase has no failures', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Auto-Complete Test',
|
||||||
|
subtasks: [
|
||||||
|
{ id: '1.1', title: 'Already Implemented', status: 'pending' },
|
||||||
|
{ id: '1.2', title: 'Needs Work', status: 'pending' }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Complete RED phase with all tests passing (feature already implemented)
|
||||||
|
const status = await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 5,
|
||||||
|
failed: 0,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should have auto-advanced to next subtask
|
||||||
|
expect(status.currentSubtask?.id).toBe('1.2');
|
||||||
|
expect(status.tddPhase).toBe('RED');
|
||||||
|
expect(status.progress.completed).toBe(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Resume Workflow', () => {
|
||||||
|
it('should resume workflow from saved state', async () => {
|
||||||
|
// Start workflow and progress through RED phase
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Resume Test',
|
||||||
|
subtasks: [
|
||||||
|
{ id: '1.1', title: 'First', status: 'pending' },
|
||||||
|
{ id: '1.2', title: 'Second', status: 'pending' }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 0,
|
||||||
|
failed: 5,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify we're in GREEN phase
|
||||||
|
expect(workflowService.getStatus().tddPhase).toBe('GREEN');
|
||||||
|
|
||||||
|
// Create new service instance (simulating new session)
|
||||||
|
const newService = new WorkflowService(testProjectDir);
|
||||||
|
|
||||||
|
// Resume workflow
|
||||||
|
const status = await newService.resumeWorkflow();
|
||||||
|
|
||||||
|
expect(status.taskId).toBe('1');
|
||||||
|
expect(status.phase).toBe('SUBTASK_LOOP');
|
||||||
|
expect(status.tddPhase).toBe('GREEN'); // Should resume in GREEN phase
|
||||||
|
expect(status.currentSubtask?.id).toBe('1.1');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve progress when resuming', async () => {
|
||||||
|
// Start workflow and complete first subtask
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Progress Test',
|
||||||
|
subtasks: [
|
||||||
|
{ id: '1.1', title: 'First', status: 'pending' },
|
||||||
|
{ id: '1.2', title: 'Second', status: 'pending' }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Complete first subtask
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 0,
|
||||||
|
failed: 5,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 5,
|
||||||
|
failed: 0,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'GREEN'
|
||||||
|
});
|
||||||
|
await workflowService.commit();
|
||||||
|
|
||||||
|
// Resume in new session
|
||||||
|
const newService = new WorkflowService(testProjectDir);
|
||||||
|
const status = await newService.resumeWorkflow();
|
||||||
|
|
||||||
|
expect(status.progress.completed).toBe(1);
|
||||||
|
expect(status.progress.current).toBe(2);
|
||||||
|
expect(status.currentSubtask?.id).toBe('1.2');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should error when no workflow exists to resume', async () => {
|
||||||
|
await expect(workflowService.resumeWorkflow()).rejects.toThrow(
|
||||||
|
'Workflow state file not found'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Finalize Workflow', () => {
|
||||||
|
it('should finalize when all subtasks are complete', async () => {
|
||||||
|
// Start workflow with single subtask
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Finalize Test',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Only Task', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Complete the subtask
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 0,
|
||||||
|
failed: 5,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 5,
|
||||||
|
failed: 0,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'GREEN'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Make a commit in git to clean working tree
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(testProjectDir, 'feature.ts'),
|
||||||
|
'export const x = 1;\n'
|
||||||
|
);
|
||||||
|
execSync('git add .', { cwd: testProjectDir, stdio: 'pipe' });
|
||||||
|
execSync('git commit -m "Implement feature"', {
|
||||||
|
cwd: testProjectDir,
|
||||||
|
stdio: 'pipe'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Complete commit phase
|
||||||
|
await workflowService.commit();
|
||||||
|
|
||||||
|
// Should now be in FINALIZE phase
|
||||||
|
let status = workflowService.getStatus();
|
||||||
|
expect(status.phase).toBe('FINALIZE');
|
||||||
|
|
||||||
|
// Finalize workflow
|
||||||
|
status = await workflowService.finalizeWorkflow();
|
||||||
|
|
||||||
|
expect(status.phase).toBe('COMPLETE');
|
||||||
|
expect(status.progress.percentage).toBe(100);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject finalize with uncommitted changes', async () => {
|
||||||
|
// Start and complete all subtasks
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Dirty Tree Test',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Task', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 1,
|
||||||
|
passed: 0,
|
||||||
|
failed: 1,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 1,
|
||||||
|
passed: 1,
|
||||||
|
failed: 0,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'GREEN'
|
||||||
|
});
|
||||||
|
await workflowService.commit();
|
||||||
|
|
||||||
|
// Create uncommitted changes
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(testProjectDir, 'uncommitted.ts'),
|
||||||
|
'const x = 1;\n'
|
||||||
|
);
|
||||||
|
|
||||||
|
// Should fail to finalize
|
||||||
|
await expect(workflowService.finalizeWorkflow()).rejects.toThrow(
|
||||||
|
'working tree has uncommitted changes'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Abort Workflow', () => {
|
||||||
|
it('should abort workflow and delete state file', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Abort Test',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Task', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(workflowStateExists()).toBe(true);
|
||||||
|
|
||||||
|
await workflowService.abortWorkflow();
|
||||||
|
|
||||||
|
expect(workflowStateExists()).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not error when aborting non-existent workflow', async () => {
|
||||||
|
// Should not throw
|
||||||
|
await expect(workflowService.abortWorkflow()).resolves.not.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Next Action Recommendations', () => {
|
||||||
|
it('should recommend correct action for RED phase', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Next Action Test',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Write auth tests', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
const nextAction = workflowService.getNextAction();
|
||||||
|
|
||||||
|
expect(nextAction.action).toBe('generate_test');
|
||||||
|
expect(nextAction.tddPhase).toBe('RED');
|
||||||
|
expect(nextAction.subtask?.id).toBe('1.1');
|
||||||
|
expect(nextAction.nextSteps).toContain('Write failing tests');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should recommend correct action for GREEN phase', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Next Action Test',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Implement feature', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 0,
|
||||||
|
failed: 5,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
|
||||||
|
const nextAction = workflowService.getNextAction();
|
||||||
|
|
||||||
|
expect(nextAction.action).toBe('implement_code');
|
||||||
|
expect(nextAction.tddPhase).toBe('GREEN');
|
||||||
|
expect(nextAction.nextSteps).toContain('Implement code');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should recommend correct action for COMMIT phase', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Next Action Test',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Commit changes', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 0,
|
||||||
|
failed: 5,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 5,
|
||||||
|
passed: 5,
|
||||||
|
failed: 0,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'GREEN'
|
||||||
|
});
|
||||||
|
|
||||||
|
const nextAction = workflowService.getNextAction();
|
||||||
|
|
||||||
|
expect(nextAction.action).toBe('commit_changes');
|
||||||
|
expect(nextAction.tddPhase).toBe('COMMIT');
|
||||||
|
expect(nextAction.nextSteps).toContain('commit');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should recommend finalize for FINALIZE phase', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Finalize Test',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Task', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Make git commit to have clean tree
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(testProjectDir, 'feature.ts'),
|
||||||
|
'export const x = 1;\n'
|
||||||
|
);
|
||||||
|
execSync('git add .', { cwd: testProjectDir, stdio: 'pipe' });
|
||||||
|
execSync('git commit -m "Feature"', {
|
||||||
|
cwd: testProjectDir,
|
||||||
|
stdio: 'pipe'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Complete the workflow to FINALIZE phase
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 1,
|
||||||
|
passed: 0,
|
||||||
|
failed: 1,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 1,
|
||||||
|
passed: 1,
|
||||||
|
failed: 0,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'GREEN'
|
||||||
|
});
|
||||||
|
await workflowService.commit();
|
||||||
|
|
||||||
|
const nextAction = workflowService.getNextAction();
|
||||||
|
|
||||||
|
expect(nextAction.action).toBe('finalize_workflow');
|
||||||
|
expect(nextAction.phase).toBe('FINALIZE');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('State File Evolution', () => {
|
||||||
|
it('should track full workflow state evolution', async () => {
|
||||||
|
// Start workflow
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Evolution Test',
|
||||||
|
subtasks: [
|
||||||
|
{ id: '1.1', title: 'First', status: 'pending' },
|
||||||
|
{ id: '1.2', title: 'Second', status: 'pending' }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify initial state
|
||||||
|
let state = readWorkflowState();
|
||||||
|
expect(state?.phase).toBe('SUBTASK_LOOP');
|
||||||
|
expect(state?.context.currentSubtaskIndex).toBe(0);
|
||||||
|
expect(state?.context.currentTDDPhase).toBe('RED');
|
||||||
|
|
||||||
|
// Complete RED phase
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 3,
|
||||||
|
passed: 0,
|
||||||
|
failed: 3,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'RED'
|
||||||
|
});
|
||||||
|
|
||||||
|
state = readWorkflowState();
|
||||||
|
expect(state?.context.currentTDDPhase).toBe('GREEN');
|
||||||
|
expect(state?.context.lastTestResults?.failed).toBe(3);
|
||||||
|
|
||||||
|
// Complete GREEN phase
|
||||||
|
await workflowService.completePhase({
|
||||||
|
total: 3,
|
||||||
|
passed: 3,
|
||||||
|
failed: 0,
|
||||||
|
skipped: 0,
|
||||||
|
phase: 'GREEN'
|
||||||
|
});
|
||||||
|
|
||||||
|
state = readWorkflowState();
|
||||||
|
expect(state?.context.currentTDDPhase).toBe('COMMIT');
|
||||||
|
|
||||||
|
// Complete commit and advance to next subtask
|
||||||
|
await workflowService.commit();
|
||||||
|
|
||||||
|
state = readWorkflowState();
|
||||||
|
expect(state?.context.currentSubtaskIndex).toBe(1);
|
||||||
|
expect(state?.context.currentTDDPhase).toBe('RED');
|
||||||
|
expect(state?.context.subtasks[0].status).toBe('completed');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('hasWorkflow', () => {
|
||||||
|
it('should return false when no workflow exists', async () => {
|
||||||
|
const exists = await workflowService.hasWorkflow();
|
||||||
|
expect(exists).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return true when workflow exists', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Exists Test',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Task', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
const exists = await workflowService.hasWorkflow();
|
||||||
|
expect(exists).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false after workflow is aborted', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Abort Test',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Task', status: 'pending' }]
|
||||||
|
});
|
||||||
|
|
||||||
|
await workflowService.abortWorkflow();
|
||||||
|
|
||||||
|
const exists = await workflowService.hasWorkflow();
|
||||||
|
expect(exists).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Team/API Storage', () => {
|
||||||
|
it('should use orgSlug for branch naming when provided (API storage mode)', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Team Feature',
|
||||||
|
subtasks: [{ id: 'HAM-2', title: 'Implement', status: 'pending' }],
|
||||||
|
orgSlug: 'acme-corp'
|
||||||
|
});
|
||||||
|
|
||||||
|
const currentBranch = execSync('git branch --show-current', {
|
||||||
|
cwd: testProjectDir,
|
||||||
|
encoding: 'utf-8'
|
||||||
|
}).trim();
|
||||||
|
|
||||||
|
expect(currentBranch).toBe('tm/acme-corp/task-1-team-feature');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should prioritize orgSlug over tag for branch naming', async () => {
|
||||||
|
await workflowService.startWorkflow({
|
||||||
|
taskId: '1',
|
||||||
|
taskTitle: 'Priority Test',
|
||||||
|
subtasks: [{ id: '1.1', title: 'Task', status: 'pending' }],
|
||||||
|
tag: 'local-tag',
|
||||||
|
orgSlug: 'team-slug'
|
||||||
|
});
|
||||||
|
|
||||||
|
const currentBranch = execSync('git branch --show-current', {
|
||||||
|
cwd: testProjectDir,
|
||||||
|
encoding: 'utf-8'
|
||||||
|
}).trim();
|
||||||
|
|
||||||
|
// orgSlug should take precedence over tag
|
||||||
|
expect(currentBranch).toBe('tm/team-slug/task-1-priority-test');
|
||||||
|
expect(currentBranch).not.toContain('local-tag');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user