feat: implement tdd workflow (#1309)

Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
This commit is contained in:
Ralph Khreish
2025-10-18 16:29:03 +02:00
committed by GitHub
parent b8830d9508
commit ccb87a516a
106 changed files with 25097 additions and 1541 deletions

View File

@@ -31,11 +31,15 @@
},
"dependencies": {
"@supabase/supabase-js": "^2.57.4",
"fs-extra": "^11.3.2",
"simple-git": "^3.28.0",
"zod": "^4.1.11"
},
"devDependencies": {
"@types/fs-extra": "^11.0.4",
"@types/node": "^22.10.5",
"@vitest/coverage-v8": "^3.2.4",
"strip-literal": "^3.1.0",
"typescript": "^5.9.2",
"vitest": "^3.2.4",
"strip-literal": "3.1.0"

View File

@@ -25,10 +25,12 @@ import { getLogger } from '../logger/index.js';
*/
export class AuthManager {
private static instance: AuthManager | null = null;
private static readonly staticLogger = getLogger('AuthManager');
private credentialStore: CredentialStore;
private oauthService: OAuthService;
private supabaseClient: SupabaseAuthClient;
private organizationService?: OrganizationService;
private readonly logger = getLogger('AuthManager');
private constructor(config?: Partial<AuthConfig>) {
this.credentialStore = CredentialStore.getInstance(config);
@@ -50,8 +52,7 @@ export class AuthManager {
await this.supabaseClient.initialize();
} catch (error) {
// Log but don't throw - session might not exist yet
const logger = getLogger('AuthManager');
logger.debug('No existing session to restore');
this.logger.debug('No existing session to restore');
}
}
@@ -63,8 +64,7 @@ export class AuthManager {
AuthManager.instance = new AuthManager(config);
} else if (config) {
// Warn if config is provided after initialization
const logger = getLogger('AuthManager');
logger.warn(
AuthManager.staticLogger.warn(
'getInstance called with config after initialization; config is ignored.'
);
}
@@ -159,7 +159,7 @@ export class AuthManager {
await this.supabaseClient.signOut();
} catch (error) {
// Log but don't throw - we still want to clear local credentials
getLogger('AuthManager').warn('Failed to sign out from Supabase:', error);
this.logger.warn('Failed to sign out from Supabase:', error);
}
// Always clear local credentials (removes auth.json file)

View File

@@ -27,6 +27,7 @@ export type {
ProviderConfig,
TaskSettings,
TagSettings,
WorkflowSettings,
StorageSettings,
RetrySettings,
LoggingSettings,

View File

@@ -38,6 +38,35 @@ export class ConfigLoader {
main: DEFAULT_CONFIG_VALUES.MODELS.MAIN,
fallback: DEFAULT_CONFIG_VALUES.MODELS.FALLBACK
},
workflow: {
enableAutopilot: DEFAULT_CONFIG_VALUES.WORKFLOW.ENABLE_AUTOPILOT,
maxPhaseAttempts: DEFAULT_CONFIG_VALUES.WORKFLOW.MAX_PHASE_ATTEMPTS,
branchPattern: DEFAULT_CONFIG_VALUES.WORKFLOW.BRANCH_PATTERN,
requireCleanWorkingTree:
DEFAULT_CONFIG_VALUES.WORKFLOW.REQUIRE_CLEAN_WORKING_TREE,
autoStageChanges: DEFAULT_CONFIG_VALUES.WORKFLOW.AUTO_STAGE_CHANGES,
includeCoAuthor: DEFAULT_CONFIG_VALUES.WORKFLOW.INCLUDE_CO_AUTHOR,
coAuthorName: DEFAULT_CONFIG_VALUES.WORKFLOW.CO_AUTHOR_NAME,
coAuthorEmail: DEFAULT_CONFIG_VALUES.WORKFLOW.CO_AUTHOR_EMAIL,
testThresholds: {
minTests: DEFAULT_CONFIG_VALUES.WORKFLOW.MIN_TESTS,
maxFailuresInGreen:
DEFAULT_CONFIG_VALUES.WORKFLOW.MAX_FAILURES_IN_GREEN
},
commitMessageTemplate:
DEFAULT_CONFIG_VALUES.WORKFLOW.COMMIT_MESSAGE_TEMPLATE,
allowedCommitTypes: [
...DEFAULT_CONFIG_VALUES.WORKFLOW.ALLOWED_COMMIT_TYPES
],
defaultCommitType: DEFAULT_CONFIG_VALUES.WORKFLOW.DEFAULT_COMMIT_TYPE,
operationTimeout: DEFAULT_CONFIG_VALUES.WORKFLOW.OPERATION_TIMEOUT,
enableActivityLogging:
DEFAULT_CONFIG_VALUES.WORKFLOW.ENABLE_ACTIVITY_LOGGING,
activityLogPath: DEFAULT_CONFIG_VALUES.WORKFLOW.ACTIVITY_LOG_PATH,
enableStateBackup: DEFAULT_CONFIG_VALUES.WORKFLOW.ENABLE_STATE_BACKUP,
maxStateBackups: DEFAULT_CONFIG_VALUES.WORKFLOW.MAX_STATE_BACKUPS,
abortOnMaxAttempts: DEFAULT_CONFIG_VALUES.WORKFLOW.ABORT_ON_MAX_ATTEMPTS
},
storage: {
type: DEFAULT_CONFIG_VALUES.STORAGE.TYPE,
encoding: DEFAULT_CONFIG_VALUES.STORAGE.ENCODING,

View File

@@ -10,6 +10,7 @@ import {
ERROR_CODES,
TaskMasterError
} from '../../errors/task-master-error.js';
import { getLogger } from '../../logger/index.js';
/**
* Persistence options
@@ -30,6 +31,7 @@ export interface PersistenceOptions {
export class ConfigPersistence {
private localConfigPath: string;
private backupDir: string;
private readonly logger = getLogger('ConfigPersistence');
constructor(projectRoot: string) {
this.localConfigPath = path.join(projectRoot, '.taskmaster', 'config.json');
@@ -94,7 +96,7 @@ export class ConfigPersistence {
return backupPath;
} catch (error) {
console.warn('Failed to create backup:', error);
this.logger.warn('Failed to create backup:', error);
throw error;
}
}
@@ -116,7 +118,7 @@ export class ConfigPersistence {
await fs.unlink(path.join(this.backupDir, file));
}
} catch (error) {
console.warn('Failed to clean old backups:', error);
this.logger.warn('Failed to clean old backups:', error);
}
}

View File

@@ -4,6 +4,7 @@
*/
import type { PartialConfiguration } from '../../interfaces/configuration.interface.js';
import { getLogger } from '../../logger/index.js';
/**
* Environment variable mapping definition
@@ -24,6 +25,8 @@ interface EnvMapping {
* Single responsibility: Environment variable configuration extraction
*/
export class EnvironmentConfigProvider {
private readonly logger = getLogger('EnvironmentConfigProvider');
/**
* Default environment variable mappings
*/
@@ -75,7 +78,7 @@ export class EnvironmentConfigProvider {
// Validate value if validator is provided
if (mapping.validate && !mapping.validate(value)) {
console.warn(`Invalid value for ${mapping.env}: ${value}`);
this.logger.warn(`Invalid value for ${mapping.env}: ${value}`);
continue;
}

View File

@@ -10,6 +10,7 @@ import {
TaskMasterError
} from '../../errors/task-master-error.js';
import { DEFAULT_CONFIG_VALUES } from '../../interfaces/configuration.interface.js';
import { getLogger } from '../../logger/index.js';
/**
* Runtime state data structure
@@ -30,6 +31,7 @@ export interface RuntimeState {
export class RuntimeStateManager {
private stateFilePath: string;
private currentState: RuntimeState;
private readonly logger = getLogger('RuntimeStateManager');
constructor(projectRoot: string) {
this.stateFilePath = path.join(projectRoot, '.taskmaster', 'state.json');
@@ -66,7 +68,7 @@ export class RuntimeStateManager {
} catch (error: any) {
if (error.code === 'ENOENT') {
// State file doesn't exist, use defaults
console.debug('No state.json found, using default state');
this.logger.debug('No state.json found, using default state');
// Check environment variable
if (process.env.TASKMASTER_TAG) {
@@ -76,7 +78,8 @@ export class RuntimeStateManager {
return this.currentState;
}
console.warn('Failed to load state file:', error.message);
// Failed to load, use defaults
this.logger.warn('Failed to load state file:', error.message);
return this.currentState;
}
}

View File

@@ -0,0 +1,67 @@
import { describe, it, expect } from 'vitest';
import {
generateBranchName,
sanitizeBranchName
} from './branch-name-generator.js';
describe('Branch Name Generator', () => {
describe('sanitizeBranchName', () => {
it('should remove invalid characters', () => {
const result = sanitizeBranchName('feature/my feature!');
expect(result).toBe('feature-my-feature');
});
it('should replace spaces with hyphens', () => {
const result = sanitizeBranchName('my feature branch');
expect(result).toBe('my-feature-branch');
});
it('should convert to lowercase', () => {
const result = sanitizeBranchName('MyFeature');
expect(result).toBe('myfeature');
});
it('should remove consecutive hyphens', () => {
const result = sanitizeBranchName('my---feature');
expect(result).toBe('my-feature');
});
it('should handle empty string', () => {
const result = sanitizeBranchName('');
expect(result).toBe('branch');
});
});
describe('generateBranchName', () => {
it('should generate branch name from task ID', () => {
const result = generateBranchName({ taskId: '2.7' });
expect(result).toMatch(/^task-2-7-/);
});
it('should include description in branch name', () => {
const result = generateBranchName({
taskId: '2.7',
description: 'Add Feature'
});
expect(result).toContain('task-2-7');
expect(result).toContain('add-feature');
});
it('should handle custom pattern', () => {
const result = generateBranchName({
taskId: '2.7',
pattern: 'feature/{taskId}'
});
expect(result).toBe('feature-2-7');
});
it('should truncate long descriptions', () => {
const longDesc = 'a'.repeat(100);
const result = generateBranchName({
taskId: '2.7',
description: longDesc
});
expect(result.length).toBeLessThan(80);
});
});
});

View File

@@ -0,0 +1,69 @@
/**
* Branch Name Generator - Generates valid git branch names from patterns
* @module branch-name-generator
*/
/**
* Sanitizes a string to be a valid git branch name.
* Removes invalid characters, converts to lowercase, replaces spaces with hyphens.
*
* @param {string} name - Name to sanitize
* @returns {string} Sanitized branch name
*/
export function sanitizeBranchName(name: string): string {
if (!name || name.trim() === '') {
return 'branch';
}
return name
.toLowerCase()
.replace(/[^a-z0-9-_.\/]/g, '-') // Replace invalid chars with hyphens
.replace(/\//g, '-') // Replace slashes with hyphens
.replace(/-+/g, '-') // Remove consecutive hyphens
.replace(/^-+|-+$/g, ''); // Remove leading/trailing hyphens
}
/**
* Generates a branch name from a pattern and variables.
*
* @param {Object} options - Generation options
* @param {string} options.taskId - Task ID to include
* @param {string} [options.description] - Description to include
* @param {string} [options.pattern] - Custom pattern (default: 'task-{taskId}-{description}')
* @param {number} [options.maxLength=50] - Maximum branch name length
* @returns {string} Generated branch name
*/
export function generateBranchName(options: {
taskId: string;
description?: string;
pattern?: string;
maxLength?: number;
}): string {
const maxLength = options.maxLength || 50;
const pattern = options.pattern || 'task-{taskId}-{description}';
// Sanitize task ID (replace dots with hyphens)
const sanitizedTaskId = sanitizeBranchName(
options.taskId.replace(/\./g, '-')
);
// Sanitize description if provided
const sanitizedDescription = options.description
? sanitizeBranchName(options.description)
: sanitizeBranchName(Date.now().toString());
// Replace pattern variables
let branchName = pattern
.replace(/{taskId}/g, sanitizedTaskId)
.replace(/{description}/g, sanitizedDescription);
// Sanitize the final result
branchName = sanitizeBranchName(branchName);
// Truncate if too long
if (branchName.length > maxLength) {
branchName = branchName.substring(0, maxLength).replace(/-+$/, '');
}
return branchName;
}

View File

@@ -0,0 +1,319 @@
import { describe, it, expect, beforeEach } from 'vitest';
import { CommitMessageGenerator } from './commit-message-generator.js';
describe('CommitMessageGenerator', () => {
let generator: CommitMessageGenerator;
beforeEach(() => {
generator = new CommitMessageGenerator();
});
describe('generateMessage', () => {
it('should generate basic conventional commit message', () => {
const message = generator.generateMessage({
type: 'feat',
description: 'add user authentication',
changedFiles: ['packages/tm-core/src/auth/auth-manager.ts']
});
expect(message).toContain('feat(core): add user authentication');
});
it('should include scope from changed files', () => {
const message = generator.generateMessage({
type: 'fix',
description: 'resolve CLI argument parsing',
changedFiles: ['packages/cli/src/commands/start.ts']
});
expect(message).toContain('fix(cli): resolve CLI argument parsing');
});
it('should include task metadata in commit body', () => {
const message = generator.generateMessage({
type: 'feat',
description: 'implement feature',
changedFiles: ['packages/tm-core/src/index.ts'],
taskId: '5.3',
phase: 'GREEN'
});
expect(message).toContain('Task: 5.3');
expect(message).toContain('Phase: GREEN');
});
it('should include test results metadata', () => {
const message = generator.generateMessage({
type: 'test',
description: 'add unit tests',
changedFiles: ['packages/tm-core/src/auth/auth.test.ts'],
testsPassing: 42,
testsFailing: 0
});
expect(message).toContain('Tests: 42 passing');
});
it('should include failing test count when present', () => {
const message = generator.generateMessage({
type: 'fix',
description: 'fix test failures',
changedFiles: ['packages/tm-core/src/index.ts'],
testsPassing: 40,
testsFailing: 2
});
expect(message).toContain('Tests: 40 passing, 2 failing');
});
it('should include custom body text', () => {
const message = generator.generateMessage({
type: 'feat',
description: 'add new feature',
changedFiles: ['packages/tm-core/src/index.ts'],
body: 'This is a detailed explanation\nof the changes made.'
});
expect(message).toContain('This is a detailed explanation');
expect(message).toContain('of the changes made.');
});
it('should handle multiple changed files with different scopes', () => {
const message = generator.generateMessage({
type: 'refactor',
description: 'reorganize code structure',
changedFiles: [
'packages/cli/src/index.ts',
'packages/tm-core/src/index.ts'
]
});
// Should use CLI scope (higher priority due to count or priority)
expect(message).toMatch(/refactor\((cli|core)\):/);
});
it('should handle test files and detect test scope', () => {
const message = generator.generateMessage({
type: 'test',
description: 'add integration tests',
changedFiles: ['packages/tm-core/src/workflow/workflow.test.ts']
});
expect(message).toContain('test(test):');
});
it('should handle docs changes', () => {
const message = generator.generateMessage({
type: 'docs',
description: 'update README',
changedFiles: ['README.md', 'docs/guide.md']
});
expect(message).toContain('docs(docs):');
});
it('should omit scope if not detected', () => {
const message = generator.generateMessage({
type: 'chore',
description: 'update dependencies',
changedFiles: []
});
expect(message).toContain('chore(repo): update dependencies');
});
it('should support manual scope override', () => {
const message = generator.generateMessage({
type: 'feat',
description: 'add feature',
changedFiles: ['packages/tm-core/src/index.ts'],
scope: 'api'
});
expect(message).toContain('feat(api): add feature');
});
it('should handle breaking changes indicator', () => {
const message = generator.generateMessage({
type: 'feat',
description: 'change API structure',
changedFiles: ['packages/tm-core/src/index.ts'],
breaking: true
});
expect(message).toContain('feat(core)!: change API structure');
});
it('should format complete message with all metadata', () => {
const message = generator.generateMessage({
type: 'feat',
description: 'implement TDD workflow',
changedFiles: ['packages/tm-core/src/workflow/orchestrator.ts'],
body: 'Implemented complete RED-GREEN-COMMIT cycle with state persistence.',
taskId: '4.1',
phase: 'GREEN',
testsPassing: 74,
testsFailing: 0
});
expect(message).toContain('feat(core): implement TDD workflow');
expect(message).toContain('Implemented complete RED-GREEN-COMMIT cycle');
expect(message).toContain('Task: 4.1');
expect(message).toContain('Phase: GREEN');
expect(message).toContain('Tests: 74 passing');
});
});
describe('validateConventionalCommit', () => {
it('should validate correct conventional commit format', () => {
const message = 'feat(core): add feature\n\nDetails here.';
const result = generator.validateConventionalCommit(message);
expect(result.isValid).toBe(true);
expect(result.errors).toEqual([]);
});
it('should detect missing type', () => {
const message = 'add feature';
const result = generator.validateConventionalCommit(message);
expect(result.isValid).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
expect(result.errors[0]).toContain('Invalid conventional commit format');
});
it('should detect invalid type', () => {
const message = 'invalid(core): add feature';
const result = generator.validateConventionalCommit(message);
expect(result.isValid).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
});
it('should detect missing description', () => {
const message = 'feat(core):';
const result = generator.validateConventionalCommit(message);
expect(result.isValid).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
expect(result.errors[0]).toContain('Invalid conventional commit format');
});
it('should accept valid types', () => {
const validTypes = [
'feat',
'fix',
'docs',
'style',
'refactor',
'test',
'chore'
];
for (const type of validTypes) {
const message = `${type}(core): do something`;
const result = generator.validateConventionalCommit(message);
expect(result.isValid).toBe(true);
}
});
it('should accept breaking change indicator', () => {
const message = 'feat(core)!: breaking change';
const result = generator.validateConventionalCommit(message);
expect(result.isValid).toBe(true);
});
it('should accept message without scope', () => {
const message = 'fix: resolve issue';
const result = generator.validateConventionalCommit(message);
expect(result.isValid).toBe(true);
});
});
describe('parseCommitMessage', () => {
it('should parse conventional commit message', () => {
const message = 'feat(core): add feature\n\nDetailed explanation.';
const parsed = generator.parseCommitMessage(message);
expect(parsed.type).toBe('feat');
expect(parsed.scope).toBe('core');
expect(parsed.description).toBe('add feature');
expect(parsed.body).toContain('Detailed explanation.');
expect(parsed.breaking).toBe(false);
});
it('should parse breaking change indicator', () => {
const message = 'feat(core)!: breaking change';
const parsed = generator.parseCommitMessage(message);
expect(parsed.type).toBe('feat');
expect(parsed.breaking).toBe(true);
});
it('should parse message without scope', () => {
const message = 'fix: resolve issue';
const parsed = generator.parseCommitMessage(message);
expect(parsed.type).toBe('fix');
expect(parsed.scope).toBeUndefined();
expect(parsed.description).toBe('resolve issue');
});
it('should handle multiline body', () => {
const message = 'feat: add feature\n\nLine 1\nLine 2\nLine 3';
const parsed = generator.parseCommitMessage(message);
expect(parsed.body).toContain('Line 1');
expect(parsed.body).toContain('Line 2');
expect(parsed.body).toContain('Line 3');
});
});
describe('edge cases', () => {
it('should handle empty changed files list', () => {
const message = generator.generateMessage({
type: 'chore',
description: 'general maintenance',
changedFiles: []
});
expect(message).toContain('chore(repo):');
});
it('should handle very long description', () => {
const longDesc = 'a'.repeat(200);
const message = generator.generateMessage({
type: 'feat',
description: longDesc,
changedFiles: ['packages/tm-core/src/index.ts']
});
expect(message).toContain(longDesc);
});
it('should handle special characters in description', () => {
const message = generator.generateMessage({
type: 'fix',
description: 'resolve issue with $special @characters #123',
changedFiles: ['packages/tm-core/src/index.ts']
});
expect(message).toContain('$special @characters #123');
});
it('should handle zero passing tests', () => {
const message = generator.generateMessage({
type: 'test',
description: 'add failing test',
changedFiles: ['test.ts'],
testsPassing: 0,
testsFailing: 1
});
expect(message).toContain('Tests: 0 passing, 1 failing');
});
});
});

View File

@@ -0,0 +1,205 @@
/**
* CommitMessageGenerator - Generate conventional commit messages with metadata
*
* Combines TemplateEngine and ScopeDetector to create structured commit messages
* that follow conventional commits specification and include task metadata.
*/
import { TemplateEngine } from './template-engine.js';
import { ScopeDetector } from './scope-detector.js';
export interface CommitMessageOptions {
type: string;
description: string;
changedFiles: string[];
scope?: string;
body?: string;
breaking?: boolean;
taskId?: string;
phase?: string;
tag?: string;
testsPassing?: number;
testsFailing?: number;
coveragePercent?: number;
}
export interface ValidationResult {
isValid: boolean;
errors: string[];
}
export interface ParsedCommitMessage {
type: string;
scope?: string;
breaking: boolean;
description: string;
body?: string;
}
const CONVENTIONAL_COMMIT_TYPES = [
'feat',
'fix',
'docs',
'style',
'refactor',
'perf',
'test',
'build',
'ci',
'chore',
'revert'
];
export class CommitMessageGenerator {
private templateEngine: TemplateEngine;
private scopeDetector: ScopeDetector;
constructor(
customTemplates?: Record<string, string>,
customScopeMappings?: Record<string, string>,
customScopePriorities?: Record<string, number>
) {
this.templateEngine = new TemplateEngine(customTemplates);
this.scopeDetector = new ScopeDetector(
customScopeMappings,
customScopePriorities
);
}
/**
* Generate a conventional commit message with metadata
*/
generateMessage(options: CommitMessageOptions): string {
const {
type,
description,
changedFiles,
scope: manualScope,
body,
breaking = false,
taskId,
phase,
tag,
testsPassing,
testsFailing,
coveragePercent
} = options;
// Determine scope (manual override or auto-detect)
const scope = manualScope ?? this.scopeDetector.detectScope(changedFiles);
// Build template variables
const variables = {
type,
scope,
breaking: breaking ? '!' : '',
description,
body,
taskId,
phase,
tag,
testsPassing,
testsFailing,
coveragePercent
};
// Generate message from template
return this.templateEngine.render('commitMessage', variables);
}
/**
* Validate that a commit message follows conventional commits format
*/
validateConventionalCommit(message: string): ValidationResult {
const errors: string[] = [];
// Parse first line (header)
const lines = message.split('\n');
const header = lines[0];
if (!header) {
errors.push('Missing commit message');
return { isValid: false, errors };
}
// Check format: type(scope)?: description
const headerRegex = /^(\w+)(?:\(([^)]+)\))?(!)?:\s*(.+)$/;
const match = header.match(headerRegex);
if (!match) {
errors.push(
'Invalid conventional commit format. Expected: type(scope): description'
);
return { isValid: false, errors };
}
const [, type, , , description] = match;
// Validate type
if (!CONVENTIONAL_COMMIT_TYPES.includes(type)) {
errors.push(
`Invalid commit type "${type}". Must be one of: ${CONVENTIONAL_COMMIT_TYPES.join(', ')}`
);
}
// Validate description
if (!description || description.trim().length === 0) {
errors.push('Missing description');
}
return {
isValid: errors.length === 0,
errors
};
}
/**
* Parse a conventional commit message into its components
*/
parseCommitMessage(message: string): ParsedCommitMessage {
const lines = message.split('\n');
const header = lines[0];
// Parse header: type(scope)!: description
const headerRegex = /^(\w+)(?:\(([^)]+)\))?(!)?:\s*(.+)$/;
const match = header.match(headerRegex);
if (!match) {
throw new Error('Invalid conventional commit format');
}
const [, type, scope, breaking, description] = match;
// Body is everything after the first blank line
const bodyStartIndex = lines.findIndex((line, i) => i > 0 && line === '');
const body =
bodyStartIndex !== -1
? lines
.slice(bodyStartIndex + 1)
.join('\n')
.trim()
: undefined;
return {
type,
scope,
breaking: breaking === '!',
description,
body
};
}
/**
* Get the scope detector instance (for testing/customization)
*/
getScopeDetector(): ScopeDetector {
return this.scopeDetector;
}
/**
* Get the template engine instance (for testing/customization)
*/
getTemplateEngine(): TemplateEngine {
return this.templateEngine;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,780 @@
/**
* GitAdapter - Safe git operations wrapper with validation and safety checks.
* Handles all git operations (branching, committing, pushing) with built-in safety gates.
*
* @module git-adapter
*/
import { simpleGit, type SimpleGit } from 'simple-git';
import fs from 'fs-extra';
import path from 'path';
/**
* GitAdapter class for safe git operations
*/
export class GitAdapter {
public projectPath: string;
public git: SimpleGit;
/**
* Creates a new GitAdapter instance.
*
* @param {string} projectPath - Absolute path to the project directory
* @throws {Error} If projectPath is invalid or not absolute
*
* @example
* const git = new GitAdapter('/path/to/project');
* await git.ensureGitRepository();
*/
constructor(projectPath: string) {
// Validate project path
if (!projectPath) {
throw new Error('Project path is required');
}
if (!path.isAbsolute(projectPath)) {
throw new Error('Project path must be an absolute path');
}
// Normalize path
this.projectPath = path.normalize(projectPath);
// Initialize simple-git
this.git = simpleGit(this.projectPath);
}
/**
* Checks if the current directory is a git repository.
* Looks for .git directory or file (worktree/submodule).
*
* @returns {Promise<boolean>} True if in a git repository
*
* @example
* const isRepo = await git.isGitRepository();
* if (!isRepo) {
* console.log('Not a git repository');
* }
*/
async isGitRepository(): Promise<boolean> {
try {
// Check if .git exists (directory or file for submodules/worktrees)
const gitPath = path.join(this.projectPath, '.git');
if (await fs.pathExists(gitPath)) {
return true;
}
// Try to find git root from subdirectory
try {
await this.git.revparse(['--git-dir']);
return true;
} catch {
return false;
}
} catch (error) {
return false;
}
}
/**
* Validates that git is installed and accessible.
* Checks git binary availability and version.
*
* @returns {Promise<void>}
* @throws {Error} If git is not installed or not accessible
*
* @example
* await git.validateGitInstallation();
* console.log('Git is installed');
*/
async validateGitInstallation(): Promise<void> {
try {
await this.git.version();
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : String(error);
throw new Error(
`Git is not installed or not accessible: ${errorMessage}`
);
}
}
/**
* Gets the git version information.
*
* @returns {Promise<{major: number, minor: number, patch: number, agent: string}>}
*
* @example
* const version = await git.getGitVersion();
* console.log(`Git version: ${version.major}.${version.minor}.${version.patch}`);
*/
async getGitVersion(): Promise<{
major: number;
minor: number;
patch: number;
agent: string;
}> {
const versionResult = await this.git.version();
return {
major: versionResult.major,
minor: versionResult.minor,
patch:
typeof versionResult.patch === 'string'
? parseInt(versionResult.patch)
: versionResult.patch || 0,
agent: versionResult.agent
};
}
/**
* Gets the repository root path.
* Works even when called from a subdirectory.
*
* @returns {Promise<string>} Absolute path to repository root
* @throws {Error} If not in a git repository
*
* @example
* const root = await git.getRepositoryRoot();
* console.log(`Repository root: ${root}`);
*/
async getRepositoryRoot(): Promise<string> {
try {
const result = await this.git.revparse(['--show-toplevel']);
return path.normalize(result.trim());
} catch (error) {
throw new Error(`not a git repository: ${this.projectPath}`);
}
}
/**
* Validates the repository state.
* Checks for corruption and basic integrity.
*
* @returns {Promise<void>}
* @throws {Error} If repository is corrupted or invalid
*
* @example
* await git.validateRepository();
* console.log('Repository is valid');
*/
async validateRepository(): Promise<void> {
// Check if it's a git repository
const isRepo = await this.isGitRepository();
if (!isRepo) {
throw new Error(`not a git repository: ${this.projectPath}`);
}
// Try to get repository status to verify it's not corrupted
try {
await this.git.status();
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : String(error);
throw new Error(`Repository validation failed: ${errorMessage}`);
}
}
/**
* Ensures we're in a valid git repository before performing operations.
* Convenience method that throws descriptive errors.
*
* @returns {Promise<void>}
* @throws {Error} If not in a valid git repository
*
* @example
* await git.ensureGitRepository();
* // Safe to perform git operations after this
*/
async ensureGitRepository(): Promise<void> {
const isRepo = await this.isGitRepository();
if (!isRepo) {
throw new Error(
`not a git repository: ${this.projectPath}\n` +
`Please run this command from within a git repository, or initialize one with 'git init'.`
);
}
}
/**
* Checks if the working tree is clean (no uncommitted changes).
* A clean working tree has no staged, unstaged, or untracked files.
*
* @returns {Promise<boolean>} True if working tree is clean
*
* @example
* const isClean = await git.isWorkingTreeClean();
* if (!isClean) {
* console.log('Working tree has uncommitted changes');
* }
*/
async isWorkingTreeClean(): Promise<boolean> {
const status = await this.git.status();
return status.isClean();
}
/**
* Gets the detailed status of the working tree.
* Returns raw status from simple-git with all file changes.
*
* @returns {Promise<import('simple-git').StatusResult>} Detailed status object
*
* @example
* const status = await git.getStatus();
* console.log('Modified files:', status.modified);
* console.log('Staged files:', status.staged);
*/
async getStatus(): Promise<import('simple-git').StatusResult> {
return await this.git.status();
}
/**
* Checks if there are any uncommitted changes in the working tree.
* Includes staged, unstaged, and untracked files.
*
* @returns {Promise<boolean>} True if there are uncommitted changes
*
* @example
* const hasChanges = await git.hasUncommittedChanges();
* if (hasChanges) {
* console.log('Please commit your changes before proceeding');
* }
*/
async hasUncommittedChanges(): Promise<boolean> {
const status = await this.git.status();
return !status.isClean();
}
/**
* Checks if there are any staged changes ready to commit.
*
* @returns {Promise<boolean>} True if there are staged changes
*
* @example
* const hasStaged = await git.hasStagedChanges();
* if (hasStaged) {
* console.log('Ready to commit');
* }
*/
async hasStagedChanges(): Promise<boolean> {
const status = await this.git.status();
return status.staged.length > 0;
}
/**
* Checks if there are any untracked files in the working tree.
*
* @returns {Promise<boolean>} True if there are untracked files
*
* @example
* const hasUntracked = await git.hasUntrackedFiles();
* if (hasUntracked) {
* console.log('You have untracked files');
* }
*/
async hasUntrackedFiles(): Promise<boolean> {
const status = await this.git.status();
return status.not_added.length > 0;
}
/**
* Gets a summary of the working tree status with counts.
*
* @returns {Promise<{isClean: boolean, staged: number, modified: number, deleted: number, untracked: number, totalChanges: number}>}
*
* @example
* const summary = await git.getStatusSummary();
* console.log(`${summary.totalChanges} total changes`);
*/
async getStatusSummary(): Promise<{
isClean: boolean;
staged: number;
modified: number;
deleted: number;
untracked: number;
totalChanges: number;
}> {
const status = await this.git.status();
const staged = status.staged.length;
const modified = status.modified.length;
const deleted = status.deleted.length;
const untracked = status.not_added.length;
const totalChanges = staged + modified + deleted + untracked;
return {
isClean: status.isClean(),
staged,
modified,
deleted,
untracked,
totalChanges
};
}
/**
* Ensures the working tree is clean before performing operations.
* Throws an error with details if there are uncommitted changes.
*
* @returns {Promise<void>}
* @throws {Error} If working tree is not clean
*
* @example
* await git.ensureCleanWorkingTree();
* // Safe to perform git operations that require clean state
*/
async ensureCleanWorkingTree(): Promise<void> {
const status = await this.git.status();
if (!status.isClean()) {
const summary = await this.getStatusSummary();
throw new Error(
`working tree is not clean: ${this.projectPath}\n` +
`Staged: ${summary.staged}, Modified: ${summary.modified}, ` +
`Deleted: ${summary.deleted}, Untracked: ${summary.untracked}\n` +
`Please commit or stash your changes before proceeding.`
);
}
}
/**
* Gets the name of the current branch.
*
* @returns {Promise<string>} Current branch name
* @throws {Error} If unable to determine current branch
*
* @example
* const branch = await git.getCurrentBranch();
* console.log(`Currently on: ${branch}`);
*/
async getCurrentBranch(): Promise<string> {
const status = await this.git.status();
return status.current || 'HEAD';
}
/**
* Lists all local branches in the repository.
*
* @returns {Promise<string[]>} Array of branch names
*
* @example
* const branches = await git.listBranches();
* console.log('Available branches:', branches);
*/
async listBranches(): Promise<string[]> {
const branchSummary = await this.git.branchLocal();
return Object.keys(branchSummary.branches);
}
/**
* Checks if a branch exists in the repository.
*
* @param {string} branchName - Name of branch to check
* @returns {Promise<boolean>} True if branch exists
*
* @example
* const exists = await git.branchExists('feature-branch');
* if (!exists) {
* console.log('Branch does not exist');
* }
*/
async branchExists(branchName: string): Promise<boolean> {
const branches = await this.listBranches();
return branches.includes(branchName);
}
/**
* Creates a new branch without checking it out.
*
* @param {string} branchName - Name for the new branch
* @param {Object} options - Branch creation options
* @param {boolean} options.checkout - Whether to checkout after creation
* @returns {Promise<void>}
* @throws {Error} If branch already exists or working tree is dirty (when checkout=true)
*
* @example
* await git.createBranch('feature-branch');
* await git.createBranch('feature-branch', { checkout: true });
*/
async createBranch(
branchName: string,
options: { checkout?: boolean } = {}
): Promise<void> {
// Check if branch already exists
const exists = await this.branchExists(branchName);
if (exists) {
throw new Error(`branch already exists: ${branchName}`);
}
// If checkout is requested, ensure working tree is clean
if (options.checkout) {
await this.ensureCleanWorkingTree();
}
// Create the branch
await this.git.branch([branchName]);
// Checkout if requested
if (options.checkout) {
await this.git.checkout(branchName);
}
}
/**
* Checks out an existing branch.
*
* @param {string} branchName - Name of branch to checkout
* @param {Object} options - Checkout options
* @param {boolean} options.force - Force checkout even with uncommitted changes
* @returns {Promise<void>}
* @throws {Error} If branch doesn't exist or working tree is dirty (unless force=true)
*
* @example
* await git.checkoutBranch('feature-branch');
* await git.checkoutBranch('feature-branch', { force: true });
*/
async checkoutBranch(
branchName: string,
options: { force?: boolean } = {}
): Promise<void> {
// Check if branch exists
const exists = await this.branchExists(branchName);
if (!exists) {
throw new Error(`branch does not exist: ${branchName}`);
}
// Ensure clean working tree unless force is specified
if (!options.force) {
await this.ensureCleanWorkingTree();
}
// Checkout the branch
const checkoutOptions = options.force ? ['-f', branchName] : [branchName];
await this.git.checkout(checkoutOptions);
}
/**
* Creates a new branch and checks it out.
* Convenience method combining createBranch and checkoutBranch.
*
* @param {string} branchName - Name for the new branch
* @returns {Promise<void>}
* @throws {Error} If branch already exists or working tree is dirty
*
* @example
* await git.createAndCheckoutBranch('new-feature');
*/
async createAndCheckoutBranch(branchName: string): Promise<void> {
// Ensure working tree is clean
await this.ensureCleanWorkingTree();
// Check if branch already exists
const exists = await this.branchExists(branchName);
if (exists) {
throw new Error(`branch already exists: ${branchName}`);
}
// Create and checkout the branch
await this.git.checkoutLocalBranch(branchName);
}
/**
* Deletes a branch.
*
* @param {string} branchName - Name of branch to delete
* @param {Object} options - Delete options
* @param {boolean} options.force - Force delete even if unmerged
* @returns {Promise<void>}
* @throws {Error} If branch doesn't exist or is currently checked out
*
* @example
* await git.deleteBranch('old-feature');
* await git.deleteBranch('unmerged-feature', { force: true });
*/
async deleteBranch(
branchName: string,
options: { force?: boolean } = {}
): Promise<void> {
// Check if branch exists
const exists = await this.branchExists(branchName);
if (!exists) {
throw new Error(`branch does not exist: ${branchName}`);
}
// Check if trying to delete current branch
const current = await this.getCurrentBranch();
if (current === branchName) {
throw new Error(`cannot delete current branch: ${branchName}`);
}
// Delete the branch
const deleteOptions = options.force
? ['-D', branchName]
: ['-d', branchName];
await this.git.branch(deleteOptions);
}
/**
* Stages files for commit.
*
* @param {string[]} files - Array of file paths to stage
* @returns {Promise<void>}
*
* @example
* await git.stageFiles(['file1.txt', 'file2.txt']);
* await git.stageFiles(['.']); // Stage all changes
*/
async stageFiles(files: string[]): Promise<void> {
await this.git.add(files);
}
/**
* Unstages files that were previously staged.
*
* @param {string[]} files - Array of file paths to unstage
* @returns {Promise<void>}
*
* @example
* await git.unstageFiles(['file1.txt']);
*/
async unstageFiles(files: string[]): Promise<void> {
await this.git.reset(['HEAD', '--', ...files]);
}
/**
* Creates a commit with optional metadata embedding.
*
* @param {string} message - Commit message
* @param {Object} options - Commit options
* @param {Object} options.metadata - Metadata to embed in commit message
* @param {boolean} options.allowEmpty - Allow empty commits
* @param {boolean} options.enforceNonDefaultBranch - Prevent commits on default branch
* @param {boolean} options.force - Force commit even on default branch
* @returns {Promise<void>}
* @throws {Error} If no staged changes (unless allowEmpty), or on default branch (unless force)
*
* @example
* await git.createCommit('Add feature');
* await git.createCommit('Add feature', {
* metadata: { taskId: '2.4', phase: 'implementation' }
* });
* await git.createCommit('Add feature', {
* enforceNonDefaultBranch: true
* });
*/
async createCommit(
message: string,
options: {
metadata?: Record<string, string>;
allowEmpty?: boolean;
enforceNonDefaultBranch?: boolean;
force?: boolean;
} = {}
): Promise<void> {
// Check if on default branch and enforcement is requested
if (options.enforceNonDefaultBranch && !options.force) {
const currentBranch = await this.getCurrentBranch();
const defaultBranches = ['main', 'master', 'develop'];
if (defaultBranches.includes(currentBranch)) {
throw new Error(
`cannot commit to default branch: ${currentBranch}\n` +
`Please create a feature branch or use force option.`
);
}
}
// Check for staged changes unless allowEmpty
if (!options.allowEmpty) {
const hasStaged = await this.hasStagedChanges();
if (!hasStaged) {
throw new Error('no staged changes to commit');
}
}
// Build commit arguments
const commitArgs: string[] = ['commit'];
// Add message
commitArgs.push('-m', message);
// Add metadata as separate commit message lines
if (options.metadata) {
commitArgs.push('-m', ''); // Empty line separator
for (const [key, value] of Object.entries(options.metadata)) {
commitArgs.push('-m', `[${key}:${value}]`);
}
}
// Add flags
commitArgs.push('--no-gpg-sign');
if (options.allowEmpty) {
commitArgs.push('--allow-empty');
}
await this.git.raw(commitArgs);
}
/**
* Gets the commit log history.
*
* @param {Object} options - Log options
* @param {number} options.maxCount - Maximum number of commits to return
* @returns {Promise<Array>} Array of commit objects
*
* @example
* const log = await git.getCommitLog();
* const recentLog = await git.getCommitLog({ maxCount: 10 });
*/
async getCommitLog(options: { maxCount?: number } = {}): Promise<any[]> {
const logOptions: any = {
format: {
hash: '%H',
date: '%ai',
message: '%B', // Full commit message including body
author_name: '%an',
author_email: '%ae'
}
};
if (options.maxCount) {
logOptions.maxCount = options.maxCount;
}
const log = await this.git.log(logOptions);
return [...log.all];
}
/**
* Gets the last commit.
*
* @returns {Promise<any>} Last commit object
*
* @example
* const lastCommit = await git.getLastCommit();
* console.log(lastCommit.message);
*/
async getLastCommit(): Promise<any> {
const log = await this.git.log({
maxCount: 1,
format: {
hash: '%H',
date: '%ai',
message: '%B', // Full commit message including body
author_name: '%an',
author_email: '%ae'
}
});
return log.latest;
}
/**
* Detects the default branch for the repository.
* Returns the current branch name, assuming it's the default if it's main/master/develop.
*
* @returns {Promise<string>} Default branch name
*
* @example
* const defaultBranch = await git.getDefaultBranch();
* console.log(`Default branch: ${defaultBranch}`);
*/
async getDefaultBranch(): Promise<string> {
const currentBranch = await this.getCurrentBranch();
const defaultBranches = ['main', 'master', 'develop'];
if (defaultBranches.includes(currentBranch)) {
return currentBranch;
}
// If not on a default branch, check which default branches exist
const branches = await this.listBranches();
for (const defaultBranch of defaultBranches) {
if (branches.includes(defaultBranch)) {
return defaultBranch;
}
}
// Fallback to main
return 'main';
}
/**
* Checks if a given branch name is considered a default branch.
* Default branches are: main, master, develop.
*
* @param {string} branchName - Branch name to check
* @returns {Promise<boolean>} True if branch is a default branch
*
* @example
* const isDefault = await git.isDefaultBranch('main');
* if (isDefault) {
* console.log('This is a default branch');
* }
*/
async isDefaultBranch(branchName: string): Promise<boolean> {
const defaultBranches = ['main', 'master', 'develop'];
return defaultBranches.includes(branchName);
}
/**
* Checks if currently on a default branch.
*
* @returns {Promise<boolean>} True if on a default branch
*
* @example
* const onDefault = await git.isOnDefaultBranch();
* if (onDefault) {
* console.log('Warning: You are on a default branch');
* }
*/
async isOnDefaultBranch(): Promise<boolean> {
const currentBranch = await this.getCurrentBranch();
return await this.isDefaultBranch(currentBranch);
}
/**
* Ensures the current branch is not a default branch.
* Throws an error if on a default branch.
*
* @returns {Promise<void>}
* @throws {Error} If currently on a default branch
*
* @example
* await git.ensureNotOnDefaultBranch();
* // Safe to perform operations that shouldn't happen on default branches
*/
async ensureNotOnDefaultBranch(): Promise<void> {
const onDefault = await this.isOnDefaultBranch();
if (onDefault) {
const currentBranch = await this.getCurrentBranch();
throw new Error(
`currently on default branch: ${currentBranch}\n` +
`Please create a feature branch before proceeding.`
);
}
}
/**
* Checks if the repository has any remotes configured.
*
* @returns {Promise<boolean>} True if remotes exist
*
* @example
* const hasRemote = await git.hasRemote();
* if (!hasRemote) {
* console.log('No remotes configured');
* }
*/
async hasRemote(): Promise<boolean> {
const remotes = await this.git.getRemotes();
return remotes.length > 0;
}
/**
* Gets all configured remotes.
*
* @returns {Promise<Array>} Array of remote objects
*
* @example
* const remotes = await git.getRemotes();
* console.log('Remotes:', remotes);
*/
async getRemotes(): Promise<any[]> {
return await this.git.getRemotes(true);
}
}

View File

@@ -0,0 +1,13 @@
/**
* @fileoverview Git operations layer for the tm-core package
* This file exports all git-related classes and interfaces
*/
// Export GitAdapter
export { GitAdapter } from './git-adapter.js';
// Export branch name utilities
export {
generateBranchName,
sanitizeBranchName
} from './branch-name-generator.js';

View File

@@ -0,0 +1,284 @@
import { describe, it, expect, beforeEach } from 'vitest';
import { ScopeDetector } from './scope-detector.js';
describe('ScopeDetector', () => {
let scopeDetector: ScopeDetector;
beforeEach(() => {
scopeDetector = new ScopeDetector();
});
describe('detectScope', () => {
it('should detect cli scope from CLI file changes', () => {
const files = ['packages/cli/src/commands/start.ts'];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('cli');
});
it('should detect core scope from core package changes', () => {
const files = ['packages/tm-core/src/workflow/orchestrator.ts'];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('core');
});
it('should detect test scope from test file changes', () => {
const files = ['packages/tm-core/src/workflow/orchestrator.test.ts'];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('test');
});
it('should detect docs scope from documentation changes', () => {
const files = ['README.md', 'docs/guide.md'];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('docs');
});
it('should detect config scope from configuration changes', () => {
const files = ['tsconfig.json'];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('config');
});
it('should detect workflow scope from workflow files', () => {
const files = ['packages/tm-core/src/workflow/types.ts'];
const scope = scopeDetector.detectScope(files);
// Files within packages get the package scope (more specific than feature scope)
expect(scope).toBe('core');
});
it('should detect git scope from git adapter files', () => {
const files = ['packages/tm-core/src/git/git-adapter.ts'];
const scope = scopeDetector.detectScope(files);
// Files within packages get the package scope (more specific than feature scope)
expect(scope).toBe('core');
});
it('should detect storage scope from storage files', () => {
const files = ['packages/tm-core/src/storage/state-manager.ts'];
const scope = scopeDetector.detectScope(files);
// Files within packages get the package scope (more specific than feature scope)
expect(scope).toBe('core');
});
it('should use most relevant scope when multiple files', () => {
const files = [
'packages/cli/src/commands/start.ts',
'packages/cli/src/commands/stop.ts',
'packages/tm-core/src/types.ts'
];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('cli');
});
it('should handle mixed scopes by choosing highest priority', () => {
const files = [
'README.md',
'packages/tm-core/src/workflow/orchestrator.ts'
];
const scope = scopeDetector.detectScope(files);
// Core is higher priority than docs
expect(scope).toBe('core');
});
it('should handle empty file list gracefully', () => {
const files: string[] = [];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('repo');
});
it('should detect mcp scope from MCP server files', () => {
const files = ['packages/mcp-server/src/tools.ts'];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('mcp');
});
it('should detect auth scope from authentication files', () => {
const files = ['packages/tm-core/src/auth/auth-manager.ts'];
const scope = scopeDetector.detectScope(files);
// Files within packages get the package scope (more specific than feature scope)
expect(scope).toBe('core');
});
it('should detect deps scope from dependency changes', () => {
const files = ['pnpm-lock.yaml'];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('deps');
});
});
describe('detectScopeWithCustomRules', () => {
it('should use custom scope mapping rules', () => {
const customRules: Record<string, number> = {
custom: 100
};
const customDetector = new ScopeDetector(
{
'custom/**': 'custom'
},
customRules
);
const files = ['custom/file.ts'];
const scope = customDetector.detectScope(files);
expect(scope).toBe('custom');
});
it('should override default priorities with custom priorities', () => {
const customPriorities: Record<string, number> = {
docs: 100, // Make docs highest priority
core: 10
};
const customDetector = new ScopeDetector(undefined, customPriorities);
const files = [
'README.md',
'packages/tm-core/src/workflow/orchestrator.ts'
];
const scope = customDetector.detectScope(files);
expect(scope).toBe('docs');
});
});
describe('getAllMatchingScopes', () => {
it('should return all matching scopes for files', () => {
const files = [
'packages/cli/src/commands/start.ts',
'packages/tm-core/src/workflow/orchestrator.ts',
'README.md'
];
const scopes = scopeDetector.getAllMatchingScopes(files);
expect(scopes).toContain('cli');
expect(scopes).toContain('core');
expect(scopes).toContain('docs');
expect(scopes).toHaveLength(3);
});
it('should return unique scopes only', () => {
const files = [
'packages/cli/src/commands/start.ts',
'packages/cli/src/commands/stop.ts'
];
const scopes = scopeDetector.getAllMatchingScopes(files);
expect(scopes).toEqual(['cli']);
});
it('should return empty array for files with no matches', () => {
const files = ['unknown/path/file.ts'];
const scopes = scopeDetector.getAllMatchingScopes(files);
expect(scopes).toEqual([]);
});
});
describe('getScopePriority', () => {
it('should return priority for known scope', () => {
const priority = scopeDetector.getScopePriority('core');
expect(priority).toBeGreaterThan(0);
});
it('should return 0 for unknown scope', () => {
const priority = scopeDetector.getScopePriority('nonexistent');
expect(priority).toBe(0);
});
it('should prioritize core > cli > test > docs', () => {
const corePriority = scopeDetector.getScopePriority('core');
const cliPriority = scopeDetector.getScopePriority('cli');
const testPriority = scopeDetector.getScopePriority('test');
const docsPriority = scopeDetector.getScopePriority('docs');
expect(corePriority).toBeGreaterThan(cliPriority);
expect(cliPriority).toBeGreaterThan(testPriority);
expect(testPriority).toBeGreaterThan(docsPriority);
});
});
describe('edge cases', () => {
it('should handle Windows paths', () => {
const files = ['packages\\cli\\src\\commands\\start.ts'];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('cli');
});
it('should handle absolute paths', () => {
const files = [
'/home/user/project/packages/tm-core/src/workflow/orchestrator.ts'
];
const scope = scopeDetector.detectScope(files);
// Absolute paths won't match package patterns
expect(scope).toBe('workflow');
});
it('should handle paths with special characters', () => {
const files = ['packages/tm-core/src/workflow/orchestrator@v2.ts'];
const scope = scopeDetector.detectScope(files);
// Files within packages get the package scope
expect(scope).toBe('core');
});
it('should handle very long file paths', () => {
const files = [
'packages/tm-core/src/deeply/nested/directory/structure/with/many/levels/file.ts'
];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('core');
});
it('should handle files in root directory', () => {
const files = ['file.ts'];
const scope = scopeDetector.detectScope(files);
expect(scope).toBe('repo');
});
});
describe('getMatchingScope', () => {
it('should return matching scope for single file', () => {
const scope = scopeDetector.getMatchingScope('packages/cli/src/index.ts');
expect(scope).toBe('cli');
});
it('should return null for non-matching file', () => {
const scope = scopeDetector.getMatchingScope('unknown/file.ts');
expect(scope).toBeNull();
});
it('should match test files', () => {
const scope = scopeDetector.getMatchingScope(
'src/components/button.test.tsx'
);
expect(scope).toBe('test');
});
});
});

View File

@@ -0,0 +1,204 @@
/**
* ScopeDetector - Intelligent scope detection from changed files
*
* Automatically determines conventional commit scopes based on file paths
* using configurable pattern matching and priority-based resolution.
* // TODO: remove this
*/
export interface ScopeMapping {
[pattern: string]: string;
}
export interface ScopePriority {
[scope: string]: number;
}
// Ordered from most specific to least specific
const DEFAULT_SCOPE_MAPPINGS: Array<[string, string]> = [
// Special file types (check first - most specific)
['**/*.test.*', 'test'],
['**/*.spec.*', 'test'],
['**/test/**', 'test'],
['**/tests/**', 'test'],
['**/__tests__/**', 'test'],
// Dependencies (specific files)
['**/package-lock.json', 'deps'],
['package-lock.json', 'deps'],
['**/pnpm-lock.yaml', 'deps'],
['pnpm-lock.yaml', 'deps'],
['**/yarn.lock', 'deps'],
['yarn.lock', 'deps'],
// Configuration files (before packages so root configs don't match package patterns)
['**/package.json', 'config'],
['package.json', 'config'],
['**/tsconfig*.json', 'config'],
['tsconfig*.json', 'config'],
['**/.eslintrc*', 'config'],
['.eslintrc*', 'config'],
['**/vite.config.*', 'config'],
['vite.config.*', 'config'],
['**/vitest.config.*', 'config'],
['vitest.config.*', 'config'],
// Package-level scopes (more specific than feature-level)
['packages/cli/**', 'cli'],
['packages/tm-core/**', 'core'],
['packages/mcp-server/**', 'mcp'],
// Feature-level scopes (within any package)
['**/workflow/**', 'workflow'],
['**/git/**', 'git'],
['**/storage/**', 'storage'],
['**/auth/**', 'auth'],
['**/config/**', 'config'],
// Documentation (least specific)
['**/*.md', 'docs'],
['**/docs/**', 'docs'],
['README*', 'docs'],
['CHANGELOG*', 'docs']
];
const DEFAULT_SCOPE_PRIORITIES: ScopePriority = {
core: 100,
cli: 90,
mcp: 85,
workflow: 80,
git: 75,
storage: 70,
auth: 65,
config: 60,
test: 50,
docs: 30,
deps: 20,
repo: 10
};
export class ScopeDetector {
private scopeMappings: Array<[string, string]>;
private scopePriorities: ScopePriority;
constructor(customMappings?: ScopeMapping, customPriorities?: ScopePriority) {
// Start with default mappings
this.scopeMappings = [...DEFAULT_SCOPE_MAPPINGS];
// Add custom mappings at the start (highest priority)
if (customMappings) {
const customEntries = Object.entries(customMappings);
this.scopeMappings = [...customEntries, ...this.scopeMappings];
}
this.scopePriorities = {
...DEFAULT_SCOPE_PRIORITIES,
...customPriorities
};
}
/**
* Detect the most relevant scope from a list of changed files
* Returns the scope with the highest priority
*/
detectScope(files: string[]): string {
if (files.length === 0) {
return 'repo';
}
const scopeCounts = new Map<string, number>();
// Count occurrences of each scope
for (const file of files) {
const scope = this.getMatchingScope(file);
if (scope) {
scopeCounts.set(scope, (scopeCounts.get(scope) || 0) + 1);
}
}
// If no scopes matched, default to 'repo'
if (scopeCounts.size === 0) {
return 'repo';
}
// Find scope with highest priority (considering both priority and count)
let bestScope = 'repo';
let bestScore = 0;
for (const [scope, count] of scopeCounts) {
const priority = this.getScopePriority(scope);
// Score = priority * count (files in that scope)
const score = priority * count;
if (score > bestScore) {
bestScore = score;
bestScope = scope;
}
}
return bestScope;
}
/**
* Get all matching scopes for the given files
*/
getAllMatchingScopes(files: string[]): string[] {
const scopes = new Set<string>();
for (const file of files) {
const scope = this.getMatchingScope(file);
if (scope) {
scopes.add(scope);
}
}
return Array.from(scopes);
}
/**
* Get the matching scope for a single file
* Returns the first matching scope (order matters!)
*/
getMatchingScope(file: string): string | null {
// Normalize path separators
const normalizedFile = file.replace(/\\/g, '/');
for (const [pattern, scope] of this.scopeMappings) {
if (this.matchesPattern(normalizedFile, pattern)) {
return scope;
}
}
return null;
}
/**
* Get the priority of a scope
*/
getScopePriority(scope: string): number {
return this.scopePriorities[scope] || 0;
}
/**
* Match a file path against a glob-like pattern
* Supports:
* - ** for multi-level directory matching
* - * for single-level matching
*/
private matchesPattern(filePath: string, pattern: string): boolean {
// Replace ** first with a unique placeholder
let regexPattern = pattern.replace(/\*\*/g, '§GLOBSTAR§');
// Escape special regex characters (but not our placeholder or *)
regexPattern = regexPattern.replace(/[.+^${}()|[\]\\]/g, '\\$&');
// Replace single * with [^/]* (matches anything except /)
regexPattern = regexPattern.replace(/\*/g, '[^/]*');
// Replace placeholder with .* (matches anything including /)
regexPattern = regexPattern.replace(/§GLOBSTAR§/g, '.*');
const regex = new RegExp(`^${regexPattern}$`);
return regex.test(filePath);
}
}

View File

@@ -0,0 +1,277 @@
import { describe, it, expect, beforeEach } from 'vitest';
import { TemplateEngine } from './template-engine.js';
describe('TemplateEngine', () => {
let templateEngine: TemplateEngine;
beforeEach(() => {
templateEngine = new TemplateEngine();
});
describe('constructor and initialization', () => {
it('should initialize with default templates', () => {
expect(templateEngine).toBeDefined();
});
it('should accept custom templates in constructor', () => {
const customTemplate = '{{type}}({{scope}}): {{description}}';
const engine = new TemplateEngine({ commitMessage: customTemplate });
const result = engine.render('commitMessage', {
type: 'feat',
scope: 'core',
description: 'add feature'
});
expect(result).toBe('feat(core): add feature');
});
});
describe('render', () => {
it('should render simple template with single variable', () => {
const template = 'Hello {{name}}';
const result = templateEngine.render('test', { name: 'World' }, template);
expect(result).toBe('Hello World');
});
it('should render template with multiple variables', () => {
const template = '{{type}}({{scope}}): {{description}}';
const result = templateEngine.render(
'test',
{
type: 'feat',
scope: 'api',
description: 'add endpoint'
},
template
);
expect(result).toBe('feat(api): add endpoint');
});
it('should handle missing variables by leaving placeholder', () => {
const template = 'Hello {{name}} from {{location}}';
const result = templateEngine.render('test', { name: 'Alice' }, template);
expect(result).toBe('Hello Alice from {{location}}');
});
it('should handle empty variable values', () => {
const template = '{{prefix}}{{message}}';
const result = templateEngine.render(
'test',
{
prefix: '',
message: 'hello'
},
template
);
expect(result).toBe('hello');
});
it('should handle numeric values', () => {
const template = 'Count: {{count}}';
const result = templateEngine.render('test', { count: 42 }, template);
expect(result).toBe('Count: 42');
});
it('should handle boolean values', () => {
const template = 'Active: {{active}}';
const result = templateEngine.render('test', { active: true }, template);
expect(result).toBe('Active: true');
});
});
describe('setTemplate', () => {
it('should set and use custom template', () => {
templateEngine.setTemplate('custom', 'Value: {{value}}');
const result = templateEngine.render('custom', { value: '123' });
expect(result).toBe('Value: 123');
});
it('should override existing template', () => {
templateEngine.setTemplate('commitMessage', 'Custom: {{msg}}');
const result = templateEngine.render('commitMessage', { msg: 'hello' });
expect(result).toBe('Custom: hello');
});
});
describe('getTemplate', () => {
it('should return existing template', () => {
templateEngine.setTemplate('test', 'Template: {{value}}');
const template = templateEngine.getTemplate('test');
expect(template).toBe('Template: {{value}}');
});
it('should return undefined for non-existent template', () => {
const template = templateEngine.getTemplate('nonexistent');
expect(template).toBeUndefined();
});
});
describe('hasTemplate', () => {
it('should return true for existing template', () => {
templateEngine.setTemplate('test', 'Template');
expect(templateEngine.hasTemplate('test')).toBe(true);
});
it('should return false for non-existent template', () => {
expect(templateEngine.hasTemplate('nonexistent')).toBe(false);
});
});
describe('validateTemplate', () => {
it('should validate template with all required variables', () => {
const template = '{{type}}({{scope}}): {{description}}';
const requiredVars = ['type', 'scope', 'description'];
const result = templateEngine.validateTemplate(template, requiredVars);
expect(result.isValid).toBe(true);
expect(result.missingVars).toEqual([]);
});
it('should detect missing required variables', () => {
const template = '{{type}}: {{description}}';
const requiredVars = ['type', 'scope', 'description'];
const result = templateEngine.validateTemplate(template, requiredVars);
expect(result.isValid).toBe(false);
expect(result.missingVars).toEqual(['scope']);
});
it('should detect multiple missing variables', () => {
const template = '{{type}}';
const requiredVars = ['type', 'scope', 'description'];
const result = templateEngine.validateTemplate(template, requiredVars);
expect(result.isValid).toBe(false);
expect(result.missingVars).toEqual(['scope', 'description']);
});
it('should handle optional variables in template', () => {
const template = '{{type}}({{scope}}): {{description}} [{{taskId}}]';
const requiredVars = ['type', 'scope', 'description'];
const result = templateEngine.validateTemplate(template, requiredVars);
expect(result.isValid).toBe(true);
expect(result.missingVars).toEqual([]);
});
});
describe('extractVariables', () => {
it('should extract all variables from template', () => {
const template = '{{type}}({{scope}}): {{description}}';
const variables = templateEngine.extractVariables(template);
expect(variables).toEqual(['type', 'scope', 'description']);
});
it('should extract unique variables only', () => {
const template = '{{name}} and {{name}} with {{other}}';
const variables = templateEngine.extractVariables(template);
expect(variables).toEqual(['name', 'other']);
});
it('should return empty array for template without variables', () => {
const template = 'Static text with no variables';
const variables = templateEngine.extractVariables(template);
expect(variables).toEqual([]);
});
it('should handle template with whitespace in placeholders', () => {
const template = '{{ type }} and {{ scope }}';
const variables = templateEngine.extractVariables(template);
expect(variables).toEqual(['type', 'scope']);
});
});
describe('edge cases', () => {
it('should handle empty template', () => {
const result = templateEngine.render('test', { name: 'value' }, '');
expect(result).toBe('');
});
it('should handle template with no variables', () => {
const template = 'Static text';
const result = templateEngine.render('test', {}, template);
expect(result).toBe('Static text');
});
it('should handle empty variables object', () => {
const template = 'Hello {{name}}';
const result = templateEngine.render('test', {}, template);
expect(result).toBe('Hello {{name}}');
});
it('should handle special characters in values', () => {
const template = 'Value: {{value}}';
const result = templateEngine.render(
'test',
{
value: 'hello$world{test}'
},
template
);
expect(result).toBe('Value: hello$world{test}');
});
it('should handle multiline templates', () => {
const template = '{{type}}: {{description}}\n\n{{body}}';
const result = templateEngine.render(
'test',
{
type: 'feat',
description: 'add feature',
body: 'Details here'
},
template
);
expect(result).toBe('feat: add feature\n\nDetails here');
});
});
describe('default commit message template', () => {
it('should have default commit message template', () => {
const template = templateEngine.getTemplate('commitMessage');
expect(template).toBeDefined();
expect(template).toContain('{{type}}');
expect(template).toContain('{{description}}');
});
it('should render default commit message template', () => {
const result = templateEngine.render('commitMessage', {
type: 'feat',
scope: 'core',
description: 'implement feature',
body: 'Additional details',
taskId: '5.1'
});
expect(result).toContain('feat');
expect(result).toContain('core');
expect(result).toContain('implement feature');
});
});
});

View File

@@ -0,0 +1,203 @@
/**
* TemplateEngine - Configurable template system for generating text from templates
*
* Supports:
* - Variable substitution using {{variableName}} syntax
* - Custom templates via constructor or setTemplate
* - Template validation with required variables
* - Variable extraction from templates
* - Multiple template storage and retrieval
*/
export interface TemplateValidationResult {
isValid: boolean;
missingVars: string[];
}
export interface TemplateVariables {
[key: string]: string | number | boolean | undefined;
}
export interface TemplateCollection {
[templateName: string]: string;
}
export interface TemplateEngineOptions {
customTemplates?: TemplateCollection;
preservePlaceholders?: boolean;
}
const DEFAULT_TEMPLATES: TemplateCollection = {
commitMessage: `{{type}}{{#scope}}({{scope}}){{/scope}}{{#breaking}}!{{/breaking}}: {{description}}
{{#body}}{{body}}
{{/body}}{{#taskId}}Task: {{taskId}}{{/taskId}}{{#phase}}
Phase: {{phase}}{{/phase}}{{#testsPassing}}
Tests: {{testsPassing}} passing{{#testsFailing}}, {{testsFailing}} failing{{/testsFailing}}{{/testsPassing}}`
};
export class TemplateEngine {
private templates: TemplateCollection;
private preservePlaceholders: boolean;
constructor(
optionsOrTemplates: TemplateEngineOptions | TemplateCollection = {}
) {
// Backward compatibility: support old signature (TemplateCollection) and new signature (TemplateEngineOptions)
const isOptions =
'customTemplates' in optionsOrTemplates ||
'preservePlaceholders' in optionsOrTemplates;
const options: TemplateEngineOptions = isOptions
? (optionsOrTemplates as TemplateEngineOptions)
: { customTemplates: optionsOrTemplates as TemplateCollection };
this.templates = {
...DEFAULT_TEMPLATES,
...(options.customTemplates || {})
};
this.preservePlaceholders = options.preservePlaceholders ?? false;
}
/**
* Render a template with provided variables
*/
render(
templateName: string,
variables: TemplateVariables,
inlineTemplate?: string
): string {
const template =
inlineTemplate !== undefined
? inlineTemplate
: this.templates[templateName];
if (template === undefined) {
throw new Error(`Template "${templateName}" not found`);
}
return this.substituteVariables(template, variables);
}
/**
* Set or update a template
*/
setTemplate(name: string, template: string): void {
this.templates[name] = template;
}
/**
* Get a template by name
*/
getTemplate(name: string): string | undefined {
return this.templates[name];
}
/**
* Check if a template exists
*/
hasTemplate(name: string): boolean {
return name in this.templates;
}
/**
* Validate that a template contains all required variables
*/
validateTemplate(
template: string,
requiredVars: string[]
): TemplateValidationResult {
const templateVars = this.extractVariables(template);
const missingVars = requiredVars.filter(
(varName) => !templateVars.includes(varName)
);
return {
isValid: missingVars.length === 0,
missingVars
};
}
/**
* Extract all variable names from a template
*/
extractVariables(template: string): string[] {
const regex = /\{\{\s*([^}#/\s]+)\s*\}\}/g;
const matches = template.matchAll(regex);
const variables = new Set<string>();
for (const match of matches) {
variables.add(match[1]);
}
return Array.from(variables);
}
/**
* Substitute variables in template
* Supports both {{variable}} and {{#variable}}...{{/variable}} (conditional blocks)
*/
private substituteVariables(
template: string,
variables: TemplateVariables
): string {
let result = template;
// Handle conditional blocks first ({{#var}}...{{/var}})
result = this.processConditionalBlocks(result, variables);
// Handle simple variable substitution ({{var}})
result = result.replace(/\{\{\s*([^}#/\s]+)\s*\}\}/g, (_, varName) => {
const value = variables[varName];
return value !== undefined && value !== null
? String(value)
: this.preservePlaceholders
? `{{${varName}}}`
: '';
});
return result;
}
/**
* Process conditional blocks in template
* {{#variable}}content{{/variable}} - shows content only if variable is truthy
* Processes innermost blocks first to handle nesting
*/
private processConditionalBlocks(
template: string,
variables: TemplateVariables
): string {
let result = template;
let hasChanges = true;
// Keep processing until no more conditional blocks are found
while (hasChanges) {
const before = result;
// Find and replace innermost conditional blocks (non-greedy match)
result = result.replace(
/\{\{#([^}]+)\}\}((?:(?!\{\{#).)*?)\{\{\/\1\}\}/gs,
(_, varName, content) => {
const value = variables[varName.trim()];
// Show content if variable is truthy (not undefined, null, false, or empty string)
if (
value !== undefined &&
value !== null &&
value !== false &&
value !== ''
) {
return content;
}
return '';
}
);
hasChanges = result !== before;
}
return result;
}
}

View File

@@ -72,3 +72,46 @@ export {
type ComplexityAnalysis,
type TaskComplexityData
} from './reports/index.js';
// Re-export services
export {
PreflightChecker,
TaskLoaderService,
type CheckResult,
type PreflightResult,
type TaskValidationResult,
type ValidationErrorType,
type DependencyIssue
} from './services/index.js';
// Re-export Git adapter
export { GitAdapter } from './git/git-adapter.js';
export {
CommitMessageGenerator,
type CommitMessageOptions
} from './git/commit-message-generator.js';
// Re-export workflow orchestrator, state manager, activity logger, and types
export { WorkflowOrchestrator } from './workflow/workflow-orchestrator.js';
export { WorkflowStateManager } from './workflow/workflow-state-manager.js';
export { WorkflowActivityLogger } from './workflow/workflow-activity-logger.js';
export type {
WorkflowPhase,
TDDPhase,
WorkflowContext,
WorkflowState,
WorkflowEvent,
WorkflowEventData,
WorkflowEventListener,
SubtaskInfo,
TestResult,
WorkflowError
} from './workflow/types.js';
// Re-export workflow service
export { WorkflowService } from './services/workflow.service.js';
export type {
StartWorkflowOptions,
WorkflowStatus,
NextAction
} from './services/workflow.service.js';

View File

@@ -9,6 +9,17 @@ import type {
StorageType
} from '../types/index.js';
/**
* Conventional Commit types allowed in workflow
*/
export type CommitType =
| 'feat'
| 'fix'
| 'refactor'
| 'test'
| 'docs'
| 'chore';
/**
* Model configuration for different AI roles
*/
@@ -45,9 +56,15 @@ export interface TaskSettings {
defaultPriority: TaskPriority;
/** Default complexity for analysis */
defaultComplexity: TaskComplexity;
/** Maximum number of subtasks per task */
/**
* Maximum number of subtasks per task
* @minimum 1
*/
maxSubtasks: number;
/** Maximum number of concurrent tasks */
/**
* Maximum number of concurrent tasks
* @minimum 1
*/
maxConcurrentTasks: number;
/** Enable automatic task ID generation */
autoGenerateIds: boolean;
@@ -69,7 +86,10 @@ export interface TagSettings {
enableTags: boolean;
/** Default tag for new tasks */
defaultTag: string;
/** Maximum number of tags per task */
/**
* Maximum number of tags per task
* @minimum 1
*/
maxTagsPerTask: number;
/** Enable automatic tag creation from Git branches */
autoCreateFromBranch: boolean;
@@ -114,7 +134,10 @@ export interface StorageSettings
readonly apiConfigured?: boolean;
/** Enable automatic backups */
enableBackup: boolean;
/** Maximum number of backups to retain */
/**
* Maximum number of backups to retain
* @minimum 0
*/
maxBackups: number;
/** Enable compression for storage */
enableCompression: boolean;
@@ -128,15 +151,30 @@ export interface StorageSettings
* Retry and resilience settings
*/
export interface RetrySettings {
/** Number of retry attempts for failed operations */
/**
* Number of retry attempts for failed operations
* @minimum 0
*/
retryAttempts: number;
/** Base delay between retries in milliseconds */
/**
* Base delay between retries in milliseconds
* @minimum 0
*/
retryDelay: number;
/** Maximum delay between retries in milliseconds */
/**
* Maximum delay between retries in milliseconds
* @minimum 0
*/
maxRetryDelay: number;
/** Exponential backoff multiplier */
/**
* Exponential backoff multiplier
* @minimum 1
*/
backoffMultiplier: number;
/** Request timeout in milliseconds */
/**
* Request timeout in milliseconds
* @minimum 0
*/
requestTimeout: number;
/** Enable retry for network errors */
retryOnNetworkError: boolean;
@@ -160,9 +198,15 @@ export interface LoggingSettings {
logPerformance: boolean;
/** Enable error stack traces */
logStackTraces: boolean;
/** Maximum log file size in MB */
/**
* Maximum log file size in MB
* @minimum 1
*/
maxFileSize: number;
/** Maximum number of log files to retain */
/**
* Maximum number of log files to retain
* @minimum 1
*/
maxFiles: number;
}
@@ -174,11 +218,17 @@ export interface SecuritySettings {
validateApiKeys: boolean;
/** Enable request rate limiting */
enableRateLimit: boolean;
/** Maximum requests per minute */
/**
* Maximum requests per minute
* @minimum 1
*/
maxRequestsPerMinute: number;
/** Enable input sanitization */
sanitizeInputs: boolean;
/** Maximum prompt length in characters */
/**
* Maximum prompt length in characters
* @minimum 1
*/
maxPromptLength: number;
/** Allowed file extensions for imports */
allowedFileExtensions: string[];
@@ -186,6 +236,72 @@ export interface SecuritySettings {
enableCors: boolean;
}
/**
* Workflow and autopilot TDD settings
*/
export interface WorkflowSettings {
/** Enable autopilot/TDD workflow features */
enableAutopilot: boolean;
/**
* Maximum retry attempts for phase validation
* @minimum 1
* @maximum 10
*/
maxPhaseAttempts: number;
/** Branch naming pattern for workflow branches */
branchPattern: string;
/** Require clean working tree before starting workflow */
requireCleanWorkingTree: boolean;
/** Automatically stage all changes during commit phase */
autoStageChanges: boolean;
/** Include co-author attribution in commits */
includeCoAuthor: boolean;
/** Co-author name for commit messages */
coAuthorName: string;
/** Co-author email for commit messages (defaults to taskmaster@tryhamster.com) */
coAuthorEmail: string;
/** Test result thresholds for phase validation */
testThresholds: {
/**
* Minimum test count for valid RED phase
* @minimum 0
*/
minTests: number;
/**
* Maximum allowed failing tests in GREEN phase
* @minimum 0
*/
maxFailuresInGreen: number;
};
/** Commit message template pattern */
commitMessageTemplate: string;
/** Conventional commit types allowed */
allowedCommitTypes: readonly CommitType[];
/**
* Default commit type for autopilot
* @validation Must be present in allowedCommitTypes array
*/
defaultCommitType: CommitType;
/**
* Timeout for workflow operations in milliseconds
* @minimum 0
*/
operationTimeout: number;
/** Enable activity logging for workflow events */
enableActivityLogging: boolean;
/** Path to store workflow activity logs */
activityLogPath: string;
/** Enable automatic backup of workflow state */
enableStateBackup: boolean;
/**
* Maximum workflow state backups to retain
* @minimum 0
*/
maxStateBackups: number;
/** Abort workflow if validation fails after max attempts */
abortOnMaxAttempts: boolean;
}
/**
* Main configuration interface for Task Master core
*/
@@ -211,6 +327,9 @@ export interface IConfiguration {
/** Tag and context settings */
tags: TagSettings;
/** Workflow and autopilot settings */
workflow: WorkflowSettings;
/** Storage configuration */
storage: StorageSettings;
@@ -414,6 +533,35 @@ export const DEFAULT_CONFIG_VALUES = {
MAX_TAGS_PER_TASK: 10,
NAMING_CONVENTION: 'kebab-case' as const
},
WORKFLOW: {
ENABLE_AUTOPILOT: true,
MAX_PHASE_ATTEMPTS: 3,
BRANCH_PATTERN: 'task-{taskId}',
REQUIRE_CLEAN_WORKING_TREE: true,
AUTO_STAGE_CHANGES: true,
INCLUDE_CO_AUTHOR: true,
CO_AUTHOR_NAME: 'TaskMaster AI',
CO_AUTHOR_EMAIL: 'taskmaster@tryhamster.com',
MIN_TESTS: 1,
MAX_FAILURES_IN_GREEN: 0,
COMMIT_MESSAGE_TEMPLATE:
'{type}({scope}): {description} (Task {taskId}.{subtaskIndex})',
ALLOWED_COMMIT_TYPES: [
'feat',
'fix',
'refactor',
'test',
'docs',
'chore'
] as const satisfies readonly CommitType[],
DEFAULT_COMMIT_TYPE: 'feat' as CommitType,
OPERATION_TIMEOUT: 60000,
ENABLE_ACTIVITY_LOGGING: true,
ACTIVITY_LOG_PATH: '.taskmaster/logs/workflow-activity.log',
ENABLE_STATE_BACKUP: true,
MAX_STATE_BACKUPS: 5,
ABORT_ON_MAX_ATTEMPTS: false
},
STORAGE: {
TYPE: 'auto' as const,
ENCODING: 'utf8' as BufferEncoding,

View File

@@ -6,8 +6,28 @@
export { TaskService } from './task-service.js';
export { OrganizationService } from './organization.service.js';
export { ExportService } from './export.service.js';
export { PreflightChecker } from './preflight-checker.service.js';
export { TaskLoaderService } from './task-loader.service.js';
export { TestResultValidator } from './test-result-validator.js';
export type { Organization, Brief } from './organization.service.js';
export type {
ExportTasksOptions,
ExportResult
} from './export.service.js';
export type {
CheckResult,
PreflightResult
} from './preflight-checker.service.js';
export type {
TaskValidationResult,
ValidationErrorType,
DependencyIssue
} from './task-loader.service.js';
export type {
TestResult,
TestPhase,
Coverage,
CoverageThresholds,
ValidationResult,
PhaseValidationOptions
} from './test-result-validator.types.js';

View File

@@ -0,0 +1,395 @@
/**
* @fileoverview Preflight Checker Service
* Validates environment and prerequisites for autopilot execution
*/
import { readFileSync, existsSync, readdirSync } from 'fs';
import { join } from 'path';
import { execSync } from 'child_process';
import { getLogger } from '../logger/factory.js';
import {
isGitRepository,
isGhCliAvailable,
getDefaultBranch
} from '../utils/git-utils.js';
const logger = getLogger('PreflightChecker');
/**
* Result of a single preflight check
*/
export interface CheckResult {
/** Whether the check passed */
success: boolean;
/** The value detected/validated */
value?: any;
/** Error or warning message */
message?: string;
}
/**
* Complete preflight validation results
*/
export interface PreflightResult {
/** Overall success - all checks passed */
success: boolean;
/** Test command detection result */
testCommand: CheckResult;
/** Git working tree status */
gitWorkingTree: CheckResult;
/** Required tools availability */
requiredTools: CheckResult;
/** Default branch detection */
defaultBranch: CheckResult;
/** Summary message */
summary: string;
}
/**
* Tool validation result
*/
interface ToolCheck {
name: string;
available: boolean;
version?: string;
message?: string;
}
/**
* PreflightChecker validates environment for autopilot execution
*/
export class PreflightChecker {
private projectRoot: string;
constructor(projectRoot: string) {
if (!projectRoot) {
throw new Error('projectRoot is required for PreflightChecker');
}
this.projectRoot = projectRoot;
}
/**
* Detect test command from package.json
*/
async detectTestCommand(): Promise<CheckResult> {
try {
const packageJsonPath = join(this.projectRoot, 'package.json');
const packageJsonContent = readFileSync(packageJsonPath, 'utf-8');
const packageJson = JSON.parse(packageJsonContent);
if (!packageJson.scripts || !packageJson.scripts.test) {
return {
success: false,
message:
'No test script found in package.json. Please add a "test" script.'
};
}
const testCommand = packageJson.scripts.test;
return {
success: true,
value: testCommand,
message: `Test command: ${testCommand}`
};
} catch (error: any) {
if (error.code === 'ENOENT') {
return {
success: false,
message: 'package.json not found in project root'
};
}
return {
success: false,
message: `Failed to read package.json: ${error.message}`
};
}
}
/**
* Check git working tree status
*/
async checkGitWorkingTree(): Promise<CheckResult> {
try {
// Check if it's a git repository
const isRepo = await isGitRepository(this.projectRoot);
if (!isRepo) {
return {
success: false,
message: 'Not a git repository. Initialize git first.'
};
}
// Check for changes (staged/unstaged/untracked) without requiring HEAD
const status = execSync('git status --porcelain', {
cwd: this.projectRoot,
encoding: 'utf-8',
timeout: 5000
});
if (status.trim().length > 0) {
return {
success: false,
value: 'dirty',
message:
'Working tree has uncommitted or untracked changes. Please commit or stash them.'
};
}
return {
success: true,
value: 'clean',
message: 'Working tree is clean'
};
} catch (error: any) {
return {
success: false,
message: `Git check failed: ${error.message}`
};
}
}
/**
* Detect project types based on common configuration files
*/
private detectProjectTypes(): string[] {
const types: string[] = [];
if (existsSync(join(this.projectRoot, 'package.json'))) types.push('node');
if (
existsSync(join(this.projectRoot, 'requirements.txt')) ||
existsSync(join(this.projectRoot, 'setup.py')) ||
existsSync(join(this.projectRoot, 'pyproject.toml'))
)
types.push('python');
if (
existsSync(join(this.projectRoot, 'pom.xml')) ||
existsSync(join(this.projectRoot, 'build.gradle'))
)
types.push('java');
if (existsSync(join(this.projectRoot, 'go.mod'))) types.push('go');
if (existsSync(join(this.projectRoot, 'Cargo.toml'))) types.push('rust');
if (existsSync(join(this.projectRoot, 'composer.json'))) types.push('php');
if (existsSync(join(this.projectRoot, 'Gemfile'))) types.push('ruby');
const files = readdirSync(this.projectRoot);
if (files.some((f) => f.endsWith('.csproj') || f.endsWith('.sln')))
types.push('dotnet');
return types;
}
/**
* Get required tools for a project type
*/
private getToolsForProjectType(
type: string
): Array<{ command: string; args: string[] }> {
const toolMap: Record<
string,
Array<{ command: string; args: string[] }>
> = {
node: [
{ command: 'node', args: ['--version'] },
{ command: 'npm', args: ['--version'] }
],
python: [
{ command: 'python3', args: ['--version'] },
{ command: 'pip3', args: ['--version'] }
],
java: [{ command: 'java', args: ['--version'] }],
go: [{ command: 'go', args: ['version'] }],
rust: [{ command: 'cargo', args: ['--version'] }],
php: [
{ command: 'php', args: ['--version'] },
{ command: 'composer', args: ['--version'] }
],
ruby: [
{ command: 'ruby', args: ['--version'] },
{ command: 'bundle', args: ['--version'] }
],
dotnet: [{ command: 'dotnet', args: ['--version'] }]
};
return toolMap[type] || [];
}
/**
* Validate required tools availability
*/
async validateRequiredTools(): Promise<CheckResult> {
const tools: ToolCheck[] = [];
// Always check git and gh CLI
tools.push(this.checkTool('git', ['--version']));
tools.push(await this.checkGhCli());
// Detect project types and check their tools
const projectTypes = this.detectProjectTypes();
if (projectTypes.length === 0) {
logger.warn('No recognized project type detected');
} else {
logger.info(`Detected project types: ${projectTypes.join(', ')}`);
}
for (const type of projectTypes) {
const typeTools = this.getToolsForProjectType(type);
for (const tool of typeTools) {
tools.push(this.checkTool(tool.command, tool.args));
}
}
// Determine overall success
const allAvailable = tools.every((tool) => tool.available);
const missingTools = tools
.filter((tool) => !tool.available)
.map((tool) => tool.name);
if (!allAvailable) {
return {
success: false,
value: tools,
message: `Missing required tools: ${missingTools.join(', ')}`
};
}
return {
success: true,
value: tools,
message: 'All required tools are available'
};
}
/**
* Check if a command-line tool is available
*/
private checkTool(command: string, versionArgs: string[]): ToolCheck {
try {
const version = execSync(`${command} ${versionArgs.join(' ')}`, {
cwd: this.projectRoot,
encoding: 'utf-8',
stdio: 'pipe',
timeout: 5000
})
.trim()
.split('\n')[0];
return {
name: command,
available: true,
version,
message: `${command} ${version}`
};
} catch (error) {
return {
name: command,
available: false,
message: `${command} not found`
};
}
}
/**
* Check GitHub CLI installation and authentication status
*/
private async checkGhCli(): Promise<ToolCheck> {
try {
const version = execSync('gh --version', {
cwd: this.projectRoot,
encoding: 'utf-8',
stdio: 'pipe',
timeout: 5000
})
.trim()
.split('\n')[0];
const authed = await isGhCliAvailable(this.projectRoot);
return {
name: 'gh',
available: true,
version,
message: authed
? 'GitHub CLI installed (authenticated)'
: 'GitHub CLI installed (not authenticated)'
};
} catch {
return { name: 'gh', available: false, message: 'GitHub CLI not found' };
}
}
/**
* Detect default branch
*/
async detectDefaultBranch(): Promise<CheckResult> {
try {
const defaultBranch = await getDefaultBranch(this.projectRoot);
if (!defaultBranch) {
return {
success: false,
message:
'Could not determine default branch. Make sure remote is configured.'
};
}
return {
success: true,
value: defaultBranch,
message: `Default branch: ${defaultBranch}`
};
} catch (error: any) {
return {
success: false,
message: `Failed to detect default branch: ${error.message}`
};
}
}
/**
* Run all preflight checks
*/
async runAllChecks(): Promise<PreflightResult> {
logger.info('Running preflight checks...');
const testCommand = await this.detectTestCommand();
const gitWorkingTree = await this.checkGitWorkingTree();
const requiredTools = await this.validateRequiredTools();
const defaultBranch = await this.detectDefaultBranch();
const allSuccess =
testCommand.success &&
gitWorkingTree.success &&
requiredTools.success &&
defaultBranch.success;
// Build summary
const passed: string[] = [];
const failed: string[] = [];
if (testCommand.success) passed.push('Test command');
else failed.push('Test command');
if (gitWorkingTree.success) passed.push('Git working tree');
else failed.push('Git working tree');
if (requiredTools.success) passed.push('Required tools');
else failed.push('Required tools');
if (defaultBranch.success) passed.push('Default branch');
else failed.push('Default branch');
const total = passed.length + failed.length;
const summary = allSuccess
? `All preflight checks passed (${passed.length}/${total})`
: `Preflight checks failed: ${failed.join(', ')} (${passed.length}/${total} passed)`;
logger.info(summary);
return {
success: allSuccess,
testCommand,
gitWorkingTree,
requiredTools,
defaultBranch,
summary
};
}
}

View File

@@ -0,0 +1,401 @@
/**
* @fileoverview Task Loader Service
* Loads and validates tasks for autopilot execution
*/
import type { Task, Subtask, TaskStatus } from '../types/index.js';
import { TaskService } from './task-service.js';
import { ConfigManager } from '../config/config-manager.js';
import { getLogger } from '../logger/factory.js';
const logger = getLogger('TaskLoader');
/**
* Validation error types
*/
export type ValidationErrorType =
| 'task_not_found'
| 'task_completed'
| 'no_subtasks'
| 'circular_dependencies'
| 'missing_dependencies'
| 'invalid_structure';
/**
* Validation result for task loading
*/
export interface TaskValidationResult {
/** Whether validation passed */
success: boolean;
/** Loaded task (only present if validation succeeded) */
task?: Task;
/** Error type */
errorType?: ValidationErrorType;
/** Human-readable error message */
errorMessage?: string;
/** Actionable suggestion for fixing the error */
suggestion?: string;
/** Dependency analysis (only for dependency errors) */
dependencyIssues?: DependencyIssue[];
}
/**
* Dependency issue details
*/
export interface DependencyIssue {
/** Subtask ID with the issue */
subtaskId: string;
/** Type of dependency issue */
issueType: 'circular' | 'missing' | 'invalid';
/** Description of the issue */
message: string;
/** The problematic dependency reference */
dependencyRef?: string;
}
/**
* TaskLoaderService loads and validates tasks for autopilot execution
*/
export class TaskLoaderService {
private taskService: TaskService | null = null;
private projectRoot: string;
constructor(projectRoot: string) {
if (!projectRoot) {
throw new Error('projectRoot is required for TaskLoaderService');
}
this.projectRoot = projectRoot;
}
/**
* Ensure TaskService is initialized
*/
private async ensureInitialized(): Promise<void> {
if (this.taskService) return;
const configManager = await ConfigManager.create(this.projectRoot);
this.taskService = new TaskService(configManager);
await this.taskService.initialize();
}
/**
* Load and validate a task for autopilot execution
*/
async loadAndValidateTask(taskId: string): Promise<TaskValidationResult> {
logger.info(`Loading task ${taskId}...`);
// Step 1: Load task
const task = await this.loadTask(taskId);
if (!task) {
return {
success: false,
errorType: 'task_not_found',
errorMessage: `Task with ID "${taskId}" not found`,
suggestion:
'Use "task-master list" to see available tasks or verify the task ID is correct.'
};
}
// Step 2: Validate task status
const statusValidation = this.validateTaskStatus(task);
if (!statusValidation.success) {
return statusValidation;
}
// Step 3: Check for subtasks
const subtaskValidation = this.validateSubtasksExist(task);
if (!subtaskValidation.success) {
return subtaskValidation;
}
// Step 4: Validate subtask structure
const structureValidation = this.validateSubtaskStructure(task);
if (!structureValidation.success) {
return structureValidation;
}
// Step 5: Analyze dependencies
const dependencyValidation = this.validateDependencies(task);
if (!dependencyValidation.success) {
return dependencyValidation;
}
logger.info(`Task ${taskId} validated successfully`);
return {
success: true,
task
};
}
/**
* Load task using TaskService
*/
private async loadTask(taskId: string): Promise<Task | null> {
try {
await this.ensureInitialized();
if (!this.taskService) {
throw new Error('TaskService initialization failed');
}
return await this.taskService.getTask(taskId);
} catch (error) {
logger.error(`Failed to load task ${taskId}:`, error);
return null;
}
}
/**
* Validate task status is appropriate for autopilot
*/
private validateTaskStatus(task: Task): TaskValidationResult {
const completedStatuses: TaskStatus[] = ['done', 'completed', 'cancelled'];
if (completedStatuses.includes(task.status)) {
return {
success: false,
errorType: 'task_completed',
errorMessage: `Task "${task.title}" is already ${task.status}`,
suggestion:
'Autopilot can only execute tasks that are pending or in-progress. Use a different task.'
};
}
return { success: true };
}
/**
* Validate task has subtasks
*/
private validateSubtasksExist(task: Task): TaskValidationResult {
if (!task.subtasks || task.subtasks.length === 0) {
return {
success: false,
errorType: 'no_subtasks',
errorMessage: `Task "${task.title}" has no subtasks`,
suggestion: this.buildExpansionSuggestion(task)
};
}
return { success: true };
}
/**
* Build helpful suggestion for expanding tasks
*/
private buildExpansionSuggestion(task: Task): string {
const suggestions: string[] = [
`Autopilot requires tasks to be broken down into subtasks for execution.`
];
// Add expansion command suggestion
suggestions.push(`\nExpand this task using:`);
suggestions.push(` task-master expand --id=${task.id}`);
// If task has complexity analysis, mention it
if (task.complexity || task.recommendedSubtasks) {
suggestions.push(
`\nThis task has complexity analysis available. Consider reviewing it first:`
);
suggestions.push(` task-master show ${task.id}`);
} else {
suggestions.push(
`\nOr analyze task complexity first to determine optimal subtask count:`
);
suggestions.push(` task-master analyze-complexity --from=${task.id}`);
}
return suggestions.join('\n');
}
/**
* Validate subtask structure
*/
private validateSubtaskStructure(task: Task): TaskValidationResult {
for (const subtask of task.subtasks) {
// Check required fields
if (!subtask.title || !subtask.description) {
return {
success: false,
errorType: 'invalid_structure',
errorMessage: `Subtask ${task.id}.${subtask.id} is missing required fields`,
suggestion:
'Subtasks must have title and description. Re-expand the task or manually fix the subtask structure.'
};
}
// Validate dependencies are arrays
if (subtask.dependencies && !Array.isArray(subtask.dependencies)) {
return {
success: false,
errorType: 'invalid_structure',
errorMessage: `Subtask ${task.id}.${subtask.id} has invalid dependencies format`,
suggestion:
'Dependencies must be an array. Fix the task structure manually.'
};
}
}
return { success: true };
}
/**
* Validate subtask dependencies
*/
private validateDependencies(task: Task): TaskValidationResult {
const issues: DependencyIssue[] = [];
const subtaskIds = new Set(task.subtasks.map((st) => String(st.id)));
for (const subtask of task.subtasks) {
const subtaskId = `${task.id}.${subtask.id}`;
// Check for missing dependencies
if (subtask.dependencies && subtask.dependencies.length > 0) {
for (const depId of subtask.dependencies) {
const depIdStr = String(depId);
if (!subtaskIds.has(depIdStr)) {
issues.push({
subtaskId,
issueType: 'missing',
message: `References non-existent subtask ${depIdStr}`,
dependencyRef: depIdStr
});
}
}
}
// Check for circular dependencies
const circularCheck = this.detectCircularDependency(
subtask,
task.subtasks,
new Set()
);
if (circularCheck) {
issues.push({
subtaskId,
issueType: 'circular',
message: `Circular dependency detected: ${circularCheck.join(' -> ')}`
});
}
}
if (issues.length > 0) {
const errorType =
issues[0].issueType === 'circular'
? 'circular_dependencies'
: 'missing_dependencies';
return {
success: false,
errorType,
errorMessage: `Task "${task.title}" has dependency issues`,
suggestion:
'Fix dependency issues manually or re-expand the task:\n' +
issues
.map((issue) => ` - ${issue.subtaskId}: ${issue.message}`)
.join('\n'),
dependencyIssues: issues
};
}
return { success: true };
}
/**
* Detect circular dependencies using depth-first search
*/
private detectCircularDependency(
subtask: Subtask,
allSubtasks: Subtask[],
visited: Set<string>
): string[] | null {
const subtaskId = String(subtask.id);
if (visited.has(subtaskId)) {
return [subtaskId];
}
visited.add(subtaskId);
if (subtask.dependencies && subtask.dependencies.length > 0) {
for (const depId of subtask.dependencies) {
const depIdStr = String(depId);
const dependency = allSubtasks.find((st) => String(st.id) === depIdStr);
if (dependency) {
const circular = this.detectCircularDependency(
dependency,
allSubtasks,
new Set(visited)
);
if (circular) {
return [subtaskId, ...circular];
}
}
}
}
return null;
}
/**
* Get ordered subtask execution sequence
* Returns subtasks in dependency order (tasks with no deps first)
*/
getExecutionOrder(task: Task): Subtask[] {
const ordered: Subtask[] = [];
const completed = new Set<string>();
// Keep adding subtasks whose dependencies are all completed
while (ordered.length < task.subtasks.length) {
let added = false;
for (const subtask of task.subtasks) {
const subtaskId = String(subtask.id);
if (completed.has(subtaskId)) {
continue;
}
// Check if all dependencies are completed
const allDepsCompleted =
!subtask.dependencies ||
subtask.dependencies.length === 0 ||
subtask.dependencies.every((depId) => completed.has(String(depId)));
if (allDepsCompleted) {
ordered.push(subtask);
completed.add(subtaskId);
added = true;
break;
}
}
// Safety check to prevent infinite loop
if (!added && ordered.length < task.subtasks.length) {
logger.warn(
`Could not determine complete execution order for task ${task.id}`
);
// Add remaining subtasks in original order
for (const subtask of task.subtasks) {
if (!completed.has(String(subtask.id))) {
ordered.push(subtask);
}
}
break;
}
}
return ordered;
}
/**
* Clean up resources
*/
async cleanup(): Promise<void> {
// TaskService doesn't require explicit cleanup
// Resources are automatically released when instance is garbage collected
}
}

View File

@@ -0,0 +1,456 @@
import { describe, it, expect } from 'vitest';
import { TestResultValidator } from './test-result-validator.js';
import type {
TestResult,
ValidationResult,
TestPhase
} from './test-result-validator.types.js';
describe('TestResultValidator - Input Validation', () => {
const validator = new TestResultValidator();
describe('Schema Validation', () => {
it('should validate a valid test result', () => {
const validResult: TestResult = {
total: 10,
passed: 5,
failed: 5,
skipped: 0,
phase: 'RED'
};
const result = validator.validate(validResult);
expect(result.valid).toBe(true);
expect(result.errors).toEqual([]);
});
it('should reject negative test counts', () => {
const invalidResult = {
total: -1,
passed: 0,
failed: 0,
skipped: 0,
phase: 'RED'
};
const result = validator.validate(invalidResult as TestResult);
expect(result.valid).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
});
it('should reject when totals do not match', () => {
const invalidResult: TestResult = {
total: 10,
passed: 3,
failed: 3,
skipped: 3, // 3 + 3 + 3 = 9, not 10
phase: 'RED'
};
const result = validator.validate(invalidResult);
expect(result.valid).toBe(false);
expect(result.errors).toContain(
'Total tests must equal passed + failed + skipped'
);
});
it('should reject missing required fields', () => {
const invalidResult = {
total: 10,
passed: 5
// missing failed, skipped, phase
};
const result = validator.validate(invalidResult as TestResult);
expect(result.valid).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
});
it('should accept optional coverage data', () => {
const resultWithCoverage: TestResult = {
total: 10,
passed: 10,
failed: 0,
skipped: 0,
phase: 'GREEN',
coverage: {
line: 85,
branch: 75,
function: 90,
statement: 85
}
};
const result = validator.validate(resultWithCoverage);
expect(result.valid).toBe(true);
});
it('should reject invalid coverage percentages', () => {
const invalidResult: TestResult = {
total: 10,
passed: 10,
failed: 0,
skipped: 0,
phase: 'GREEN',
coverage: {
line: 150, // Invalid: > 100
branch: -10, // Invalid: < 0
function: 90,
statement: 85
}
};
const result = validator.validate(invalidResult);
expect(result.valid).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
});
it('should reject invalid phase values', () => {
const invalidResult = {
total: 10,
passed: 5,
failed: 5,
skipped: 0,
phase: 'INVALID_PHASE'
};
const result = validator.validate(invalidResult as TestResult);
expect(result.valid).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
});
});
});
describe('TestResultValidator - RED Phase Validation', () => {
const validator = new TestResultValidator();
it('should pass validation when RED phase has failures', () => {
const redResult: TestResult = {
total: 10,
passed: 5,
failed: 5,
skipped: 0,
phase: 'RED'
};
const result = validator.validateRedPhase(redResult);
expect(result.valid).toBe(true);
expect(result.errors).toEqual([]);
});
it('should fail validation when RED phase has zero failures', () => {
const redResult: TestResult = {
total: 10,
passed: 10,
failed: 0,
skipped: 0,
phase: 'RED'
};
const result = validator.validateRedPhase(redResult);
expect(result.valid).toBe(false);
expect(result.errors).toContain(
'RED phase must have at least one failing test'
);
expect(result.suggestions).toContain(
'Write failing tests first to follow TDD workflow'
);
});
it('should fail validation when RED phase has empty test suite', () => {
const emptyResult: TestResult = {
total: 0,
passed: 0,
failed: 0,
skipped: 0,
phase: 'RED'
};
const result = validator.validateRedPhase(emptyResult);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Cannot validate empty test suite');
expect(result.suggestions).toContain(
'Add at least one test to begin TDD cycle'
);
});
it('should propagate base validation errors', () => {
const invalidResult: TestResult = {
total: 10,
passed: 3,
failed: 3,
skipped: 3, // Total mismatch
phase: 'RED'
};
const result = validator.validateRedPhase(invalidResult);
expect(result.valid).toBe(false);
expect(result.errors).toContain(
'Total tests must equal passed + failed + skipped'
);
});
});
describe('TestResultValidator - GREEN Phase Validation', () => {
const validator = new TestResultValidator();
it('should pass validation when GREEN phase has all tests passing', () => {
const greenResult: TestResult = {
total: 10,
passed: 10,
failed: 0,
skipped: 0,
phase: 'GREEN'
};
const result = validator.validateGreenPhase(greenResult);
expect(result.valid).toBe(true);
expect(result.errors).toEqual([]);
});
it('should fail validation when GREEN phase has failures', () => {
const greenResult: TestResult = {
total: 10,
passed: 5,
failed: 5,
skipped: 0,
phase: 'GREEN'
};
const result = validator.validateGreenPhase(greenResult);
expect(result.valid).toBe(false);
expect(result.errors).toContain('GREEN phase must have zero failures');
expect(result.suggestions).toContain(
'Fix implementation to make all tests pass'
);
});
it('should fail validation when GREEN phase has no passing tests', () => {
const greenResult: TestResult = {
total: 5,
passed: 0,
failed: 0,
skipped: 5,
phase: 'GREEN'
};
const result = validator.validateGreenPhase(greenResult);
expect(result.valid).toBe(false);
expect(result.errors).toContain(
'GREEN phase must have at least one passing test'
);
});
it('should warn when test count decreases', () => {
const greenResult: TestResult = {
total: 5,
passed: 5,
failed: 0,
skipped: 0,
phase: 'GREEN'
};
const result = validator.validateGreenPhase(greenResult, 10);
expect(result.valid).toBe(true);
expect(result.warnings).toContain('Test count decreased from 10 to 5');
expect(result.suggestions).toContain(
'Verify that no tests were accidentally removed'
);
});
it('should not warn when test count increases', () => {
const greenResult: TestResult = {
total: 15,
passed: 15,
failed: 0,
skipped: 0,
phase: 'GREEN'
};
const result = validator.validateGreenPhase(greenResult, 10);
expect(result.valid).toBe(true);
expect(result.warnings || []).toEqual([]);
});
it('should propagate base validation errors', () => {
const invalidResult: TestResult = {
total: 10,
passed: 3,
failed: 3,
skipped: 3, // Total mismatch
phase: 'GREEN'
};
const result = validator.validateGreenPhase(invalidResult);
expect(result.valid).toBe(false);
expect(result.errors).toContain(
'Total tests must equal passed + failed + skipped'
);
});
});
describe('TestResultValidator - Coverage Threshold Validation', () => {
const validator = new TestResultValidator();
it('should pass validation when coverage meets thresholds', () => {
const result: TestResult = {
total: 10,
passed: 10,
failed: 0,
skipped: 0,
phase: 'GREEN',
coverage: {
line: 85,
branch: 80,
function: 90,
statement: 85
}
};
const thresholds = {
line: 80,
branch: 75,
function: 85,
statement: 80
};
const validationResult = validator.validateCoverage(result, thresholds);
expect(validationResult.valid).toBe(true);
expect(validationResult.errors).toEqual([]);
});
it('should fail validation when line coverage is below threshold', () => {
const result: TestResult = {
total: 10,
passed: 10,
failed: 0,
skipped: 0,
phase: 'GREEN',
coverage: {
line: 70,
branch: 80,
function: 90,
statement: 85
}
};
const thresholds = {
line: 80
};
const validationResult = validator.validateCoverage(result, thresholds);
expect(validationResult.valid).toBe(false);
expect(validationResult.errors[0]).toContain('line coverage (70% < 80%)');
expect(validationResult.suggestions).toContain(
'Add more tests to improve code coverage'
);
});
it('should fail validation when multiple coverage types are below threshold', () => {
const result: TestResult = {
total: 10,
passed: 10,
failed: 0,
skipped: 0,
phase: 'GREEN',
coverage: {
line: 70,
branch: 60,
function: 75,
statement: 65
}
};
const thresholds = {
line: 80,
branch: 75,
function: 85,
statement: 80
};
const validationResult = validator.validateCoverage(result, thresholds);
expect(validationResult.valid).toBe(false);
expect(validationResult.errors[0]).toContain('line coverage (70% < 80%)');
expect(validationResult.errors[0]).toContain('branch coverage (60% < 75%)');
expect(validationResult.errors[0]).toContain(
'function coverage (75% < 85%)'
);
expect(validationResult.errors[0]).toContain(
'statement coverage (65% < 80%)'
);
});
it('should skip validation when no coverage data is provided', () => {
const result: TestResult = {
total: 10,
passed: 10,
failed: 0,
skipped: 0,
phase: 'GREEN'
};
const thresholds = {
line: 80,
branch: 75
};
const validationResult = validator.validateCoverage(result, thresholds);
expect(validationResult.valid).toBe(true);
expect(validationResult.errors).toEqual([]);
});
it('should only validate specified threshold types', () => {
const result: TestResult = {
total: 10,
passed: 10,
failed: 0,
skipped: 0,
phase: 'GREEN',
coverage: {
line: 70,
branch: 60,
function: 90,
statement: 85
}
};
const thresholds = {
line: 80
// Only checking line coverage
};
const validationResult = validator.validateCoverage(result, thresholds);
expect(validationResult.valid).toBe(false);
expect(validationResult.errors[0]).toContain('line coverage');
expect(validationResult.errors[0]).not.toContain('branch coverage');
});
it('should propagate base validation errors', () => {
const invalidResult: TestResult = {
total: 10,
passed: 3,
failed: 3,
skipped: 3, // Total mismatch
phase: 'GREEN',
coverage: {
line: 90,
branch: 90,
function: 90,
statement: 90
}
};
const thresholds = {
line: 80
};
const validationResult = validator.validateCoverage(
invalidResult,
thresholds
);
expect(validationResult.valid).toBe(false);
expect(validationResult.errors).toContain(
'Total tests must equal passed + failed + skipped'
);
});
});

View File

@@ -0,0 +1,268 @@
import { z } from 'zod';
import type {
TestResult,
ValidationResult,
CoverageThresholds,
PhaseValidationOptions
} from './test-result-validator.types.js';
/**
* Schema for coverage metrics validation
*/
const coverageSchema = z.object({
line: z.number().min(0).max(100),
branch: z.number().min(0).max(100),
function: z.number().min(0).max(100),
statement: z.number().min(0).max(100)
});
/**
* Schema for test result validation
*/
const testResultSchema = z.object({
total: z.number().int().nonnegative(),
passed: z.number().int().nonnegative(),
failed: z.number().int().nonnegative(),
skipped: z.number().int().nonnegative(),
phase: z.enum(['RED', 'GREEN', 'REFACTOR']),
coverage: coverageSchema.optional()
});
/**
* Validates test results according to TDD phase semantics
*/
export class TestResultValidator {
/**
* Validates a test result object
*/
validate(testResult: TestResult): ValidationResult {
const errors: string[] = [];
const warnings: string[] = [];
const suggestions: string[] = [];
// Schema validation
const parseResult = testResultSchema.safeParse(testResult);
if (!parseResult.success) {
const zodIssues = parseResult.error.issues || [];
errors.push(
...zodIssues.map((e) => {
const path = e.path.length > 0 ? `${e.path.join('.')}: ` : '';
return `${path}${e.message}`;
})
);
return { valid: false, errors, warnings, suggestions };
}
// Total validation
const sum = testResult.passed + testResult.failed + testResult.skipped;
if (sum !== testResult.total) {
errors.push('Total tests must equal passed + failed + skipped');
}
// If there are validation errors, return early
if (errors.length > 0) {
return { valid: false, errors, warnings, suggestions };
}
return { valid: true, errors, warnings, suggestions };
}
/**
* Validates RED phase test results
* RED phase must have at least one failing test
*/
validateRedPhase(testResult: TestResult): ValidationResult {
const baseValidation = this.validate(testResult);
if (!baseValidation.valid) {
return baseValidation;
}
const errors: string[] = [];
const suggestions: string[] = [];
// RED phase must have failures
if (testResult.failed === 0) {
errors.push('RED phase must have at least one failing test');
suggestions.push('Write failing tests first to follow TDD workflow');
}
// Must have at least one test
if (testResult.total === 0) {
errors.push('Cannot validate empty test suite');
suggestions.push('Add at least one test to begin TDD cycle');
}
return {
valid: errors.length === 0,
errors,
suggestions
};
}
/**
* Validates GREEN phase test results
* GREEN phase must have zero failures
*/
validateGreenPhase(
testResult: TestResult,
previousTestCount?: number
): ValidationResult {
const baseValidation = this.validate(testResult);
if (!baseValidation.valid) {
return baseValidation;
}
const errors: string[] = [];
const warnings: string[] = [];
const suggestions: string[] = [];
// GREEN phase must have zero failures
if (testResult.failed > 0) {
errors.push('GREEN phase must have zero failures');
suggestions.push('Fix implementation to make all tests pass');
}
// Must have at least one passing test
if (testResult.passed === 0) {
errors.push('GREEN phase must have at least one passing test');
suggestions.push('Ensure tests exist and implementation makes them pass');
}
// Check for test count regression
if (
previousTestCount !== undefined &&
testResult.total < previousTestCount
) {
warnings.push(
`Test count decreased from ${previousTestCount} to ${testResult.total}`
);
suggestions.push('Verify that no tests were accidentally removed');
}
return {
valid: errors.length === 0,
errors,
warnings,
suggestions
};
}
/**
* Validates coverage thresholds if provided
*/
validateCoverage(
testResult: TestResult,
thresholds: CoverageThresholds
): ValidationResult {
const baseValidation = this.validate(testResult);
if (!baseValidation.valid) {
return baseValidation;
}
const errors: string[] = [];
const suggestions: string[] = [];
// Skip validation if no coverage data
if (!testResult.coverage) {
return { valid: true, errors: [], suggestions: [] };
}
const coverage = testResult.coverage;
const gaps: string[] = [];
// Check each coverage type against threshold
if (thresholds.line !== undefined && coverage.line < thresholds.line) {
gaps.push(`line coverage (${coverage.line}% < ${thresholds.line}%)`);
}
if (
thresholds.branch !== undefined &&
coverage.branch < thresholds.branch
) {
gaps.push(
`branch coverage (${coverage.branch}% < ${thresholds.branch}%)`
);
}
if (
thresholds.function !== undefined &&
coverage.function < thresholds.function
) {
gaps.push(
`function coverage (${coverage.function}% < ${thresholds.function}%)`
);
}
if (
thresholds.statement !== undefined &&
coverage.statement < thresholds.statement
) {
gaps.push(
`statement coverage (${coverage.statement}% < ${thresholds.statement}%)`
);
}
if (gaps.length > 0) {
errors.push(`Coverage thresholds not met: ${gaps.join(', ')}`);
suggestions.push('Add more tests to improve code coverage');
}
return {
valid: errors.length === 0,
errors,
suggestions
};
}
/**
* Validates test results based on TDD phase
*/
validatePhase(
testResult: TestResult,
options?: PhaseValidationOptions
): ValidationResult {
const phase = options?.phase ?? testResult.phase;
// Phase-specific validation
let phaseResult: ValidationResult;
if (phase === 'RED') {
phaseResult = this.validateRedPhase(testResult);
} else if (phase === 'GREEN') {
phaseResult = this.validateGreenPhase(
testResult,
options?.previousTestCount
);
} else {
// REFACTOR phase uses same rules as GREEN
phaseResult = this.validateGreenPhase(
testResult,
options?.previousTestCount
);
}
if (!phaseResult.valid) {
return phaseResult;
}
// Coverage validation if thresholds provided
if (options?.coverageThresholds) {
const coverageResult = this.validateCoverage(
testResult,
options.coverageThresholds
);
// Merge results
return {
valid: coverageResult.valid,
errors: [...(phaseResult.errors || []), ...coverageResult.errors],
warnings: phaseResult.warnings,
suggestions: [
...(phaseResult.suggestions || []),
...(coverageResult.suggestions || [])
]
};
}
return phaseResult;
}
}

View File

@@ -0,0 +1,55 @@
/**
* Test phase in TDD workflow
*/
export type TestPhase = 'RED' | 'GREEN' | 'REFACTOR';
/**
* Code coverage metrics
*/
export interface Coverage {
line: number;
branch: number;
function: number;
statement: number;
}
/**
* Test result data structure
*/
export interface TestResult {
total: number;
passed: number;
failed: number;
skipped: number;
phase: TestPhase;
coverage?: Coverage;
}
/**
* Coverage threshold configuration
*/
export interface CoverageThresholds {
line?: number;
branch?: number;
function?: number;
statement?: number;
}
/**
* Validation result structure
*/
export interface ValidationResult {
valid: boolean;
errors: string[];
warnings?: string[];
suggestions?: string[];
}
/**
* Phase-specific validation options
*/
export interface PhaseValidationOptions {
phase: TestPhase;
coverageThresholds?: CoverageThresholds;
previousTestCount?: number;
}

View File

@@ -0,0 +1,494 @@
/**
* @fileoverview WorkflowService - High-level facade for TDD workflow operations
* Provides a simplified API for MCP tools while delegating to WorkflowOrchestrator
*/
import { WorkflowOrchestrator } from '../workflow/workflow-orchestrator.js';
import { WorkflowStateManager } from '../workflow/workflow-state-manager.js';
import { WorkflowActivityLogger } from '../workflow/workflow-activity-logger.js';
import type {
WorkflowContext,
SubtaskInfo,
TestResult,
WorkflowPhase,
TDDPhase,
WorkflowState
} from '../workflow/types.js';
import { GitAdapter } from '../git/git-adapter.js';
/**
* Options for starting a new workflow
*/
export interface StartWorkflowOptions {
taskId: string;
taskTitle: string;
subtasks: Array<{
id: string;
title: string;
status: string;
maxAttempts?: number;
}>;
maxAttempts?: number;
force?: boolean;
tag?: string; // Optional tag for branch naming
}
/**
* Simplified workflow status for MCP responses
*/
export interface WorkflowStatus {
taskId: string;
phase: WorkflowPhase;
tddPhase?: TDDPhase;
branchName?: string;
currentSubtask?: {
id: string;
title: string;
attempts: number;
maxAttempts: number;
};
progress: {
completed: number;
total: number;
current: number;
percentage: number;
};
}
/**
* Next action recommendation for AI agent
*/
export interface NextAction {
action: string;
description: string;
nextSteps: string;
phase: WorkflowPhase;
tddPhase?: TDDPhase;
subtask?: {
id: string;
title: string;
};
}
/**
* WorkflowService - Facade for workflow operations
* Manages WorkflowOrchestrator lifecycle and state persistence
*/
export class WorkflowService {
private readonly projectRoot: string;
private readonly stateManager: WorkflowStateManager;
private orchestrator?: WorkflowOrchestrator;
private activityLogger?: WorkflowActivityLogger;
constructor(projectRoot: string) {
this.projectRoot = projectRoot;
this.stateManager = new WorkflowStateManager(projectRoot);
}
/**
* Check if workflow state exists
*/
async hasWorkflow(): Promise<boolean> {
return await this.stateManager.exists();
}
/**
* Start a new TDD workflow
*/
async startWorkflow(options: StartWorkflowOptions): Promise<WorkflowStatus> {
const {
taskId,
taskTitle,
subtasks,
maxAttempts = 3,
force,
tag
} = options;
// Check for existing workflow
if ((await this.hasWorkflow()) && !force) {
throw new Error(
'Workflow already exists. Use force=true to override or resume existing workflow.'
);
}
// Initialize git adapter and ensure clean state
const gitAdapter = new GitAdapter(this.projectRoot);
await gitAdapter.ensureGitRepository();
await gitAdapter.ensureCleanWorkingTree();
// Parse subtasks to WorkflowContext format
const workflowSubtasks: SubtaskInfo[] = subtasks.map((st) => ({
id: st.id,
title: st.title,
status: st.status === 'done' ? 'completed' : 'pending',
attempts: 0,
maxAttempts: st.maxAttempts || maxAttempts
}));
// Find the first incomplete subtask to resume from
const firstIncompleteIndex = workflowSubtasks.findIndex(
(st) => st.status !== 'completed'
);
// If all subtasks are already completed, throw an error
if (firstIncompleteIndex === -1) {
throw new Error(
`All subtasks for task ${taskId} are already completed. Nothing to do.`
);
}
// Create workflow context, starting from first incomplete subtask
const context: WorkflowContext = {
taskId,
subtasks: workflowSubtasks,
currentSubtaskIndex: firstIncompleteIndex,
errors: [],
metadata: {
startedAt: new Date().toISOString(),
taskTitle,
resumedFromSubtask:
firstIncompleteIndex > 0
? workflowSubtasks[firstIncompleteIndex].id
: undefined
}
};
// Create orchestrator with auto-persistence
this.orchestrator = new WorkflowOrchestrator(context);
this.orchestrator.enableAutoPersist(async (state: WorkflowState) => {
await this.stateManager.save(state);
});
// Initialize activity logger to track all workflow events
this.activityLogger = new WorkflowActivityLogger(
this.orchestrator,
this.stateManager.getActivityLogPath()
);
this.activityLogger.start();
// Transition through PREFLIGHT and BRANCH_SETUP phases
this.orchestrator.transition({ type: 'PREFLIGHT_COMPLETE' });
// Create git branch with descriptive name
const branchName = this.generateBranchName(taskId, taskTitle, tag);
// Check if we're already on the target branch
const currentBranch = await gitAdapter.getCurrentBranch();
if (currentBranch !== branchName) {
// Only create branch if we're not already on it
await gitAdapter.createAndCheckoutBranch(branchName);
}
// Transition to SUBTASK_LOOP with RED phase
this.orchestrator.transition({
type: 'BRANCH_CREATED',
branchName
});
return this.getStatus();
}
/**
* Resume an existing workflow
*/
async resumeWorkflow(): Promise<WorkflowStatus> {
// Load state
const state = await this.stateManager.load();
// Create new orchestrator with loaded context
this.orchestrator = new WorkflowOrchestrator(state.context);
// Validate and restore state
if (!this.orchestrator.canResumeFromState(state)) {
throw new Error(
'Invalid workflow state. State may be corrupted. Consider starting a new workflow.'
);
}
this.orchestrator.restoreState(state);
// Re-enable auto-persistence
this.orchestrator.enableAutoPersist(async (newState: WorkflowState) => {
await this.stateManager.save(newState);
});
// Initialize activity logger to continue tracking events
this.activityLogger = new WorkflowActivityLogger(
this.orchestrator,
this.stateManager.getActivityLogPath()
);
this.activityLogger.start();
return this.getStatus();
}
/**
* Get current workflow status
*/
getStatus(): WorkflowStatus {
if (!this.orchestrator) {
throw new Error('No active workflow. Start or resume a workflow first.');
}
const context = this.orchestrator.getContext();
const progress = this.orchestrator.getProgress();
const currentSubtask = this.orchestrator.getCurrentSubtask();
return {
taskId: context.taskId,
phase: this.orchestrator.getCurrentPhase(),
tddPhase: this.orchestrator.getCurrentTDDPhase(),
branchName: context.branchName,
currentSubtask: currentSubtask
? {
id: currentSubtask.id,
title: currentSubtask.title,
attempts: currentSubtask.attempts,
maxAttempts: currentSubtask.maxAttempts || 3
}
: undefined,
progress
};
}
/**
* Get workflow context (for accessing full state details)
*/
getContext(): WorkflowContext {
if (!this.orchestrator) {
throw new Error('No active workflow. Start or resume a workflow first.');
}
return this.orchestrator.getContext();
}
/**
* Get next recommended action for AI agent
*/
getNextAction(): NextAction {
if (!this.orchestrator) {
throw new Error('No active workflow. Start or resume a workflow first.');
}
const phase = this.orchestrator.getCurrentPhase();
const tddPhase = this.orchestrator.getCurrentTDDPhase();
const currentSubtask = this.orchestrator.getCurrentSubtask();
// Determine action based on current phase
if (phase === 'COMPLETE') {
return {
action: 'workflow_complete',
description: 'All subtasks completed',
nextSteps:
'All subtasks completed! Review the entire implementation and merge your branch when ready.',
phase
};
}
if (phase === 'FINALIZE') {
return {
action: 'finalize_workflow',
description: 'Finalize and complete the workflow',
nextSteps:
'All subtasks are complete! Use autopilot_finalize to verify no uncommitted changes remain and mark the workflow as complete.',
phase
};
}
if (phase !== 'SUBTASK_LOOP' || !tddPhase || !currentSubtask) {
return {
action: 'unknown',
description: 'Workflow is not in active state',
nextSteps: 'Use autopilot_status to check workflow state.',
phase
};
}
const baseAction = {
phase,
tddPhase,
subtask: {
id: currentSubtask.id,
title: currentSubtask.title
}
};
switch (tddPhase) {
case 'RED':
return {
...baseAction,
action: 'generate_test',
description: 'Generate failing test for current subtask',
nextSteps: `Write failing tests for subtask ${currentSubtask.id}: "${currentSubtask.title}". Create test file(s) that validate the expected behavior. Run tests and use autopilot_complete_phase with results. Note: If all tests pass (0 failures), the feature is already implemented and the subtask will be auto-completed.`
};
case 'GREEN':
return {
...baseAction,
action: 'implement_code',
description: 'Implement feature to make tests pass',
nextSteps: `Implement code to make tests pass for subtask ${currentSubtask.id}: "${currentSubtask.title}". Write the minimal code needed to pass all tests (GREEN phase), then use autopilot_complete_phase with test results.`
};
case 'COMMIT':
return {
...baseAction,
action: 'commit_changes',
description: 'Commit RED-GREEN cycle changes',
nextSteps: `Review and commit your changes for subtask ${currentSubtask.id}: "${currentSubtask.title}". Use autopilot_commit to create the commit and advance to the next subtask.`
};
default:
return {
...baseAction,
action: 'unknown',
description: 'Unknown TDD phase',
nextSteps: 'Use autopilot_status to check workflow state.'
};
}
}
/**
* Complete current TDD phase with test results
*/
async completePhase(testResults: TestResult): Promise<WorkflowStatus> {
if (!this.orchestrator) {
throw new Error('No active workflow. Start or resume a workflow first.');
}
const tddPhase = this.orchestrator.getCurrentTDDPhase();
if (!tddPhase) {
throw new Error('Not in active TDD phase');
}
// Transition based on current phase
switch (tddPhase) {
case 'RED':
this.orchestrator.transition({
type: 'RED_PHASE_COMPLETE',
testResults
});
break;
case 'GREEN':
this.orchestrator.transition({
type: 'GREEN_PHASE_COMPLETE',
testResults
});
break;
case 'COMMIT':
throw new Error(
'Cannot complete COMMIT phase with test results. Use commit() instead.'
);
default:
throw new Error(`Unknown TDD phase: ${tddPhase}`);
}
return this.getStatus();
}
/**
* Commit current changes and advance workflow
*/
async commit(): Promise<WorkflowStatus> {
if (!this.orchestrator) {
throw new Error('No active workflow. Start or resume a workflow first.');
}
const tddPhase = this.orchestrator.getCurrentTDDPhase();
if (tddPhase !== 'COMMIT') {
throw new Error(
`Cannot commit in ${tddPhase} phase. Complete RED and GREEN phases first.`
);
}
// Transition COMMIT phase complete
this.orchestrator.transition({
type: 'COMMIT_COMPLETE'
});
// Check if should advance to next subtask
const progress = this.orchestrator.getProgress();
if (progress.current < progress.total) {
this.orchestrator.transition({ type: 'SUBTASK_COMPLETE' });
} else {
// All subtasks complete
this.orchestrator.transition({ type: 'ALL_SUBTASKS_COMPLETE' });
}
return this.getStatus();
}
/**
* Finalize and complete the workflow
* Validates working tree is clean before marking complete
*/
async finalizeWorkflow(): Promise<WorkflowStatus> {
if (!this.orchestrator) {
throw new Error('No active workflow. Start or resume a workflow first.');
}
const phase = this.orchestrator.getCurrentPhase();
if (phase !== 'FINALIZE') {
throw new Error(
`Cannot finalize workflow in ${phase} phase. Complete all subtasks first.`
);
}
// Check working tree is clean
const gitAdapter = new GitAdapter(this.projectRoot);
const statusSummary = await gitAdapter.getStatusSummary();
if (!statusSummary.isClean) {
throw new Error(
`Cannot finalize workflow: working tree has uncommitted changes.\n` +
`Staged: ${statusSummary.staged}, Modified: ${statusSummary.modified}, ` +
`Deleted: ${statusSummary.deleted}, Untracked: ${statusSummary.untracked}\n` +
`Please commit all changes before finalizing the workflow.`
);
}
// Transition to COMPLETE
this.orchestrator.transition({ type: 'FINALIZE_COMPLETE' });
return this.getStatus();
}
/**
* Abort current workflow
*/
async abortWorkflow(): Promise<void> {
if (this.orchestrator) {
this.orchestrator.transition({ type: 'ABORT' });
}
// Delete state file
await this.stateManager.delete();
this.orchestrator = undefined;
}
/**
* Generate a descriptive git branch name
* Format: tag-name/task-id-task-title or task-id-task-title
*/
private generateBranchName(
taskId: string,
taskTitle: string,
tag?: string
): string {
// Sanitize task title for branch name
const sanitizedTitle = taskTitle
.toLowerCase()
.replace(/[^a-z0-9]+/g, '-') // Replace non-alphanumeric with dash
.replace(/^-+|-+$/g, '') // Remove leading/trailing dashes
.substring(0, 50); // Limit length
// Format task ID for branch name
const formattedTaskId = taskId.replace(/\./g, '-');
// Add tag prefix if tag is provided
const tagPrefix = tag ? `${tag}/` : '';
return `${tagPrefix}task-${formattedTaskId}-${sanitizedTitle}`;
}
}

View File

@@ -0,0 +1,182 @@
/**
* Activity.jsonl append-only logging system for workflow tracking.
* Uses newline-delimited JSON (JSONL) format for structured event logging.
*
* @module activity-logger
*/
import fs from 'fs-extra';
import path from 'path';
/**
* Activity log entry structure
*/
export interface ActivityEvent {
timestamp: string;
type: string;
[key: string]: any;
}
/**
* Filter criteria for activity log queries
*/
export interface ActivityFilter {
type?: string;
timestampFrom?: string;
timestampTo?: string;
predicate?: (event: ActivityEvent) => boolean;
}
/**
* Appends an activity event to the log file.
* Uses atomic append operations to ensure data integrity.
*
* @param {string} activityPath - Path to the activity.jsonl file
* @param {Omit<ActivityEvent, 'timestamp'>} event - Event data to log (timestamp added automatically)
* @returns {Promise<void>}
*
* @example
* await logActivity('/path/to/activity.jsonl', {
* type: 'phase-start',
* phase: 'red'
* });
*/
export async function logActivity(
activityPath: string,
event: Omit<ActivityEvent, 'timestamp'>
): Promise<void> {
// Add timestamp to event
const logEntry = {
...event,
timestamp: new Date().toISOString()
} as ActivityEvent;
// Ensure directory exists
await fs.ensureDir(path.dirname(activityPath));
// Convert to JSONL format (single line with newline)
const line = JSON.stringify(logEntry) + '\n';
// Append to file atomically
// Using 'a' flag ensures atomic append on most systems
await fs.appendFile(activityPath, line, 'utf-8');
}
/**
* Reads and parses all events from an activity log file.
* Returns events in chronological order.
*
* @param {string} activityPath - Path to the activity.jsonl file
* @returns {Promise<ActivityEvent[]>} Array of activity events
* @throws {Error} If file contains invalid JSON
*
* @example
* const events = await readActivityLog('/path/to/activity.jsonl');
* console.log(`Found ${events.length} events`);
*/
export async function readActivityLog(
activityPath: string
): Promise<ActivityEvent[]> {
// Return empty array if file doesn't exist
if (!(await fs.pathExists(activityPath))) {
return [];
}
// Read file content
const content = await fs.readFile(activityPath, 'utf-8');
// Parse JSONL (newline-delimited JSON)
const lines = content.trim().split('\n');
const events: ActivityEvent[] = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i].trim();
// Skip empty lines
if (!line) {
continue;
}
// Parse JSON
try {
const event = JSON.parse(line);
events.push(event);
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : String(error);
throw new Error(`Invalid JSON at line ${i + 1}: ${errorMessage}`);
}
}
return events;
}
/**
* Filters activity log events based on criteria.
* Supports filtering by event type, timestamp range, and custom predicates.
*
* @param {string} activityPath - Path to the activity.jsonl file
* @param {ActivityFilter} filter - Filter criteria
* @returns {Promise<ActivityEvent[]>} Filtered array of events
*
* @example
* // Filter by event type
* const phaseEvents = await filterActivityLog('/path/to/activity.jsonl', {
* type: 'phase-start'
* });
*
* // Filter by timestamp range
* const recentEvents = await filterActivityLog('/path/to/activity.jsonl', {
* timestampFrom: '2024-01-15T10:00:00.000Z'
* });
*
* // Filter with custom predicate
* const failedTests = await filterActivityLog('/path/to/activity.jsonl', {
* predicate: (event) => event.type === 'test-run' && event.result === 'fail'
* });
*/
export async function filterActivityLog(
activityPath: string,
filter: ActivityFilter & Record<string, any>
): Promise<ActivityEvent[]> {
const events = await readActivityLog(activityPath);
return events.filter((event) => {
// Filter by type
if (filter.type && event.type !== filter.type) {
return false;
}
// Filter by timestamp range
if (filter.timestampFrom && event.timestamp < filter.timestampFrom) {
return false;
}
if (filter.timestampTo && event.timestamp > filter.timestampTo) {
return false;
}
// Filter by custom predicate
if (filter.predicate && !filter.predicate(event)) {
return false;
}
// Filter by other fields (exact match)
for (const [key, value] of Object.entries(filter)) {
if (
key === 'type' ||
key === 'timestampFrom' ||
key === 'timestampTo' ||
key === 'predicate'
) {
continue;
}
if (event[key] !== value) {
return false;
}
}
return true;
});
}

View File

@@ -8,6 +8,15 @@ export { FileStorage } from './file-storage/index.js';
export { ApiStorage, type ApiStorageConfig } from './api-storage.js';
export { StorageFactory } from './storage-factory.js';
// Export activity logger
export {
logActivity,
readActivityLog,
filterActivityLog,
type ActivityEvent,
type ActivityFilter
} from './activity-logger.js';
// Export storage interface and types
export type {
IStorage,

View File

@@ -0,0 +1,421 @@
/**
* @fileoverview Git utilities for Task Master
* Git integration utilities using raw git commands and gh CLI
*/
import { exec, execSync } from 'child_process';
import { promisify } from 'util';
const execAsync = promisify(exec);
/**
* GitHub repository information
*/
export interface GitHubRepoInfo {
name: string;
owner: { login: string };
defaultBranchRef: { name: string };
}
/**
* Check if the specified directory is inside a git repository
*/
export async function isGitRepository(projectRoot: string): Promise<boolean> {
if (!projectRoot) {
throw new Error('projectRoot is required for isGitRepository');
}
try {
await execAsync('git rev-parse --git-dir', { cwd: projectRoot });
return true;
} catch (error) {
return false;
}
}
/**
* Synchronous check if directory is in a git repository
*/
export function isGitRepositorySync(projectRoot: string): boolean {
if (!projectRoot) {
return false;
}
try {
execSync('git rev-parse --git-dir', {
cwd: projectRoot,
stdio: 'ignore'
});
return true;
} catch (error) {
return false;
}
}
/**
* Get the current git branch name
*/
export async function getCurrentBranch(
projectRoot: string
): Promise<string | null> {
if (!projectRoot) {
throw new Error('projectRoot is required for getCurrentBranch');
}
try {
const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {
cwd: projectRoot
});
return stdout.trim();
} catch (error) {
return null;
}
}
/**
* Synchronous get current git branch name
*/
export function getCurrentBranchSync(projectRoot: string): string | null {
if (!projectRoot) {
return null;
}
try {
const stdout = execSync('git rev-parse --abbrev-ref HEAD', {
cwd: projectRoot,
encoding: 'utf8'
});
return stdout.trim();
} catch (error) {
return null;
}
}
/**
* Get list of all local git branches
*/
export async function getLocalBranches(projectRoot: string): Promise<string[]> {
if (!projectRoot) {
throw new Error('projectRoot is required for getLocalBranches');
}
try {
const { stdout } = await execAsync(
'git branch --format="%(refname:short)"',
{ cwd: projectRoot, maxBuffer: 10 * 1024 * 1024 }
);
return stdout
.trim()
.split('\n')
.filter((branch) => branch.length > 0)
.map((branch) => branch.trim());
} catch (error) {
return [];
}
}
/**
* Get list of all remote branches
*/
export async function getRemoteBranches(
projectRoot: string
): Promise<string[]> {
if (!projectRoot) {
throw new Error('projectRoot is required for getRemoteBranches');
}
try {
const { stdout } = await execAsync(
'git branch -r --format="%(refname:short)"',
{ cwd: projectRoot, maxBuffer: 10 * 1024 * 1024 }
);
const names = stdout
.trim()
.split('\n')
.filter((branch) => branch.length > 0 && !branch.includes('HEAD'))
.map((branch) => branch.replace(/^[^/]+\//, '').trim());
return Array.from(new Set(names));
} catch (error) {
return [];
}
}
/**
* Check if gh CLI is available and authenticated
*/
export async function isGhCliAvailable(projectRoot?: string): Promise<boolean> {
try {
const options = projectRoot ? { cwd: projectRoot } : {};
await execAsync('gh auth status', options);
return true;
} catch (error) {
return false;
}
}
/**
* Get GitHub repository information using gh CLI
*/
export async function getGitHubRepoInfo(
projectRoot: string
): Promise<GitHubRepoInfo | null> {
if (!projectRoot) {
throw new Error('projectRoot is required for getGitHubRepoInfo');
}
try {
const { stdout } = await execAsync(
'gh repo view --json name,owner,defaultBranchRef',
{ cwd: projectRoot }
);
return JSON.parse(stdout) as GitHubRepoInfo;
} catch (error) {
return null;
}
}
/**
* Get git repository root directory
*/
export async function getGitRepositoryRoot(
projectRoot: string
): Promise<string | null> {
if (!projectRoot) {
throw new Error('projectRoot is required for getGitRepositoryRoot');
}
try {
const { stdout } = await execAsync('git rev-parse --show-toplevel', {
cwd: projectRoot
});
return stdout.trim();
} catch (error) {
return null;
}
}
/**
* Get the default branch name for the repository
*/
export async function getDefaultBranch(
projectRoot: string
): Promise<string | null> {
if (!projectRoot) {
throw new Error('projectRoot is required for getDefaultBranch');
}
try {
// Try to get from GitHub first (if gh CLI is available)
if (await isGhCliAvailable(projectRoot)) {
const repoInfo = await getGitHubRepoInfo(projectRoot);
if (repoInfo && repoInfo.defaultBranchRef) {
return repoInfo.defaultBranchRef.name;
}
}
// Fallback to git remote info (support non-origin remotes)
const remotesRaw = await execAsync('git remote', { cwd: projectRoot });
const remotes = remotesRaw.stdout.trim().split('\n').filter(Boolean);
if (remotes.length > 0) {
const primary = remotes.includes('origin') ? 'origin' : remotes[0];
// Parse `git remote show` (preferred)
try {
const { stdout } = await execAsync(`git remote show ${primary}`, {
cwd: projectRoot,
maxBuffer: 10 * 1024 * 1024
});
const m = stdout.match(/HEAD branch:\s+([^\s]+)/);
if (m) return m[1].trim();
} catch {}
// Fallback to symbolic-ref of remote HEAD
try {
const { stdout } = await execAsync(
`git symbolic-ref refs/remotes/${primary}/HEAD`,
{ cwd: projectRoot }
);
return stdout.replace(`refs/remotes/${primary}/`, '').trim();
} catch {}
}
// If we couldn't determine, throw to trigger final fallbacks
throw new Error('default-branch-not-found');
} catch (error) {
// Final fallback - common default branch names
const commonDefaults = ['main', 'master'];
const branches = await getLocalBranches(projectRoot);
const remoteBranches = await getRemoteBranches(projectRoot);
for (const defaultName of commonDefaults) {
if (
branches.includes(defaultName) ||
remoteBranches.includes(defaultName)
) {
return defaultName;
}
}
return null;
}
}
/**
* Check if we're currently on the default branch
*/
export async function isOnDefaultBranch(projectRoot: string): Promise<boolean> {
if (!projectRoot) {
throw new Error('projectRoot is required for isOnDefaultBranch');
}
try {
const [currentBranch, defaultBranch] = await Promise.all([
getCurrentBranch(projectRoot),
getDefaultBranch(projectRoot)
]);
return (
currentBranch !== null &&
defaultBranch !== null &&
currentBranch === defaultBranch
);
} catch (error) {
return false;
}
}
/**
* Check if the current working directory is inside a Git work-tree
*/
export function insideGitWorkTree(): boolean {
try {
execSync('git rev-parse --is-inside-work-tree', {
stdio: 'ignore',
cwd: process.cwd()
});
return true;
} catch {
return false;
}
}
/**
* Sanitize branch name to be a valid tag name
*/
export function sanitizeBranchNameForTag(branchName: string): string {
if (!branchName || typeof branchName !== 'string') {
return 'unknown-branch';
}
// Replace invalid characters with hyphens and clean up
return branchName
.replace(/[^a-zA-Z0-9_.-]/g, '-') // Replace invalid chars with hyphens (allow dots)
.replace(/^-+|-+$/g, '') // Remove leading/trailing hyphens
.replace(/-+/g, '-') // Collapse multiple hyphens
.toLowerCase() // Convert to lowercase
.substring(0, 50); // Limit length
}
/**
* Check if a branch name would create a valid tag name
*/
export function isValidBranchForTag(branchName: string): boolean {
if (!branchName || typeof branchName !== 'string') {
return false;
}
// Check if it's a reserved branch name that shouldn't become tags
const reservedBranches = ['main', 'master', 'develop', 'dev', 'head'];
if (reservedBranches.includes(branchName.toLowerCase())) {
return false;
}
// Check if sanitized name would be meaningful
const sanitized = sanitizeBranchNameForTag(branchName);
return sanitized.length > 0 && sanitized !== 'unknown-branch';
}
/**
* Git worktree information
*/
export interface GitWorktree {
path: string;
branch: string | null;
head: string;
}
/**
* Get list of all git worktrees
*/
export async function getWorktrees(
projectRoot: string
): Promise<GitWorktree[]> {
if (!projectRoot) {
throw new Error('projectRoot is required for getWorktrees');
}
try {
const { stdout } = await execAsync('git worktree list --porcelain', {
cwd: projectRoot
});
const worktrees: GitWorktree[] = [];
const lines = stdout.trim().split('\n');
let current: Partial<GitWorktree> = {};
for (const line of lines) {
if (line.startsWith('worktree ')) {
// flush previous entry if present
if (current.path) {
worktrees.push({
path: current.path,
branch: current.branch || null,
head: current.head || ''
});
current = {};
}
current.path = line.substring(9);
} else if (line.startsWith('HEAD ')) {
current.head = line.substring(5);
} else if (line.startsWith('branch ')) {
current.branch = line.substring(7).replace('refs/heads/', '');
} else if (line === '' && current.path) {
worktrees.push({
path: current.path,
branch: current.branch || null,
head: current.head || ''
});
current = {};
}
}
// Handle last entry if no trailing newline
if (current.path) {
worktrees.push({
path: current.path,
branch: current.branch || null,
head: current.head || ''
});
}
return worktrees;
} catch (error) {
return [];
}
}
/**
* Check if a branch is checked out in any worktree
* Returns the worktree path if found, null otherwise
*/
export async function isBranchCheckedOut(
projectRoot: string,
branchName: string
): Promise<string | null> {
if (!projectRoot) {
throw new Error('projectRoot is required for isBranchCheckedOut');
}
if (!branchName) {
throw new Error('branchName is required for isBranchCheckedOut');
}
const worktrees = await getWorktrees(projectRoot);
const worktree = worktrees.find((wt) => wt.branch === branchName);
return worktree ? worktree.path : null;
}

View File

@@ -13,6 +13,40 @@ export {
getParentTaskId
} from './id-generator.js';
// Export git utilities
export {
isGitRepository,
isGitRepositorySync,
getCurrentBranch,
getCurrentBranchSync,
getLocalBranches,
getRemoteBranches,
isGhCliAvailable,
getGitHubRepoInfo,
getGitRepositoryRoot,
getDefaultBranch,
isOnDefaultBranch,
insideGitWorkTree,
sanitizeBranchNameForTag,
isValidBranchForTag,
type GitHubRepoInfo
} from './git-utils.js';
// Export path normalization utilities
export {
normalizeProjectPath,
denormalizeProjectPath,
isValidNormalizedPath
} from './path-normalizer.js';
// Export run ID generation utilities
export {
generateRunId,
isValidRunId,
parseRunId,
compareRunIds
} from './run-id-generator.js';
// Additional utility exports
/**

View File

@@ -0,0 +1,282 @@
import { describe, it, expect } from 'vitest';
import {
normalizeProjectPath,
denormalizeProjectPath,
isValidNormalizedPath
} from './path-normalizer.js';
describe('Path Normalizer (base64url encoding)', () => {
describe('normalizeProjectPath', () => {
it('should encode Unix paths to base64url', () => {
const input = '/Users/test/projects/myapp';
const normalized = normalizeProjectPath(input);
// Should be valid base64url (only A-Z, a-z, 0-9, -, _)
expect(/^[A-Za-z0-9_-]+$/.test(normalized)).toBe(true);
// Should not contain slashes
expect(normalized).not.toContain('/');
expect(normalized).not.toContain('\\');
});
it('should encode Windows paths to base64url', () => {
const input = 'C:\\Users\\test\\projects\\myapp';
const normalized = normalizeProjectPath(input);
// Should be valid base64url
expect(/^[A-Za-z0-9_-]+$/.test(normalized)).toBe(true);
expect(normalized).not.toContain('/');
expect(normalized).not.toContain('\\');
});
it('should encode paths with hyphens (preserving them for round-trip)', () => {
const input = '/projects/my-app';
const normalized = normalizeProjectPath(input);
// Should be valid base64url
expect(/^[A-Za-z0-9_-]+$/.test(normalized)).toBe(true);
// Hyphens in base64url are from encoding, not original path
expect(isValidNormalizedPath(normalized)).toBe(true);
});
it('should encode paths with special characters', () => {
const input = '/projects/myapp (v2)';
const normalized = normalizeProjectPath(input);
// Should be valid base64url
expect(/^[A-Za-z0-9_-]+$/.test(normalized)).toBe(true);
});
it('should encode relative paths', () => {
const input = './projects/app';
const normalized = normalizeProjectPath(input);
// Should be valid base64url
expect(/^[A-Za-z0-9_-]+$/.test(normalized)).toBe(true);
});
it('should handle empty string', () => {
const input = '';
const expected = '';
expect(normalizeProjectPath(input)).toBe(expected);
});
it('should encode single directory', () => {
const input = 'project';
const normalized = normalizeProjectPath(input);
// Should be valid base64url
expect(/^[A-Za-z0-9_-]+$/.test(normalized)).toBe(true);
});
it('should encode paths with multiple consecutive slashes', () => {
const input = '/Users//test///project';
const normalized = normalizeProjectPath(input);
// Should be valid base64url
expect(/^[A-Za-z0-9_-]+$/.test(normalized)).toBe(true);
});
});
describe('denormalizeProjectPath', () => {
it('should decode base64url back to original path', () => {
const original = '/Users/test/projects/myapp';
const normalized = normalizeProjectPath(original);
const denormalized = denormalizeProjectPath(normalized);
expect(denormalized).toBe(original);
});
it('should decode base64url for Windows paths', () => {
const original = 'C:\\Users\\test\\project';
const normalized = normalizeProjectPath(original);
const denormalized = denormalizeProjectPath(normalized);
expect(denormalized).toBe(original);
});
it('should handle empty string', () => {
const input = '';
const expected = '';
expect(denormalizeProjectPath(input)).toBe(expected);
});
it('should preserve hyphens in directory names (no longer a limitation!)', () => {
const original = '/projects/my-app';
const normalized = normalizeProjectPath(original);
const denormalized = denormalizeProjectPath(normalized);
// With base64url, hyphens are preserved correctly
expect(denormalized).toBe(original);
});
it('should handle invalid base64url gracefully', () => {
// Invalid base64url - should return the input as fallback
const invalid = 'not@valid#base64url';
const result = denormalizeProjectPath(invalid);
// Should return input unchanged for backward compatibility
expect(result).toBe(invalid);
});
});
describe('isValidNormalizedPath', () => {
it('should return true for valid base64url strings', () => {
// Valid base64url characters: A-Z, a-z, 0-9, -, _
expect(isValidNormalizedPath('VXNlcnMtdGVzdC1wcm9qZWN0')).toBe(true);
expect(isValidNormalizedPath('abc123_-ABC')).toBe(true);
});
it('should return true for base64url with hyphens and underscores', () => {
expect(isValidNormalizedPath('test-path_encoded')).toBe(true);
});
it('should return false for paths with slashes', () => {
expect(isValidNormalizedPath('Users/test/project')).toBe(false);
});
it('should return false for paths with backslashes', () => {
expect(isValidNormalizedPath('Users\\test\\project')).toBe(false);
});
it('should return true for empty string', () => {
expect(isValidNormalizedPath('')).toBe(true);
});
it('should return false for strings with special characters not in base64url', () => {
// Base64url only allows: A-Z, a-z, 0-9, -, _
expect(isValidNormalizedPath('my-app (v2)')).toBe(false); // parentheses and spaces not allowed
expect(isValidNormalizedPath('test@example')).toBe(false); // @ not allowed
expect(isValidNormalizedPath('test+value')).toBe(false); // + not allowed
});
it('should validate normalized paths correctly', () => {
const path = '/Users/test/my-app';
const normalized = normalizeProjectPath(path);
expect(isValidNormalizedPath(normalized)).toBe(true);
});
});
describe('Round-trip conversion', () => {
it('should perfectly preserve ALL Unix paths (including those with hyphens)', () => {
const originalPaths = [
'/Users/test/projects/myapp',
'/root/deep/nested/path',
'./relative/path',
'/projects/my-app', // Now works correctly!
'/path/with-multiple-hyphens/in-names'
];
for (const original of originalPaths) {
const normalized = normalizeProjectPath(original);
const denormalized = denormalizeProjectPath(normalized);
// Perfect round-trip with base64url encoding
expect(denormalized).toBe(original);
}
});
it('should perfectly preserve Windows paths (including drive letters)', () => {
const originalPaths = [
'C:\\Users\\test\\project',
'D:\\Projects\\my-app',
'E:\\path\\with-hyphens\\test'
];
for (const original of originalPaths) {
const normalized = normalizeProjectPath(original);
const denormalized = denormalizeProjectPath(normalized);
// Perfect round-trip - drive letters and colons preserved
expect(denormalized).toBe(original);
}
});
it('should preserve paths with special characters', () => {
const originalPaths = [
'/projects/my app (v2)',
'/path/with spaces/test',
'/path/with-dashes-and_underscores',
'/path/with.dots.and-dashes'
];
for (const original of originalPaths) {
const normalized = normalizeProjectPath(original);
const denormalized = denormalizeProjectPath(normalized);
// Perfect round-trip for all special characters
expect(denormalized).toBe(original);
}
});
it('should handle mixed slashes and preserve exact path structure', () => {
const original = '/Users/test\\mixed/path';
const normalized = normalizeProjectPath(original);
const denormalized = denormalizeProjectPath(normalized);
// Exact preservation of mixed slashes
expect(denormalized).toBe(original);
});
it('should preserve multiple consecutive slashes', () => {
const original = '/Users//test///project';
const normalized = normalizeProjectPath(original);
const denormalized = denormalizeProjectPath(normalized);
// Exact preservation of all slashes
expect(denormalized).toBe(original);
});
});
describe('Cross-platform consistency', () => {
it('should produce filesystem-safe normalized output for all platforms', () => {
const unixPath = '/Users/test/project';
const windowsPath = 'C:\\Users\\test\\project';
const normalizedUnix = normalizeProjectPath(unixPath);
const normalizedWindows = normalizeProjectPath(windowsPath);
// Both should be valid base64url (no slashes or backslashes)
expect(normalizedUnix).not.toContain('/');
expect(normalizedUnix).not.toContain('\\');
expect(normalizedWindows).not.toContain('/');
expect(normalizedWindows).not.toContain('\\');
// Both should be valid base64url format
expect(isValidNormalizedPath(normalizedUnix)).toBe(true);
expect(isValidNormalizedPath(normalizedWindows)).toBe(true);
});
it('should produce different normalized outputs for different paths', () => {
// Unix and Windows paths are different, so should produce different encoded values
const unixPath = '/Users/test/project';
const windowsPath = 'C:\\Users\\test\\project';
const normalizedUnix = normalizeProjectPath(unixPath);
const normalizedWindows = normalizeProjectPath(windowsPath);
// Different inputs should produce different outputs
expect(normalizedUnix).not.toBe(normalizedWindows);
// But both should denormalize back to their originals
expect(denormalizeProjectPath(normalizedUnix)).toBe(unixPath);
expect(denormalizeProjectPath(normalizedWindows)).toBe(windowsPath);
});
it('should handle Unicode characters in paths', () => {
const unicodePaths = [
'/Users/测试/project',
'/Users/test/プロジェクト',
'/Users/тест/project'
];
for (const original of unicodePaths) {
const normalized = normalizeProjectPath(original);
const denormalized = denormalizeProjectPath(normalized);
// Perfect round-trip for Unicode
expect(denormalized).toBe(original);
expect(isValidNormalizedPath(normalized)).toBe(true);
}
});
});
});

View File

@@ -0,0 +1,76 @@
/**
* Path normalization utilities for global storage system.
* Converts project paths to storage-safe directory names using base64url encoding.
*
* This provides a bijective (one-to-one) mapping that preserves all characters
* and supports perfect round-trip conversion between paths and storage names.
*
* @module path-normalizer
*/
/**
* Normalizes a project path to a storage-safe directory name using base64url encoding.
* This encoding is filesystem-safe (no slashes, backslashes, or special characters)
* and fully reversible, preserving hyphens and all other characters in paths.
*
* @param {string} projectPath - The project path to normalize
* @returns {string} The base64url-encoded path safe for use as a directory name
*
* @example
* normalizeProjectPath('/Users/test/project') // returns base64url encoded string
* normalizeProjectPath('C:\\Users\\test') // returns base64url encoded string
* normalizeProjectPath('/projects/my-app') // returns base64url encoded string (hyphens preserved)
*/
export function normalizeProjectPath(projectPath: string): string {
if (!projectPath) {
return '';
}
// Use base64url encoding: filesystem-safe and fully reversible
return Buffer.from(projectPath, 'utf-8').toString('base64url');
}
/**
* Denormalizes a storage directory name back to the original path.
* Decodes base64url-encoded paths with perfect fidelity.
*
* @param {string} normalizedPath - The base64url-encoded path to decode
* @returns {string} The original path with all characters preserved
*
* @example
* denormalizeProjectPath(normalizeProjectPath('/Users/test/project')) // returns '/Users/test/project'
* denormalizeProjectPath(normalizeProjectPath('/projects/my-app')) // returns '/projects/my-app'
*/
export function denormalizeProjectPath(normalizedPath: string): string {
if (!normalizedPath) {
return '';
}
// Validate that input is valid base64url before attempting to decode
if (!isValidNormalizedPath(normalizedPath)) {
// Return original string for backward compatibility with non-base64url inputs
return normalizedPath;
}
return Buffer.from(normalizedPath, 'base64url').toString('utf-8');
}
/**
* Validates whether a path is in normalized (base64url) format.
* Valid base64url strings contain only: A-Z, a-z, 0-9, -, _
*
* @param {string} path - The path to validate
* @returns {boolean} True if the path is in normalized base64url format
*
* @example
* isValidNormalizedPath('VXNlcnMvdGVzdC9wcm9qZWN0') // returns true (valid base64url)
* isValidNormalizedPath('Users/test/project') // returns false (contains slashes)
*/
export function isValidNormalizedPath(path: string): boolean {
if (path === '') {
return true;
}
// Check if path is valid base64url: only A-Z, a-z, 0-9, -, _
return /^[A-Za-z0-9_-]*$/.test(path);
}

View File

@@ -0,0 +1,266 @@
import { describe, it, expect } from 'vitest';
import {
generateRunId,
isValidRunId,
parseRunId,
compareRunIds
} from './run-id-generator.js';
describe('Run ID Generator', () => {
describe('generateRunId', () => {
it('should generate a valid ISO 8601 timestamp-based ID', () => {
const runId = generateRunId();
// Should be in ISO 8601 format with milliseconds
expect(runId).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
});
it('should generate unique IDs when called multiple times', () => {
const id1 = generateRunId();
const id2 = generateRunId();
const id3 = generateRunId();
expect(id1).not.toBe(id2);
expect(id2).not.toBe(id3);
expect(id1).not.toBe(id3);
});
it('should generate chronologically ordered IDs', () => {
const id1 = generateRunId();
// Small delay to ensure different timestamp
const id2 = generateRunId();
expect(id2 > id1).toBe(true);
});
it('should use current time by default', () => {
const before = new Date().toISOString();
const runId = generateRunId();
const after = new Date().toISOString();
expect(runId >= before).toBe(true);
expect(runId <= after).toBe(true);
});
it('should accept custom Date object', () => {
const customDate = new Date('2024-01-15T10:30:45.123Z');
const runId = generateRunId(customDate);
expect(runId).toBe('2024-01-15T10:30:45.123Z');
});
it('should handle date at year boundary', () => {
const newYear = new Date('2025-01-01T00:00:00.000Z');
const runId = generateRunId(newYear);
expect(runId).toBe('2025-01-01T00:00:00.000Z');
});
it('should handle millisecond precision correctly', () => {
const dateWithMs = new Date('2024-03-15T14:22:33.999Z');
const runId = generateRunId(dateWithMs);
expect(runId).toBe('2024-03-15T14:22:33.999Z');
});
});
describe('isValidRunId', () => {
it('should return true for valid ISO 8601 timestamp', () => {
expect(isValidRunId('2024-01-15T10:30:45.123Z')).toBe(true);
});
it('should return true for generated run IDs', () => {
const runId = generateRunId();
expect(isValidRunId(runId)).toBe(true);
});
it('should return false for invalid format', () => {
expect(isValidRunId('not-a-timestamp')).toBe(false);
expect(isValidRunId('2024-01-15')).toBe(false);
expect(isValidRunId('2024-01-15T10:30:45')).toBe(false); // missing Z
expect(isValidRunId('2024-01-15 10:30:45.123Z')).toBe(false); // space instead of T
});
it('should return false for empty string', () => {
expect(isValidRunId('')).toBe(false);
});
it('should return false for null or undefined', () => {
expect(isValidRunId(null)).toBe(false);
expect(isValidRunId(undefined)).toBe(false);
});
it('should return false for invalid dates', () => {
expect(isValidRunId('2024-13-01T10:30:45.123Z')).toBe(false); // invalid month
expect(isValidRunId('2024-01-32T10:30:45.123Z')).toBe(false); // invalid day
expect(isValidRunId('2024-01-15T25:30:45.123Z')).toBe(false); // invalid hour
});
it('should return true for edge case valid dates', () => {
expect(isValidRunId('2024-02-29T23:59:59.999Z')).toBe(true); // leap year
expect(isValidRunId('2025-01-01T00:00:00.000Z')).toBe(true); // year boundary
});
it('should return false for missing milliseconds', () => {
expect(isValidRunId('2024-01-15T10:30:45Z')).toBe(false);
});
it('should return false for non-UTC timezone', () => {
expect(isValidRunId('2024-01-15T10:30:45.123+01:00')).toBe(false);
});
});
describe('parseRunId', () => {
it('should parse valid run ID to Date object', () => {
const runId = '2024-01-15T10:30:45.123Z';
const date = parseRunId(runId);
expect(date).toBeInstanceOf(Date);
expect(date?.toISOString()).toBe(runId);
});
it('should parse generated run ID', () => {
const originalDate = new Date('2024-03-20T15:45:30.500Z');
const runId = generateRunId(originalDate);
const parsedDate = parseRunId(runId);
expect(parsedDate?.getTime()).toBe(originalDate.getTime());
});
it('should return null for invalid run ID', () => {
expect(parseRunId('invalid')).toBe(null);
expect(parseRunId('')).toBe(null);
expect(parseRunId(null)).toBe(null);
expect(parseRunId(undefined)).toBe(null);
});
it('should handle edge case dates correctly', () => {
const leapYear = '2024-02-29T12:00:00.000Z';
const parsed = parseRunId(leapYear);
expect(parsed?.toISOString()).toBe(leapYear);
});
});
describe('compareRunIds', () => {
it('should return negative when first ID is earlier', () => {
const earlier = '2024-01-15T10:00:00.000Z';
const later = '2024-01-15T11:00:00.000Z';
expect(compareRunIds(earlier, later)).toBeLessThan(0);
});
it('should return positive when first ID is later', () => {
const earlier = '2024-01-15T10:00:00.000Z';
const later = '2024-01-15T11:00:00.000Z';
expect(compareRunIds(later, earlier)).toBeGreaterThan(0);
});
it('should return zero when IDs are equal', () => {
const runId = '2024-01-15T10:00:00.000Z';
expect(compareRunIds(runId, runId)).toBe(0);
});
it('should handle millisecond differences', () => {
const id1 = '2024-01-15T10:00:00.100Z';
const id2 = '2024-01-15T10:00:00.200Z';
expect(compareRunIds(id1, id2)).toBeLessThan(0);
expect(compareRunIds(id2, id1)).toBeGreaterThan(0);
});
it('should handle cross-day comparisons', () => {
const yesterday = '2024-01-14T23:59:59.999Z';
const today = '2024-01-15T00:00:00.000Z';
expect(compareRunIds(yesterday, today)).toBeLessThan(0);
});
it('should handle cross-year comparisons', () => {
const lastYear = '2023-12-31T23:59:59.999Z';
const thisYear = '2024-01-01T00:00:00.000Z';
expect(compareRunIds(lastYear, thisYear)).toBeLessThan(0);
});
it('should throw error for invalid run IDs', () => {
const valid = '2024-01-15T10:00:00.000Z';
expect(() => compareRunIds('invalid', valid)).toThrow();
expect(() => compareRunIds(valid, 'invalid')).toThrow();
expect(() => compareRunIds('invalid', 'invalid')).toThrow();
});
});
describe('Collision detection', () => {
it('should generate different IDs in rapid succession', () => {
const ids = new Set();
const count = 100;
for (let i = 0; i < count; i++) {
ids.add(generateRunId());
}
// All IDs should be unique
expect(ids.size).toBe(count);
});
it('should handle high-frequency generation', () => {
const ids = [];
const iterations = 1000;
for (let i = 0; i < iterations; i++) {
ids.push(generateRunId());
}
// Check uniqueness
const uniqueIds = new Set(ids);
expect(uniqueIds.size).toBe(iterations);
// Check chronological order
for (let i = 1; i < ids.length; i++) {
expect(compareRunIds(ids[i - 1], ids[i])).toBeLessThanOrEqual(0);
}
});
});
describe('Chronological ordering', () => {
it('should allow sorting run IDs chronologically', () => {
const ids = [
'2024-01-15T14:00:00.000Z',
'2024-01-15T10:00:00.000Z',
'2024-01-15T12:00:00.000Z',
'2024-01-14T23:00:00.000Z',
'2024-01-16T08:00:00.000Z'
];
const sorted = [...ids].sort(compareRunIds);
expect(sorted).toEqual([
'2024-01-14T23:00:00.000Z',
'2024-01-15T10:00:00.000Z',
'2024-01-15T12:00:00.000Z',
'2024-01-15T14:00:00.000Z',
'2024-01-16T08:00:00.000Z'
]);
});
it('should handle reverse chronological sorting', () => {
const ids = [
'2024-01-15T10:00:00.000Z',
'2024-01-15T14:00:00.000Z',
'2024-01-15T12:00:00.000Z'
];
const sorted = [...ids].sort((a, b) => compareRunIds(b, a));
expect(sorted).toEqual([
'2024-01-15T14:00:00.000Z',
'2024-01-15T12:00:00.000Z',
'2024-01-15T10:00:00.000Z'
]);
});
});
});

View File

@@ -0,0 +1,129 @@
/**
* Run ID generation and validation utilities for the global storage system.
* Uses ISO 8601 timestamps with millisecond precision for unique, chronologically-ordered run IDs.
*
* @module run-id-generator
*/
// Collision detection state
let lastTimestamp = 0;
let counter = 0;
/**
* Generates a unique run ID using ISO 8601 timestamp format with millisecond precision.
* The ID is guaranteed to be chronologically sortable and URL-safe.
* Includes collision detection to ensure uniqueness even when called in rapid succession.
*
* @param {Date} [date=new Date()] - Optional date to use for the run ID. Defaults to current time.
* @returns {string} ISO 8601 formatted timestamp (e.g., '2024-01-15T10:30:45.123Z')
*
* @example
* generateRunId() // returns '2024-01-15T10:30:45.123Z'
* generateRunId(new Date('2024-01-15T10:00:00.000Z')) // returns '2024-01-15T10:00:00.000Z'
*/
export function generateRunId(date: Date = new Date()): string {
const timestamp = date.getTime();
// Collision detection: if same millisecond, wait for next millisecond
if (timestamp === lastTimestamp) {
counter++;
// Wait for next millisecond to ensure uniqueness
let newTimestamp = timestamp;
while (newTimestamp === timestamp) {
newTimestamp = Date.now();
}
date = new Date(newTimestamp);
lastTimestamp = newTimestamp;
counter = 0;
} else {
lastTimestamp = timestamp;
counter = 0;
}
return date.toISOString();
}
/**
* Validates whether a string is a valid run ID.
* A valid run ID must be:
* - In ISO 8601 format with milliseconds
* - In UTC timezone (ends with 'Z')
* - A valid date when parsed
*
* @param {any} runId - The value to validate
* @returns {boolean} True if the value is a valid run ID
*
* @example
* isValidRunId('2024-01-15T10:30:45.123Z') // returns true
* isValidRunId('invalid') // returns false
* isValidRunId('2024-01-15T10:30:45Z') // returns false (missing milliseconds)
*/
export function isValidRunId(runId: any): boolean {
if (!runId || typeof runId !== 'string') {
return false;
}
// Check format: YYYY-MM-DDTHH:mm:ss.sssZ
const isoFormatRegex = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/;
if (!isoFormatRegex.test(runId)) {
return false;
}
// Validate it's a real date
const date = new Date(runId);
if (isNaN(date.getTime())) {
return false;
}
// Ensure the parsed date matches the input (catches invalid dates like 2024-13-01)
return date.toISOString() === runId;
}
/**
* Parses a run ID string into a Date object.
*
* @param {any} runId - The run ID to parse
* @returns {Date | null} Date object if valid, null if invalid
*
* @example
* parseRunId('2024-01-15T10:30:45.123Z') // returns Date object
* parseRunId('invalid') // returns null
*/
export function parseRunId(runId: any): Date | null {
if (!isValidRunId(runId)) {
return null;
}
return new Date(runId);
}
/**
* Compares two run IDs chronologically.
* Returns a negative number if id1 is earlier, positive if id1 is later, or 0 if equal.
* Can be used as a comparator function for Array.sort().
*
* @param {string} id1 - First run ID to compare
* @param {string} id2 - Second run ID to compare
* @returns {number} Negative if id1 < id2, positive if id1 > id2, zero if equal
* @throws {Error} If either run ID is invalid
*
* @example
* compareRunIds('2024-01-15T10:00:00.000Z', '2024-01-15T11:00:00.000Z') // returns negative number
* ['2024-01-15T14:00:00.000Z', '2024-01-15T10:00:00.000Z'].sort(compareRunIds)
* // returns ['2024-01-15T10:00:00.000Z', '2024-01-15T14:00:00.000Z']
*/
export function compareRunIds(id1: string, id2: string): number {
if (!isValidRunId(id1)) {
throw new Error(`Invalid run ID: ${id1}`);
}
if (!isValidRunId(id2)) {
throw new Error(`Invalid run ID: ${id2}`);
}
// String comparison works for ISO 8601 timestamps
// because they are lexicographically sortable
if (id1 < id2) return -1;
if (id1 > id2) return 1;
return 0;
}

View File

@@ -0,0 +1,150 @@
/**
* Workflow phase definitions
*/
export type WorkflowPhase =
| 'PREFLIGHT'
| 'BRANCH_SETUP'
| 'SUBTASK_LOOP'
| 'FINALIZE'
| 'COMPLETE';
/**
* TDD cycle phases within subtask loop
*/
export type TDDPhase = 'RED' | 'GREEN' | 'COMMIT';
/**
* Workflow state context
*/
export interface WorkflowContext {
taskId: string;
subtasks: SubtaskInfo[];
currentSubtaskIndex: number;
currentTDDPhase?: TDDPhase;
branchName?: string;
errors: WorkflowError[];
metadata: Record<string, unknown>;
lastTestResults?: TestResult;
}
/**
* Test result from test execution
*/
export interface TestResult {
total: number;
passed: number;
failed: number;
skipped: number;
phase: 'RED' | 'GREEN';
}
/**
* Subtask information
*/
export interface SubtaskInfo {
id: string;
title: string;
status: 'pending' | 'in-progress' | 'completed' | 'failed';
attempts: number;
maxAttempts?: number;
}
/**
* Workflow error information
*/
export interface WorkflowError {
phase: WorkflowPhase;
message: string;
timestamp: Date;
recoverable: boolean;
}
/**
* State machine state
*/
export interface WorkflowState {
phase: WorkflowPhase;
context: WorkflowContext;
}
/**
* State transition event types
*/
export type WorkflowEvent =
| { type: 'PREFLIGHT_COMPLETE' }
| { type: 'BRANCH_CREATED'; branchName: string }
| { type: 'SUBTASK_START'; subtaskId: string }
| { type: 'RED_PHASE_COMPLETE'; testResults?: TestResult }
| { type: 'GREEN_PHASE_COMPLETE'; testResults?: TestResult }
| { type: 'COMMIT_COMPLETE' }
| { type: 'SUBTASK_COMPLETE' }
| { type: 'ALL_SUBTASKS_COMPLETE' }
| { type: 'FINALIZE_COMPLETE' }
| { type: 'ERROR'; error: WorkflowError }
| { type: 'RETRY' }
| { type: 'ABORT' };
/**
* State transition definition
*/
export interface StateTransition {
from: WorkflowPhase;
to: WorkflowPhase;
event: WorkflowEvent['type'];
guard?: (context: WorkflowContext) => boolean;
}
/**
* State machine configuration
*/
export interface StateMachineConfig {
initialPhase: WorkflowPhase;
transitions: StateTransition[];
}
/**
* Workflow event listener
*/
export type WorkflowEventListener = (event: WorkflowEventData) => void;
/**
* Comprehensive event data for workflow events
*/
export interface WorkflowEventData {
type: WorkflowEventType;
timestamp: Date;
phase: WorkflowPhase;
tddPhase?: TDDPhase;
subtaskId?: string;
data?: Record<string, unknown>;
}
/**
* All possible workflow event types
*/
export type WorkflowEventType =
| 'workflow:started'
| 'workflow:completed'
| 'workflow:error'
| 'workflow:resumed'
| 'phase:entered'
| 'phase:exited'
| 'tdd:feature-already-implemented'
| 'tdd:red:started'
| 'tdd:red:completed'
| 'tdd:green:started'
| 'tdd:green:completed'
| 'tdd:commit:started'
| 'tdd:commit:completed'
| 'subtask:started'
| 'subtask:completed'
| 'subtask:failed'
| 'test:run'
| 'test:passed'
| 'test:failed'
| 'git:branch:created'
| 'git:commit:created'
| 'error:occurred'
| 'state:persisted'
| 'progress:updated'
| 'adapter:configured';

View File

@@ -0,0 +1,152 @@
/**
* @fileoverview WorkflowActivityLogger - Logs all workflow events to activity.jsonl
*
* Subscribes to all WorkflowOrchestrator events and persists them to a JSONL file
* for debugging, auditing, and workflow analysis.
*/
import type { WorkflowOrchestrator } from './workflow-orchestrator.js';
import type { WorkflowEventData, WorkflowEventType } from './types.js';
import { logActivity, type ActivityEvent } from '../storage/activity-logger.js';
import { getLogger } from '../logger/index.js';
/**
* All workflow event types that should be logged
*/
const WORKFLOW_EVENT_TYPES: WorkflowEventType[] = [
'workflow:started',
'workflow:completed',
'workflow:error',
'workflow:resumed',
'phase:entered',
'phase:exited',
'tdd:feature-already-implemented',
'tdd:red:started',
'tdd:red:completed',
'tdd:green:started',
'tdd:green:completed',
'tdd:commit:started',
'tdd:commit:completed',
'subtask:started',
'subtask:completed',
'subtask:failed',
'test:run',
'test:passed',
'test:failed',
'git:branch:created',
'git:commit:created',
'error:occurred',
'state:persisted',
'progress:updated',
'adapter:configured'
];
/**
* Logs all workflow events to an activity.jsonl file
*/
export class WorkflowActivityLogger {
private readonly activityLogPath: string;
private readonly orchestrator: WorkflowOrchestrator;
private readonly logger = getLogger('WorkflowActivityLogger');
private readonly listenerMap: Map<
WorkflowEventType,
(event: WorkflowEventData) => void
> = new Map();
private isActive = false;
constructor(orchestrator: WorkflowOrchestrator, activityLogPath: string) {
this.orchestrator = orchestrator;
this.activityLogPath = activityLogPath;
}
/**
* Start logging workflow events
*/
start(): void {
if (this.isActive) {
this.logger.warn('Activity logger is already active');
return;
}
// Subscribe to all workflow events, storing listener references for cleanup
WORKFLOW_EVENT_TYPES.forEach((eventType) => {
const listener = (event: WorkflowEventData) => this.logEvent(event);
this.listenerMap.set(eventType, listener);
this.orchestrator.on(eventType, listener);
});
this.isActive = true;
this.logger.debug(
`Activity logger started, logging to: ${this.activityLogPath}`
);
}
/**
* Stop logging workflow events and remove all listeners
*/
stop(): void {
if (!this.isActive) {
return;
}
// Remove all registered listeners
this.listenerMap.forEach((listener, eventType) => {
this.orchestrator.off(eventType, listener);
});
// Clear the listener map
this.listenerMap.clear();
this.isActive = false;
this.logger.debug('Activity logger stopped and listeners removed');
}
/**
* Log a workflow event to the activity log
*/
private async logEvent(event: WorkflowEventData): Promise<void> {
if (!this.isActive) {
return;
}
try {
// Convert timestamp to ISO string, handling both Date objects and string/number timestamps
const ts =
(event.timestamp as any) instanceof Date
? (event.timestamp as Date).toISOString()
: new Date(event.timestamp as any).toISOString();
// Convert WorkflowEventData to ActivityEvent format
const activityEvent: Omit<ActivityEvent, 'timestamp'> = {
type: event.type,
phase: event.phase,
tddPhase: event.tddPhase,
subtaskId: event.subtaskId,
// Event timestamp kept as ISO for readability; storage layer adds its own "timestamp"
eventTimestamp: ts,
...(event.data || {})
};
await logActivity(this.activityLogPath, activityEvent);
} catch (error: any) {
// Log errors but don't throw - we don't want activity logging to break the workflow
this.logger.error(
`Failed to log activity event ${event.type}: ${error.message}`
);
}
}
/**
* Get the path to the activity log file
*/
getActivityLogPath(): string {
return this.activityLogPath;
}
/**
* Check if the logger is currently active
*/
isLogging(): boolean {
return this.isActive;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,698 @@
import type {
WorkflowPhase,
TDDPhase,
WorkflowContext,
WorkflowEvent,
WorkflowState,
StateTransition,
WorkflowEventType,
WorkflowEventData,
WorkflowEventListener,
SubtaskInfo
} from './types.js';
import type { TestResultValidator } from '../services/test-result-validator.js';
/**
* Lightweight state machine for TDD workflow orchestration
*/
export class WorkflowOrchestrator {
private currentPhase: WorkflowPhase;
private context: WorkflowContext;
private readonly transitions: StateTransition[];
private readonly eventListeners: Map<
WorkflowEventType,
Set<WorkflowEventListener>
>;
private persistCallback?: (state: WorkflowState) => void | Promise<void>;
private autoPersistEnabled: boolean = false;
private readonly phaseGuards: Map<
WorkflowPhase,
(context: WorkflowContext) => boolean
>;
private aborted: boolean = false;
private testResultValidator?: TestResultValidator;
private gitOperationHook?: (operation: string, data?: unknown) => void;
private executeHook?: (command: string, context: WorkflowContext) => void;
constructor(initialContext: WorkflowContext) {
this.currentPhase = 'PREFLIGHT';
this.context = { ...initialContext };
this.transitions = this.defineTransitions();
this.eventListeners = new Map();
this.phaseGuards = new Map();
}
/**
* Define valid state transitions
*/
private defineTransitions(): StateTransition[] {
return [
{
from: 'PREFLIGHT',
to: 'BRANCH_SETUP',
event: 'PREFLIGHT_COMPLETE'
},
{
from: 'BRANCH_SETUP',
to: 'SUBTASK_LOOP',
event: 'BRANCH_CREATED'
},
{
from: 'SUBTASK_LOOP',
to: 'FINALIZE',
event: 'ALL_SUBTASKS_COMPLETE'
},
{
from: 'FINALIZE',
to: 'COMPLETE',
event: 'FINALIZE_COMPLETE'
}
];
}
/**
* Get current workflow phase
*/
getCurrentPhase(): WorkflowPhase {
return this.currentPhase;
}
/**
* Get current TDD phase (only valid in SUBTASK_LOOP)
*/
getCurrentTDDPhase(): TDDPhase | undefined {
if (this.currentPhase === 'SUBTASK_LOOP') {
return this.context.currentTDDPhase || 'RED';
}
return undefined;
}
/**
* Get workflow context
*/
getContext(): WorkflowContext {
return { ...this.context };
}
/**
* Transition to next state based on event
*/
transition(event: WorkflowEvent): void {
// Check if workflow is aborted
if (this.aborted && event.type !== 'ABORT') {
throw new Error('Workflow has been aborted');
}
// Handle special events that work across all phases
if (event.type === 'ERROR') {
this.handleError(event.error);
void this.triggerAutoPersist();
return;
}
if (event.type === 'ABORT') {
this.aborted = true;
void this.triggerAutoPersist();
return;
}
if (event.type === 'RETRY') {
this.handleRetry();
void this.triggerAutoPersist();
return;
}
// Handle TDD phase transitions within SUBTASK_LOOP
if (this.currentPhase === 'SUBTASK_LOOP') {
this.handleTDDPhaseTransition(event);
void this.triggerAutoPersist();
return;
}
// Handle main workflow phase transitions
const validTransition = this.transitions.find(
(t) => t.from === this.currentPhase && t.event === event.type
);
if (!validTransition) {
throw new Error(
`Invalid transition: ${event.type} from ${this.currentPhase}`
);
}
// Execute transition
this.executeTransition(validTransition, event);
void this.triggerAutoPersist();
}
/**
* Handle TDD phase transitions (RED -> GREEN -> COMMIT)
*/
private handleTDDPhaseTransition(event: WorkflowEvent): void {
const currentTDD = this.context.currentTDDPhase || 'RED';
switch (event.type) {
case 'RED_PHASE_COMPLETE':
if (currentTDD !== 'RED') {
throw new Error(
'Invalid transition: RED_PHASE_COMPLETE from non-RED phase'
);
}
// Validate test results are provided
if (!event.testResults) {
throw new Error('Test results required for RED phase transition');
}
// Store test results in context
this.context.lastTestResults = event.testResults;
// Special case: All tests passing in RED phase means feature already implemented
if (event.testResults.failed === 0) {
this.emit('tdd:red:completed');
this.emit('tdd:feature-already-implemented', {
subtaskId: this.getCurrentSubtaskId(),
testResults: event.testResults
});
// Mark subtask as complete and move to next one
const subtask =
this.context.subtasks[this.context.currentSubtaskIndex];
if (subtask) {
subtask.status = 'completed';
}
this.emit('subtask:completed');
this.context.currentSubtaskIndex++;
// Emit progress update
const progress = this.getProgress();
this.emit('progress:updated', {
completed: progress.completed,
total: progress.total,
percentage: progress.percentage
});
// Start next subtask or complete workflow
if (this.context.currentSubtaskIndex < this.context.subtasks.length) {
this.context.currentTDDPhase = 'RED';
this.emit('tdd:red:started');
this.emit('subtask:started');
} else {
// All subtasks complete, transition to FINALIZE
this.transition({ type: 'ALL_SUBTASKS_COMPLETE' });
}
break;
}
// Normal RED phase: has failing tests, proceed to GREEN
this.emit('tdd:red:completed');
this.context.currentTDDPhase = 'GREEN';
this.emit('tdd:green:started');
break;
case 'GREEN_PHASE_COMPLETE':
if (currentTDD !== 'GREEN') {
throw new Error(
'Invalid transition: GREEN_PHASE_COMPLETE from non-GREEN phase'
);
}
// Validate test results are provided
if (!event.testResults) {
throw new Error('Test results required for GREEN phase transition');
}
// Validate GREEN phase has no failures
if (event.testResults.failed !== 0) {
throw new Error('GREEN phase must have zero failures');
}
// Store test results in context
this.context.lastTestResults = event.testResults;
this.emit('tdd:green:completed');
this.context.currentTDDPhase = 'COMMIT';
this.emit('tdd:commit:started');
break;
case 'COMMIT_COMPLETE':
if (currentTDD !== 'COMMIT') {
throw new Error(
'Invalid transition: COMMIT_COMPLETE from non-COMMIT phase'
);
}
this.emit('tdd:commit:completed');
// Mark current subtask as completed
const currentSubtask =
this.context.subtasks[this.context.currentSubtaskIndex];
if (currentSubtask) {
currentSubtask.status = 'completed';
}
break;
case 'SUBTASK_COMPLETE':
this.emit('subtask:completed');
// Move to next subtask
this.context.currentSubtaskIndex++;
// Emit progress update
const progress = this.getProgress();
this.emit('progress:updated', {
completed: progress.completed,
total: progress.total,
percentage: progress.percentage
});
if (this.context.currentSubtaskIndex < this.context.subtasks.length) {
// Start next subtask with RED phase
this.context.currentTDDPhase = 'RED';
this.emit('tdd:red:started');
this.emit('subtask:started');
} else {
// All subtasks complete, transition to FINALIZE
this.transition({ type: 'ALL_SUBTASKS_COMPLETE' });
}
break;
case 'ALL_SUBTASKS_COMPLETE':
// Transition to FINALIZE phase
this.emit('phase:exited');
this.currentPhase = 'FINALIZE';
this.context.currentTDDPhase = undefined;
this.emit('phase:entered');
// Note: Don't auto-transition to COMPLETE - requires explicit finalize call
break;
default:
throw new Error(`Invalid transition: ${event.type} in SUBTASK_LOOP`);
}
}
/**
* Execute a state transition
*/
private executeTransition(
transition: StateTransition,
event: WorkflowEvent
): void {
// Check guard condition if present
if (transition.guard && !transition.guard(this.context)) {
throw new Error(
`Guard condition failed for transition to ${transition.to}`
);
}
// Check phase-specific guard if present
const phaseGuard = this.phaseGuards.get(transition.to);
if (phaseGuard && !phaseGuard(this.context)) {
throw new Error('Guard condition failed');
}
// Emit phase exit event
this.emit('phase:exited');
// Update context based on event
this.updateContext(event);
// Transition to new phase
this.currentPhase = transition.to;
// Emit phase entry event
this.emit('phase:entered');
// Initialize TDD phase if entering SUBTASK_LOOP
if (this.currentPhase === 'SUBTASK_LOOP') {
this.context.currentTDDPhase = 'RED';
this.emit('tdd:red:started');
this.emit('subtask:started');
}
}
/**
* Update context based on event
*/
private updateContext(event: WorkflowEvent): void {
switch (event.type) {
case 'BRANCH_CREATED':
this.context.branchName = event.branchName;
this.emit('git:branch:created', { branchName: event.branchName });
// Trigger git operation hook
if (this.gitOperationHook) {
this.gitOperationHook('branch:created', {
branchName: event.branchName
});
}
break;
case 'ERROR':
this.context.errors.push(event.error);
this.emit('error:occurred', { error: event.error });
break;
// Add more context updates as needed
}
}
/**
* Get current state for serialization
*/
getState(): WorkflowState {
return {
phase: this.currentPhase,
context: { ...this.context }
};
}
/**
* Restore state from checkpoint
*/
restoreState(state: WorkflowState): void {
this.currentPhase = state.phase;
this.context = { ...state.context };
// Emit workflow:resumed event
this.emit('workflow:resumed', {
phase: this.currentPhase,
progress: this.getProgress()
});
}
/**
* Add event listener
*/
on(eventType: WorkflowEventType, listener: WorkflowEventListener): void {
if (!this.eventListeners.has(eventType)) {
this.eventListeners.set(eventType, new Set());
}
this.eventListeners.get(eventType)!.add(listener);
}
/**
* Remove event listener
*/
off(eventType: WorkflowEventType, listener: WorkflowEventListener): void {
const listeners = this.eventListeners.get(eventType);
if (listeners) {
listeners.delete(listener);
}
}
/**
* Emit workflow event
*/
private emit(
eventType: WorkflowEventType,
data?: Record<string, unknown>
): void {
const eventData: WorkflowEventData = {
type: eventType,
timestamp: new Date(),
phase: this.currentPhase,
tddPhase: this.context.currentTDDPhase,
subtaskId: this.getCurrentSubtaskId(),
data: {
...data,
adapters: {
testValidator: !!this.testResultValidator,
gitHook: !!this.gitOperationHook,
executeHook: !!this.executeHook
}
}
};
const listeners = this.eventListeners.get(eventType);
if (listeners) {
listeners.forEach((listener) => listener(eventData));
}
}
/**
* Get current subtask ID
*/
private getCurrentSubtaskId(): string | undefined {
const currentSubtask =
this.context.subtasks[this.context.currentSubtaskIndex];
return currentSubtask?.id;
}
/**
* Register callback for state persistence
*/
onStatePersist(
callback: (state: WorkflowState) => void | Promise<void>
): void {
this.persistCallback = callback;
}
/**
* Enable auto-persistence after each transition
*/
enableAutoPersist(
callback: (state: WorkflowState) => void | Promise<void>
): void {
this.persistCallback = callback;
this.autoPersistEnabled = true;
}
/**
* Disable auto-persistence
*/
disableAutoPersist(): void {
this.autoPersistEnabled = false;
}
/**
* Manually persist current state
*/
async persistState(): Promise<void> {
if (this.persistCallback) {
await this.persistCallback(this.getState());
}
this.emit('state:persisted');
}
/**
* Trigger auto-persistence if enabled
*/
private async triggerAutoPersist(): Promise<void> {
if (this.autoPersistEnabled && this.persistCallback) {
await this.persistCallback(this.getState());
}
}
/**
* Add a guard condition for a specific phase
*/
addGuard(
phase: WorkflowPhase,
guard: (context: WorkflowContext) => boolean
): void {
this.phaseGuards.set(phase, guard);
}
/**
* Remove a guard condition for a specific phase
*/
removeGuard(phase: WorkflowPhase): void {
this.phaseGuards.delete(phase);
}
/**
* Get current subtask being worked on
*/
getCurrentSubtask(): SubtaskInfo | undefined {
return this.context.subtasks[this.context.currentSubtaskIndex];
}
/**
* Get workflow progress information
*/
getProgress(): {
completed: number;
total: number;
current: number;
percentage: number;
} {
const completed = this.context.subtasks.filter(
(st) => st.status === 'completed'
).length;
const total = this.context.subtasks.length;
const current = this.context.currentSubtaskIndex + 1;
const percentage = total > 0 ? Math.round((completed / total) * 100) : 0;
return { completed, total, current, percentage };
}
/**
* Check if can proceed to next subtask or phase
*/
canProceed(): boolean {
if (this.currentPhase !== 'SUBTASK_LOOP') {
return false;
}
const currentSubtask = this.getCurrentSubtask();
// Can proceed if current subtask is completed (after COMMIT phase)
return currentSubtask?.status === 'completed';
}
/**
* Increment attempts for current subtask
*/
incrementAttempts(): void {
const currentSubtask = this.getCurrentSubtask();
if (currentSubtask) {
currentSubtask.attempts++;
}
}
/**
* Check if current subtask has exceeded max attempts
*/
hasExceededMaxAttempts(): boolean {
const currentSubtask = this.getCurrentSubtask();
if (!currentSubtask || !currentSubtask.maxAttempts) {
return false;
}
return currentSubtask.attempts > currentSubtask.maxAttempts;
}
/**
* Handle error event
*/
private handleError(error: import('./types.js').WorkflowError): void {
this.context.errors.push(error);
this.emit('error:occurred', { error });
}
/**
* Handle retry event
*/
private handleRetry(): void {
if (this.currentPhase === 'SUBTASK_LOOP') {
// Reset to RED phase to retry current subtask
this.context.currentTDDPhase = 'RED';
this.emit('tdd:red:started');
}
}
/**
* Retry current subtask (resets to RED phase)
*/
retryCurrentSubtask(): void {
if (this.currentPhase === 'SUBTASK_LOOP') {
this.context.currentTDDPhase = 'RED';
this.emit('tdd:red:started');
}
}
/**
* Handle max attempts exceeded for current subtask
*/
handleMaxAttemptsExceeded(): void {
const currentSubtask = this.getCurrentSubtask();
if (currentSubtask) {
currentSubtask.status = 'failed';
this.emit('subtask:failed', {
subtaskId: currentSubtask.id,
attempts: currentSubtask.attempts,
maxAttempts: currentSubtask.maxAttempts
});
}
}
/**
* Check if workflow has been aborted
*/
isAborted(): boolean {
return this.aborted;
}
/**
* Validate if a state can be resumed from
*/
canResumeFromState(state: WorkflowState): boolean {
// Validate phase is valid
const validPhases: WorkflowPhase[] = [
'PREFLIGHT',
'BRANCH_SETUP',
'SUBTASK_LOOP',
'FINALIZE',
'COMPLETE'
];
if (!validPhases.includes(state.phase)) {
return false;
}
// Validate context structure
if (!state.context || typeof state.context !== 'object') {
return false;
}
// Validate required context fields
if (!state.context.taskId || !Array.isArray(state.context.subtasks)) {
return false;
}
if (typeof state.context.currentSubtaskIndex !== 'number') {
return false;
}
if (!Array.isArray(state.context.errors)) {
return false;
}
// All validations passed
return true;
}
/**
* Set TestResultValidator adapter
*/
setTestResultValidator(validator: TestResultValidator): void {
this.testResultValidator = validator;
this.emit('adapter:configured', { adapterType: 'test-validator' });
}
/**
* Check if TestResultValidator is configured
*/
hasTestResultValidator(): boolean {
return !!this.testResultValidator;
}
/**
* Remove TestResultValidator adapter
*/
removeTestResultValidator(): void {
this.testResultValidator = undefined;
}
/**
* Register git operation hook
*/
onGitOperation(hook: (operation: string, data?: unknown) => void): void {
this.gitOperationHook = hook;
}
/**
* Register execute command hook
*/
onExecute(hook: (command: string, context: WorkflowContext) => void): void {
this.executeHook = hook;
}
/**
* Execute a command (triggers execute hook)
*/
executeCommand(command: string): void {
if (this.executeHook) {
this.executeHook(command, this.context);
}
}
}

View File

@@ -0,0 +1,146 @@
/**
* @fileoverview Tests for WorkflowStateManager path sanitization
*/
import { describe, it, expect } from 'vitest';
import { WorkflowStateManager } from './workflow-state-manager.js';
import os from 'node:os';
import path from 'node:path';
describe('WorkflowStateManager', () => {
describe('getProjectIdentifier', () => {
it('should sanitize paths like Claude Code', () => {
const projectRoot =
'/Volumes/Workspace/workspace/contrib/task-master/demos/nextjs-todo-tdd';
const manager = new WorkflowStateManager(projectRoot);
const sessionDir = manager.getSessionDir();
const homeDir = os.homedir();
// Expected structure: ~/.taskmaster/{project-id}/sessions/
const expectedPath = path.join(
homeDir,
'.taskmaster',
'-Volumes-Workspace-workspace-contrib-task-master-demos-nextjs-todo-tdd',
'sessions'
);
expect(sessionDir).toBe(expectedPath);
});
it('should preserve case in paths', () => {
const projectRoot = '/Users/Alice/Projects/MyApp';
const manager = new WorkflowStateManager(projectRoot);
const sessionDir = manager.getSessionDir();
// Extract project ID from: ~/.taskmaster/{project-id}/sessions/
const projectId = sessionDir.split(path.sep).slice(-2, -1)[0];
// Case should be preserved
expect(projectId).toContain('Users');
expect(projectId).toContain('Alice');
expect(projectId).toContain('Projects');
expect(projectId).toContain('MyApp');
});
it('should handle paths with special characters', () => {
const projectRoot = '/tmp/my-project_v2.0/test';
const manager = new WorkflowStateManager(projectRoot);
const sessionDir = manager.getSessionDir();
// Extract project ID from: ~/.taskmaster/{project-id}/sessions/
const projectId = sessionDir.split(path.sep).slice(-2, -1)[0];
// Special chars should be replaced with dashes
expect(projectId).toBe('-tmp-my-project-v2-0-test');
});
it('should create unique identifiers for different paths', () => {
const project1 = '/Users/alice/task-master';
const project2 = '/Users/bob/task-master';
const manager1 = new WorkflowStateManager(project1);
const manager2 = new WorkflowStateManager(project2);
// Extract project IDs from: ~/.taskmaster/{project-id}/sessions/
const id1 = manager1.getSessionDir().split(path.sep).slice(-2, -1)[0];
const id2 = manager2.getSessionDir().split(path.sep).slice(-2, -1)[0];
// Same basename but different full paths should be unique
expect(id1).not.toBe(id2);
expect(id1).toContain('alice');
expect(id2).toContain('bob');
});
it('should collapse multiple dashes', () => {
const projectRoot = '/path//with///multiple////slashes';
const manager = new WorkflowStateManager(projectRoot);
const sessionDir = manager.getSessionDir();
// Extract project ID from: ~/.taskmaster/{project-id}/sessions/
const projectId = sessionDir.split(path.sep).slice(-2, -1)[0];
// Multiple dashes should be collapsed to single dash
expect(projectId).not.toContain('--');
expect(projectId).toBe('-path-with-multiple-slashes');
});
it('should not have trailing dashes', () => {
const projectRoot = '/path/to/project';
const manager = new WorkflowStateManager(projectRoot);
const sessionDir = manager.getSessionDir();
// Extract project ID from: ~/.taskmaster/{project-id}/sessions/
const projectId = sessionDir.split(path.sep).slice(-2, -1)[0];
// Should not end with dash
expect(projectId).not.toMatch(/-$/);
});
it('should start with a dash like Claude Code', () => {
const projectRoot = '/any/path';
const manager = new WorkflowStateManager(projectRoot);
const sessionDir = manager.getSessionDir();
// Extract project ID from: ~/.taskmaster/{project-id}/sessions/
const projectId = sessionDir.split(path.sep).slice(-2, -1)[0];
// Should start with dash like Claude Code's pattern
expect(projectId).toMatch(/^-/);
});
});
describe('session paths', () => {
it('should place sessions in global ~/.taskmaster/{project-id}/sessions/', () => {
const projectRoot = '/some/project';
const manager = new WorkflowStateManager(projectRoot);
const sessionDir = manager.getSessionDir();
const homeDir = os.homedir();
// Should be: ~/.taskmaster/{project-id}/sessions/
expect(sessionDir).toContain(path.join(homeDir, '.taskmaster'));
expect(sessionDir).toMatch(/\.taskmaster\/.*\/sessions$/);
});
it('should include workflow-state.json in session dir', () => {
const projectRoot = '/some/project';
const manager = new WorkflowStateManager(projectRoot);
const statePath = manager.getStatePath();
const sessionDir = manager.getSessionDir();
expect(statePath).toBe(path.join(sessionDir, 'workflow-state.json'));
});
it('should include backups dir in session dir', () => {
const projectRoot = '/some/project';
const manager = new WorkflowStateManager(projectRoot);
const backupDir = manager.getBackupDir();
const sessionDir = manager.getSessionDir();
expect(backupDir).toBe(path.join(sessionDir, 'backups'));
});
});
});

View File

@@ -0,0 +1,263 @@
/**
* @fileoverview WorkflowStateManager - Manages persistence of TDD workflow state
*
* Stores workflow state in global user directory (~/.taskmaster/{project-id}/sessions/)
* to avoid git conflicts and support multiple worktrees.
* Each project gets its own directory for organizing workflow-related data.
*/
import { promises as fs } from 'node:fs';
import path from 'node:path';
import os from 'node:os';
import type { WorkflowState } from './types.js';
import { getLogger } from '../logger/index.js';
export interface WorkflowStateBackup {
timestamp: string;
state: WorkflowState;
}
/**
* Manages workflow state persistence with backup support
* Stores state in global user directory to avoid git noise
*/
export class WorkflowStateManager {
private readonly projectRoot: string;
private readonly statePath: string;
private readonly backupDir: string;
private readonly sessionDir: string;
private maxBackups: number;
private readonly logger = getLogger('WorkflowStateManager');
constructor(projectRoot: string, maxBackups = 5) {
this.projectRoot = path.resolve(projectRoot);
this.maxBackups = maxBackups;
// Create project-specific directory in global .taskmaster
// Structure: ~/.taskmaster/{project-id}/sessions/
const projectId = this.getProjectIdentifier(this.projectRoot);
const homeDir = os.homedir();
const projectDir = path.join(homeDir, '.taskmaster', projectId);
this.sessionDir = path.join(projectDir, 'sessions');
this.statePath = path.join(this.sessionDir, 'workflow-state.json');
this.backupDir = path.join(this.sessionDir, 'backups');
}
/**
* Generate a unique identifier for the project using full sanitized path
* Uses Claude Code's pattern: leading dash + full path with case preserved
* Example: /Volumes/Workspace/... -> -Volumes-Workspace-...
*/
private getProjectIdentifier(projectRoot: string): string {
// Resolve to absolute path
const absolutePath = path.resolve(projectRoot);
// Sanitize path like Claude Code does:
// - Add leading dash
// - Replace path separators and non-alphanumeric chars with dashes
// - Preserve case for readability
// - Collapse multiple dashes
const sanitized =
'-' +
absolutePath
.replace(/^\//, '') // Remove leading slash before adding dash
.replace(/[^a-zA-Z0-9]+/g, '-') // Replace sequences of non-alphanumeric with single dash
.replace(/-+/g, '-') // Collapse multiple dashes
.replace(/-+$/, ''); // Remove trailing dashes
return sanitized;
}
/**
* Check if workflow state exists
*/
async exists(): Promise<boolean> {
try {
await fs.access(this.statePath);
return true;
} catch {
return false;
}
}
/**
* Load workflow state from disk
*/
async load(): Promise<WorkflowState> {
try {
const content = await fs.readFile(this.statePath, 'utf-8');
return JSON.parse(content) as WorkflowState;
} catch (error: any) {
if (error.code === 'ENOENT') {
throw new Error(`Workflow state file not found at ${this.statePath}`);
}
throw new Error(`Failed to load workflow state: ${error.message}`);
}
}
/**
* Save workflow state to disk
*/
async save(state: WorkflowState): Promise<void> {
try {
// Ensure session directory exists
await fs.mkdir(this.sessionDir, { recursive: true });
// Serialize and validate JSON
const jsonContent = JSON.stringify(state, null, 2);
// Validate that the JSON is well-formed by parsing it back
try {
JSON.parse(jsonContent);
} catch (parseError) {
this.logger.error('Generated invalid JSON:', jsonContent);
throw new Error('Failed to generate valid JSON from workflow state');
}
// Write state atomically with newline at end
const tempPath = `${this.statePath}.tmp`;
await fs.writeFile(tempPath, jsonContent + '\n', 'utf-8');
await fs.rename(tempPath, this.statePath);
this.logger.debug(`Saved workflow state (${jsonContent.length} bytes)`);
} catch (error: any) {
throw new Error(`Failed to save workflow state: ${error.message}`);
}
}
/**
* Create a backup of current state
*/
async createBackup(): Promise<void> {
try {
const exists = await this.exists();
if (!exists) {
return;
}
const state = await this.load();
await fs.mkdir(this.backupDir, { recursive: true });
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const backupPath = path.join(
this.backupDir,
`workflow-state-${timestamp}.json`
);
const backup: WorkflowStateBackup = {
timestamp: new Date().toISOString(),
state
};
await fs.writeFile(backupPath, JSON.stringify(backup, null, 2), 'utf-8');
// Clean up old backups
await this.pruneBackups();
} catch (error: any) {
throw new Error(`Failed to create backup: ${error.message}`);
}
}
/**
* Delete workflow state file
*/
async delete(): Promise<void> {
try {
await fs.unlink(this.statePath);
} catch (error: any) {
if (error.code !== 'ENOENT') {
throw new Error(`Failed to delete workflow state: ${error.message}`);
}
}
}
/**
* List available backups
*/
async listBackups(): Promise<string[]> {
try {
const files = await fs.readdir(this.backupDir);
return files
.filter((f) => f.startsWith('workflow-state-') && f.endsWith('.json'))
.sort()
.reverse();
} catch (error: any) {
if (error.code === 'ENOENT') {
return [];
}
throw new Error(`Failed to list backups: ${error.message}`);
}
}
/**
* Restore from a backup
*/
async restoreBackup(backupFileName: string): Promise<void> {
try {
const backupPath = path.join(this.backupDir, backupFileName);
const content = await fs.readFile(backupPath, 'utf-8');
const backup: WorkflowStateBackup = JSON.parse(content);
await this.save(backup.state);
} catch (error: any) {
throw new Error(`Failed to restore backup: ${error.message}`);
}
}
/**
* Prune old backups to maintain max backup count
*/
private async pruneBackups(): Promise<void> {
try {
const backups = await this.listBackups();
if (backups.length > this.maxBackups) {
const toDelete = backups.slice(this.maxBackups);
for (const backup of toDelete) {
await fs.unlink(path.join(this.backupDir, backup));
}
}
} catch (error: any) {
// Non-critical error, log but don't throw
this.logger.warn(`Failed to prune backups: ${error.message}`);
}
}
/**
* Get the path to the state file (for debugging/testing)
*/
getStatePath(): string {
return this.statePath;
}
/**
* Get the path to the backup directory (for debugging/testing)
*/
getBackupDir(): string {
return this.backupDir;
}
/**
* Get the session directory path (for debugging/testing)
*/
getSessionDir(): string {
return this.sessionDir;
}
/**
* Get the project root this manager is for
*/
getProjectRoot(): string {
return this.projectRoot;
}
/**
* Get the path to the activity log file
* Activity log is stored next to workflow-state.json for correlation
*/
getActivityLogPath(): string {
return path.join(this.sessionDir, 'activity.jsonl');
}
}

View File

@@ -0,0 +1,401 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import fs from 'fs-extra';
import path from 'path';
import os from 'os';
import {
logActivity,
readActivityLog,
filterActivityLog
} from '../../../src/storage/activity-logger.js';
describe('Activity Logger', () => {
let testDir: string;
let activityPath: string;
beforeEach(async () => {
// Create a unique temporary test directory
const prefix = path.join(os.tmpdir(), 'activity-test-');
testDir = await fs.mkdtemp(prefix);
activityPath = path.join(testDir, 'activity.jsonl');
});
afterEach(async () => {
// Clean up test directory
await fs.remove(testDir);
});
describe('logActivity', () => {
it('should create activity log file on first write', async () => {
await logActivity(activityPath, {
type: 'phase-start',
phase: 'red',
data: {}
});
const exists = await fs.pathExists(activityPath);
expect(exists).toBe(true);
});
it('should append event to log file', async () => {
await logActivity(activityPath, {
type: 'phase-start',
phase: 'red'
});
const content = await fs.readFile(activityPath, 'utf-8');
const lines = content.trim().split(/\r?\n/);
expect(lines.length).toBe(1);
});
it('should write valid JSONL format', async () => {
await logActivity(activityPath, {
type: 'test-run',
result: 'pass'
});
const content = await fs.readFile(activityPath, 'utf-8');
const line = content.trim();
const parsed = JSON.parse(line);
expect(parsed).toBeDefined();
expect(parsed.type).toBe('test-run');
});
it('should include timestamp in log entry', async () => {
const before = new Date().toISOString();
await logActivity(activityPath, {
type: 'phase-start',
phase: 'red'
});
const after = new Date().toISOString();
const logs = await readActivityLog(activityPath);
expect(logs[0].timestamp).toBeDefined();
expect(logs[0].timestamp >= before).toBe(true);
expect(logs[0].timestamp <= after).toBe(true);
});
it('should append multiple events', async () => {
await logActivity(activityPath, { type: 'event1' });
await logActivity(activityPath, { type: 'event2' });
await logActivity(activityPath, { type: 'event3' });
const logs = await readActivityLog(activityPath);
expect(logs.length).toBe(3);
expect(logs[0].type).toBe('event1');
expect(logs[1].type).toBe('event2');
expect(logs[2].type).toBe('event3');
});
it('should preserve event data', async () => {
const eventData = {
type: 'git-commit',
hash: 'abc123',
message: 'test commit',
files: ['file1.ts', 'file2.ts']
};
await logActivity(activityPath, eventData);
const logs = await readActivityLog(activityPath);
expect(logs[0].type).toBe('git-commit');
expect(logs[0].hash).toBe('abc123');
expect(logs[0].message).toBe('test commit');
expect(logs[0].files).toEqual(['file1.ts', 'file2.ts']);
});
it('should handle nested objects in event data', async () => {
await logActivity(activityPath, {
type: 'test-results',
results: {
passed: 10,
failed: 2,
details: { coverage: 85 }
}
});
const logs = await readActivityLog(activityPath);
expect(logs[0].results.details.coverage).toBe(85);
});
it('should handle special characters in event data', async () => {
await logActivity(activityPath, {
type: 'error',
message: 'Error: "Something went wrong"\nLine 2'
});
const logs = await readActivityLog(activityPath);
expect(logs[0].message).toBe('Error: "Something went wrong"\nLine 2');
});
it('should create parent directory if it does not exist', async () => {
const nestedPath = path.join(testDir, 'nested', 'dir', 'activity.jsonl');
await logActivity(nestedPath, { type: 'test' });
const exists = await fs.pathExists(nestedPath);
expect(exists).toBe(true);
});
});
describe('readActivityLog', () => {
it('should read all events from log', async () => {
await logActivity(activityPath, { type: 'event1' });
await logActivity(activityPath, { type: 'event2' });
const logs = await readActivityLog(activityPath);
expect(logs.length).toBe(2);
expect(logs[0].type).toBe('event1');
expect(logs[1].type).toBe('event2');
});
it('should return empty array for non-existent file', async () => {
const logs = await readActivityLog(activityPath);
expect(logs).toEqual([]);
});
it('should parse JSONL correctly', async () => {
await logActivity(activityPath, { type: 'event1', data: 'test1' });
await logActivity(activityPath, { type: 'event2', data: 'test2' });
const logs = await readActivityLog(activityPath);
expect(logs[0].data).toBe('test1');
expect(logs[1].data).toBe('test2');
});
it('should handle empty lines', async () => {
await fs.writeFile(
activityPath,
'{"type":"event1"}\n\n{"type":"event2"}\n'
);
const logs = await readActivityLog(activityPath);
expect(logs.length).toBe(2);
expect(logs[0].type).toBe('event1');
expect(logs[1].type).toBe('event2');
});
it('should throw error for invalid JSON line', async () => {
await fs.writeFile(activityPath, '{"type":"event1"}\ninvalid json\n');
await expect(readActivityLog(activityPath)).rejects.toThrow(
/Invalid JSON/i
);
});
it('should preserve chronological order', async () => {
for (let i = 0; i < 10; i++) {
await logActivity(activityPath, { type: 'event', index: i });
}
const logs = await readActivityLog(activityPath);
for (let i = 0; i < 10; i++) {
expect(logs[i].index).toBe(i);
}
});
});
describe('filterActivityLog', () => {
beforeEach(async () => {
// Create sample log entries
await logActivity(activityPath, { type: 'phase-start', phase: 'red' });
await logActivity(activityPath, { type: 'test-run', result: 'fail' });
await logActivity(activityPath, { type: 'phase-start', phase: 'green' });
await logActivity(activityPath, { type: 'test-run', result: 'pass' });
await logActivity(activityPath, { type: 'git-commit', hash: 'abc123' });
});
it('should filter by event type', async () => {
const filtered = await filterActivityLog(activityPath, {
type: 'phase-start'
});
expect(filtered.length).toBe(2);
expect(filtered[0].type).toBe('phase-start');
expect(filtered[1].type).toBe('phase-start');
});
it('should filter by multiple criteria', async () => {
const filtered = await filterActivityLog(activityPath, {
type: 'test-run',
result: 'pass'
});
expect(filtered.length).toBe(1);
expect(filtered[0].result).toBe('pass');
});
it('should return all events when no filter provided', async () => {
const filtered = await filterActivityLog(activityPath, {});
expect(filtered.length).toBe(5);
});
it('should filter by timestamp range', async () => {
const logs = await readActivityLog(activityPath);
const midpoint = logs[2].timestamp;
const filtered = await filterActivityLog(activityPath, {
timestampFrom: midpoint
});
// Should get events from midpoint onwards (inclusive)
// Expect at least 3 events, may be more due to timestamp collisions
expect(filtered.length).toBeGreaterThanOrEqual(3);
expect(filtered.length).toBeLessThanOrEqual(5);
});
it('should filter by custom predicate', async () => {
const filtered = await filterActivityLog(activityPath, {
predicate: (event: any) => event.phase === 'red'
});
expect(filtered.length).toBe(1);
expect(filtered[0].phase).toBe('red');
});
it('should return empty array for non-matching filter', async () => {
const filtered = await filterActivityLog(activityPath, {
type: 'non-existent'
});
expect(filtered).toEqual([]);
});
it('should handle nested property filters', async () => {
await logActivity(activityPath, {
type: 'test-results',
results: { coverage: 85 }
});
const filtered = await filterActivityLog(activityPath, {
predicate: (event: any) => event.results?.coverage > 80
});
expect(filtered.length).toBe(1);
expect(filtered[0].results.coverage).toBe(85);
});
});
describe('Event types', () => {
it('should support phase-transition events', async () => {
await logActivity(activityPath, {
type: 'phase-transition',
from: 'red',
to: 'green'
});
const logs = await readActivityLog(activityPath);
expect(logs[0].type).toBe('phase-transition');
expect(logs[0].from).toBe('red');
expect(logs[0].to).toBe('green');
});
it('should support test-run events', async () => {
await logActivity(activityPath, {
type: 'test-run',
result: 'pass',
testsRun: 50,
testsPassed: 50,
testsFailed: 0,
coverage: 85.5
});
const logs = await readActivityLog(activityPath);
expect(logs[0].testsRun).toBe(50);
expect(logs[0].coverage).toBe(85.5);
});
it('should support git-operation events', async () => {
await logActivity(activityPath, {
type: 'git-commit',
hash: 'abc123def456',
message: 'feat: add new feature',
files: ['file1.ts', 'file2.ts']
});
const logs = await readActivityLog(activityPath);
expect(logs[0].hash).toBe('abc123def456');
expect(logs[0].files.length).toBe(2);
});
it('should support error events', async () => {
await logActivity(activityPath, {
type: 'error',
phase: 'red',
error: 'Test failed',
stack: 'Error stack trace...'
});
const logs = await readActivityLog(activityPath);
expect(logs[0].type).toBe('error');
expect(logs[0].error).toBe('Test failed');
});
});
describe('Concurrency handling', () => {
it('should handle rapid concurrent writes', async () => {
const writes: Promise<void>[] = [];
for (let i = 0; i < 50; i++) {
writes.push(logActivity(activityPath, { type: 'event', index: i }));
}
await Promise.all(writes);
const logs = await readActivityLog(activityPath);
expect(logs.length).toBe(50);
});
it('should maintain data integrity with concurrent writes', async () => {
const writes: Promise<void>[] = [];
for (let i = 0; i < 20; i++) {
writes.push(
logActivity(activityPath, {
type: 'concurrent-test',
id: i,
data: `data-${i}`
})
);
}
await Promise.all(writes);
const logs = await readActivityLog(activityPath);
// All events should be present
expect(logs.length).toBe(20);
// Validate ids set
const ids = new Set(logs.map((l) => l.id));
expect([...ids].sort((a, b) => a - b)).toEqual([...Array(20).keys()]);
// Validate shape
for (const log of logs) {
expect(log.type).toBe('concurrent-test');
expect(typeof log.id).toBe('number');
expect(log.data).toMatch(/^data-\d+$/);
}
});
});
describe('File integrity', () => {
it('should maintain valid JSONL after many operations', async () => {
for (let i = 0; i < 100; i++) {
await logActivity(activityPath, { type: 'test', iteration: i });
}
const content = await fs.readFile(activityPath, 'utf-8');
const lines = content.trim().split(/\r?\n/);
expect(lines.length).toBe(100);
// All lines should be valid JSON
for (const line of lines) {
expect(() => JSON.parse(line)).not.toThrow();
}
});
});
});

View File

@@ -22,12 +22,15 @@ export default defineConfig({
provider: 'v8',
reporter: ['text', 'json', 'html', 'lcov'],
exclude: [
'node_modules',
'dist',
'tests',
'node_modules/',
'dist/',
'tests/',
'**/*.test.ts',
'**/*.spec.ts',
'**/*.d.ts',
'**/mocks/**',
'**/fixtures/**',
'vitest.config.ts',
'src/index.ts'
],
thresholds: {