Compare commits

..

4 Commits

Author SHA1 Message Date
Ralph Khreish
d4826e0258 wip 2025-09-08 12:33:43 -07:00
Ralph Khreish
b9e3eecafe chore: address oauth PR concerns (#1184) 2025-09-08 01:15:19 +02:00
Ralph Khreish
6dd910fc52 feat: add oauth with remote server (#1178) 2025-09-04 20:45:41 +02:00
Ralph Khreish
19ec52181d feat: create tm-core and apps/cli (#1093)
- add typescript
- add npm workspaces
2025-09-01 21:44:43 +02:00
71 changed files with 7047 additions and 7200 deletions

View File

@@ -24,11 +24,13 @@
},
"dependencies": {
"@tm/core": "*",
"@tm/workflow-engine": "*",
"boxen": "^7.1.1",
"chalk": "^5.3.0",
"cli-table3": "^0.6.5",
"commander": "^12.1.0",
"inquirer": "^9.2.10",
"open": "^10.2.0",
"ora": "^8.1.0"
},
"devDependencies": {

View File

@@ -0,0 +1,503 @@
/**
* @fileoverview Auth command using Commander's native class pattern
* Extends Commander.Command for better integration with the framework
*/
import { Command } from 'commander';
import chalk from 'chalk';
import inquirer from 'inquirer';
import ora, { type Ora } from 'ora';
import open from 'open';
import {
AuthManager,
AuthenticationError,
type AuthCredentials
} from '@tm/core/auth';
import * as ui from '../utils/ui.js';
/**
* Result type from auth command
*/
export interface AuthResult {
success: boolean;
action: 'login' | 'logout' | 'status' | 'refresh';
credentials?: AuthCredentials;
message?: string;
}
/**
* AuthCommand extending Commander's Command class
* This is a thin presentation layer over @tm/core's AuthManager
*/
export class AuthCommand extends Command {
private authManager: AuthManager;
private lastResult?: AuthResult;
constructor(name?: string) {
super(name || 'auth');
// Initialize auth manager
this.authManager = AuthManager.getInstance();
// Configure the command with subcommands
this.description('Manage authentication with tryhamster.com');
// Add subcommands
this.addLoginCommand();
this.addLogoutCommand();
this.addStatusCommand();
this.addRefreshCommand();
// Default action shows help
this.action(() => {
this.help();
});
}
/**
* Add login subcommand
*/
private addLoginCommand(): void {
this.command('login')
.description('Authenticate with tryhamster.com')
.action(async () => {
await this.executeLogin();
});
}
/**
* Add logout subcommand
*/
private addLogoutCommand(): void {
this.command('logout')
.description('Logout and clear credentials')
.action(async () => {
await this.executeLogout();
});
}
/**
* Add status subcommand
*/
private addStatusCommand(): void {
this.command('status')
.description('Display authentication status')
.action(async () => {
await this.executeStatus();
});
}
/**
* Add refresh subcommand
*/
private addRefreshCommand(): void {
this.command('refresh')
.description('Refresh authentication token')
.action(async () => {
await this.executeRefresh();
});
}
/**
* Execute login command
*/
private async executeLogin(): Promise<void> {
try {
const result = await this.performInteractiveAuth();
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
// Exit cleanly after successful authentication
// Small delay to ensure all output is flushed
setTimeout(() => {
process.exit(0);
}, 100);
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Execute logout command
*/
private async executeLogout(): Promise<void> {
try {
const result = await this.performLogout();
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Execute status command
*/
private async executeStatus(): Promise<void> {
try {
const result = this.displayStatus();
this.setLastResult(result);
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Execute refresh command
*/
private async executeRefresh(): Promise<void> {
try {
const result = await this.refreshToken();
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Display authentication status
*/
private displayStatus(): AuthResult {
const credentials = this.authManager.getCredentials();
console.log(chalk.cyan('\n🔐 Authentication Status\n'));
if (credentials) {
console.log(chalk.green('✓ Authenticated'));
console.log(chalk.gray(` Email: ${credentials.email || 'N/A'}`));
console.log(chalk.gray(` User ID: ${credentials.userId}`));
console.log(
chalk.gray(` Token Type: ${credentials.tokenType || 'standard'}`)
);
if (credentials.expiresAt) {
const expiresAt = new Date(credentials.expiresAt);
const now = new Date();
const hoursRemaining = Math.floor(
(expiresAt.getTime() - now.getTime()) / (1000 * 60 * 60)
);
if (hoursRemaining > 0) {
console.log(
chalk.gray(
` Expires: ${expiresAt.toLocaleString()} (${hoursRemaining} hours remaining)`
)
);
} else {
console.log(
chalk.yellow(` Token expired at: ${expiresAt.toLocaleString()}`)
);
}
} else {
console.log(chalk.gray(' Expires: Never (API key)'));
}
console.log(
chalk.gray(` Saved: ${new Date(credentials.savedAt).toLocaleString()}`)
);
return {
success: true,
action: 'status',
credentials,
message: 'Authenticated'
};
} else {
console.log(chalk.yellow('✗ Not authenticated'));
console.log(
chalk.gray('\n Run "task-master auth login" to authenticate')
);
return {
success: false,
action: 'status',
message: 'Not authenticated'
};
}
}
/**
* Perform logout
*/
private async performLogout(): Promise<AuthResult> {
try {
await this.authManager.logout();
ui.displaySuccess('Successfully logged out');
return {
success: true,
action: 'logout',
message: 'Successfully logged out'
};
} catch (error) {
const message = `Failed to logout: ${(error as Error).message}`;
ui.displayError(message);
return {
success: false,
action: 'logout',
message
};
}
}
/**
* Refresh authentication token
*/
private async refreshToken(): Promise<AuthResult> {
const spinner = ora('Refreshing authentication token...').start();
try {
const credentials = await this.authManager.refreshToken();
spinner.succeed('Token refreshed successfully');
console.log(
chalk.gray(
` New expiration: ${credentials.expiresAt ? new Date(credentials.expiresAt).toLocaleString() : 'Never'}`
)
);
return {
success: true,
action: 'refresh',
credentials,
message: 'Token refreshed successfully'
};
} catch (error) {
spinner.fail('Failed to refresh token');
if ((error as AuthenticationError).code === 'NO_REFRESH_TOKEN') {
ui.displayWarning(
'No refresh token available. Please re-authenticate.'
);
} else {
ui.displayError(`Refresh failed: ${(error as Error).message}`);
}
return {
success: false,
action: 'refresh',
message: `Failed to refresh: ${(error as Error).message}`
};
}
}
/**
* Perform interactive authentication
*/
private async performInteractiveAuth(): Promise<AuthResult> {
ui.displayBanner('Task Master Authentication');
// Check if already authenticated
if (this.authManager.isAuthenticated()) {
const { continueAuth } = await inquirer.prompt([
{
type: 'confirm',
name: 'continueAuth',
message:
'You are already authenticated. Do you want to re-authenticate?',
default: false
}
]);
if (!continueAuth) {
const credentials = this.authManager.getCredentials();
ui.displaySuccess('Using existing authentication');
if (credentials) {
console.log(chalk.gray(` Email: ${credentials.email || 'N/A'}`));
console.log(chalk.gray(` User ID: ${credentials.userId}`));
}
return {
success: true,
action: 'login',
credentials: credentials || undefined,
message: 'Using existing authentication'
};
}
}
try {
// Direct browser authentication - no menu needed
const credentials = await this.authenticateWithBrowser();
ui.displaySuccess('Authentication successful!');
console.log(
chalk.gray(` Logged in as: ${credentials.email || credentials.userId}`)
);
return {
success: true,
action: 'login',
credentials,
message: 'Authentication successful'
};
} catch (error) {
this.handleAuthError(error as AuthenticationError);
return {
success: false,
action: 'login',
message: `Authentication failed: ${(error as Error).message}`
};
}
}
/**
* Authenticate with browser using OAuth 2.0 with PKCE
*/
private async authenticateWithBrowser(): Promise<AuthCredentials> {
let authSpinner: Ora | null = null;
try {
// Use AuthManager's new unified OAuth flow method with callbacks
const credentials = await this.authManager.authenticateWithOAuth({
// Callback to handle browser opening
openBrowser: async (authUrl) => {
await open(authUrl);
},
timeout: 5 * 60 * 1000, // 5 minutes
// Callback when auth URL is ready
onAuthUrl: (authUrl) => {
// Display authentication instructions
console.log(chalk.blue.bold('\n🔐 Browser Authentication\n'));
console.log(chalk.white(' Opening your browser to authenticate...'));
console.log(chalk.gray(" If the browser doesn't open, visit:"));
console.log(chalk.cyan.underline(` ${authUrl}\n`));
},
// Callback when waiting for authentication
onWaitingForAuth: () => {
authSpinner = ora({
text: 'Waiting for authentication...',
spinner: 'dots'
}).start();
},
// Callback on success
onSuccess: () => {
if (authSpinner) {
authSpinner.succeed('Authentication successful!');
}
},
// Callback on error
onError: () => {
if (authSpinner) {
authSpinner.fail('Authentication failed');
}
}
});
return credentials;
} catch (error) {
throw error;
}
}
/**
* Handle authentication errors
*/
private handleAuthError(error: AuthenticationError): void {
console.error(chalk.red(`\n✗ ${error.message}`));
switch (error.code) {
case 'NETWORK_ERROR':
ui.displayWarning(
'Please check your internet connection and try again.'
);
break;
case 'INVALID_CREDENTIALS':
ui.displayWarning('Please check your credentials and try again.');
break;
case 'AUTH_EXPIRED':
ui.displayWarning(
'Your session has expired. Please authenticate again.'
);
break;
default:
if (process.env.DEBUG) {
console.error(chalk.gray(error.stack || ''));
}
}
}
/**
* Handle general errors
*/
private handleError(error: any): void {
if (error instanceof AuthenticationError) {
this.handleAuthError(error);
} else {
const msg = error?.getSanitizedDetails?.() ?? {
message: error?.message ?? String(error)
};
console.error(chalk.red(`Error: ${msg.message || 'Unexpected error'}`));
if (error.stack && process.env.DEBUG) {
console.error(chalk.gray(error.stack));
}
}
}
/**
* Set the last result for programmatic access
*/
private setLastResult(result: AuthResult): void {
this.lastResult = result;
}
/**
* Get the last result (for programmatic usage)
*/
getLastResult(): AuthResult | undefined {
return this.lastResult;
}
/**
* Get current authentication status (for programmatic usage)
*/
isAuthenticated(): boolean {
return this.authManager.isAuthenticated();
}
/**
* Get current credentials (for programmatic usage)
*/
getCredentials(): AuthCredentials | null {
return this.authManager.getCredentials();
}
/**
* Clean up resources
*/
async cleanup(): Promise<void> {
// No resources to clean up for auth command
// But keeping method for consistency with other commands
}
/**
* Static method to register this command on an existing program
*/
static register(program: Command, name?: string): AuthCommand {
const authCommand = new AuthCommand(name);
program.addCommand(authCommand);
return authCommand;
}
}

View File

@@ -0,0 +1,38 @@
/**
* Command registry - exports all CLI commands for central registration
*/
import type { Command } from 'commander';
import { ListTasksCommand } from './list.command.js';
import { AuthCommand } from './auth.command.js';
import WorkflowCommand from './workflow.command.js';
// Define interface for command classes that can register themselves
export interface CommandRegistrar {
register(program: Command, name?: string): any;
}
// Future commands can be added here as they're created
// The pattern is: each command exports a class with a static register(program: Command, name?: string) method
/**
* Auto-register all exported commands that implement the CommandRegistrar interface
*/
export function registerAllCommands(program: Command): void {
// Get all exports from this module
const commands = [
ListTasksCommand,
AuthCommand,
WorkflowCommand
// Add new commands here as they're imported above
];
commands.forEach((CommandClass) => {
if (
'register' in CommandClass &&
typeof CommandClass.register === 'function'
) {
CommandClass.register(program);
}
});
}

View File

@@ -307,17 +307,6 @@ export class ListTasksCommand extends Command {
/**
* Static method to register this command on an existing program
* This is for gradual migration - allows commands.js to use this
*/
static registerOn(program: Command): Command {
const listCommand = new ListTasksCommand();
program.addCommand(listCommand);
return listCommand;
}
/**
* Alternative registration that returns the command for chaining
* Can also configure the command name if needed
*/
static register(program: Command, name?: string): ListTasksCommand {
const listCommand = new ListTasksCommand(name);

View File

@@ -0,0 +1,58 @@
/**
* @fileoverview Workflow Command
* Main workflow command with subcommands
*/
import { Command } from 'commander';
import {
WorkflowStartCommand,
WorkflowListCommand,
WorkflowStopCommand,
WorkflowStatusCommand
} from './workflow/index.js';
/**
* WorkflowCommand - Main workflow command with subcommands
*/
export class WorkflowCommand extends Command {
constructor(name?: string) {
super(name || 'workflow');
this.description('Manage task execution workflows with git worktrees and Claude Code')
.alias('wf');
// Register subcommands
this.addSubcommands();
}
private addSubcommands(): void {
// Start workflow
WorkflowStartCommand.register(this);
// List workflows
WorkflowListCommand.register(this);
// Stop workflow
WorkflowStopCommand.register(this);
// Show workflow status
WorkflowStatusCommand.register(this);
// Alias commands for convenience
this.addCommand(new WorkflowStartCommand('run')); // tm workflow run <task-id>
this.addCommand(new WorkflowStopCommand('kill')); // tm workflow kill <workflow-id>
this.addCommand(new WorkflowStatusCommand('info')); // tm workflow info <workflow-id>
}
/**
* Static method to register this command on an existing program
*/
static register(program: Command, name?: string): WorkflowCommand {
const workflowCommand = new WorkflowCommand(name);
program.addCommand(workflowCommand);
return workflowCommand;
}
}
export default WorkflowCommand;

View File

@@ -0,0 +1,9 @@
/**
* @fileoverview Workflow Commands
* Exports for all workflow-related CLI commands
*/
export * from './workflow-start.command.js';
export * from './workflow-list.command.js';
export * from './workflow-stop.command.js';
export * from './workflow-status.command.js';

View File

@@ -0,0 +1,253 @@
/**
* @fileoverview Workflow List Command
* List active and recent workflow executions
*/
import { Command } from 'commander';
import chalk from 'chalk';
import path from 'node:path';
import {
TaskExecutionManager,
type TaskExecutionManagerConfig,
type WorkflowExecutionContext
} from '@tm/workflow-engine';
import * as ui from '../../utils/ui.js';
export interface WorkflowListOptions {
project?: string;
status?: string;
format?: 'text' | 'json' | 'compact';
worktreeBase?: string;
claude?: string;
all?: boolean;
}
/**
* WorkflowListCommand - List workflow executions
*/
export class WorkflowListCommand extends Command {
private workflowManager?: TaskExecutionManager;
constructor(name?: string) {
super(name || 'list');
this.description('List active and recent workflow executions')
.alias('ls')
.option('-p, --project <path>', 'Project root directory', process.cwd())
.option('-s, --status <status>', 'Filter by status (running, completed, failed, etc.)')
.option('-f, --format <format>', 'Output format (text, json, compact)', 'text')
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
.option('--claude <path>', 'Claude Code executable path', 'claude')
.option('--all', 'Show all workflows including completed ones')
.action(async (options: WorkflowListOptions) => {
await this.executeCommand(options);
});
}
private async executeCommand(options: WorkflowListOptions): Promise<void> {
try {
// Initialize workflow manager
await this.initializeWorkflowManager(options);
// Get workflows
let workflows = this.workflowManager!.listWorkflows();
// Apply status filter
if (options.status) {
workflows = workflows.filter(w => w.status === options.status);
}
// Apply active filter (default behavior)
if (!options.all) {
workflows = workflows.filter(w =>
['pending', 'initializing', 'running', 'paused'].includes(w.status)
);
}
// Display results
this.displayResults(workflows, options);
} catch (error: any) {
ui.displayError(error.message || 'Failed to list workflows');
process.exit(1);
}
}
private async initializeWorkflowManager(options: WorkflowListOptions): Promise<void> {
if (!this.workflowManager) {
const projectRoot = options.project || process.cwd();
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
const config: TaskExecutionManagerConfig = {
projectRoot,
maxConcurrent: 5,
defaultTimeout: 60,
worktreeBase,
claudeExecutable: options.claude || 'claude',
debug: false
};
this.workflowManager = new TaskExecutionManager(config);
await this.workflowManager.initialize();
}
}
private displayResults(workflows: WorkflowExecutionContext[], options: WorkflowListOptions): void {
switch (options.format) {
case 'json':
this.displayJson(workflows);
break;
case 'compact':
this.displayCompact(workflows);
break;
case 'text':
default:
this.displayText(workflows);
break;
}
}
private displayJson(workflows: WorkflowExecutionContext[]): void {
console.log(JSON.stringify({
workflows: workflows.map(w => ({
workflowId: `workflow-${w.taskId}`,
taskId: w.taskId,
taskTitle: w.taskTitle,
status: w.status,
worktreePath: w.worktreePath,
branchName: w.branchName,
processId: w.processId,
startedAt: w.startedAt,
lastActivity: w.lastActivity,
metadata: w.metadata
})),
total: workflows.length,
timestamp: new Date().toISOString()
}, null, 2));
}
private displayCompact(workflows: WorkflowExecutionContext[]): void {
if (workflows.length === 0) {
console.log(chalk.gray('No workflows found'));
return;
}
workflows.forEach(workflow => {
const workflowId = `workflow-${workflow.taskId}`;
const statusDisplay = this.getStatusDisplay(workflow.status);
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
console.log(
`${chalk.cyan(workflowId)} ${statusDisplay} ${workflow.taskTitle} ${chalk.gray(`(${duration})`)}`
);
});
}
private displayText(workflows: WorkflowExecutionContext[]): void {
ui.displayBanner('Active Workflows');
if (workflows.length === 0) {
ui.displayWarning('No workflows found');
console.log();
console.log(chalk.blue('💡 Start a new workflow with:'));
console.log(` ${chalk.cyan('tm workflow start <task-id>')}`);
return;
}
// Statistics
console.log(chalk.blue.bold('\n📊 Statistics:\n'));
const statusCounts = this.getStatusCounts(workflows);
Object.entries(statusCounts).forEach(([status, count]) => {
console.log(` ${this.getStatusDisplay(status)}: ${chalk.cyan(count)}`);
});
// Workflows table
console.log(chalk.blue.bold(`\n🔄 Workflows (${workflows.length}):\n`));
const tableData = workflows.map(workflow => {
const workflowId = `workflow-${workflow.taskId}`;
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
return [
chalk.cyan(workflowId),
chalk.yellow(workflow.taskId),
workflow.taskTitle.substring(0, 30) + (workflow.taskTitle.length > 30 ? '...' : ''),
this.getStatusDisplay(workflow.status),
workflow.processId ? chalk.green(workflow.processId.toString()) : chalk.gray('N/A'),
chalk.gray(duration),
chalk.gray(path.basename(workflow.worktreePath))
];
});
console.log(ui.createTable(
['Workflow ID', 'Task ID', 'Task Title', 'Status', 'PID', 'Duration', 'Worktree'],
tableData
));
// Running workflows actions
const runningWorkflows = workflows.filter(w => w.status === 'running');
if (runningWorkflows.length > 0) {
console.log(chalk.blue.bold('\n🚀 Quick Actions:\n'));
runningWorkflows.slice(0, 3).forEach(workflow => {
const workflowId = `workflow-${workflow.taskId}`;
console.log(` • Attach to ${chalk.cyan(workflowId)}: ${chalk.gray(`tm workflow attach ${workflowId}`)}`);
});
if (runningWorkflows.length > 3) {
console.log(` ${chalk.gray(`... and ${runningWorkflows.length - 3} more`)}`);
}
}
}
private getStatusDisplay(status: string): string {
const statusMap = {
pending: { icon: '⏳', color: chalk.yellow },
initializing: { icon: '🔄', color: chalk.blue },
running: { icon: '🚀', color: chalk.green },
paused: { icon: '⏸️', color: chalk.orange },
completed: { icon: '✅', color: chalk.green },
failed: { icon: '❌', color: chalk.red },
cancelled: { icon: '🛑', color: chalk.gray },
timeout: { icon: '⏰', color: chalk.red }
};
const statusInfo = statusMap[status as keyof typeof statusMap] || { icon: '❓', color: chalk.white };
return `${statusInfo.icon} ${statusInfo.color(status)}`;
}
private getStatusCounts(workflows: WorkflowExecutionContext[]): Record<string, number> {
const counts: Record<string, number> = {};
workflows.forEach(workflow => {
counts[workflow.status] = (counts[workflow.status] || 0) + 1;
});
return counts;
}
private formatDuration(start: Date, end: Date): string {
const diff = end.getTime() - start.getTime();
const minutes = Math.floor(diff / (1000 * 60));
const hours = Math.floor(minutes / 60);
if (hours > 0) {
return `${hours}h ${minutes % 60}m`;
} else if (minutes > 0) {
return `${minutes}m`;
} else {
return '<1m';
}
}
async cleanup(): Promise<void> {
if (this.workflowManager) {
this.workflowManager.removeAllListeners();
}
}
static register(program: Command, name?: string): WorkflowListCommand {
const command = new WorkflowListCommand(name);
program.addCommand(command);
return command;
}
}

View File

@@ -0,0 +1,239 @@
/**
* @fileoverview Workflow Start Command
* Start task execution in isolated worktree with Claude Code process
*/
import { Command } from 'commander';
import chalk from 'chalk';
import path from 'node:path';
import {
createTaskMasterCore,
type TaskMasterCore
} from '@tm/core';
import {
TaskExecutionManager,
type TaskExecutionManagerConfig
} from '@tm/workflow-engine';
import * as ui from '../../utils/ui.js';
export interface WorkflowStartOptions {
project?: string;
branch?: string;
timeout?: number;
worktreeBase?: string;
claude?: string;
debug?: boolean;
env?: string;
}
/**
* WorkflowStartCommand - Start task execution workflow
*/
export class WorkflowStartCommand extends Command {
private tmCore?: TaskMasterCore;
private workflowManager?: TaskExecutionManager;
constructor(name?: string) {
super(name || 'start');
this.description('Start task execution in isolated worktree')
.argument('<task-id>', 'Task ID to execute')
.option('-p, --project <path>', 'Project root directory', process.cwd())
.option('-b, --branch <name>', 'Custom branch name for worktree')
.option('-t, --timeout <minutes>', 'Execution timeout in minutes', '60')
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
.option('--claude <path>', 'Claude Code executable path', 'claude')
.option('--debug', 'Enable debug logging')
.option('--env <vars>', 'Environment variables (KEY=VALUE,KEY2=VALUE2)')
.action(async (taskId: string, options: WorkflowStartOptions) => {
await this.executeCommand(taskId, options);
});
}
private async executeCommand(taskId: string, options: WorkflowStartOptions): Promise<void> {
try {
// Initialize components
await this.initializeCore(options.project || process.cwd());
await this.initializeWorkflowManager(options);
// Get task details
const task = await this.getTask(taskId);
if (!task) {
throw new Error(`Task ${taskId} not found`);
}
// Check if task already has active workflow
const existingWorkflow = this.workflowManager!.getWorkflowByTaskId(taskId);
if (existingWorkflow) {
ui.displayWarning(`Task ${taskId} already has an active workflow`);
console.log(`Workflow ID: ${chalk.cyan('workflow-' + taskId)}`);
console.log(`Status: ${this.getStatusDisplay(existingWorkflow.status)}`);
console.log(`Worktree: ${chalk.gray(existingWorkflow.worktreePath)}`);
return;
}
// Parse environment variables
const env = this.parseEnvironmentVariables(options.env);
// Display task info
ui.displayBanner(`Starting Workflow for Task ${taskId}`);
console.log(`${chalk.blue('Task:')} ${task.title}`);
console.log(`${chalk.blue('Description:')} ${task.description}`);
if (task.dependencies?.length) {
console.log(`${chalk.blue('Dependencies:')} ${task.dependencies.join(', ')}`);
}
console.log(`${chalk.blue('Priority:')} ${task.priority || 'normal'}`);
console.log();
// Start workflow
ui.displaySpinner('Creating worktree and starting Claude Code process...');
const workflowId = await this.workflowManager!.startTaskExecution(task, {
branchName: options.branch,
timeout: parseInt(options.timeout || '60'),
env
});
const workflow = this.workflowManager!.getWorkflowStatus(workflowId);
ui.displaySuccess('Workflow started successfully!');
console.log();
console.log(`${chalk.green('✓')} Workflow ID: ${chalk.cyan(workflowId)}`);
console.log(`${chalk.green('✓')} Worktree: ${chalk.gray(workflow?.worktreePath)}`);
console.log(`${chalk.green('✓')} Branch: ${chalk.gray(workflow?.branchName)}`);
console.log(`${chalk.green('✓')} Process ID: ${chalk.gray(workflow?.processId)}`);
console.log();
// Display next steps
console.log(chalk.blue.bold('📋 Next Steps:'));
console.log(` • Monitor: ${chalk.cyan(`tm workflow status ${workflowId}`)}`);
console.log(` • Attach: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
console.log();
// Setup event listeners for real-time updates
this.setupEventListeners();
} catch (error: any) {
ui.displayError(error.message || 'Failed to start workflow');
if (options.debug && error.stack) {
console.error(chalk.gray(error.stack));
}
process.exit(1);
}
}
private async initializeCore(projectRoot: string): Promise<void> {
if (!this.tmCore) {
this.tmCore = await createTaskMasterCore({ projectPath: projectRoot });
}
}
private async initializeWorkflowManager(options: WorkflowStartOptions): Promise<void> {
if (!this.workflowManager) {
const projectRoot = options.project || process.cwd();
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
const config: TaskExecutionManagerConfig = {
projectRoot,
maxConcurrent: 5,
defaultTimeout: parseInt(options.timeout || '60'),
worktreeBase,
claudeExecutable: options.claude || 'claude',
debug: options.debug || false
};
this.workflowManager = new TaskExecutionManager(config);
await this.workflowManager.initialize();
}
}
private async getTask(taskId: string) {
if (!this.tmCore) {
throw new Error('TaskMasterCore not initialized');
}
const result = await this.tmCore.getTaskList({});
return result.tasks.find(task => task.id === taskId);
}
private parseEnvironmentVariables(envString?: string): Record<string, string> | undefined {
if (!envString) return undefined;
const env: Record<string, string> = {};
for (const pair of envString.split(',')) {
const [key, ...valueParts] = pair.trim().split('=');
if (key && valueParts.length > 0) {
env[key] = valueParts.join('=');
}
}
return Object.keys(env).length > 0 ? env : undefined;
}
private getStatusDisplay(status: string): string {
const colors = {
pending: chalk.yellow,
initializing: chalk.blue,
running: chalk.green,
paused: chalk.orange,
completed: chalk.green,
failed: chalk.red,
cancelled: chalk.gray,
timeout: chalk.red
};
const color = colors[status as keyof typeof colors] || chalk.white;
return color(status);
}
private setupEventListeners(): void {
if (!this.workflowManager) return;
this.workflowManager.on('workflow.started', (event) => {
console.log(`${chalk.green('🚀')} Workflow started: ${event.workflowId}`);
});
this.workflowManager.on('process.output', (event) => {
if (event.data?.stream === 'stdout') {
console.log(`${chalk.blue('[OUT]')} ${event.data.data.trim()}`);
} else if (event.data?.stream === 'stderr') {
console.log(`${chalk.red('[ERR]')} ${event.data.data.trim()}`);
}
});
this.workflowManager.on('workflow.completed', (event) => {
console.log(`${chalk.green('✅')} Workflow completed: ${event.workflowId}`);
});
this.workflowManager.on('workflow.failed', (event) => {
console.log(`${chalk.red('❌')} Workflow failed: ${event.workflowId}`);
if (event.error) {
console.log(`${chalk.red('Error:')} ${event.error.message}`);
}
});
}
async cleanup(): Promise<void> {
if (this.workflowManager) {
// Don't cleanup workflows, just disconnect
this.workflowManager.removeAllListeners();
}
if (this.tmCore) {
await this.tmCore.close();
this.tmCore = undefined;
}
}
static register(program: Command, name?: string): WorkflowStartCommand {
const command = new WorkflowStartCommand(name);
program.addCommand(command);
return command;
}
}

View File

@@ -0,0 +1,339 @@
/**
* @fileoverview Workflow Status Command
* Show detailed status of a specific workflow
*/
import { Command } from 'commander';
import chalk from 'chalk';
import path from 'node:path';
import {
TaskExecutionManager,
type TaskExecutionManagerConfig
} from '@tm/workflow-engine';
import * as ui from '../../utils/ui.js';
export interface WorkflowStatusOptions {
project?: string;
worktreeBase?: string;
claude?: string;
watch?: boolean;
format?: 'text' | 'json';
}
/**
* WorkflowStatusCommand - Show workflow execution status
*/
export class WorkflowStatusCommand extends Command {
private workflowManager?: TaskExecutionManager;
constructor(name?: string) {
super(name || 'status');
this.description('Show detailed status of a workflow execution')
.argument('<workflow-id>', 'Workflow ID or task ID to check')
.option('-p, --project <path>', 'Project root directory', process.cwd())
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
.option('--claude <path>', 'Claude Code executable path', 'claude')
.option('-w, --watch', 'Watch for status changes (refresh every 2 seconds)')
.option('-f, --format <format>', 'Output format (text, json)', 'text')
.action(async (workflowId: string, options: WorkflowStatusOptions) => {
await this.executeCommand(workflowId, options);
});
}
private async executeCommand(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
try {
// Initialize workflow manager
await this.initializeWorkflowManager(options);
if (options.watch) {
await this.watchWorkflowStatus(workflowId, options);
} else {
await this.showWorkflowStatus(workflowId, options);
}
} catch (error: any) {
ui.displayError(error.message || 'Failed to get workflow status');
process.exit(1);
}
}
private async initializeWorkflowManager(options: WorkflowStatusOptions): Promise<void> {
if (!this.workflowManager) {
const projectRoot = options.project || process.cwd();
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
const config: TaskExecutionManagerConfig = {
projectRoot,
maxConcurrent: 5,
defaultTimeout: 60,
worktreeBase,
claudeExecutable: options.claude || 'claude',
debug: false
};
this.workflowManager = new TaskExecutionManager(config);
await this.workflowManager.initialize();
}
}
private async showWorkflowStatus(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
// Try to find workflow by ID or task ID
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
if (!workflow) {
// Try as task ID
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
}
if (!workflow) {
throw new Error(`Workflow not found: ${workflowId}`);
}
if (options.format === 'json') {
this.displayJsonStatus(workflow);
} else {
this.displayTextStatus(workflow);
}
}
private async watchWorkflowStatus(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
console.log(chalk.blue.bold('👀 Watching workflow status (Press Ctrl+C to exit)\n'));
let lastStatus = '';
let updateCount = 0;
const updateStatus = async () => {
try {
// Clear screen and move cursor to top
if (updateCount > 0) {
process.stdout.write('\x1b[2J\x1b[0f');
}
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
if (!workflow) {
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
}
if (!workflow) {
console.log(chalk.red(`Workflow not found: ${workflowId}`));
return;
}
// Display header with timestamp
console.log(chalk.blue.bold('👀 Watching Workflow Status'));
console.log(chalk.gray(`Last updated: ${new Date().toLocaleTimeString()}\n`));
this.displayTextStatus(workflow);
// Check if workflow has ended
if (['completed', 'failed', 'cancelled', 'timeout'].includes(workflow.status)) {
console.log(chalk.yellow('\n⚠ Workflow has ended. Stopping watch mode.'));
return;
}
updateCount++;
} catch (error) {
console.error(chalk.red('Error updating status:'), error);
}
};
// Initial display
await updateStatus();
// Setup interval for updates
const interval = setInterval(updateStatus, 2000);
// Handle Ctrl+C
process.on('SIGINT', () => {
clearInterval(interval);
console.log(chalk.yellow('\n👋 Stopped watching workflow status'));
process.exit(0);
});
// Keep the process alive
await new Promise(() => {});
}
private displayJsonStatus(workflow: any): void {
const status = {
workflowId: `workflow-${workflow.taskId}`,
taskId: workflow.taskId,
taskTitle: workflow.taskTitle,
taskDescription: workflow.taskDescription,
status: workflow.status,
worktreePath: workflow.worktreePath,
branchName: workflow.branchName,
processId: workflow.processId,
startedAt: workflow.startedAt,
lastActivity: workflow.lastActivity,
duration: this.calculateDuration(workflow.startedAt, workflow.lastActivity),
metadata: workflow.metadata
};
console.log(JSON.stringify(status, null, 2));
}
private displayTextStatus(workflow: any): void {
const workflowId = `workflow-${workflow.taskId}`;
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
ui.displayBanner(`Workflow Status: ${workflowId}`);
// Basic information
console.log(chalk.blue.bold('\n📋 Basic Information:\n'));
console.log(` Workflow ID: ${chalk.cyan(workflowId)}`);
console.log(` Task ID: ${chalk.cyan(workflow.taskId)}`);
console.log(` Task Title: ${workflow.taskTitle}`);
console.log(` Status: ${this.getStatusDisplay(workflow.status)}`);
console.log(` Duration: ${chalk.gray(duration)}`);
// Task details
if (workflow.taskDescription) {
console.log(chalk.blue.bold('\n📝 Task Details:\n'));
console.log(` ${workflow.taskDescription}`);
}
// Process information
console.log(chalk.blue.bold('\n⚙ Process Information:\n'));
console.log(` Process ID: ${workflow.processId ? chalk.green(workflow.processId) : chalk.gray('N/A')}`);
console.log(` Worktree: ${chalk.gray(workflow.worktreePath)}`);
console.log(` Branch: ${chalk.gray(workflow.branchName)}`);
// Timing information
console.log(chalk.blue.bold('\n⏰ Timing:\n'));
console.log(` Started: ${chalk.gray(workflow.startedAt.toLocaleString())}`);
console.log(` Last Activity: ${chalk.gray(workflow.lastActivity.toLocaleString())}`);
// Metadata
if (workflow.metadata && Object.keys(workflow.metadata).length > 0) {
console.log(chalk.blue.bold('\n🔖 Metadata:\n'));
Object.entries(workflow.metadata).forEach(([key, value]) => {
console.log(` ${key}: ${chalk.gray(String(value))}`);
});
}
// Status-specific information
this.displayStatusSpecificInfo(workflow);
// Actions
this.displayAvailableActions(workflow);
}
private displayStatusSpecificInfo(workflow: any): void {
const workflowId = `workflow-${workflow.taskId}`;
switch (workflow.status) {
case 'running':
console.log(chalk.blue.bold('\n🚀 Running Status:\n'));
console.log(` ${chalk.green('●')} Process is actively executing`);
console.log(` ${chalk.blue('')} Monitor output with: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
break;
case 'paused':
console.log(chalk.blue.bold('\n⏸ Paused Status:\n'));
console.log(` ${chalk.yellow('●')} Workflow is paused`);
console.log(` ${chalk.blue('')} Resume with: ${chalk.cyan(`tm workflow resume ${workflowId}`)}`);
break;
case 'completed':
console.log(chalk.blue.bold('\n✅ Completed Status:\n'));
console.log(` ${chalk.green('●')} Workflow completed successfully`);
console.log(` ${chalk.blue('')} Resources have been cleaned up`);
break;
case 'failed':
console.log(chalk.blue.bold('\n❌ Failed Status:\n'));
console.log(` ${chalk.red('●')} Workflow execution failed`);
console.log(` ${chalk.blue('')} Check logs for error details`);
break;
case 'initializing':
console.log(chalk.blue.bold('\n🔄 Initializing Status:\n'));
console.log(` ${chalk.blue('●')} Setting up worktree and process`);
console.log(` ${chalk.blue('')} This should complete shortly`);
break;
}
}
private displayAvailableActions(workflow: any): void {
const workflowId = `workflow-${workflow.taskId}`;
console.log(chalk.blue.bold('\n🎯 Available Actions:\n'));
switch (workflow.status) {
case 'running':
console.log(` • Attach: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
console.log(` • Pause: ${chalk.cyan(`tm workflow pause ${workflowId}`)}`);
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
break;
case 'paused':
console.log(` • Resume: ${chalk.cyan(`tm workflow resume ${workflowId}`)}`);
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
break;
case 'pending':
case 'initializing':
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
break;
case 'completed':
case 'failed':
case 'cancelled':
console.log(` • View logs: ${chalk.cyan(`tm workflow logs ${workflowId}`)}`);
console.log(` • Start new: ${chalk.cyan(`tm workflow start ${workflow.taskId}`)}`);
break;
}
console.log(` • List all: ${chalk.cyan('tm workflow list')}`);
}
private getStatusDisplay(status: string): string {
const statusMap = {
pending: { icon: '⏳', color: chalk.yellow },
initializing: { icon: '🔄', color: chalk.blue },
running: { icon: '🚀', color: chalk.green },
paused: { icon: '⏸️', color: chalk.orange },
completed: { icon: '✅', color: chalk.green },
failed: { icon: '❌', color: chalk.red },
cancelled: { icon: '🛑', color: chalk.gray },
timeout: { icon: '⏰', color: chalk.red }
};
const statusInfo = statusMap[status as keyof typeof statusMap] || { icon: '❓', color: chalk.white };
return `${statusInfo.icon} ${statusInfo.color(status)}`;
}
private formatDuration(start: Date, end: Date): string {
const diff = end.getTime() - start.getTime();
const minutes = Math.floor(diff / (1000 * 60));
const hours = Math.floor(minutes / 60);
const seconds = Math.floor((diff % (1000 * 60)) / 1000);
if (hours > 0) {
return `${hours}h ${minutes % 60}m ${seconds}s`;
} else if (minutes > 0) {
return `${minutes}m ${seconds}s`;
} else {
return `${seconds}s`;
}
}
private calculateDuration(start: Date, end: Date): number {
return Math.floor((end.getTime() - start.getTime()) / 1000);
}
async cleanup(): Promise<void> {
if (this.workflowManager) {
this.workflowManager.removeAllListeners();
}
}
static register(program: Command, name?: string): WorkflowStatusCommand {
const command = new WorkflowStatusCommand(name);
program.addCommand(command);
return command;
}
}

View File

@@ -0,0 +1,260 @@
/**
* @fileoverview Workflow Stop Command
* Stop and clean up workflow execution
*/
import { Command } from 'commander';
import chalk from 'chalk';
import path from 'node:path';
import {
TaskExecutionManager,
type TaskExecutionManagerConfig
} from '@tm/workflow-engine';
import * as ui from '../../utils/ui.js';
export interface WorkflowStopOptions {
project?: string;
worktreeBase?: string;
claude?: string;
force?: boolean;
all?: boolean;
}
/**
* WorkflowStopCommand - Stop workflow execution
*/
export class WorkflowStopCommand extends Command {
private workflowManager?: TaskExecutionManager;
constructor(name?: string) {
super(name || 'stop');
this.description('Stop workflow execution and clean up resources')
.argument('[workflow-id]', 'Workflow ID to stop (or task ID)')
.option('-p, --project <path>', 'Project root directory', process.cwd())
.option(
'--worktree-base <path>',
'Base directory for worktrees',
'../task-worktrees'
)
.option('--claude <path>', 'Claude Code executable path', 'claude')
.option('-f, --force', 'Force stop (kill process immediately)')
.option('--all', 'Stop all running workflows')
.action(
async (
workflowId: string | undefined,
options: WorkflowStopOptions
) => {
await this.executeCommand(workflowId, options);
}
);
}
private async executeCommand(
workflowId: string | undefined,
options: WorkflowStopOptions
): Promise<void> {
try {
// Initialize workflow manager
await this.initializeWorkflowManager(options);
if (options.all) {
await this.stopAllWorkflows(options);
} else if (workflowId) {
await this.stopSingleWorkflow(workflowId, options);
} else {
ui.displayError('Please specify a workflow ID or use --all flag');
process.exit(1);
}
} catch (error: any) {
ui.displayError(error.message || 'Failed to stop workflow');
process.exit(1);
}
}
private async initializeWorkflowManager(
options: WorkflowStopOptions
): Promise<void> {
if (!this.workflowManager) {
const projectRoot = options.project || process.cwd();
const worktreeBase = path.resolve(
projectRoot,
options.worktreeBase || '../task-worktrees'
);
const config: TaskExecutionManagerConfig = {
projectRoot,
maxConcurrent: 5,
defaultTimeout: 60,
worktreeBase,
claudeExecutable: options.claude || 'claude',
debug: false
};
this.workflowManager = new TaskExecutionManager(config);
await this.workflowManager.initialize();
}
}
private async stopSingleWorkflow(
workflowId: string,
options: WorkflowStopOptions
): Promise<void> {
// Try to find workflow by ID or task ID
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
if (!workflow) {
// Try as task ID
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
}
if (!workflow) {
throw new Error(`Workflow not found: ${workflowId}`);
}
const actualWorkflowId = `workflow-${workflow.taskId}`;
// Display workflow info
console.log(chalk.blue.bold(`🛑 Stopping Workflow: ${actualWorkflowId}`));
console.log(`${chalk.blue('Task:')} ${workflow.taskTitle}`);
console.log(
`${chalk.blue('Status:')} ${this.getStatusDisplay(workflow.status)}`
);
console.log(
`${chalk.blue('Worktree:')} ${chalk.gray(workflow.worktreePath)}`
);
if (workflow.processId) {
console.log(
`${chalk.blue('Process ID:')} ${chalk.gray(workflow.processId)}`
);
}
console.log();
// Confirm if not forced
if (!options.force && ['running', 'paused'].includes(workflow.status)) {
const shouldProceed = await ui.confirm(
`Are you sure you want to stop this ${workflow.status} workflow?`
);
if (!shouldProceed) {
console.log(chalk.gray('Operation cancelled'));
return;
}
}
// Stop the workflow
ui.displaySpinner('Stopping workflow and cleaning up resources...');
await this.workflowManager!.stopTaskExecution(
actualWorkflowId,
options.force
);
ui.displaySuccess('Workflow stopped successfully!');
console.log();
console.log(`${chalk.green('✓')} Process terminated`);
console.log(`${chalk.green('✓')} Worktree cleaned up`);
console.log(`${chalk.green('✓')} State updated`);
}
private async stopAllWorkflows(options: WorkflowStopOptions): Promise<void> {
const workflows = this.workflowManager!.listWorkflows();
const activeWorkflows = workflows.filter((w) =>
['pending', 'initializing', 'running', 'paused'].includes(w.status)
);
if (activeWorkflows.length === 0) {
ui.displayWarning('No active workflows to stop');
return;
}
console.log(
chalk.blue.bold(`🛑 Stopping ${activeWorkflows.length} Active Workflows`)
);
console.log();
// List workflows to be stopped
activeWorkflows.forEach((workflow) => {
console.log(
`${chalk.cyan(`workflow-${workflow.taskId}`)} - ${workflow.taskTitle} ${this.getStatusDisplay(workflow.status)}`
);
});
console.log();
// Confirm if not forced
if (!options.force) {
const shouldProceed = await ui.confirm(
`Are you sure you want to stop all ${activeWorkflows.length} active workflows?`
);
if (!shouldProceed) {
console.log(chalk.gray('Operation cancelled'));
return;
}
}
// Stop all workflows
ui.displaySpinner('Stopping all workflows...');
let stopped = 0;
let failed = 0;
for (const workflow of activeWorkflows) {
try {
const workflowId = `workflow-${workflow.taskId}`;
await this.workflowManager!.stopTaskExecution(
workflowId,
options.force
);
stopped++;
} catch (error) {
console.error(
`${chalk.red('✗')} Failed to stop workflow ${workflow.taskId}: ${error}`
);
failed++;
}
}
console.log();
if (stopped > 0) {
ui.displaySuccess(`Successfully stopped ${stopped} workflows`);
}
if (failed > 0) {
ui.displayWarning(`Failed to stop ${failed} workflows`);
}
}
private getStatusDisplay(status: string): string {
const statusMap = {
pending: { icon: '⏳', color: chalk.yellow },
initializing: { icon: '🔄', color: chalk.blue },
running: { icon: '🚀', color: chalk.green },
paused: { icon: '⏸️', color: chalk.hex('#FFA500') },
completed: { icon: '✅', color: chalk.green },
failed: { icon: '❌', color: chalk.red },
cancelled: { icon: '🛑', color: chalk.gray },
timeout: { icon: '⏰', color: chalk.red }
};
const statusInfo = statusMap[status as keyof typeof statusMap] || {
icon: '❓',
color: chalk.white
};
return `${statusInfo.icon} ${statusInfo.color(status)}`;
}
async cleanup(): Promise<void> {
if (this.workflowManager) {
this.workflowManager.removeAllListeners();
}
}
static register(program: Command, name?: string): WorkflowStopCommand {
const command = new WorkflowStopCommand(name);
program.addCommand(command);
return command;
}
}

View File

@@ -5,6 +5,11 @@
// Commands
export { ListTasksCommand } from './commands/list.command.js';
export { AuthCommand } from './commands/auth.command.js';
export { WorkflowCommand } from './commands/workflow.command.js';
// Command registry
export { registerAllCommands } from './commands/index.js';
// UI utilities (for other commands to use)
export * as ui from './utils/ui.js';

View File

@@ -324,3 +324,61 @@ export function createTaskTable(
return table.toString();
}
/**
* Display a spinner with message (mock implementation)
*/
export function displaySpinner(message: string): void {
console.log(chalk.blue('◐'), chalk.gray(message));
}
/**
* Simple confirmation prompt
*/
export async function confirm(message: string): Promise<boolean> {
// For now, return true. In a real implementation, use inquirer
console.log(chalk.yellow('?'), chalk.white(message), chalk.gray('(y/n)'));
// Mock implementation - in production this would use inquirer
return new Promise((resolve) => {
process.stdin.once('data', (data) => {
const answer = data.toString().trim().toLowerCase();
resolve(answer === 'y' || answer === 'yes');
});
process.stdin.resume();
});
}
/**
* Create a generic table
*/
export function createTable(headers: string[], rows: string[][]): string {
const table = new Table({
head: headers.map(h => chalk.blue.bold(h)),
style: {
head: [],
border: ['gray']
},
chars: {
'top': '─',
'top-mid': '┬',
'top-left': '┌',
'top-right': '┐',
'bottom': '─',
'bottom-mid': '┴',
'bottom-left': '└',
'bottom-right': '┘',
'left': '│',
'left-mid': '├',
'mid': '─',
'mid-mid': '┼',
'right': '│',
'right-mid': '┤',
'middle': '│'
}
});
rows.forEach(row => table.push(row));
return table.toString();
}

203
package-lock.json generated
View File

@@ -69,6 +69,7 @@
"@changesets/changelog-github": "^0.5.1",
"@changesets/cli": "^2.28.1",
"@types/jest": "^29.5.14",
"dotenv-mono": "^1.5.1",
"execa": "^8.0.1",
"ink": "^5.0.1",
"jest": "^29.7.0",
@@ -95,11 +96,13 @@
"license": "MIT",
"dependencies": {
"@tm/core": "*",
"@tm/workflow-engine": "*",
"boxen": "^7.1.1",
"chalk": "^5.3.0",
"cli-table3": "^0.6.5",
"commander": "^12.1.0",
"inquirer": "^9.2.10",
"open": "^10.2.0",
"ora": "^8.1.0"
},
"devDependencies": {
@@ -551,6 +554,7 @@
"apps/extension": {
"version": "0.24.2-rc.1",
"dependencies": {
"@tm/workflow-engine": "*",
"task-master-ai": "0.26.0-rc.1"
},
"devDependencies": {
@@ -8947,6 +8951,80 @@
"version": "0.0.22",
"license": "MIT"
},
"node_modules/@supabase/auth-js": {
"version": "2.71.1",
"resolved": "https://registry.npmjs.org/@supabase/auth-js/-/auth-js-2.71.1.tgz",
"integrity": "sha512-mMIQHBRc+SKpZFRB2qtupuzulaUhFYupNyxqDj5Jp/LyPvcWvjaJzZzObv6URtL/O6lPxkanASnotGtNpS3H2Q==",
"license": "MIT",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/functions-js": {
"version": "2.4.5",
"resolved": "https://registry.npmjs.org/@supabase/functions-js/-/functions-js-2.4.5.tgz",
"integrity": "sha512-v5GSqb9zbosquTo6gBwIiq7W9eQ7rE5QazsK/ezNiQXdCbY+bH8D9qEaBIkhVvX4ZRW5rP03gEfw5yw9tiq4EQ==",
"license": "MIT",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/node-fetch": {
"version": "2.6.15",
"resolved": "https://registry.npmjs.org/@supabase/node-fetch/-/node-fetch-2.6.15.tgz",
"integrity": "sha512-1ibVeYUacxWYi9i0cf5efil6adJ9WRyZBLivgjs+AUpewx1F3xPi7gLgaASI2SmIQxPoCEjAsLAzKPgMJVgOUQ==",
"license": "MIT",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
}
},
"node_modules/@supabase/postgrest-js": {
"version": "1.21.3",
"resolved": "https://registry.npmjs.org/@supabase/postgrest-js/-/postgrest-js-1.21.3.tgz",
"integrity": "sha512-rg3DmmZQKEVCreXq6Am29hMVe1CzemXyIWVYyyua69y6XubfP+DzGfLxME/1uvdgwqdoaPbtjBDpEBhqxq1ZwA==",
"license": "MIT",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/realtime-js": {
"version": "2.15.4",
"resolved": "https://registry.npmjs.org/@supabase/realtime-js/-/realtime-js-2.15.4.tgz",
"integrity": "sha512-e/FYIWjvQJHOCNACWehnKvg26zosju3694k0NMUNb+JGLdvHJzEa29ZVVLmawd2kvx4hdbv8mxSqfttRnH3+DA==",
"license": "MIT",
"dependencies": {
"@supabase/node-fetch": "^2.6.13",
"@types/phoenix": "^1.6.6",
"@types/ws": "^8.18.1",
"ws": "^8.18.2"
}
},
"node_modules/@supabase/storage-js": {
"version": "2.11.0",
"resolved": "https://registry.npmjs.org/@supabase/storage-js/-/storage-js-2.11.0.tgz",
"integrity": "sha512-Y+kx/wDgd4oasAgoAq0bsbQojwQ+ejIif8uczZ9qufRHWFLMU5cODT+ApHsSrDufqUcVKt+eyxtOXSkeh2v9ww==",
"license": "MIT",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/supabase-js": {
"version": "2.57.0",
"resolved": "https://registry.npmjs.org/@supabase/supabase-js/-/supabase-js-2.57.0.tgz",
"integrity": "sha512-h9ttcL0MY4h+cGqZl95F/RuqccuRBjHU9B7Qqvw0Da+pPK2sUlU1/UdvyqUGj37UsnSphr9pdGfeXjesYkBcyA==",
"license": "MIT",
"dependencies": {
"@supabase/auth-js": "2.71.1",
"@supabase/functions-js": "2.4.5",
"@supabase/node-fetch": "2.6.15",
"@supabase/postgrest-js": "1.21.3",
"@supabase/realtime-js": "2.15.4",
"@supabase/storage-js": "^2.10.4"
}
},
"node_modules/@szmarczak/http-timer": {
"version": "5.0.1",
"dev": true,
@@ -9058,6 +9136,10 @@
"resolved": "packages/tm-core",
"link": true
},
"node_modules/@tm/workflow-engine": {
"resolved": "packages/workflow-engine",
"link": true
},
"node_modules/@tokenizer/inflate": {
"version": "0.2.7",
"license": "MIT",
@@ -9343,6 +9425,12 @@
"form-data": "^4.0.0"
}
},
"node_modules/@types/phoenix": {
"version": "1.6.6",
"resolved": "https://registry.npmjs.org/@types/phoenix/-/phoenix-1.6.6.tgz",
"integrity": "sha512-PIzZZlEppgrpoT2QgbnDU+MMzuR6BbCjllj0bM70lWoejMeNJAxCchxnv7J3XFkI8MpygtRpzXrIlmWUBclP5A==",
"license": "MIT"
},
"node_modules/@types/react": {
"version": "19.1.8",
"dev": true,
@@ -9400,6 +9488,15 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/ws": {
"version": "8.18.1",
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz",
"integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==",
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/yargs": {
"version": "17.0.33",
"dev": true,
@@ -11087,7 +11184,6 @@
},
"node_modules/bundle-name": {
"version": "4.1.0",
"devOptional": true,
"license": "MIT",
"dependencies": {
"run-applescript": "^7.0.0"
@@ -12480,7 +12576,6 @@
},
"node_modules/default-browser": {
"version": "5.2.1",
"devOptional": true,
"license": "MIT",
"dependencies": {
"bundle-name": "^4.1.0",
@@ -12495,7 +12590,6 @@
},
"node_modules/default-browser-id": {
"version": "5.0.0",
"devOptional": true,
"license": "MIT",
"engines": {
"node": ">=18"
@@ -12542,7 +12636,6 @@
},
"node_modules/define-lazy-prop": {
"version": "3.0.0",
"devOptional": true,
"license": "MIT",
"engines": {
"node": ">=12"
@@ -12837,6 +12930,52 @@
"url": "https://dotenvx.com"
}
},
"node_modules/dotenv-expand": {
"version": "12.0.2",
"resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-12.0.2.tgz",
"integrity": "sha512-lXpXz2ZE1cea1gL4sz2Ipj8y4PiVjytYr3Ij0SWoms1PGxIv7m2CRKuRuCRtHdVuvM/hNJPMxt5PbhboNC4dPQ==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"dotenv": "^16.4.5"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://dotenvx.com"
}
},
"node_modules/dotenv-mono": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/dotenv-mono/-/dotenv-mono-1.5.1.tgz",
"integrity": "sha512-dt7bK/WKQvL0gcdTxjI7wD4MhVR5F4bCk70XMAgnrbWN3fdhpyhWCypYbZalr/vjLURLA7Ib9/VCzazRLJnp1Q==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"cross-spawn": "^7.0.6",
"dotenv": "^17.2.0",
"dotenv-expand": "^12.0.2",
"minimist": "^1.2.8"
},
"bin": {
"dotenv": "dist/cli.js",
"dotenv-mono": "dist/cli.js"
}
},
"node_modules/dotenv-mono/node_modules/dotenv": {
"version": "17.2.1",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.1.tgz",
"integrity": "sha512-kQhDYKZecqnM0fCnzI5eIv5L4cAe/iRI+HqMbO/hbRdTAeXDG+M9FjipUxNfbARuEg4iHIbhnhs78BCHNbSxEQ==",
"dev": true,
"license": "BSD-2-Clause",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://dotenvx.com"
}
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"license": "MIT",
@@ -15897,7 +16036,6 @@
},
"node_modules/is-docker": {
"version": "3.0.0",
"devOptional": true,
"license": "MIT",
"bin": {
"is-docker": "cli.js"
@@ -16011,7 +16149,6 @@
},
"node_modules/is-inside-container": {
"version": "1.0.0",
"devOptional": true,
"license": "MIT",
"dependencies": {
"is-docker": "^3.0.0"
@@ -16335,7 +16472,6 @@
},
"node_modules/is-wsl": {
"version": "3.1.0",
"devOptional": true,
"license": "MIT",
"dependencies": {
"is-inside-container": "^1.0.0"
@@ -19601,7 +19737,6 @@
"version": "1.2.8",
"dev": true,
"license": "MIT",
"optional": true,
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
@@ -20629,7 +20764,8 @@
},
"node_modules/open": {
"version": "10.2.0",
"devOptional": true,
"resolved": "https://registry.npmjs.org/open/-/open-10.2.0.tgz",
"integrity": "sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA==",
"license": "MIT",
"dependencies": {
"default-browser": "^5.2.1",
@@ -22702,7 +22838,6 @@
},
"node_modules/run-applescript": {
"version": "7.0.0",
"devOptional": true,
"license": "MIT",
"engines": {
"node": ">=18"
@@ -25886,7 +26021,6 @@
},
"node_modules/ws": {
"version": "8.18.2",
"devOptional": true,
"license": "MIT",
"engines": {
"node": ">=10.0.0"
@@ -25906,7 +26040,6 @@
},
"node_modules/wsl-utils": {
"version": "0.1.0",
"devOptional": true,
"license": "MIT",
"dependencies": {
"is-wsl": "^3.1.0"
@@ -26153,17 +26286,32 @@
"url": "https://github.com/sponsors/wooorm"
}
},
"packages/logger": {
"name": "@tm/logger",
"version": "1.0.0",
"extraneous": true,
"dependencies": {
"chalk": "^5.3.0"
},
"devDependencies": {
"@types/node": "^20.11.5",
"typescript": "^5.3.3"
}
},
"packages/tm-core": {
"name": "@tm/core",
"version": "1.0.0",
"license": "MIT",
"dependencies": {
"@supabase/supabase-js": "^2.57.0",
"chalk": "^5.3.0",
"zod": "^3.22.4"
},
"devDependencies": {
"@biomejs/biome": "^1.9.4",
"@types/node": "^20.11.30",
"@vitest/coverage-v8": "^2.0.5",
"dotenv-mono": "^1.5.1",
"ts-node": "^10.9.2",
"tsup": "^8.0.2",
"typescript": "^5.4.3",
@@ -26185,6 +26333,37 @@
"version": "6.21.0",
"dev": true,
"license": "MIT"
},
"packages/workflow-engine": {
"name": "@tm/workflow-engine",
"version": "0.1.0",
"license": "MIT",
"dependencies": {
"@tm/core": "*"
},
"devDependencies": {
"@types/node": "^22.0.0",
"tsup": "^8.0.0",
"typescript": "^5.5.0",
"vitest": "^2.0.0"
}
},
"packages/workflow-engine/node_modules/@types/node": {
"version": "22.18.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.1.tgz",
"integrity": "sha512-rzSDyhn4cYznVG+PCzGe1lwuMYJrcBS1fc3JqSa2PvtABwWo+dZ1ij5OVok3tqfpEBCBoaR4d7upFJk73HRJDw==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.21.0"
}
},
"packages/workflow-engine/node_modules/undici-types": {
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"dev": true,
"license": "MIT"
}
}
}

View File

@@ -13,11 +13,13 @@
"scripts": {
"build": "npm run build:packages && tsup",
"dev": "npm run build:packages && npm link && (npm run dev:packages & tsup --watch --onSuccess 'echo Build complete && npm link')",
"dev:packages": "(cd packages/tm-core && npm run dev) & (cd apps/cli && npm run dev) & wait",
"dev:packages": "(cd packages/tm-core && npm run dev) & (cd packages/workflow-engine && npm run dev) & (cd apps/cli && npm run dev) & wait",
"dev:core": "cd packages/tm-core && npm run dev",
"dev:workflow": "cd packages/workflow-engine && npm run dev",
"dev:cli": "cd apps/cli && npm run dev",
"build:packages": "npm run build:core && npm run build:cli",
"build:packages": "npm run build:core && npm run build:workflow && npm run build:cli",
"build:core": "cd packages/tm-core && npm run build",
"build:workflow": "cd packages/workflow-engine && npm run build",
"build:cli": "cd apps/cli && npm run build",
"test": "node --experimental-vm-modules node_modules/.bin/jest",
"test:fails": "node --experimental-vm-modules node_modules/.bin/jest --onlyFailures",
@@ -116,8 +118,11 @@
},
"devDependencies": {
"@biomejs/biome": "^1.9.4",
"@changesets/changelog-github": "^0.5.1",
"@changesets/cli": "^2.28.1",
"dotenv-mono": "^1.5.1",
"@types/jest": "^29.5.14",
"execa": "^8.0.1",
"ink": "^5.0.1",

File diff suppressed because it is too large Load Diff

View File

@@ -10,7 +10,63 @@
"types": "./src/index.ts",
"import": "./dist/index.js",
"require": "./dist/index.js"
}
},
"./auth": {
"types": "./src/auth/index.ts",
"import": "./dist/auth/index.js",
"require": "./dist/auth/index.js"
},
"./storage": {
"types": "./src/storage/index.ts",
"import": "./dist/storage/index.js",
"require": "./dist/storage/index.js"
},
"./config": {
"types": "./src/config/index.ts",
"import": "./dist/config/index.js",
"require": "./dist/config/index.js"
},
"./providers": {
"types": "./src/providers/index.ts",
"import": "./dist/providers/index.js",
"require": "./dist/providers/index.js"
},
"./services": {
"types": "./src/services/index.ts",
"import": "./dist/services/index.js",
"require": "./dist/services/index.js"
},
"./errors": {
"types": "./src/errors/index.ts",
"import": "./dist/errors/index.js",
"require": "./dist/errors/index.js"
},
"./logger": {
"types": "./src/logger/index.ts",
"import": "./dist/logger/index.js",
"require": "./dist/logger/index.js"
},
"./types": {
"types": "./src/types/index.ts",
"import": "./dist/types/index.js",
"require": "./dist/types/index.js"
},
"./interfaces": {
"types": "./src/interfaces/index.ts",
"import": "./dist/interfaces/index.js",
"require": "./dist/interfaces/index.js"
},
"./utils": {
"types": "./src/utils/index.ts",
"import": "./dist/utils/index.js",
"require": "./dist/utils/index.js"
},
"./workflow": {
"types": "./src/workflow/index.ts",
"import": "./dist/workflow/index.js",
"require": "./dist/workflow/index.js"
},
"./package.json": "./package.json"
},
"scripts": {
"build": "tsup",
@@ -26,12 +82,16 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@supabase/supabase-js": "^2.57.0",
"@tm/workflow-engine": "*",
"chalk": "^5.3.0",
"zod": "^3.22.4"
},
"devDependencies": {
"@biomejs/biome": "^1.9.4",
"@types/node": "^20.11.30",
"@vitest/coverage-v8": "^2.0.5",
"dotenv-mono": "^1.5.1",
"ts-node": "^10.9.2",
"tsup": "^8.0.2",
"typescript": "^5.4.3",

View File

@@ -0,0 +1,150 @@
/**
* Tests for AuthManager singleton behavior
*/
import { describe, it, expect, beforeEach, vi } from 'vitest';
// Mock the logger to verify warnings (must be hoisted before SUT import)
const mockLogger = {
warn: vi.fn(),
info: vi.fn(),
debug: vi.fn(),
error: vi.fn()
};
vi.mock('../logger/index.js', () => ({
getLogger: () => mockLogger
}));
// Spy on CredentialStore constructor to verify config propagation
const CredentialStoreSpy = vi.fn();
vi.mock('./credential-store.js', () => {
return {
CredentialStore: class {
constructor(config: any) {
CredentialStoreSpy(config);
this.getCredentials = vi.fn(() => null);
}
getCredentials() {
return null;
}
saveCredentials() {}
clearCredentials() {}
hasValidCredentials() {
return false;
}
}
};
});
// Mock OAuthService to avoid side effects
vi.mock('./oauth-service.js', () => {
return {
OAuthService: class {
constructor() {}
authenticate() {
return Promise.resolve({});
}
getAuthorizationUrl() {
return null;
}
}
};
});
// Mock SupabaseAuthClient to avoid side effects
vi.mock('../clients/supabase-client.js', () => {
return {
SupabaseAuthClient: class {
constructor() {}
refreshSession() {
return Promise.resolve({});
}
signOut() {
return Promise.resolve();
}
}
};
});
// Import SUT after mocks
import { AuthManager } from './auth-manager.js';
describe('AuthManager Singleton', () => {
beforeEach(() => {
// Reset singleton before each test
AuthManager.resetInstance();
vi.clearAllMocks();
CredentialStoreSpy.mockClear();
});
it('should return the same instance on multiple calls', () => {
const instance1 = AuthManager.getInstance();
const instance2 = AuthManager.getInstance();
expect(instance1).toBe(instance2);
});
it('should use config on first call', () => {
const config = {
baseUrl: 'https://test.auth.com',
configDir: '/test/config',
configFile: '/test/config/auth.json'
};
const instance = AuthManager.getInstance(config);
expect(instance).toBeDefined();
// Assert that CredentialStore was constructed with the provided config
expect(CredentialStoreSpy).toHaveBeenCalledTimes(1);
expect(CredentialStoreSpy).toHaveBeenCalledWith(config);
// Verify the config is passed to internal components through observable behavior
// getCredentials would look in the configured file path
const credentials = instance.getCredentials();
expect(credentials).toBeNull(); // File doesn't exist, but config was propagated correctly
});
it('should warn when config is provided after initialization', () => {
// Clear previous calls
mockLogger.warn.mockClear();
// First call with config
AuthManager.getInstance({ baseUrl: 'https://first.auth.com' });
// Second call with different config
AuthManager.getInstance({ baseUrl: 'https://second.auth.com' });
// Verify warning was logged
expect(mockLogger.warn).toHaveBeenCalledWith(
expect.stringMatching(/config.*after initialization.*ignored/i)
);
});
it('should not warn when no config is provided after initialization', () => {
// Clear previous calls
mockLogger.warn.mockClear();
// First call with config
AuthManager.getInstance({ configDir: '/test/config' });
// Second call without config
AuthManager.getInstance();
// Verify no warning was logged
expect(mockLogger.warn).not.toHaveBeenCalled();
});
it('should allow resetting the instance', () => {
const instance1 = AuthManager.getInstance();
// Reset the instance
AuthManager.resetInstance();
// Get new instance
const instance2 = AuthManager.getInstance();
// They should be different instances
expect(instance1).not.toBe(instance2);
});
});

View File

@@ -0,0 +1,136 @@
/**
* Authentication manager for Task Master CLI
*/
import {
AuthCredentials,
OAuthFlowOptions,
AuthenticationError,
AuthConfig
} from './types.js';
import { CredentialStore } from './credential-store.js';
import { OAuthService } from './oauth-service.js';
import { SupabaseAuthClient } from '../clients/supabase-client.js';
import { getLogger } from '../logger/index.js';
/**
* Authentication manager class
*/
export class AuthManager {
private static instance: AuthManager | null = null;
private credentialStore: CredentialStore;
private oauthService: OAuthService;
private supabaseClient: SupabaseAuthClient;
private constructor(config?: Partial<AuthConfig>) {
this.credentialStore = new CredentialStore(config);
this.supabaseClient = new SupabaseAuthClient();
this.oauthService = new OAuthService(this.credentialStore, config);
}
/**
* Get singleton instance
*/
static getInstance(config?: Partial<AuthConfig>): AuthManager {
if (!AuthManager.instance) {
AuthManager.instance = new AuthManager(config);
} else if (config) {
// Warn if config is provided after initialization
const logger = getLogger('AuthManager');
logger.warn(
'getInstance called with config after initialization; config is ignored.'
);
}
return AuthManager.instance;
}
/**
* Reset the singleton instance (useful for testing)
*/
static resetInstance(): void {
AuthManager.instance = null;
}
/**
* Get stored authentication credentials
*/
getCredentials(): AuthCredentials | null {
return this.credentialStore.getCredentials();
}
/**
* Start OAuth 2.0 Authorization Code Flow with browser handling
*/
async authenticateWithOAuth(
options: OAuthFlowOptions = {}
): Promise<AuthCredentials> {
return this.oauthService.authenticate(options);
}
/**
* Get the authorization URL (for browser opening)
*/
getAuthorizationUrl(): string | null {
return this.oauthService.getAuthorizationUrl();
}
/**
* Refresh authentication token
*/
async refreshToken(): Promise<AuthCredentials> {
const authData = this.credentialStore.getCredentials({
allowExpired: true
});
if (!authData || !authData.refreshToken) {
throw new AuthenticationError(
'No refresh token available',
'NO_REFRESH_TOKEN'
);
}
try {
// Use Supabase client to refresh the token
const response = await this.supabaseClient.refreshSession(
authData.refreshToken
);
// Update authentication data
const newAuthData: AuthCredentials = {
...authData,
token: response.token,
refreshToken: response.refreshToken,
expiresAt: response.expiresAt,
savedAt: new Date().toISOString()
};
this.credentialStore.saveCredentials(newAuthData);
return newAuthData;
} catch (error) {
throw error;
}
}
/**
* Logout and clear credentials
*/
async logout(): Promise<void> {
try {
// First try to sign out from Supabase to revoke tokens
await this.supabaseClient.signOut();
} catch (error) {
// Log but don't throw - we still want to clear local credentials
getLogger('AuthManager').warn('Failed to sign out from Supabase:', error);
}
// Always clear local credentials (removes auth.json file)
this.credentialStore.clearCredentials();
}
/**
* Check if authenticated
*/
isAuthenticated(): boolean {
return this.credentialStore.hasValidCredentials();
}
}

View File

@@ -0,0 +1,37 @@
/**
* Centralized authentication configuration
*/
import os from 'os';
import path from 'path';
import { AuthConfig } from './types.js';
// Single base domain for all URLs
// Build-time: process.env.TM_PUBLIC_BASE_DOMAIN gets replaced by tsup's env option
// Default: https://tryhamster.com for production
const BASE_DOMAIN =
process.env.TM_PUBLIC_BASE_DOMAIN || // This gets replaced at build time by tsup
'https://tryhamster.com';
/**
* Default authentication configuration
* All URL configuration is derived from the single BASE_DOMAIN
*/
export const DEFAULT_AUTH_CONFIG: AuthConfig = {
// Base domain for all services
baseUrl: BASE_DOMAIN,
// Configuration directory and file paths
configDir: path.join(os.homedir(), '.taskmaster'),
configFile: path.join(os.homedir(), '.taskmaster', 'auth.json')
};
/**
* Get merged configuration with optional overrides
*/
export function getAuthConfig(overrides?: Partial<AuthConfig>): AuthConfig {
return {
...DEFAULT_AUTH_CONFIG,
...overrides
};
}

View File

@@ -0,0 +1,575 @@
/**
* Tests for CredentialStore with numeric and string timestamp handling
*/
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
import { CredentialStore } from './credential-store.js';
import { AuthenticationError } from './types.js';
import type { AuthCredentials } from './types.js';
import fs from 'fs';
import path from 'path';
import os from 'os';
// Mock fs module
vi.mock('fs');
// Mock logger
const mockLogger = {
warn: vi.fn(),
info: vi.fn(),
debug: vi.fn(),
error: vi.fn()
};
vi.mock('../logger/index.js', () => ({
getLogger: () => mockLogger
}));
describe('CredentialStore', () => {
let store: CredentialStore;
const testDir = '/test/config';
const configFile = '/test/config/auth.json';
beforeEach(() => {
vi.clearAllMocks();
store = new CredentialStore({
configDir: testDir,
configFile: configFile,
baseUrl: 'https://api.test.com'
});
});
afterEach(() => {
vi.restoreAllMocks();
});
describe('getCredentials with timestamp migration', () => {
it('should handle string ISO timestamp correctly', () => {
const futureDate = new Date(Date.now() + 3600000); // 1 hour from now
const mockCredentials: AuthCredentials = {
token: 'test-token',
userId: 'user-123',
email: 'test@example.com',
expiresAt: futureDate.toISOString(),
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).not.toBeNull();
expect(result?.token).toBe('test-token');
// The timestamp should be normalized to numeric milliseconds
expect(typeof result?.expiresAt).toBe('number');
expect(result?.expiresAt).toBe(futureDate.getTime());
});
it('should handle numeric timestamp correctly', () => {
const futureTimestamp = Date.now() + 7200000; // 2 hours from now
const mockCredentials = {
token: 'test-token',
userId: 'user-456',
email: 'test2@example.com',
expiresAt: futureTimestamp,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).not.toBeNull();
expect(result?.token).toBe('test-token');
// Numeric timestamp should remain as-is
expect(typeof result?.expiresAt).toBe('number');
expect(result?.expiresAt).toBe(futureTimestamp);
});
it('should reject invalid string timestamp', () => {
const mockCredentials = {
token: 'test-token',
userId: 'user-789',
expiresAt: 'invalid-date-string',
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.warn).toHaveBeenCalledWith(
'No valid expiration time provided for token'
);
});
it('should reject NaN timestamp', () => {
const mockCredentials = {
token: 'test-token',
userId: 'user-nan',
expiresAt: NaN,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.warn).toHaveBeenCalledWith(
'No valid expiration time provided for token'
);
});
it('should reject Infinity timestamp', () => {
const mockCredentials = {
token: 'test-token',
userId: 'user-inf',
expiresAt: Infinity,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.warn).toHaveBeenCalledWith(
'No valid expiration time provided for token'
);
});
it('should handle missing expiresAt field', () => {
const mockCredentials = {
token: 'test-token',
userId: 'user-no-expiry',
tokenType: 'standard',
savedAt: new Date().toISOString()
// No expiresAt field
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.warn).toHaveBeenCalledWith(
'No valid expiration time provided for token'
);
});
it('should check token expiration correctly', () => {
const expiredTimestamp = Date.now() - 3600000; // 1 hour ago
const mockCredentials = {
token: 'expired-token',
userId: 'user-expired',
expiresAt: expiredTimestamp,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.warn).toHaveBeenCalledWith(
expect.stringContaining('Authentication token has expired'),
expect.any(Object)
);
});
it('should allow expired tokens when requested', () => {
const expiredTimestamp = Date.now() - 3600000; // 1 hour ago
const mockCredentials = {
token: 'expired-token',
userId: 'user-expired',
expiresAt: expiredTimestamp,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials({ allowExpired: true });
expect(result).not.toBeNull();
expect(result?.token).toBe('expired-token');
});
});
describe('saveCredentials with timestamp normalization', () => {
beforeEach(() => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.mkdirSync).mockImplementation(() => undefined);
vi.mocked(fs.writeFileSync).mockImplementation(() => undefined);
vi.mocked(fs.renameSync).mockImplementation(() => undefined);
});
it('should normalize string timestamp to ISO string when saving', () => {
const futureDate = new Date(Date.now() + 3600000);
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-123',
expiresAt: futureDate.toISOString(),
tokenType: 'standard',
savedAt: new Date().toISOString()
};
store.saveCredentials(credentials);
expect(fs.writeFileSync).toHaveBeenCalledWith(
expect.stringContaining('.tmp'),
expect.stringContaining('"expiresAt":'),
expect.any(Object)
);
// Check that the written data contains a valid ISO string
const writtenData = vi.mocked(fs.writeFileSync).mock
.calls[0][1] as string;
const parsed = JSON.parse(writtenData);
expect(typeof parsed.expiresAt).toBe('string');
expect(new Date(parsed.expiresAt).toISOString()).toBe(parsed.expiresAt);
});
it('should convert numeric timestamp to ISO string when saving', () => {
const futureTimestamp = Date.now() + 7200000;
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-456',
expiresAt: futureTimestamp,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
store.saveCredentials(credentials);
const writtenData = vi.mocked(fs.writeFileSync).mock
.calls[0][1] as string;
const parsed = JSON.parse(writtenData);
expect(typeof parsed.expiresAt).toBe('string');
expect(new Date(parsed.expiresAt).getTime()).toBe(futureTimestamp);
});
it('should reject invalid string timestamp when saving', () => {
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-789',
expiresAt: 'invalid-date' as any,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
let err: unknown;
try {
store.saveCredentials(credentials);
} catch (e) {
err = e;
}
expect(err).toBeInstanceOf(AuthenticationError);
expect((err as Error).message).toContain('Invalid expiresAt format');
});
it('should reject NaN timestamp when saving', () => {
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-nan',
expiresAt: NaN as any,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
let err: unknown;
try {
store.saveCredentials(credentials);
} catch (e) {
err = e;
}
expect(err).toBeInstanceOf(AuthenticationError);
expect((err as Error).message).toContain('Invalid expiresAt format');
});
it('should reject Infinity timestamp when saving', () => {
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-inf',
expiresAt: Infinity as any,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
let err: unknown;
try {
store.saveCredentials(credentials);
} catch (e) {
err = e;
}
expect(err).toBeInstanceOf(AuthenticationError);
expect((err as Error).message).toContain('Invalid expiresAt format');
});
it('should handle missing expiresAt when saving', () => {
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-no-expiry',
tokenType: 'standard',
savedAt: new Date().toISOString()
// No expiresAt
};
store.saveCredentials(credentials);
const writtenData = vi.mocked(fs.writeFileSync).mock
.calls[0][1] as string;
const parsed = JSON.parse(writtenData);
expect(parsed.expiresAt).toBeUndefined();
});
it('should not mutate the original credentials object', () => {
const originalTimestamp = Date.now() + 3600000;
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-123',
expiresAt: originalTimestamp,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
const originalCredentialsCopy = { ...credentials };
store.saveCredentials(credentials);
// Original object should not be modified
expect(credentials).toEqual(originalCredentialsCopy);
expect(credentials.expiresAt).toBe(originalTimestamp);
});
});
describe('corrupt file handling', () => {
it('should quarantine corrupt file on JSON parse error', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue('invalid json {');
vi.mocked(fs.renameSync).mockImplementation(() => undefined);
const result = store.getCredentials();
expect(result).toBeNull();
expect(fs.renameSync).toHaveBeenCalledWith(
configFile,
expect.stringContaining('.corrupt-')
);
expect(mockLogger.warn).toHaveBeenCalledWith(
expect.stringContaining('Quarantined corrupt auth file')
);
});
it('should handle quarantine failure gracefully', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue('invalid json {');
vi.mocked(fs.renameSync).mockImplementation(() => {
throw new Error('Permission denied');
});
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.debug).toHaveBeenCalledWith(
expect.stringContaining('Could not quarantine corrupt file')
);
});
});
describe('clearCredentials', () => {
it('should delete the auth file when it exists', () => {
// Mock file exists
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.unlinkSync).mockImplementation(() => undefined);
store.clearCredentials();
expect(fs.existsSync).toHaveBeenCalledWith('/test/config/auth.json');
expect(fs.unlinkSync).toHaveBeenCalledWith('/test/config/auth.json');
});
it('should not throw when auth file does not exist', () => {
// Mock file does not exist
vi.mocked(fs.existsSync).mockReturnValue(false);
// Should not throw
expect(() => store.clearCredentials()).not.toThrow();
// Should not try to unlink non-existent file
expect(fs.unlinkSync).not.toHaveBeenCalled();
});
it('should throw AuthenticationError when unlink fails', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.unlinkSync).mockImplementation(() => {
throw new Error('Permission denied');
});
let err: unknown;
try {
store.clearCredentials();
} catch (e) {
err = e;
}
expect(err).toBeInstanceOf(AuthenticationError);
expect((err as Error).message).toContain('Failed to clear credentials');
expect((err as Error).message).toContain('Permission denied');
});
});
describe('hasValidCredentials', () => {
it('should return true when valid unexpired credentials exist', () => {
const futureDate = new Date(Date.now() + 3600000); // 1 hour from now
const credentials = {
token: 'valid-token',
userId: 'user-123',
expiresAt: futureDate.toISOString(),
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(credentials));
expect(store.hasValidCredentials()).toBe(true);
});
it('should return false when credentials are expired', () => {
const pastDate = new Date(Date.now() - 3600000); // 1 hour ago
const credentials = {
token: 'expired-token',
userId: 'user-123',
expiresAt: pastDate.toISOString(),
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(credentials));
expect(store.hasValidCredentials()).toBe(false);
});
it('should return false when no credentials exist', () => {
vi.mocked(fs.existsSync).mockReturnValue(false);
expect(store.hasValidCredentials()).toBe(false);
});
it('should return false when file contains invalid JSON', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue('invalid json {');
vi.mocked(fs.renameSync).mockImplementation(() => undefined);
expect(store.hasValidCredentials()).toBe(false);
});
it('should return false for credentials without expiry', () => {
const credentials = {
token: 'no-expiry-token',
userId: 'user-123',
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(credentials));
// Credentials without expiry are considered invalid
expect(store.hasValidCredentials()).toBe(false);
// Should log warning about missing expiration
expect(mockLogger.warn).toHaveBeenCalledWith(
'No valid expiration time provided for token'
);
});
it('should use allowExpired=false by default', () => {
// Spy on getCredentials to verify it's called with correct params
const getCredentialsSpy = vi.spyOn(store, 'getCredentials');
vi.mocked(fs.existsSync).mockReturnValue(false);
store.hasValidCredentials();
expect(getCredentialsSpy).toHaveBeenCalledWith({ allowExpired: false });
});
});
describe('cleanupCorruptFiles', () => {
it('should remove old corrupt files', () => {
const now = Date.now();
const oldFile = 'auth.json.corrupt-' + (now - 8 * 24 * 60 * 60 * 1000); // 8 days old
const newFile = 'auth.json.corrupt-' + (now - 1000); // 1 second old
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readdirSync).mockReturnValue([
{ name: oldFile, isFile: () => true },
{ name: newFile, isFile: () => true },
{ name: 'auth.json', isFile: () => true }
] as any);
vi.mocked(fs.statSync).mockImplementation((filePath) => {
if (filePath.includes(oldFile)) {
return { mtimeMs: now - 8 * 24 * 60 * 60 * 1000 } as any;
}
return { mtimeMs: now - 1000 } as any;
});
vi.mocked(fs.unlinkSync).mockImplementation(() => undefined);
store.cleanupCorruptFiles();
expect(fs.unlinkSync).toHaveBeenCalledWith(
expect.stringContaining(oldFile)
);
expect(fs.unlinkSync).not.toHaveBeenCalledWith(
expect.stringContaining(newFile)
);
});
it('should handle cleanup errors gracefully', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readdirSync).mockImplementation(() => {
throw new Error('Permission denied');
});
// Should not throw
expect(() => store.cleanupCorruptFiles()).not.toThrow();
expect(mockLogger.debug).toHaveBeenCalledWith(
expect.stringContaining('Error during corrupt file cleanup')
);
});
});
});

View File

@@ -0,0 +1,241 @@
/**
* Credential storage and management
*/
import fs from 'fs';
import path from 'path';
import { AuthCredentials, AuthenticationError, AuthConfig } from './types.js';
import { getAuthConfig } from './config.js';
import { getLogger } from '../logger/index.js';
/**
* CredentialStore manages the persistence and retrieval of authentication credentials.
*
* Runtime vs Persisted Shape:
* - When retrieved (getCredentials): expiresAt is normalized to number (milliseconds since epoch)
* - When persisted (saveCredentials): expiresAt is stored as ISO string for readability
*
* This normalization ensures consistent runtime behavior while maintaining
* human-readable persisted format in the auth.json file.
*/
export class CredentialStore {
private logger = getLogger('CredentialStore');
private config: AuthConfig;
// Clock skew tolerance for expiry checks (30 seconds)
private readonly CLOCK_SKEW_MS = 30_000;
constructor(config?: Partial<AuthConfig>) {
this.config = getAuthConfig(config);
}
/**
* Get stored authentication credentials
* @returns AuthCredentials with expiresAt as number (milliseconds) for runtime use
*/
getCredentials(options?: { allowExpired?: boolean }): AuthCredentials | null {
try {
if (!fs.existsSync(this.config.configFile)) {
return null;
}
const authData = JSON.parse(
fs.readFileSync(this.config.configFile, 'utf-8')
) as AuthCredentials;
// Normalize/migrate timestamps to numeric (handles both number and ISO string)
let expiresAtMs: number | undefined;
if (typeof authData.expiresAt === 'number') {
expiresAtMs = Number.isFinite(authData.expiresAt)
? authData.expiresAt
: undefined;
} else if (typeof authData.expiresAt === 'string') {
const parsed = Date.parse(authData.expiresAt);
expiresAtMs = Number.isNaN(parsed) ? undefined : parsed;
} else {
expiresAtMs = undefined;
}
// Validate expiration time for tokens
if (expiresAtMs === undefined) {
this.logger.warn('No valid expiration time provided for token');
return null;
}
// Update the authData with normalized timestamp
authData.expiresAt = expiresAtMs;
// Check if the token has expired (with clock skew tolerance)
const now = Date.now();
const allowExpired = options?.allowExpired ?? false;
if (now >= expiresAtMs - this.CLOCK_SKEW_MS && !allowExpired) {
this.logger.warn(
'Authentication token has expired or is about to expire',
{
expiresAt: authData.expiresAt,
currentTime: new Date(now).toISOString(),
skewWindow: `${this.CLOCK_SKEW_MS / 1000}s`
}
);
return null;
}
// Return valid token
return authData;
} catch (error) {
this.logger.error(
`Failed to read auth credentials: ${(error as Error).message}`
);
// Quarantine corrupt file to prevent repeated errors
try {
if (fs.existsSync(this.config.configFile)) {
const corruptFile = `${this.config.configFile}.corrupt-${Date.now()}-${process.pid}-${Math.random().toString(36).slice(2, 8)}`;
fs.renameSync(this.config.configFile, corruptFile);
this.logger.warn(`Quarantined corrupt auth file to: ${corruptFile}`);
}
} catch (quarantineError) {
// If we can't quarantine, log but don't throw
this.logger.debug(
`Could not quarantine corrupt file: ${(quarantineError as Error).message}`
);
}
return null;
}
}
/**
* Save authentication credentials
* @param authData - Credentials with expiresAt as number or string (will be persisted as ISO string)
*/
saveCredentials(authData: AuthCredentials): void {
try {
// Ensure directory exists
if (!fs.existsSync(this.config.configDir)) {
fs.mkdirSync(this.config.configDir, { recursive: true, mode: 0o700 });
}
// Add timestamp without mutating caller's object
authData = { ...authData, savedAt: new Date().toISOString() };
// Validate and normalize expiresAt timestamp
if (authData.expiresAt !== undefined) {
let validTimestamp: number | undefined;
if (typeof authData.expiresAt === 'number') {
validTimestamp = Number.isFinite(authData.expiresAt)
? authData.expiresAt
: undefined;
} else if (typeof authData.expiresAt === 'string') {
const parsed = Date.parse(authData.expiresAt);
validTimestamp = Number.isNaN(parsed) ? undefined : parsed;
}
if (validTimestamp === undefined) {
throw new AuthenticationError(
`Invalid expiresAt format: ${authData.expiresAt}`,
'SAVE_FAILED'
);
}
// Store as ISO string for consistency
authData.expiresAt = new Date(validTimestamp).toISOString();
}
// Save credentials atomically with secure permissions
const tempFile = `${this.config.configFile}.tmp`;
fs.writeFileSync(tempFile, JSON.stringify(authData, null, 2), {
mode: 0o600
});
fs.renameSync(tempFile, this.config.configFile);
} catch (error) {
throw new AuthenticationError(
`Failed to save auth credentials: ${(error as Error).message}`,
'SAVE_FAILED',
error
);
}
}
/**
* Clear stored credentials
*/
clearCredentials(): void {
try {
if (fs.existsSync(this.config.configFile)) {
fs.unlinkSync(this.config.configFile);
}
} catch (error) {
throw new AuthenticationError(
`Failed to clear credentials: ${(error as Error).message}`,
'CLEAR_FAILED',
error
);
}
}
/**
* Check if credentials exist and are valid
*/
hasValidCredentials(): boolean {
const credentials = this.getCredentials({ allowExpired: false });
return credentials !== null;
}
/**
* Get configuration
*/
getConfig(): AuthConfig {
return { ...this.config };
}
/**
* Clean up old corrupt auth files
* Removes corrupt files older than the specified age
*/
cleanupCorruptFiles(maxAgeMs: number = 7 * 24 * 60 * 60 * 1000): void {
try {
const dir = path.dirname(this.config.configFile);
const baseName = path.basename(this.config.configFile);
const prefix = `${baseName}.corrupt-`;
if (!fs.existsSync(dir)) {
return;
}
const entries = fs.readdirSync(dir, { withFileTypes: true });
const now = Date.now();
for (const entry of entries) {
if (!entry.isFile()) continue;
const file = entry.name;
// Check if file matches pattern: baseName.corrupt-{timestamp}
if (!file.startsWith(prefix)) continue;
const suffix = file.slice(prefix.length);
if (!/^\d+$/.test(suffix)) continue; // Fixed regex, not from variable input
const filePath = path.join(dir, file);
try {
const stats = fs.statSync(filePath);
const age = now - stats.mtimeMs;
if (age > maxAgeMs) {
fs.unlinkSync(filePath);
this.logger.debug(`Cleaned up old corrupt file: ${file}`);
}
} catch (error) {
// Ignore errors for individual file cleanup
this.logger.debug(
`Could not clean up corrupt file ${file}: ${(error as Error).message}`
);
}
}
} catch (error) {
// Log but don't throw - this is a cleanup operation
this.logger.debug(
`Error during corrupt file cleanup: ${(error as Error).message}`
);
}
}
}

View File

@@ -0,0 +1,21 @@
/**
* Authentication module exports
*/
export { AuthManager } from './auth-manager.js';
export { CredentialStore } from './credential-store.js';
export { OAuthService } from './oauth-service.js';
export type {
AuthCredentials,
OAuthFlowOptions,
AuthConfig,
CliData
} from './types.js';
export { AuthenticationError } from './types.js';
export {
DEFAULT_AUTH_CONFIG,
getAuthConfig
} from './config.js';

View File

@@ -0,0 +1,346 @@
/**
* OAuth 2.0 Authorization Code Flow service
*/
import http from 'http';
import { URL } from 'url';
import crypto from 'crypto';
import os from 'os';
import {
AuthCredentials,
AuthenticationError,
OAuthFlowOptions,
AuthConfig,
CliData
} from './types.js';
import { CredentialStore } from './credential-store.js';
import { SupabaseAuthClient } from '../clients/supabase-client.js';
import { getAuthConfig } from './config.js';
import { getLogger } from '../logger/index.js';
import packageJson from '../../../../package.json' with { type: 'json' };
export class OAuthService {
private logger = getLogger('OAuthService');
private credentialStore: CredentialStore;
private supabaseClient: SupabaseAuthClient;
private baseUrl: string;
private authorizationUrl: string | null = null;
private originalState: string | null = null;
private authorizationReady: Promise<void> | null = null;
private resolveAuthorizationReady: (() => void) | null = null;
constructor(
credentialStore: CredentialStore,
config: Partial<AuthConfig> = {}
) {
this.credentialStore = credentialStore;
this.supabaseClient = new SupabaseAuthClient();
const authConfig = getAuthConfig(config);
this.baseUrl = authConfig.baseUrl;
}
/**
* Start OAuth 2.0 Authorization Code Flow with browser handling
*/
async authenticate(options: OAuthFlowOptions = {}): Promise<AuthCredentials> {
const {
openBrowser,
timeout = 300000, // 5 minutes default
onAuthUrl,
onWaitingForAuth,
onSuccess,
onError
} = options;
try {
// Start the OAuth flow (starts local server)
const authPromise = this.startFlow(timeout);
// Wait for server to be ready and URL to be generated
if (this.authorizationReady) {
await this.authorizationReady;
}
// Get the authorization URL
const authUrl = this.getAuthorizationUrl();
if (!authUrl) {
throw new AuthenticationError(
'Failed to generate authorization URL',
'URL_GENERATION_FAILED'
);
}
// Notify about the auth URL
if (onAuthUrl) {
onAuthUrl(authUrl);
}
// Open browser if callback provided
if (openBrowser) {
try {
await openBrowser(authUrl);
this.logger.debug('Browser opened successfully with URL:', authUrl);
} catch (error) {
// Log the error but don't throw - user can still manually open the URL
this.logger.warn('Failed to open browser automatically:', error);
}
}
// Notify that we're waiting for authentication
if (onWaitingForAuth) {
onWaitingForAuth();
}
// Wait for authentication to complete
const credentials = await authPromise;
// Notify success
if (onSuccess) {
onSuccess(credentials);
}
return credentials;
} catch (error) {
const authError =
error instanceof AuthenticationError
? error
: new AuthenticationError(
`OAuth authentication failed: ${(error as Error).message}`,
'OAUTH_FAILED',
error
);
// Notify error
if (onError) {
onError(authError);
}
throw authError;
}
}
/**
* Start the OAuth flow (internal implementation)
*/
private async startFlow(timeout: number = 300000): Promise<AuthCredentials> {
const state = this.generateState();
// Store the original state for verification
this.originalState = state;
// Create a promise that will resolve when the server is ready
this.authorizationReady = new Promise<void>((resolve) => {
this.resolveAuthorizationReady = resolve;
});
return new Promise((resolve, reject) => {
let timeoutId: NodeJS.Timeout;
// Create local HTTP server for OAuth callback
const server = http.createServer();
// Start server on localhost only, bind to port 0 for automatic port assignment
server.listen(0, '127.0.0.1', () => {
const address = server.address();
if (!address || typeof address === 'string') {
reject(new Error('Failed to get server address'));
return;
}
const port = address.port;
const callbackUrl = `http://localhost:${port}/callback`;
// Set up request handler after we know the port
server.on('request', async (req, res) => {
const url = new URL(req.url!, `http://127.0.0.1:${port}`);
if (url.pathname === '/callback') {
await this.handleCallback(
url,
res,
server,
resolve,
reject,
timeoutId
);
} else {
// Handle other paths (favicon, etc.)
res.writeHead(404);
res.end();
}
});
// Prepare CLI data object (server handles OAuth/PKCE)
const cliData: CliData = {
callback: callbackUrl,
state: state,
name: 'Task Master CLI',
version: this.getCliVersion(),
device: os.hostname(),
user: os.userInfo().username,
platform: os.platform(),
timestamp: Date.now()
};
// Build authorization URL for web app sign-in page
const authUrl = new URL(`${this.baseUrl}/auth/sign-in`);
// Encode CLI data as base64
const cliParam = Buffer.from(JSON.stringify(cliData)).toString(
'base64'
);
// Set the single CLI parameter with all encoded data
authUrl.searchParams.append('cli', cliParam);
// Store auth URL for browser opening
this.authorizationUrl = authUrl.toString();
this.logger.info(
`OAuth session started - ${cliData.name} v${cliData.version} on port ${port}`
);
this.logger.debug('CLI data:', cliData);
// Signal that the server is ready and URL is available
if (this.resolveAuthorizationReady) {
this.resolveAuthorizationReady();
this.resolveAuthorizationReady = null;
}
});
// Set timeout for authentication
timeoutId = setTimeout(() => {
if (server.listening) {
server.close();
// Clean up the readiness promise if still pending
if (this.resolveAuthorizationReady) {
this.resolveAuthorizationReady();
this.resolveAuthorizationReady = null;
}
reject(
new AuthenticationError('Authentication timeout', 'AUTH_TIMEOUT')
);
}
}, timeout);
});
}
/**
* Handle OAuth callback
*/
private async handleCallback(
url: URL,
res: http.ServerResponse,
server: http.Server,
resolve: (value: AuthCredentials) => void,
reject: (error: any) => void,
timeoutId?: NodeJS.Timeout
): Promise<void> {
// Server now returns tokens directly instead of code
const type = url.searchParams.get('type');
const returnedState = url.searchParams.get('state');
const accessToken = url.searchParams.get('access_token');
const refreshToken = url.searchParams.get('refresh_token');
const expiresIn = url.searchParams.get('expires_in');
const error = url.searchParams.get('error');
const errorDescription = url.searchParams.get('error_description');
// Server handles displaying success/failure, just close connection
res.writeHead(200);
res.end();
if (error) {
if (server.listening) {
server.close();
}
reject(
new AuthenticationError(
errorDescription || error || 'Authentication failed',
'OAUTH_ERROR'
)
);
return;
}
// Verify state parameter for CSRF protection
if (returnedState !== this.originalState) {
if (server.listening) {
server.close();
}
reject(
new AuthenticationError('Invalid state parameter', 'INVALID_STATE')
);
return;
}
// Handle direct token response from server
if (
accessToken &&
(type === 'oauth_success' || type === 'session_transfer')
) {
try {
this.logger.info(`Received tokens via ${type}`);
// Get user info using the access token if possible
const user = await this.supabaseClient.getUser(accessToken);
// Calculate expiration time
const expiresAt = expiresIn
? new Date(Date.now() + parseInt(expiresIn) * 1000).toISOString()
: undefined;
// Save authentication data
const authData: AuthCredentials = {
token: accessToken,
refreshToken: refreshToken || undefined,
userId: user?.id || 'unknown',
email: user?.email,
expiresAt: expiresAt,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
this.credentialStore.saveCredentials(authData);
if (server.listening) {
server.close();
}
// Clear timeout since authentication succeeded
if (timeoutId) {
clearTimeout(timeoutId);
}
resolve(authData);
} catch (error) {
if (server.listening) {
server.close();
}
reject(error);
}
} else {
if (server.listening) {
server.close();
}
reject(new AuthenticationError('No access token received', 'NO_TOKEN'));
}
}
/**
* Generate state for OAuth flow
*/
private generateState(): string {
return crypto.randomBytes(32).toString('base64url');
}
/**
* Get CLI version from package.json if available
*/
private getCliVersion(): string {
return packageJson.version || 'unknown';
}
/**
* Get the authorization URL (for browser opening)
*/
getAuthorizationUrl(): string | null {
return this.authorizationUrl;
}
}

View File

@@ -0,0 +1,87 @@
/**
* Authentication types and interfaces
*/
export interface AuthCredentials {
token: string;
refreshToken?: string;
userId: string;
email?: string;
expiresAt?: string | number;
tokenType?: 'standard';
savedAt: string;
}
export interface OAuthFlowOptions {
/** Callback to open the browser with the auth URL. If not provided, browser won't be opened */
openBrowser?: (url: string) => Promise<void>;
/** Timeout for the OAuth flow in milliseconds. Default: 300000 (5 minutes) */
timeout?: number;
/** Callback to be invoked with the authorization URL */
onAuthUrl?: (url: string) => void;
/** Callback to be invoked when waiting for authentication */
onWaitingForAuth?: () => void;
/** Callback to be invoked on successful authentication */
onSuccess?: (credentials: AuthCredentials) => void;
/** Callback to be invoked on authentication error */
onError?: (error: AuthenticationError) => void;
}
export interface AuthConfig {
baseUrl: string;
configDir: string;
configFile: string;
}
export interface CliData {
callback: string;
state: string;
name: string;
version: string;
device?: string;
user?: string;
platform?: string;
timestamp?: number;
}
/**
* Authentication error codes
*/
export type AuthErrorCode =
| 'AUTH_TIMEOUT'
| 'AUTH_EXPIRED'
| 'OAUTH_FAILED'
| 'OAUTH_ERROR'
| 'OAUTH_CANCELED'
| 'URL_GENERATION_FAILED'
| 'INVALID_STATE'
| 'NO_TOKEN'
| 'TOKEN_EXCHANGE_FAILED'
| 'INVALID_CREDENTIALS'
| 'NO_REFRESH_TOKEN'
| 'NOT_AUTHENTICATED'
| 'NETWORK_ERROR'
| 'CONFIG_MISSING'
| 'SAVE_FAILED'
| 'CLEAR_FAILED'
| 'STORAGE_ERROR'
| 'NOT_SUPPORTED'
| 'REFRESH_FAILED'
| 'INVALID_RESPONSE';
/**
* Authentication error class
*/
export class AuthenticationError extends Error {
constructor(
message: string,
public code: AuthErrorCode,
public cause?: unknown
) {
super(message);
this.name = 'AuthenticationError';
if (cause && cause instanceof Error) {
this.stack = `${this.stack}\nCaused by: ${cause.stack}`;
}
}
}

View File

@@ -0,0 +1,5 @@
/**
* Client exports
*/
export { SupabaseAuthClient } from './supabase-client.js';

View File

@@ -0,0 +1,154 @@
/**
* Supabase client for authentication
*/
import { createClient, SupabaseClient, User } from '@supabase/supabase-js';
import { AuthenticationError } from '../auth/types.js';
import { getLogger } from '../logger/index.js';
export class SupabaseAuthClient {
private client: SupabaseClient | null = null;
private logger = getLogger('SupabaseAuthClient');
/**
* Initialize Supabase client
*/
private getClient(): SupabaseClient {
if (!this.client) {
// Get Supabase configuration from environment - using TM_PUBLIC prefix
const supabaseUrl = process.env.TM_PUBLIC_SUPABASE_URL;
const supabaseAnonKey = process.env.TM_PUBLIC_SUPABASE_ANON_KEY;
if (!supabaseUrl || !supabaseAnonKey) {
throw new AuthenticationError(
'Supabase configuration missing. Please set TM_PUBLIC_SUPABASE_URL and TM_PUBLIC_SUPABASE_ANON_KEY environment variables.',
'CONFIG_MISSING'
);
}
this.client = createClient(supabaseUrl, supabaseAnonKey, {
auth: {
autoRefreshToken: true,
persistSession: false, // We handle persistence ourselves
detectSessionInUrl: false
}
});
}
return this.client;
}
/**
* Note: Code exchange is now handled server-side
* The server returns tokens directly to avoid PKCE issues
* This method is kept for potential future use
*/
async exchangeCodeForSession(_code: string): Promise<{
token: string;
refreshToken?: string;
userId: string;
email?: string;
expiresAt?: string;
}> {
throw new AuthenticationError(
'Code exchange is handled server-side. CLI receives tokens directly.',
'NOT_SUPPORTED'
);
}
/**
* Refresh an access token
*/
async refreshSession(refreshToken: string): Promise<{
token: string;
refreshToken?: string;
expiresAt?: string;
}> {
try {
const client = this.getClient();
this.logger.info('Refreshing session...');
// Set the session with refresh token
const { data, error } = await client.auth.refreshSession({
refresh_token: refreshToken
});
if (error) {
this.logger.error('Failed to refresh session:', error);
throw new AuthenticationError(
`Failed to refresh session: ${error.message}`,
'REFRESH_FAILED'
);
}
if (!data.session) {
throw new AuthenticationError(
'No session data returned',
'INVALID_RESPONSE'
);
}
this.logger.info('Successfully refreshed session');
return {
token: data.session.access_token,
refreshToken: data.session.refresh_token,
expiresAt: data.session.expires_at
? new Date(data.session.expires_at * 1000).toISOString()
: undefined
};
} catch (error) {
if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Failed to refresh session: ${(error as Error).message}`,
'REFRESH_FAILED'
);
}
}
/**
* Get user details from token
*/
async getUser(token: string): Promise<User | null> {
try {
const client = this.getClient();
// Get user with the token
const { data, error } = await client.auth.getUser(token);
if (error) {
this.logger.warn('Failed to get user:', error);
return null;
}
return data.user;
} catch (error) {
this.logger.error('Error getting user:', error);
return null;
}
}
/**
* Sign out (revoke tokens)
* Note: This requires the user to be authenticated with the current session.
* For remote token revocation, a server-side admin API with service_role key would be needed.
*/
async signOut(): Promise<void> {
try {
const client = this.getClient();
// Sign out the current session with global scope to revoke all refresh tokens
const { error } = await client.auth.signOut({ scope: 'global' });
if (error) {
this.logger.warn('Failed to sign out:', error);
}
} catch (error) {
this.logger.error('Error during sign out:', error);
}
}
}

View File

@@ -177,7 +177,7 @@ describe('ConfigManager', () => {
it('should return storage configuration', () => {
const storage = manager.getStorageConfig();
expect(storage).toEqual({ type: 'file' });
expect(storage).toEqual({ type: 'auto', apiConfigured: false });
});
it('should return API storage configuration when configured', async () => {
@@ -206,7 +206,65 @@ describe('ConfigManager', () => {
expect(storage).toEqual({
type: 'api',
apiEndpoint: 'https://api.example.com',
apiAccessToken: 'token123'
apiAccessToken: 'token123',
apiConfigured: true
});
});
it('should return auto storage configuration with apiConfigured flag', async () => {
// Create a new instance with auto storage config and partial API settings
vi.mocked(ConfigMerger).mockImplementationOnce(
() =>
({
addSource: vi.fn(),
clearSources: vi.fn(),
merge: vi.fn().mockReturnValue({
storage: {
type: 'auto',
apiEndpoint: 'https://api.example.com'
// No apiAccessToken - partial config
}
}),
getSources: vi.fn().mockReturnValue([])
}) as any
);
const autoManager = await ConfigManager.create(testProjectRoot);
const storage = autoManager.getStorageConfig();
expect(storage).toEqual({
type: 'auto',
apiEndpoint: 'https://api.example.com',
apiAccessToken: undefined,
apiConfigured: true // true because apiEndpoint is provided
});
});
it('should return auto storage with apiConfigured false when no API settings', async () => {
// Create a new instance with auto storage but no API settings
vi.mocked(ConfigMerger).mockImplementationOnce(
() =>
({
addSource: vi.fn(),
clearSources: vi.fn(),
merge: vi.fn().mockReturnValue({
storage: {
type: 'auto'
// No API settings at all
}
}),
getSources: vi.fn().mockReturnValue([])
}) as any
);
const autoManager = await ConfigManager.create(testProjectRoot);
const storage = autoManager.getStorageConfig();
expect(storage).toEqual({
type: 'auto',
apiEndpoint: undefined,
apiAccessToken: undefined,
apiConfigured: false // false because no API settings
});
});
@@ -251,11 +309,11 @@ describe('ConfigManager', () => {
expect(manager.getProjectRoot()).toBe(testProjectRoot);
});
it('should check if using API storage', () => {
expect(manager.isUsingApiStorage()).toBe(false);
it('should check if API is explicitly configured', () => {
expect(manager.isApiExplicitlyConfigured()).toBe(false);
});
it('should detect API storage', () => {
it('should detect when API is explicitly configured', () => {
// Update config for current instance
(manager as any).config = {
storage: {
@@ -265,7 +323,7 @@ describe('ConfigManager', () => {
}
};
expect(manager.isUsingApiStorage()).toBe(true);
expect(manager.isApiExplicitlyConfigured()).toBe(true);
});
});

View File

@@ -6,7 +6,10 @@
* maintainability, testability, and separation of concerns.
*/
import type { PartialConfiguration } from '../interfaces/configuration.interface.js';
import type {
PartialConfiguration,
RuntimeStorageConfig
} from '../interfaces/configuration.interface.js';
import { ConfigLoader } from './services/config-loader.service.js';
import {
ConfigMerger,
@@ -134,26 +137,28 @@ export class ConfigManager {
/**
* Get storage configuration
*/
getStorageConfig(): {
type: 'file' | 'api';
apiEndpoint?: string;
apiAccessToken?: string;
} {
getStorageConfig(): RuntimeStorageConfig {
const storage = this.config.storage;
if (
storage?.type === 'api' &&
storage.apiEndpoint &&
storage.apiAccessToken
) {
// Return the configured type (including 'auto')
const storageType = storage?.type || 'auto';
const basePath = storage?.basePath ?? this.projectRoot;
if (storageType === 'api' || storageType === 'auto') {
return {
type: 'api',
apiEndpoint: storage.apiEndpoint,
apiAccessToken: storage.apiAccessToken
type: storageType,
basePath,
apiEndpoint: storage?.apiEndpoint,
apiAccessToken: storage?.apiAccessToken,
apiConfigured: Boolean(storage?.apiEndpoint || storage?.apiAccessToken)
};
}
return { type: 'file' };
return {
type: storageType,
basePath,
apiConfigured: false
};
}
/**
@@ -184,9 +189,10 @@ export class ConfigManager {
}
/**
* Check if using API storage
* Check if explicitly configured to use API storage
* Excludes 'auto' type
*/
isUsingApiStorage(): boolean {
isApiExplicitlyConfigured(): boolean {
return this.getStorageConfig().type === 'api';
}
@@ -219,6 +225,7 @@ export class ConfigManager {
await this.persistence.saveConfig(this.config);
// Re-initialize to respect precedence
this.initialized = false;
await this.initialize();
}
@@ -269,12 +276,4 @@ export class ConfigManager {
getConfigSources() {
return this.merger.getSources();
}
/**
* Watch for configuration changes (placeholder for future)
*/
watch(_callback: (config: PartialConfiguration) => void): () => void {
console.warn('Configuration watching not yet implemented');
return () => {}; // Return no-op unsubscribe function
}
}

View File

@@ -85,6 +85,11 @@ describe('EnvironmentConfigProvider', () => {
provider = new EnvironmentConfigProvider(); // Reset provider
config = provider.loadConfig();
expect(config.storage?.type).toBe('api');
process.env.TASKMASTER_STORAGE_TYPE = 'auto';
provider = new EnvironmentConfigProvider(); // Reset provider
config = provider.loadConfig();
expect(config.storage?.type).toBe('auto');
});
it('should handle nested configuration paths', () => {

View File

@@ -31,7 +31,7 @@ export class EnvironmentConfigProvider {
{
env: 'TASKMASTER_STORAGE_TYPE',
path: ['storage', 'type'],
validate: (v: string) => ['file', 'api'].includes(v)
validate: (v: string) => ['file', 'api', 'auto'].includes(v)
},
{ env: 'TASKMASTER_API_ENDPOINT', path: ['storage', 'apiEndpoint'] },
{ env: 'TASKMASTER_API_TOKEN', path: ['storage', 'apiAccessToken'] },

View File

@@ -9,19 +9,19 @@ export {
createTaskMasterCore,
type TaskMasterCoreOptions,
type ListTasksResult
} from './task-master-core';
} from './task-master-core.js';
// Re-export types
export type * from './types';
export type * from './types/index.js';
// Re-export interfaces (types only to avoid conflicts)
export type * from './interfaces';
export type * from './interfaces/index.js';
// Re-export constants
export * from './constants';
export * from './constants/index.js';
// Re-export providers
export * from './providers';
export * from './providers/index.js';
// Re-export storage (selectively to avoid conflicts)
export {
@@ -29,17 +29,33 @@ export {
ApiStorage,
StorageFactory,
type ApiStorageConfig
} from './storage';
export { PlaceholderStorage, type StorageAdapter } from './storage';
} from './storage/index.js';
export { PlaceholderStorage, type StorageAdapter } from './storage/index.js';
// Re-export parser
export * from './parser';
export * from './parser/index.js';
// Re-export utilities
export * from './utils';
export * from './utils/index.js';
// Re-export errors
export * from './errors';
export * from './errors/index.js';
// Re-export entities
export { TaskEntity } from './entities/task.entity';
export { TaskEntity } from './entities/task.entity.js';
// Re-export authentication
export {
AuthManager,
AuthenticationError,
type AuthCredentials,
type OAuthFlowOptions,
type AuthConfig
} from './auth/index.js';
// Re-export logger
export { getLogger, createLogger, setGlobalLogger } from './logger/index.js';
// Re-export workflow
export { WorkflowService, type WorkflowServiceConfig } from './workflow/index.js';
export type * from './workflow/index.js';

View File

@@ -3,7 +3,7 @@
* This file defines the contract for configuration management
*/
import type { TaskComplexity, TaskPriority } from '../types/index';
import type { TaskComplexity, TaskPriority } from '../types/index.js';
/**
* Model configuration for different AI roles
@@ -74,17 +74,48 @@ export interface TagSettings {
}
/**
* Storage and persistence settings
* Storage type options
* - 'file': Local file system storage
* - 'api': Remote API storage (Hamster integration)
* - 'auto': Automatically detect based on auth status
*/
export interface StorageSettings {
export type StorageType = 'file' | 'api' | 'auto';
/**
* Runtime storage configuration used for storage backend selection
* This is what getStorageConfig() returns and what StorageFactory expects
*/
export interface RuntimeStorageConfig {
/** Storage backend type */
type: 'file' | 'api';
/** Base path for file storage */
type: StorageType;
/** Base path for file storage (if configured) */
basePath?: string;
/** API endpoint for API storage (Hamster integration) */
apiEndpoint?: string;
/** Access token for API authentication */
apiAccessToken?: string;
/**
* Indicates whether API is configured (has endpoint or token)
* @computed Derived automatically from presence of apiEndpoint or apiAccessToken
* @internal Should not be set manually - computed by ConfigManager
*/
readonly apiConfigured: boolean;
}
/**
* Storage and persistence settings
* Extended storage settings including file operation preferences
*/
export interface StorageSettings
extends Omit<RuntimeStorageConfig, 'apiConfigured'> {
/** Base path for file storage */
basePath?: string;
/**
* Indicates whether API is configured
* @computed Derived automatically from presence of apiEndpoint or apiAccessToken
* @internal Should not be set manually in user config - computed by ConfigManager
*/
readonly apiConfigured?: boolean;
/** Enable automatic backups */
enableBackup: boolean;
/** Maximum number of backups to retain */
@@ -388,7 +419,7 @@ export const DEFAULT_CONFIG_VALUES = {
NAMING_CONVENTION: 'kebab-case' as const
},
STORAGE: {
TYPE: 'file' as const,
TYPE: 'auto' as const,
ENCODING: 'utf8' as BufferEncoding,
MAX_BACKUPS: 5
},

View File

@@ -4,13 +4,13 @@
*/
// Storage interfaces
export type * from './storage.interface';
export * from './storage.interface';
export type * from './storage.interface.js';
export * from './storage.interface.js';
// AI Provider interfaces
export type * from './ai-provider.interface';
export * from './ai-provider.interface';
export type * from './ai-provider.interface.js';
export * from './ai-provider.interface.js';
// Configuration interfaces
export type * from './configuration.interface';
export * from './configuration.interface';
export type * from './configuration.interface.js';
export * from './configuration.interface.js';

View File

@@ -3,7 +3,7 @@
* This file defines the contract for all storage implementations
*/
import type { Task, TaskMetadata } from '../types/index';
import type { Task, TaskMetadata } from '../types/index.js';
/**
* Interface for storage operations on tasks

View File

@@ -0,0 +1,59 @@
/**
* @fileoverview Logger factory and singleton management
*/
import { Logger, type LoggerConfig } from './logger.js';
// Global logger instance
let globalLogger: Logger | null = null;
// Named logger instances
const loggers = new Map<string, Logger>();
/**
* Create a new logger instance
*/
export function createLogger(config?: LoggerConfig): Logger {
return new Logger(config);
}
/**
* Get or create a named logger instance
*/
export function getLogger(name?: string, config?: LoggerConfig): Logger {
// If no name provided, return global logger
if (!name) {
if (!globalLogger) {
globalLogger = createLogger(config);
}
return globalLogger;
}
// Check if named logger exists
if (!loggers.has(name)) {
loggers.set(
name,
createLogger({
prefix: name,
...config
})
);
}
return loggers.get(name)!;
}
/**
* Set the global logger instance
*/
export function setGlobalLogger(logger: Logger): void {
globalLogger = logger;
}
/**
* Clear all logger instances (useful for testing)
*/
export function clearLoggers(): void {
globalLogger = null;
loggers.clear();
}

View File

@@ -0,0 +1,8 @@
/**
* @fileoverview Logger package for Task Master
* Provides centralized logging with support for different modes and levels
*/
export { Logger, LogLevel } from './logger.js';
export type { LoggerConfig } from './logger.js';
export { createLogger, getLogger, setGlobalLogger } from './factory.js';

View File

@@ -0,0 +1,242 @@
/**
* @fileoverview Core logger implementation
*/
import chalk from 'chalk';
export enum LogLevel {
SILENT = 0,
ERROR = 1,
WARN = 2,
INFO = 3,
DEBUG = 4
}
export interface LoggerConfig {
level?: LogLevel;
silent?: boolean;
prefix?: string;
timestamp?: boolean;
colors?: boolean;
// MCP mode silences all output
mcpMode?: boolean;
}
export class Logger {
private config: Required<LoggerConfig>;
private static readonly DEFAULT_CONFIG: Required<LoggerConfig> = {
level: LogLevel.WARN,
silent: false,
prefix: '',
timestamp: false,
colors: true,
mcpMode: false
};
constructor(config: LoggerConfig = {}) {
// Check environment variables
const envConfig: LoggerConfig = {};
// Check for MCP mode
if (
process.env.MCP_MODE === 'true' ||
process.env.TASK_MASTER_MCP === 'true'
) {
envConfig.mcpMode = true;
}
// Check for silent mode
if (
process.env.TASK_MASTER_SILENT === 'true' ||
process.env.TM_SILENT === 'true'
) {
envConfig.silent = true;
}
// Check for log level
if (process.env.TASK_MASTER_LOG_LEVEL || process.env.TM_LOG_LEVEL) {
const levelStr = (
process.env.TASK_MASTER_LOG_LEVEL ||
process.env.TM_LOG_LEVEL ||
''
).toUpperCase();
if (levelStr in LogLevel) {
envConfig.level = LogLevel[levelStr as keyof typeof LogLevel];
}
}
// Check for no colors
if (
process.env.NO_COLOR === 'true' ||
process.env.TASK_MASTER_NO_COLOR === 'true'
) {
envConfig.colors = false;
}
// Merge configs: defaults < constructor < environment
this.config = {
...Logger.DEFAULT_CONFIG,
...config,
...envConfig
};
// MCP mode overrides everything to be silent
if (this.config.mcpMode) {
this.config.silent = true;
}
}
/**
* Check if logging is enabled for a given level
*/
private shouldLog(level: LogLevel): boolean {
if (this.config.silent || this.config.mcpMode) {
return false;
}
return level <= this.config.level;
}
/**
* Format a log message
*/
private formatMessage(
level: LogLevel,
message: string,
...args: any[]
): string {
let formatted = '';
// Add timestamp if enabled
if (this.config.timestamp) {
const timestamp = new Date().toISOString();
formatted += this.config.colors
? chalk.gray(`[${timestamp}] `)
: `[${timestamp}] `;
}
// Add prefix if configured
if (this.config.prefix) {
formatted += this.config.colors
? chalk.cyan(`[${this.config.prefix}] `)
: `[${this.config.prefix}] `;
}
// Skip level indicator for cleaner output
// We can still color the message based on level
if (this.config.colors) {
switch (level) {
case LogLevel.ERROR:
message = chalk.red(message);
break;
case LogLevel.WARN:
message = chalk.yellow(message);
break;
case LogLevel.INFO:
// Info stays default color
break;
case LogLevel.DEBUG:
message = chalk.gray(message);
break;
}
}
// Add the message
formatted += message;
// Add any additional arguments
if (args.length > 0) {
formatted +=
' ' +
args
.map((arg) =>
typeof arg === 'object' ? JSON.stringify(arg, null, 2) : String(arg)
)
.join(' ');
}
return formatted;
}
/**
* Log an error message
*/
error(message: string, ...args: any[]): void {
if (!this.shouldLog(LogLevel.ERROR)) return;
console.error(this.formatMessage(LogLevel.ERROR, message, ...args));
}
/**
* Log a warning message
*/
warn(message: string, ...args: any[]): void {
if (!this.shouldLog(LogLevel.WARN)) return;
console.warn(this.formatMessage(LogLevel.WARN, message, ...args));
}
/**
* Log an info message
*/
info(message: string, ...args: any[]): void {
if (!this.shouldLog(LogLevel.INFO)) return;
console.log(this.formatMessage(LogLevel.INFO, message, ...args));
}
/**
* Log a debug message
*/
debug(message: string, ...args: any[]): void {
if (!this.shouldLog(LogLevel.DEBUG)) return;
console.log(this.formatMessage(LogLevel.DEBUG, message, ...args));
}
/**
* Log a message without any formatting (raw output)
* Useful for CLI output that should appear as-is
*/
log(message: string, ...args: any[]): void {
if (this.config.silent || this.config.mcpMode) return;
if (args.length > 0) {
console.log(message, ...args);
} else {
console.log(message);
}
}
/**
* Update logger configuration
*/
setConfig(config: Partial<LoggerConfig>): void {
this.config = {
...this.config,
...config
};
// MCP mode always overrides to silent
if (this.config.mcpMode) {
this.config.silent = true;
}
}
/**
* Get current configuration
*/
getConfig(): Readonly<Required<LoggerConfig>> {
return { ...this.config };
}
/**
* Create a child logger with a prefix
*/
child(prefix: string, config?: Partial<LoggerConfig>): Logger {
const childPrefix = this.config.prefix
? `${this.config.prefix}:${prefix}`
: prefix;
return new Logger({
...this.config,
...config,
prefix: childPrefix
});
}
}

View File

@@ -3,7 +3,7 @@
* This file exports all parsing-related classes and functions
*/
import type { PlaceholderTask } from '../types/index';
import type { PlaceholderTask } from '../types/index.js';
// Parser implementations will be defined here
// export * from './prd-parser.js';

View File

@@ -0,0 +1,6 @@
/**
* Services module exports
* Provides business logic and service layer functionality
*/
export { TaskService } from './task-service.js';

View File

@@ -22,8 +22,8 @@ export interface TaskListResult {
filtered: number;
/** The tag these tasks belong to (only present if explicitly provided) */
tag?: string;
/** Storage type being used */
storageType: 'file' | 'api';
/** Storage type being used - includes 'auto' for automatic detection */
storageType: 'file' | 'api' | 'auto';
}
/**
@@ -64,8 +64,8 @@ export class TaskService {
const storageConfig = this.configManager.getStorageConfig();
const projectRoot = this.configManager.getProjectRoot();
this.storage = StorageFactory.create(
{ storage: storageConfig } as any,
this.storage = StorageFactory.createFromStorageConfig(
storageConfig,
projectRoot
);
@@ -166,7 +166,7 @@ export class TaskService {
byStatus: Record<TaskStatus, number>;
withSubtasks: number;
blocked: number;
storageType: 'file' | 'api';
storageType: 'file' | 'api' | 'auto';
}> {
const result = await this.getTaskList({
tag,
@@ -334,7 +334,7 @@ export class TaskService {
/**
* Get current storage type
*/
getStorageType(): 'file' | 'api' {
getStorageType(): 'file' | 'api' | 'auto' {
return this.configManager.getStorageConfig().type;
}

View File

@@ -3,15 +3,40 @@
*/
import type { IStorage } from '../interfaces/storage.interface.js';
import type { IConfiguration } from '../interfaces/configuration.interface.js';
import { FileStorage } from './file-storage';
import type {
IConfiguration,
RuntimeStorageConfig,
StorageSettings
} from '../interfaces/configuration.interface.js';
import { FileStorage } from './file-storage/index.js';
import { ApiStorage } from './api-storage.js';
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
import { AuthManager } from '../auth/auth-manager.js';
import { getLogger } from '../logger/index.js';
/**
* Factory for creating storage implementations based on configuration
*/
export class StorageFactory {
/**
* Create a storage implementation from runtime storage config
* This is the preferred method when you have a RuntimeStorageConfig
* @param storageConfig - Runtime storage configuration
* @param projectPath - Project root path (for file storage)
* @returns Storage implementation
*/
static createFromStorageConfig(
storageConfig: RuntimeStorageConfig,
projectPath: string
): IStorage {
// Wrap the storage config in the expected format, including projectPath
// This ensures ApiStorage receives the projectPath for projectId
return StorageFactory.create(
{ storage: storageConfig, projectPath } as Partial<IConfiguration>,
projectPath
);
}
/**
* Create a storage implementation based on configuration
* @param config - Configuration object
@@ -22,15 +47,83 @@ export class StorageFactory {
config: Partial<IConfiguration>,
projectPath: string
): IStorage {
const storageType = config.storage?.type || 'file';
const storageType = config.storage?.type || 'auto';
const logger = getLogger('StorageFactory');
switch (storageType) {
case 'file':
logger.debug('📁 Using local file storage');
return StorageFactory.createFileStorage(projectPath, config);
case 'api':
if (!StorageFactory.isHamsterAvailable(config)) {
const missing: string[] = [];
if (!config.storage?.apiEndpoint) missing.push('apiEndpoint');
if (!config.storage?.apiAccessToken) missing.push('apiAccessToken');
// Check if authenticated via AuthManager
const authManager = AuthManager.getInstance();
if (!authManager.isAuthenticated()) {
throw new TaskMasterError(
`API storage not fully configured (${missing.join(', ') || 'credentials missing'}). Run: tm auth login, or set the missing field(s).`,
ERROR_CODES.MISSING_CONFIGURATION,
{ storageType: 'api', missing }
);
}
// Use auth token from AuthManager
const credentials = authManager.getCredentials();
if (credentials) {
// Merge with existing storage config, ensuring required fields
const nextStorage: StorageSettings = {
...(config.storage as StorageSettings),
type: 'api',
apiAccessToken: credentials.token,
apiEndpoint:
config.storage?.apiEndpoint ||
process.env.HAMSTER_API_URL ||
'https://tryhamster.com/api'
};
config.storage = nextStorage;
}
}
logger.info('☁️ Using API storage');
return StorageFactory.createApiStorage(config);
case 'auto':
// Auto-detect based on authentication status
const authManager = AuthManager.getInstance();
// First check if API credentials are explicitly configured
if (StorageFactory.isHamsterAvailable(config)) {
logger.info('☁️ Using API storage (configured)');
return StorageFactory.createApiStorage(config);
}
// Then check if authenticated via AuthManager
if (authManager.isAuthenticated()) {
const credentials = authManager.getCredentials();
if (credentials) {
// Configure API storage with auth credentials
const nextStorage: StorageSettings = {
...(config.storage as StorageSettings),
type: 'api',
apiAccessToken: credentials.token,
apiEndpoint:
config.storage?.apiEndpoint ||
process.env.HAMSTER_API_URL ||
'https://tryhamster.com/api'
};
config.storage = nextStorage;
logger.info('☁️ Using API storage (authenticated)');
return StorageFactory.createApiStorage(config);
}
}
// Default to file storage
logger.debug('📁 Using local file storage');
return StorageFactory.createFileStorage(projectPath, config);
default:
throw new TaskMasterError(
`Unknown storage type: ${storageType}`,
@@ -125,6 +218,11 @@ export class StorageFactory {
// File storage doesn't require additional config
break;
case 'auto':
// Auto storage is valid - it will determine the actual type at runtime
// No specific validation needed as it will fall back to file if API not configured
break;
default:
errors.push(`Unknown storage type: ${storageType}`);
}
@@ -157,7 +255,8 @@ export class StorageFactory {
await apiStorage.initialize();
return apiStorage;
} catch (error) {
console.warn(
const logger = getLogger('StorageFactory');
logger.warn(
'Failed to initialize API storage, falling back to file storage:',
error
);

View File

@@ -0,0 +1,99 @@
/**
* Test file documenting subpath export usage
* This demonstrates how consumers can use granular imports for better tree-shaking
*/
import { describe, it, expect } from 'vitest';
describe('Subpath Exports', () => {
it('should allow importing from auth subpath', async () => {
// Instead of: import { AuthManager } from '@tm/core';
// Use: import { AuthManager } from '@tm/core/auth';
const authModule = await import('./auth');
expect(authModule.AuthManager).toBeDefined();
expect(authModule.AuthenticationError).toBeDefined();
});
it('should allow importing from storage subpath', async () => {
// Instead of: import { FileStorage } from '@tm/core';
// Use: import { FileStorage } from '@tm/core/storage';
const storageModule = await import('./storage');
expect(storageModule.FileStorage).toBeDefined();
expect(storageModule.ApiStorage).toBeDefined();
expect(storageModule.StorageFactory).toBeDefined();
});
it('should allow importing from config subpath', async () => {
// Instead of: import { ConfigManager } from '@tm/core';
// Use: import { ConfigManager } from '@tm/core/config';
const configModule = await import('./config');
expect(configModule.ConfigManager).toBeDefined();
});
it('should allow importing from errors subpath', async () => {
// Instead of: import { TaskMasterError } from '@tm/core';
// Use: import { TaskMasterError } from '@tm/core/errors';
const errorsModule = await import('./errors');
expect(errorsModule.TaskMasterError).toBeDefined();
expect(errorsModule.ERROR_CODES).toBeDefined();
});
it('should allow importing from logger subpath', async () => {
// Instead of: import { getLogger } from '@tm/core';
// Use: import { getLogger } from '@tm/core/logger';
const loggerModule = await import('./logger');
expect(loggerModule.getLogger).toBeDefined();
expect(loggerModule.createLogger).toBeDefined();
});
it('should allow importing from providers subpath', async () => {
// Instead of: import { BaseProvider } from '@tm/core';
// Use: import { BaseProvider } from '@tm/core/providers';
const providersModule = await import('./providers');
expect(providersModule.BaseProvider).toBeDefined();
});
it('should allow importing from services subpath', async () => {
// Instead of: import { TaskService } from '@tm/core';
// Use: import { TaskService } from '@tm/core/services';
const servicesModule = await import('./services');
expect(servicesModule.TaskService).toBeDefined();
});
it('should allow importing from utils subpath', async () => {
// Instead of: import { generateId } from '@tm/core';
// Use: import { generateId } from '@tm/core/utils';
const utilsModule = await import('./utils');
expect(utilsModule.generateId).toBeDefined();
});
});
/**
* Usage Examples for Consumers:
*
* 1. Import only authentication (smaller bundle):
* ```typescript
* import { AuthManager, AuthenticationError } from '@tm/core/auth';
* ```
*
* 2. Import only storage (no auth code bundled):
* ```typescript
* import { FileStorage, StorageFactory } from '@tm/core/storage';
* ```
*
* 3. Import only errors (minimal bundle):
* ```typescript
* import { TaskMasterError, ERROR_CODES } from '@tm/core/errors';
* ```
*
* 4. Still support convenience imports (larger bundle but better DX):
* ```typescript
* import { AuthManager, FileStorage, TaskMasterError } from '@tm/core';
* ```
*
* Benefits:
* - Better tree-shaking: unused modules are not bundled
* - Clearer dependencies: explicit about what parts of the library you use
* - Faster builds: bundlers can optimize better with granular imports
* - Smaller bundles: especially important for browser/edge deployments
*/

View File

@@ -11,6 +11,7 @@ import {
import { ERROR_CODES, TaskMasterError } from './errors/task-master-error.js';
import type { IConfiguration } from './interfaces/configuration.interface.js';
import type { Task, TaskStatus, TaskFilter } from './types/index.js';
import { WorkflowService, type WorkflowServiceConfig } from './workflow/index.js';
/**
* Options for creating TaskMasterCore instance
@@ -18,6 +19,7 @@ import type { Task, TaskStatus, TaskFilter } from './types/index.js';
export interface TaskMasterCoreOptions {
projectPath: string;
configuration?: Partial<IConfiguration>;
workflow?: Partial<WorkflowServiceConfig>;
}
/**
@@ -33,6 +35,7 @@ export type { GetTaskListOptions } from './services/task-service.js';
export class TaskMasterCore {
private configManager: ConfigManager;
private taskService: TaskService;
private workflowService: WorkflowService;
/**
* Create and initialize a new TaskMasterCore instance
@@ -55,6 +58,7 @@ export class TaskMasterCore {
// Services will be initialized in the initialize() method
this.configManager = null as any;
this.taskService = null as any;
this.workflowService = null as any;
}
/**
@@ -81,6 +85,28 @@ export class TaskMasterCore {
// Create task service
this.taskService = new TaskService(this.configManager);
await this.taskService.initialize();
// Create workflow service
const workflowConfig: WorkflowServiceConfig = {
projectRoot: options.projectPath,
...options.workflow
};
// Pass task retrieval function to workflow service
this.workflowService = new WorkflowService(
workflowConfig,
async (taskId: string) => {
const task = await this.getTask(taskId);
if (!task) {
throw new TaskMasterError(
`Task ${taskId} not found`,
ERROR_CODES.TASK_NOT_FOUND
);
}
return task;
}
);
await this.workflowService.initialize();
} catch (error) {
throw new TaskMasterError(
'Failed to initialize TaskMasterCore',
@@ -152,7 +178,7 @@ export class TaskMasterCore {
/**
* Get current storage type
*/
getStorageType(): 'file' | 'api' {
getStorageType(): 'file' | 'api' | 'auto' {
return this.taskService.getStorageType();
}
@@ -170,11 +196,21 @@ export class TaskMasterCore {
await this.configManager.setActiveTag(tag);
}
/**
* Get workflow service for workflow operations
*/
get workflow(): WorkflowService {
return this.workflowService;
}
/**
* Close and cleanup resources
*/
async close(): Promise<void> {
// TaskService handles storage cleanup internally
if (this.workflowService) {
await this.workflowService.dispose();
}
}
}

View File

@@ -3,29 +3,17 @@
* This file exports all utility functions and helper classes
*/
// Utility implementations will be defined here
// export * from './validation.js';
// export * from './formatting.js';
// export * from './file-utils.js';
// export * from './async-utils.js';
// Export ID generation utilities
export {
generateTaskId as generateId, // Alias for backward compatibility
generateTaskId,
generateSubtaskId,
isValidTaskId,
isValidSubtaskId,
getParentTaskId
} from './id-generator.js';
// Placeholder exports - these will be implemented in later tasks
/**
* Generates a unique ID for tasks
* @deprecated This is a placeholder function that will be properly implemented in later tasks
*/
export function generateTaskId(): string {
return `task-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;
}
/**
* Validates a task ID format
* @deprecated This is a placeholder function that will be properly implemented in later tasks
*/
export function isValidTaskId(id: string): boolean {
return typeof id === 'string' && id.length > 0;
}
// Additional utility exports
/**
* Formats a date for task timestamps

View File

@@ -0,0 +1,17 @@
/**
* @fileoverview Workflow Module
* Public exports for workflow functionality
*/
export { WorkflowService, type WorkflowServiceConfig } from './workflow-service.js';
// Re-export workflow engine types for convenience
export type {
WorkflowExecutionContext,
WorkflowStatus,
WorkflowEvent,
WorkflowEventType,
WorkflowProcess,
ProcessStatus,
WorktreeInfo
} from '@tm/workflow-engine';

View File

@@ -0,0 +1,218 @@
/**
* @fileoverview Workflow Service
* Integrates workflow engine into Task Master Core
*/
import {
TaskExecutionManager,
type TaskExecutionManagerConfig,
type WorkflowExecutionContext
} from '@tm/workflow-engine';
import type { Task } from '../types/index.js';
import { TaskMasterError } from '../errors/index.js';
export interface WorkflowServiceConfig {
/** Project root directory */
projectRoot: string;
/** Maximum number of concurrent workflows */
maxConcurrent?: number;
/** Default timeout for workflow execution (minutes) */
defaultTimeout?: number;
/** Base directory for worktrees */
worktreeBase?: string;
/** Claude Code executable path */
claudeExecutable?: string;
/** Enable debug logging */
debug?: boolean;
}
/**
* WorkflowService provides Task Master workflow capabilities through core
*/
export class WorkflowService {
private workflowEngine: TaskExecutionManager;
constructor(
config: WorkflowServiceConfig,
private getTask: (taskId: string) => Promise<Task>
) {
const engineConfig: TaskExecutionManagerConfig = {
projectRoot: config.projectRoot,
maxConcurrent: config.maxConcurrent || 5,
defaultTimeout: config.defaultTimeout || 60,
worktreeBase:
config.worktreeBase ||
require('path').join(config.projectRoot, '..', 'task-worktrees'),
claudeExecutable: config.claudeExecutable || 'claude',
debug: config.debug || false
};
this.workflowEngine = new TaskExecutionManager(engineConfig);
}
/**
* Initialize the workflow service
*/
async initialize(): Promise<void> {
await this.workflowEngine.initialize();
}
/**
* Start a workflow for a task
*/
async start(
taskId: string,
options?: {
branchName?: string;
timeout?: number;
env?: Record<string, string>;
}
): Promise<string> {
try {
// Get task from core
const task = await this.getTask(taskId);
// Start workflow using engine
return await this.workflowEngine.startTaskExecution(task, options);
} catch (error) {
throw new TaskMasterError(
`Failed to start workflow for task ${taskId}`,
'WORKFLOW_START_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Stop a workflow
*/
async stop(workflowId: string, force = false): Promise<void> {
try {
await this.workflowEngine.stopTaskExecution(workflowId, force);
} catch (error) {
throw new TaskMasterError(
`Failed to stop workflow ${workflowId}`,
'WORKFLOW_STOP_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Pause a workflow
*/
async pause(workflowId: string): Promise<void> {
try {
await this.workflowEngine.pauseTaskExecution(workflowId);
} catch (error) {
throw new TaskMasterError(
`Failed to pause workflow ${workflowId}`,
'WORKFLOW_PAUSE_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Resume a paused workflow
*/
async resume(workflowId: string): Promise<void> {
try {
await this.workflowEngine.resumeTaskExecution(workflowId);
} catch (error) {
throw new TaskMasterError(
`Failed to resume workflow ${workflowId}`,
'WORKFLOW_RESUME_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Get workflow status
*/
getStatus(workflowId: string): WorkflowExecutionContext | undefined {
return this.workflowEngine.getWorkflowStatus(workflowId);
}
/**
* Get workflow by task ID
*/
getByTaskId(taskId: string): WorkflowExecutionContext | undefined {
return this.workflowEngine.getWorkflowByTaskId(taskId);
}
/**
* List all workflows
*/
list(): WorkflowExecutionContext[] {
return this.workflowEngine.listWorkflows();
}
/**
* List active workflows
*/
listActive(): WorkflowExecutionContext[] {
return this.workflowEngine.listActiveWorkflows();
}
/**
* Send input to a running workflow
*/
async sendInput(workflowId: string, input: string): Promise<void> {
try {
await this.workflowEngine.sendInputToWorkflow(workflowId, input);
} catch (error) {
throw new TaskMasterError(
`Failed to send input to workflow ${workflowId}`,
'WORKFLOW_INPUT_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Clean up all workflows
*/
async cleanup(force = false): Promise<void> {
try {
await this.workflowEngine.cleanup(force);
} catch (error) {
throw new TaskMasterError(
'Failed to cleanup workflows',
'WORKFLOW_CLEANUP_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Subscribe to workflow events
*/
on(event: string, listener: (...args: any[]) => void): void {
this.workflowEngine.on(event, listener);
}
/**
* Unsubscribe from workflow events
*/
off(event: string, listener: (...args: any[]) => void): void {
this.workflowEngine.off(event, listener);
}
/**
* Get workflow engine instance (for advanced usage)
*/
getEngine(): TaskExecutionManager {
return this.workflowEngine;
}
/**
* Dispose of the workflow service
*/
async dispose(): Promise<void> {
await this.cleanup(true);
this.workflowEngine.removeAllListeners();
}
}

View File

@@ -23,17 +23,24 @@
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"moduleResolution": "node",
"moduleResolution": "bundler",
"moduleDetection": "force",
"types": ["node"],
"resolveJsonModule": true,
"isolatedModules": true,
"paths": {
"@/*": ["./src/*"],
"@/types": ["./src/types"],
"@/providers": ["./src/providers"],
"@/storage": ["./src/storage"],
"@/auth": ["./src/auth"],
"@/config": ["./src/config"],
"@/errors": ["./src/errors"],
"@/interfaces": ["./src/interfaces"],
"@/logger": ["./src/logger"],
"@/parser": ["./src/parser"],
"@/utils": ["./src/utils"],
"@/errors": ["./src/errors"]
"@/providers": ["./src/providers"],
"@/services": ["./src/services"],
"@/storage": ["./src/storage"],
"@/types": ["./src/types"],
"@/utils": ["./src/utils"]
}
},
"include": ["src/**/*"],

View File

@@ -1,14 +1,34 @@
import { defineConfig } from 'tsup';
import { dotenvLoad } from 'dotenv-mono';
dotenvLoad();
// Get all TM_PUBLIC_* env variables for build-time injection
const getBuildTimeEnvs = () => {
const envs: Record<string, string> = {};
for (const [key, value] of Object.entries(process.env)) {
if (key.startsWith('TM_PUBLIC_')) {
// Return the actual value, not JSON.stringify'd
envs[key] = value || '';
}
}
return envs;
};
export default defineConfig({
entry: {
index: 'src/index.ts',
'types/index': 'src/types/index.ts',
'providers/index': 'src/providers/index.ts',
'storage/index': 'src/storage/index.ts',
'auth/index': 'src/auth/index.ts',
'config/index': 'src/config/index.ts',
'errors/index': 'src/errors/index.ts',
'interfaces/index': 'src/interfaces/index.ts',
'logger/index': 'src/logger/index.ts',
'parser/index': 'src/parser/index.ts',
'providers/index': 'src/providers/index.ts',
'services/index': 'src/services/index.ts',
'storage/index': 'src/storage/index.ts',
'types/index': 'src/types/index.ts',
'utils/index': 'src/utils/index.ts',
'errors/index': 'src/errors/index.ts'
'workflow/index': 'src/workflow/index.ts'
},
format: ['cjs', 'esm'],
dts: true,
@@ -20,7 +40,13 @@ export default defineConfig({
target: 'es2022',
tsconfig: './tsconfig.json',
outDir: 'dist',
external: ['zod'],
// Replace process.env.TM_PUBLIC_* with actual values at build time
env: getBuildTimeEnvs(),
// Auto-external all dependencies from package.json
external: [
// External all node_modules - everything not starting with . or /
/^[^./]/
],
esbuildOptions(options) {
options.conditions = ['module'];
}

View File

@@ -0,0 +1,371 @@
# @tm/workflow-engine
Enhanced Task Master workflow execution engine with git worktree isolation and Claude Code process management.
## Overview
The Workflow Engine extends Task Master with advanced execution capabilities:
- **Git Worktree Isolation**: Each task runs in its own isolated worktree
- **Process Sandboxing**: Spawns dedicated Claude Code processes for task execution
- **Real-time Monitoring**: Track workflow progress and process output
- **State Management**: Persistent workflow state across sessions
- **Parallel Execution**: Run multiple tasks concurrently with resource limits
## Architecture
```
TaskExecutionManager
├── WorktreeManager # Git worktree lifecycle
├── ProcessSandbox # Claude Code process management
└── WorkflowStateManager # Persistent state tracking
```
## Quick Start
```typescript
import { TaskExecutionManager } from '@tm/workflow-engine';
const manager = new TaskExecutionManager({
projectRoot: '/path/to/project',
worktreeBase: '/path/to/worktrees',
claudeExecutable: 'claude',
maxConcurrent: 3,
defaultTimeout: 60,
debug: true
});
await manager.initialize();
// Start task execution
const workflowId = await manager.startTaskExecution({
id: '1.2',
title: 'Implement authentication',
description: 'Add JWT-based auth system',
status: 'pending',
priority: 'high'
});
// Monitor workflow
const workflow = manager.getWorkflowStatus(workflowId);
console.log(`Status: ${workflow.status}`);
// Stop when complete
await manager.stopTaskExecution(workflowId);
```
## CLI Integration
```bash
# Start workflow
tm workflow start 1.2
# List active workflows
tm workflow list
# Check status
tm workflow status workflow-1.2-1234567890-abc123
# Stop workflow
tm workflow stop workflow-1.2-1234567890-abc123
```
## VS Code Extension
The workflow engine integrates with the Task Master VS Code extension to provide:
- **Workflow Tree View**: Visual workflow management
- **Process Monitoring**: Real-time output streaming
- **Worktree Navigation**: Quick access to isolated workspaces
- **Status Indicators**: Visual workflow state tracking
## Core Components
### TaskExecutionManager
Orchestrates complete workflow lifecycle:
```typescript
// Event-driven workflow management
manager.on('workflow.started', (event) => {
console.log(`Started: ${event.workflowId}`);
});
manager.on('process.output', (event) => {
console.log(`[${event.data.stream}]: ${event.data.data}`);
});
```
### WorktreeManager
Manages git worktree operations:
```typescript
import { WorktreeManager } from '@tm/workflow-engine';
const manager = new WorktreeManager({
worktreeBase: './worktrees',
projectRoot: process.cwd(),
autoCleanup: true
});
// Create isolated workspace
const worktree = await manager.createWorktree('task-1.2');
console.log(`Created: ${worktree.path}`);
// List all worktrees
const worktrees = await manager.listWorktrees();
// Cleanup
await manager.removeWorktree('task-1.2');
```
### ProcessSandbox
Spawns and manages Claude Code processes:
```typescript
import { ProcessSandbox } from '@tm/workflow-engine';
const sandbox = new ProcessSandbox({
claudeExecutable: 'claude',
defaultTimeout: 30,
debug: true
});
// Start isolated process
const process = await sandbox.startProcess(
'workflow-123',
'task-1.2',
'Implement user authentication with JWT tokens',
{ cwd: '/path/to/worktree' }
);
// Send input
await sandbox.sendInput('workflow-123', 'npm test');
// Monitor output
sandbox.on('process.output', (event) => {
console.log(event.data.data);
});
```
### WorkflowStateManager
Persistent workflow state management:
```typescript
import { WorkflowStateManager } from '@tm/workflow-engine';
const stateManager = new WorkflowStateManager({
projectRoot: process.cwd()
});
await stateManager.loadState();
// Register workflow
const workflowId = await stateManager.registerWorkflow({
taskId: '1.2',
taskTitle: 'Authentication',
// ... other context
});
// Update status
await stateManager.updateWorkflowStatus(workflowId, 'running');
// Query workflows
const running = stateManager.listWorkflowsByStatus('running');
```
## Configuration
### Environment Variables
- `TASKMASTER_WORKFLOW_DEBUG`: Enable debug logging
- `TASKMASTER_CLAUDE_PATH`: Custom Claude Code executable path
- `TASKMASTER_WORKTREE_BASE`: Base directory for worktrees
- `TASKMASTER_MAX_CONCURRENT`: Maximum concurrent workflows
### Config Object
```typescript
interface TaskExecutionManagerConfig {
projectRoot: string; // Project root directory
worktreeBase: string; // Worktree base path
claudeExecutable: string; // Claude executable
maxConcurrent: number; // Concurrent limit
defaultTimeout: number; // Timeout (minutes)
debug: boolean; // Debug logging
}
```
## Workflow States
| State | Description |
|-------|-------------|
| `pending` | Created but not started |
| `initializing` | Setting up worktree/process |
| `running` | Active execution |
| `paused` | Temporarily stopped |
| `completed` | Successfully finished |
| `failed` | Error occurred |
| `cancelled` | User cancelled |
| `timeout` | Exceeded time limit |
## Events
The workflow engine emits events for real-time monitoring:
```typescript
// Workflow lifecycle
manager.on('workflow.started', (event) => {});
manager.on('workflow.completed', (event) => {});
manager.on('workflow.failed', (event) => {});
// Process events
manager.on('process.started', (event) => {});
manager.on('process.output', (event) => {});
manager.on('process.stopped', (event) => {});
// Worktree events
manager.on('worktree.created', (event) => {});
manager.on('worktree.deleted', (event) => {});
```
## Error Handling
The workflow engine provides specialized error types:
```typescript
import {
WorkflowError,
WorktreeError,
ProcessError,
MaxConcurrentWorkflowsError
} from '@tm/workflow-engine';
try {
await manager.startTaskExecution(task);
} catch (error) {
if (error instanceof MaxConcurrentWorkflowsError) {
console.log('Too many concurrent workflows');
} else if (error instanceof WorktreeError) {
console.log('Worktree operation failed');
}
}
```
## Development
```bash
# Install dependencies
npm install
# Build package
npm run build
# Run tests
npm test
# Development mode
npm run dev
```
## Integration Examples
### With Task Master Core
```typescript
import { createTaskMasterCore } from '@tm/core';
import { TaskExecutionManager } from '@tm/workflow-engine';
const core = await createTaskMasterCore({ projectPath: '.' });
const workflows = new TaskExecutionManager({ /*...*/ });
// Get task from core
const tasks = await core.getTaskList({});
const task = tasks.tasks.find(t => t.id === '1.2');
// Execute with workflow engine
if (task) {
const workflowId = await workflows.startTaskExecution(task);
}
```
### With VS Code Extension
```typescript
import { WorkflowProvider } from './workflow-provider';
// Register tree view
const provider = new WorkflowProvider(context);
vscode.window.createTreeView('taskmaster.workflows', {
treeDataProvider: provider
});
// Register commands
vscode.commands.registerCommand('taskmaster.workflow.start',
async (taskId) => {
await provider.startWorkflow(taskId);
}
);
```
## Troubleshooting
### Common Issues
1. **Worktree Creation Fails**
```bash
# Check git version (requires 2.5+)
git --version
# Verify project is git repository
git status
```
2. **Claude Code Not Found**
```bash
# Check Claude installation
which claude
# Set custom path
export TASKMASTER_CLAUDE_PATH=/path/to/claude
```
3. **Permission Errors**
```bash
# Check worktree directory permissions
chmod -R 755 ./worktrees
```
### Debug Mode
Enable debug logging for troubleshooting:
```typescript
const manager = new TaskExecutionManager({
// ... other config
debug: true
});
```
Or via environment:
```bash
export TASKMASTER_WORKFLOW_DEBUG=true
tm workflow start 1.2
```
## Roadmap
- [ ] Process resource monitoring (CPU, memory)
- [ ] Workflow templates and presets
- [ ] Integration with CI/CD pipelines
- [ ] Workflow scheduling and queueing
- [ ] Multi-machine workflow distribution
- [ ] Advanced debugging and profiling tools
## License
MIT WITH Commons-Clause

View File

@@ -0,0 +1,56 @@
{
"name": "@tm/workflow-engine",
"version": "0.1.0",
"description": "Task Master workflow execution engine with git worktree and process management",
"type": "module",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"exports": {
".": {
"import": "./dist/index.js",
"types": "./dist/index.d.ts"
},
"./task-execution": {
"import": "./dist/task-execution/index.js",
"types": "./dist/task-execution/index.d.ts"
},
"./worktree": {
"import": "./dist/worktree/index.js",
"types": "./dist/worktree/index.d.ts"
},
"./process": {
"import": "./dist/process/index.js",
"types": "./dist/process/index.d.ts"
},
"./state": {
"import": "./dist/state/index.js",
"types": "./dist/state/index.d.ts"
}
},
"scripts": {
"build": "tsup",
"dev": "tsup --watch",
"test": "vitest",
"test:watch": "vitest --watch",
"type-check": "tsc --noEmit"
},
"dependencies": {
"@tm/core": "*"
},
"devDependencies": {
"@types/node": "^22.0.0",
"tsup": "^8.0.0",
"typescript": "^5.5.0",
"vitest": "^2.0.0"
},
"files": ["dist"],
"keywords": [
"task-master",
"workflow",
"git-worktree",
"process-management",
"claude-code"
],
"author": "Task Master AI Team",
"license": "MIT"
}

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview Workflow Engine Errors
* Public error exports
*/
export * from './workflow.errors.js';

View File

@@ -0,0 +1,59 @@
/**
* @fileoverview Workflow Engine Errors
* Custom error classes for workflow operations
*/
export class WorkflowError extends Error {
constructor(
message: string,
public code: string,
public workflowId?: string,
public taskId?: string,
public cause?: Error
) {
super(message);
this.name = 'WorkflowError';
}
}
export class WorktreeError extends WorkflowError {
constructor(message: string, public path?: string, cause?: Error) {
super(message, 'WORKTREE_ERROR', undefined, undefined, cause);
this.name = 'WorktreeError';
}
}
export class ProcessError extends WorkflowError {
constructor(message: string, public pid?: number, cause?: Error) {
super(message, 'PROCESS_ERROR', undefined, undefined, cause);
this.name = 'ProcessError';
}
}
export class WorkflowTimeoutError extends WorkflowError {
constructor(workflowId: string, timeoutMinutes: number) {
super(
`Workflow ${workflowId} timed out after ${timeoutMinutes} minutes`,
'WORKFLOW_TIMEOUT',
workflowId
);
this.name = 'WorkflowTimeoutError';
}
}
export class WorkflowNotFoundError extends WorkflowError {
constructor(workflowId: string) {
super(`Workflow ${workflowId} not found`, 'WORKFLOW_NOT_FOUND', workflowId);
this.name = 'WorkflowNotFoundError';
}
}
export class MaxConcurrentWorkflowsError extends WorkflowError {
constructor(maxConcurrent: number) {
super(
`Maximum concurrent workflows (${maxConcurrent}) reached`,
'MAX_CONCURRENT_WORKFLOWS'
);
this.name = 'MaxConcurrentWorkflowsError';
}
}

View File

@@ -0,0 +1,19 @@
/**
* @fileoverview Workflow Engine
* Main entry point for the Task Master workflow execution engine
*/
// Core task execution
export * from './task-execution/index.js';
// Component managers
export * from './worktree/index.js';
export * from './process/index.js';
export * from './state/index.js';
// Types and errors
export * from './types/index.js';
export * from './errors/index.js';
// Convenience exports
export { TaskExecutionManager as WorkflowEngine } from './task-execution/index.js';

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview Process Management
* Public exports for process operations
*/
export * from './process-sandbox.js';

View File

@@ -0,0 +1,378 @@
/**
* @fileoverview Process Sandbox
* Manages Claude Code process execution in isolated environments
*/
import { spawn, ChildProcess } from 'node:child_process';
import { EventEmitter } from 'node:events';
import type {
WorkflowProcess,
WorkflowEvent,
WorkflowEventType
} from '../types/workflow.types.js';
import { ProcessError } from '../errors/workflow.errors.js';
export interface ProcessSandboxConfig {
/** Claude Code executable path */
claudeExecutable: string;
/** Default timeout for processes (minutes) */
defaultTimeout: number;
/** Environment variables to pass to processes */
environment?: Record<string, string>;
/** Enable debug output */
debug: boolean;
}
export interface ProcessOptions {
/** Working directory for the process */
cwd: string;
/** Environment variables (merged with config) */
env?: Record<string, string>;
/** Timeout in minutes (overrides default) */
timeout?: number;
/** Additional Claude Code arguments */
args?: string[];
}
/**
* ProcessSandbox manages Claude Code process lifecycle
* Single responsibility: Process spawning, monitoring, and cleanup
*/
export class ProcessSandbox extends EventEmitter {
private config: ProcessSandboxConfig;
private activeProcesses = new Map<string, WorkflowProcess>();
private childProcesses = new Map<string, ChildProcess>();
private timeouts = new Map<string, NodeJS.Timeout>();
constructor(config: ProcessSandboxConfig) {
super();
this.config = config;
this.setupCleanupHandlers();
}
/**
* Start a Claude Code process for task execution
*/
async startProcess(
workflowId: string,
taskId: string,
taskPrompt: string,
options: ProcessOptions
): Promise<WorkflowProcess> {
if (this.activeProcesses.has(workflowId)) {
throw new ProcessError(
`Process already running for workflow ${workflowId}`
);
}
// Prepare command and arguments
const args = [
'-p', // Print mode for non-interactive execution
taskPrompt,
...(options.args || [])
];
// Prepare environment
const env = {
...process.env,
...this.config.environment,
...options.env,
// Ensure task context is available
TASKMASTER_WORKFLOW_ID: workflowId,
TASKMASTER_TASK_ID: taskId
};
try {
// Spawn Claude Code process
const childProcess = spawn(this.config.claudeExecutable, args, {
cwd: options.cwd,
env,
stdio: ['pipe', 'pipe', 'pipe']
});
const workflowProcess: WorkflowProcess = {
pid: childProcess.pid!,
command: this.config.claudeExecutable,
args,
cwd: options.cwd,
env,
startedAt: new Date(),
status: 'starting'
};
// Store process references
this.activeProcesses.set(workflowId, workflowProcess);
this.childProcesses.set(workflowId, childProcess);
// Setup process event handlers
this.setupProcessHandlers(workflowId, taskId, childProcess);
// Setup timeout if specified
const timeoutMinutes = options.timeout || this.config.defaultTimeout;
if (timeoutMinutes > 0) {
this.setupProcessTimeout(workflowId, timeoutMinutes);
}
// Emit process started event
this.emitEvent('process.started', workflowId, taskId, {
pid: workflowProcess.pid,
command: workflowProcess.command
});
workflowProcess.status = 'running';
return workflowProcess;
} catch (error) {
throw new ProcessError(
`Failed to start process for workflow ${workflowId}`,
undefined,
error as Error
);
}
}
/**
* Stop a running process
*/
async stopProcess(workflowId: string, force = false): Promise<void> {
const process = this.activeProcesses.get(workflowId);
const childProcess = this.childProcesses.get(workflowId);
if (!process || !childProcess) {
throw new ProcessError(
`No running process found for workflow ${workflowId}`
);
}
try {
// Clear timeout
const timeout = this.timeouts.get(workflowId);
if (timeout) {
clearTimeout(timeout);
this.timeouts.delete(workflowId);
}
// Kill the process
if (force) {
childProcess.kill('SIGKILL');
} else {
childProcess.kill('SIGTERM');
// Give it 5 seconds to gracefully exit, then force kill
setTimeout(() => {
if (!childProcess.killed) {
childProcess.kill('SIGKILL');
}
}, 5000);
}
process.status = 'stopped';
// Emit process stopped event
this.emitEvent('process.stopped', workflowId, process.pid.toString(), {
pid: process.pid,
forced: force
});
} catch (error) {
throw new ProcessError(
`Failed to stop process for workflow ${workflowId}`,
process.pid,
error as Error
);
}
}
/**
* Send input to a running process
*/
async sendInput(workflowId: string, input: string): Promise<void> {
const childProcess = this.childProcesses.get(workflowId);
if (!childProcess) {
throw new ProcessError(
`No running process found for workflow ${workflowId}`
);
}
try {
childProcess.stdin?.write(input);
childProcess.stdin?.write('\n');
} catch (error) {
throw new ProcessError(
`Failed to send input to process for workflow ${workflowId}`,
childProcess.pid,
error as Error
);
}
}
/**
* Get process information
*/
getProcess(workflowId: string): WorkflowProcess | undefined {
return this.activeProcesses.get(workflowId);
}
/**
* List all active processes
*/
listProcesses(): WorkflowProcess[] {
return Array.from(this.activeProcesses.values());
}
/**
* Check if a process is running
*/
isProcessRunning(workflowId: string): boolean {
const process = this.activeProcesses.get(workflowId);
return process?.status === 'running' || process?.status === 'starting';
}
/**
* Clean up all processes
*/
async cleanupAll(force = false): Promise<void> {
const workflowIds = Array.from(this.activeProcesses.keys());
await Promise.all(
workflowIds.map(async (workflowId) => {
try {
await this.stopProcess(workflowId, force);
} catch (error) {
console.error(
`Failed to cleanup process for workflow ${workflowId}:`,
error
);
}
})
);
}
/**
* Setup process event handlers
*/
private setupProcessHandlers(
workflowId: string,
taskId: string,
childProcess: ChildProcess
): void {
const process = this.activeProcesses.get(workflowId);
if (!process) return;
// Handle stdout
childProcess.stdout?.on('data', (data) => {
const output = data.toString();
if (this.config.debug) {
console.log(`[${workflowId}] STDOUT:`, output);
}
this.emitEvent('process.output', workflowId, taskId, {
stream: 'stdout',
data: output
});
});
// Handle stderr
childProcess.stderr?.on('data', (data) => {
const output = data.toString();
if (this.config.debug) {
console.error(`[${workflowId}] STDERR:`, output);
}
this.emitEvent('process.output', workflowId, taskId, {
stream: 'stderr',
data: output
});
});
// Handle process exit
childProcess.on('exit', (code, signal) => {
process.status = code === 0 ? 'stopped' : 'crashed';
this.emitEvent('process.stopped', workflowId, taskId, {
pid: process.pid,
exitCode: code,
signal
});
// Cleanup
this.activeProcesses.delete(workflowId);
this.childProcesses.delete(workflowId);
const timeout = this.timeouts.get(workflowId);
if (timeout) {
clearTimeout(timeout);
this.timeouts.delete(workflowId);
}
});
// Handle process errors
childProcess.on('error', (error) => {
process.status = 'crashed';
this.emitEvent('process.error', workflowId, taskId, undefined, error);
// Cleanup
this.activeProcesses.delete(workflowId);
this.childProcesses.delete(workflowId);
});
}
/**
* Setup process timeout
*/
private setupProcessTimeout(
workflowId: string,
timeoutMinutes: number
): void {
const timeout = setTimeout(
async () => {
console.warn(`Process timeout reached for workflow ${workflowId}`);
try {
await this.stopProcess(workflowId, true);
} catch (error) {
console.error('Failed to stop timed out process:', error);
}
},
timeoutMinutes * 60 * 1000
);
this.timeouts.set(workflowId, timeout);
}
/**
* Emit workflow event
*/
private emitEvent(
type: WorkflowEventType,
workflowId: string,
taskId: string,
data?: any,
error?: Error
): void {
const event: WorkflowEvent = {
type,
workflowId,
taskId,
timestamp: new Date(),
data,
error
};
this.emit('event', event);
this.emit(type, event);
}
/**
* Setup cleanup handlers for graceful shutdown
*/
private setupCleanupHandlers(): void {
const cleanup = () => {
console.log('Cleaning up processes...');
this.cleanupAll(true).catch(console.error);
};
process.on('SIGINT', cleanup);
process.on('SIGTERM', cleanup);
process.on('exit', cleanup);
}
}

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview State Management
* Public exports for workflow state operations
*/
export * from './workflow-state-manager.js';

View File

@@ -0,0 +1,320 @@
/**
* @fileoverview Workflow State Manager
* Extends tm-core RuntimeStateManager with workflow tracking capabilities
*/
import { promises as fs } from 'node:fs';
import path from 'node:path';
import type {
WorkflowExecutionContext,
WorkflowStatus,
WorkflowEvent
} from '../types/workflow.types.js';
import { WorkflowError } from '../errors/workflow.errors.js';
export interface WorkflowStateConfig {
/** Project root directory */
projectRoot: string;
/** Custom state directory (defaults to .taskmaster) */
stateDir?: string;
}
export interface WorkflowRegistryEntry {
/** Workflow ID */
workflowId: string;
/** Task ID being executed */
taskId: string;
/** Workflow status */
status: WorkflowStatus;
/** Worktree path */
worktreePath: string;
/** Process ID if running */
processId?: number;
/** Start timestamp */
startedAt: string;
/** Last activity timestamp */
lastActivity: string;
/** Branch name */
branchName: string;
/** Additional metadata */
metadata?: Record<string, any>;
}
/**
* WorkflowStateManager manages workflow execution state
* Extends the concept of RuntimeStateManager to track active workflows globally
*/
export class WorkflowStateManager {
private config: WorkflowStateConfig;
private stateFilePath: string;
private activeWorkflows = new Map<string, WorkflowExecutionContext>();
constructor(config: WorkflowStateConfig) {
this.config = config;
const stateDir = config.stateDir || '.taskmaster';
this.stateFilePath = path.join(config.projectRoot, stateDir, 'workflows.json');
}
/**
* Load workflow state from disk
*/
async loadState(): Promise<void> {
try {
const stateData = await fs.readFile(this.stateFilePath, 'utf-8');
const registry = JSON.parse(stateData) as Record<string, WorkflowRegistryEntry>;
// Convert registry entries to WorkflowExecutionContext
for (const [workflowId, entry] of Object.entries(registry)) {
const context: WorkflowExecutionContext = {
taskId: entry.taskId,
taskTitle: `Task ${entry.taskId}`, // Will be updated when task details are loaded
taskDescription: '',
projectRoot: this.config.projectRoot,
worktreePath: entry.worktreePath,
branchName: entry.branchName,
processId: entry.processId,
startedAt: new Date(entry.startedAt),
status: entry.status,
lastActivity: new Date(entry.lastActivity),
metadata: entry.metadata
};
this.activeWorkflows.set(workflowId, context);
}
} catch (error: any) {
if (error.code === 'ENOENT') {
// Workflows file doesn't exist, start with empty state
console.debug('No workflows.json found, starting with empty state');
return;
}
console.warn('Failed to load workflow state:', error.message);
}
}
/**
* Save workflow state to disk
*/
async saveState(): Promise<void> {
const stateDir = path.dirname(this.stateFilePath);
try {
await fs.mkdir(stateDir, { recursive: true });
// Convert contexts to registry entries
const registry: Record<string, WorkflowRegistryEntry> = {};
for (const [workflowId, context] of this.activeWorkflows.entries()) {
registry[workflowId] = {
workflowId,
taskId: context.taskId,
status: context.status,
worktreePath: context.worktreePath,
processId: context.processId,
startedAt: context.startedAt.toISOString(),
lastActivity: context.lastActivity.toISOString(),
branchName: context.branchName,
metadata: context.metadata
};
}
await fs.writeFile(
this.stateFilePath,
JSON.stringify(registry, null, 2),
'utf-8'
);
} catch (error) {
throw new WorkflowError(
'Failed to save workflow state',
'WORKFLOW_STATE_SAVE_ERROR',
undefined,
undefined,
error as Error
);
}
}
/**
* Register a new workflow
*/
async registerWorkflow(context: WorkflowExecutionContext): Promise<string> {
const workflowId = this.generateWorkflowId(context.taskId);
this.activeWorkflows.set(workflowId, {
...context,
lastActivity: new Date()
});
await this.saveState();
return workflowId;
}
/**
* Update workflow context
*/
async updateWorkflow(
workflowId: string,
updates: Partial<WorkflowExecutionContext>
): Promise<void> {
const existing = this.activeWorkflows.get(workflowId);
if (!existing) {
throw new WorkflowError(
`Workflow ${workflowId} not found`,
'WORKFLOW_NOT_FOUND',
workflowId
);
}
const updated = {
...existing,
...updates,
lastActivity: new Date()
};
this.activeWorkflows.set(workflowId, updated);
await this.saveState();
}
/**
* Update workflow status
*/
async updateWorkflowStatus(workflowId: string, status: WorkflowStatus): Promise<void> {
await this.updateWorkflow(workflowId, { status });
}
/**
* Unregister a workflow (remove from state)
*/
async unregisterWorkflow(workflowId: string): Promise<void> {
if (!this.activeWorkflows.has(workflowId)) {
throw new WorkflowError(
`Workflow ${workflowId} not found`,
'WORKFLOW_NOT_FOUND',
workflowId
);
}
this.activeWorkflows.delete(workflowId);
await this.saveState();
}
/**
* Get workflow context by ID
*/
getWorkflow(workflowId: string): WorkflowExecutionContext | undefined {
return this.activeWorkflows.get(workflowId);
}
/**
* Get workflow by task ID
*/
getWorkflowByTaskId(taskId: string): WorkflowExecutionContext | undefined {
for (const context of this.activeWorkflows.values()) {
if (context.taskId === taskId) {
return context;
}
}
return undefined;
}
/**
* List all active workflows
*/
listWorkflows(): WorkflowExecutionContext[] {
return Array.from(this.activeWorkflows.values());
}
/**
* List workflows by status
*/
listWorkflowsByStatus(status: WorkflowStatus): WorkflowExecutionContext[] {
return this.listWorkflows().filter(w => w.status === status);
}
/**
* Get running workflows count
*/
getRunningCount(): number {
return this.listWorkflowsByStatus('running').length;
}
/**
* Check if a task has an active workflow
*/
hasActiveWorkflow(taskId: string): boolean {
return this.getWorkflowByTaskId(taskId) !== undefined;
}
/**
* Clean up completed/failed workflows older than specified time
*/
async cleanupOldWorkflows(olderThanHours = 24): Promise<number> {
const cutoffTime = new Date(Date.now() - (olderThanHours * 60 * 60 * 1000));
let cleaned = 0;
for (const [workflowId, context] of this.activeWorkflows.entries()) {
const isOld = context.lastActivity < cutoffTime;
const isFinished = ['completed', 'failed', 'cancelled', 'timeout'].includes(context.status);
if (isOld && isFinished) {
this.activeWorkflows.delete(workflowId);
cleaned++;
}
}
if (cleaned > 0) {
await this.saveState();
}
return cleaned;
}
/**
* Clear all workflow state
*/
async clearState(): Promise<void> {
try {
await fs.unlink(this.stateFilePath);
} catch (error: any) {
if (error.code !== 'ENOENT') {
throw error;
}
}
this.activeWorkflows.clear();
}
/**
* Record workflow event (for audit trail)
*/
async recordEvent(event: WorkflowEvent): Promise<void> {
// Update workflow last activity
const workflow = this.activeWorkflows.get(event.workflowId);
if (workflow) {
workflow.lastActivity = event.timestamp;
await this.saveState();
}
// Optional: Could extend to maintain event log file
if (process.env.TASKMASTER_DEBUG) {
console.log('Workflow Event:', {
type: event.type,
workflowId: event.workflowId,
taskId: event.taskId,
timestamp: event.timestamp.toISOString(),
data: event.data
});
}
}
/**
* Generate unique workflow ID
*/
private generateWorkflowId(taskId: string): string {
const timestamp = Date.now();
const random = Math.random().toString(36).substring(2, 8);
return `workflow-${taskId}-${timestamp}-${random}`;
}
}

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview Task Execution Management
* Public exports for task execution operations
*/
export * from './task-execution-manager.js';

View File

@@ -0,0 +1,433 @@
/**
* @fileoverview Task Execution Manager
* Orchestrates the complete task execution workflow using worktrees and processes
*/
import { EventEmitter } from 'node:events';
import path from 'node:path';
import type { Task } from '@tm/core';
import {
WorktreeManager,
type WorktreeManagerConfig
} from '../worktree/worktree-manager.js';
import {
ProcessSandbox,
type ProcessSandboxConfig
} from '../process/process-sandbox.js';
import {
WorkflowStateManager,
type WorkflowStateConfig
} from '../state/workflow-state-manager.js';
import type {
WorkflowConfig,
WorkflowExecutionContext,
WorkflowStatus,
WorkflowEvent
} from '../types/workflow.types.js';
import {
WorkflowError,
WorkflowNotFoundError,
MaxConcurrentWorkflowsError,
WorkflowTimeoutError
} from '../errors/workflow.errors.js';
export interface TaskExecutionManagerConfig extends WorkflowConfig {
/** Project root directory */
projectRoot: string;
}
/**
* TaskExecutionManager orchestrates the complete task execution workflow
* Coordinates worktree creation, process spawning, and state management
*/
export class TaskExecutionManager extends EventEmitter {
private config: TaskExecutionManagerConfig;
private worktreeManager: WorktreeManager;
private processSandbox: ProcessSandbox;
private stateManager: WorkflowStateManager;
private initialized = false;
constructor(config: TaskExecutionManagerConfig) {
super();
this.config = config;
// Initialize component managers
const worktreeConfig: WorktreeManagerConfig = {
worktreeBase: config.worktreeBase,
projectRoot: config.projectRoot,
autoCleanup: true
};
const processConfig: ProcessSandboxConfig = {
claudeExecutable: config.claudeExecutable,
defaultTimeout: config.defaultTimeout,
debug: config.debug
};
const stateConfig: WorkflowStateConfig = {
projectRoot: config.projectRoot
};
this.worktreeManager = new WorktreeManager(worktreeConfig);
this.processSandbox = new ProcessSandbox(processConfig);
this.stateManager = new WorkflowStateManager(stateConfig);
// Forward events from components
this.processSandbox.on('event', (event: WorkflowEvent) => {
this.stateManager.recordEvent(event);
this.emit('event', event);
});
}
/**
* Initialize the task execution manager
*/
async initialize(): Promise<void> {
if (this.initialized) return;
await this.stateManager.loadState();
// Clean up any stale workflows
await this.cleanupStaleWorkflows();
this.initialized = true;
}
/**
* Start task execution workflow
*/
async startTaskExecution(
task: Task,
options?: {
branchName?: string;
timeout?: number;
env?: Record<string, string>;
}
): Promise<string> {
if (!this.initialized) {
await this.initialize();
}
// Check concurrent workflow limit
const runningCount = this.stateManager.getRunningCount();
if (runningCount >= this.config.maxConcurrent) {
throw new MaxConcurrentWorkflowsError(this.config.maxConcurrent);
}
// Check if task already has an active workflow
if (this.stateManager.hasActiveWorkflow(task.id)) {
throw new WorkflowError(
`Task ${task.id} already has an active workflow`,
'TASK_ALREADY_EXECUTING',
undefined,
task.id
);
}
try {
// Create worktree
const worktreeInfo = await this.worktreeManager.createWorktree(
task.id,
options?.branchName
);
// Prepare task context
const context: WorkflowExecutionContext = {
taskId: task.id,
taskTitle: task.title,
taskDescription: task.description,
taskDetails: task.details,
projectRoot: this.config.projectRoot,
worktreePath: worktreeInfo.path,
branchName: worktreeInfo.branch,
startedAt: new Date(),
status: 'initializing',
lastActivity: new Date(),
metadata: {
priority: task.priority,
dependencies: task.dependencies
}
};
// Register workflow
const workflowId = await this.stateManager.registerWorkflow(context);
try {
// Prepare task prompt for Claude Code
const taskPrompt = this.generateTaskPrompt(task);
// Start Claude Code process
const process = await this.processSandbox.startProcess(
workflowId,
task.id,
taskPrompt,
{
cwd: worktreeInfo.path,
timeout: options?.timeout,
env: options?.env
}
);
// Update workflow with process information
await this.stateManager.updateWorkflow(workflowId, {
processId: process.pid,
status: 'running'
});
// Emit workflow started event
this.emitEvent('workflow.started', workflowId, task.id, {
worktreePath: worktreeInfo.path,
processId: process.pid
});
return workflowId;
} catch (error) {
// Clean up worktree if process failed to start
await this.worktreeManager.removeWorktree(task.id, true);
await this.stateManager.unregisterWorkflow(workflowId);
throw error;
}
} catch (error) {
throw new WorkflowError(
`Failed to start task execution for ${task.id}`,
'TASK_EXECUTION_START_ERROR',
undefined,
task.id,
error as Error
);
}
}
/**
* Stop task execution workflow
*/
async stopTaskExecution(workflowId: string, force = false): Promise<void> {
const workflow = this.stateManager.getWorkflow(workflowId);
if (!workflow) {
throw new WorkflowNotFoundError(workflowId);
}
try {
// Stop the process if running
if (this.processSandbox.isProcessRunning(workflowId)) {
await this.processSandbox.stopProcess(workflowId, force);
}
// Update workflow status
const status: WorkflowStatus = force ? 'cancelled' : 'completed';
await this.stateManager.updateWorkflowStatus(workflowId, status);
// Clean up worktree
await this.worktreeManager.removeWorktree(workflow.taskId, force);
// Emit workflow stopped event
this.emitEvent('workflow.completed', workflowId, workflow.taskId, {
status,
forced: force
});
// Unregister workflow
await this.stateManager.unregisterWorkflow(workflowId);
} catch (error) {
throw new WorkflowError(
`Failed to stop workflow ${workflowId}`,
'WORKFLOW_STOP_ERROR',
workflowId,
workflow.taskId,
error as Error
);
}
}
/**
* Pause task execution
*/
async pauseTaskExecution(workflowId: string): Promise<void> {
const workflow = this.stateManager.getWorkflow(workflowId);
if (!workflow) {
throw new WorkflowNotFoundError(workflowId);
}
if (workflow.status !== 'running') {
throw new WorkflowError(
`Cannot pause workflow ${workflowId} - not currently running`,
'WORKFLOW_NOT_RUNNING',
workflowId,
workflow.taskId
);
}
// For now, we'll just mark as paused - in the future could implement
// process suspension or other pause mechanisms
await this.stateManager.updateWorkflowStatus(workflowId, 'paused');
this.emitEvent('workflow.paused', workflowId, workflow.taskId);
}
/**
* Resume paused task execution
*/
async resumeTaskExecution(workflowId: string): Promise<void> {
const workflow = this.stateManager.getWorkflow(workflowId);
if (!workflow) {
throw new WorkflowNotFoundError(workflowId);
}
if (workflow.status !== 'paused') {
throw new WorkflowError(
`Cannot resume workflow ${workflowId} - not currently paused`,
'WORKFLOW_NOT_PAUSED',
workflowId,
workflow.taskId
);
}
await this.stateManager.updateWorkflowStatus(workflowId, 'running');
this.emitEvent('workflow.resumed', workflowId, workflow.taskId);
}
/**
* Get workflow status
*/
getWorkflowStatus(workflowId: string): WorkflowExecutionContext | undefined {
return this.stateManager.getWorkflow(workflowId);
}
/**
* Get workflow by task ID
*/
getWorkflowByTaskId(taskId: string): WorkflowExecutionContext | undefined {
return this.stateManager.getWorkflowByTaskId(taskId);
}
/**
* List all workflows
*/
listWorkflows(): WorkflowExecutionContext[] {
return this.stateManager.listWorkflows();
}
/**
* List active workflows
*/
listActiveWorkflows(): WorkflowExecutionContext[] {
return this.stateManager.listWorkflowsByStatus('running');
}
/**
* Send input to a running workflow
*/
async sendInputToWorkflow(workflowId: string, input: string): Promise<void> {
const workflow = this.stateManager.getWorkflow(workflowId);
if (!workflow) {
throw new WorkflowNotFoundError(workflowId);
}
if (!this.processSandbox.isProcessRunning(workflowId)) {
throw new WorkflowError(
`Cannot send input to workflow ${workflowId} - process not running`,
'PROCESS_NOT_RUNNING',
workflowId,
workflow.taskId
);
}
await this.processSandbox.sendInput(workflowId, input);
}
/**
* Clean up all workflows
*/
async cleanup(force = false): Promise<void> {
// Stop all processes
await this.processSandbox.cleanupAll(force);
// Clean up all worktrees
await this.worktreeManager.cleanupAll(force);
// Clear workflow state
await this.stateManager.clearState();
}
/**
* Generate task prompt for Claude Code
*/
private generateTaskPrompt(task: Task): string {
const prompt = [
`Work on Task ${task.id}: ${task.title}`,
'',
`Description: ${task.description}`
];
if (task.details) {
prompt.push('', `Details: ${task.details}`);
}
if (task.testStrategy) {
prompt.push('', `Test Strategy: ${task.testStrategy}`);
}
if (task.dependencies?.length) {
prompt.push('', `Dependencies: ${task.dependencies.join(', ')}`);
}
prompt.push(
'',
'Please implement this task following the project conventions and best practices.',
'When complete, update the task status appropriately using the available Task Master commands.'
);
return prompt.join('\n');
}
/**
* Clean up stale workflows from previous sessions
*/
private async cleanupStaleWorkflows(): Promise<void> {
const workflows = this.stateManager.listWorkflows();
for (const workflow of workflows) {
const isStale =
workflow.status === 'running' &&
!this.processSandbox.isProcessRunning(`workflow-${workflow.taskId}`);
if (isStale) {
console.log(`Cleaning up stale workflow for task ${workflow.taskId}`);
try {
await this.stateManager.updateWorkflowStatus(
`workflow-${workflow.taskId}`,
'failed'
);
// Try to clean up worktree
await this.worktreeManager.removeWorktree(workflow.taskId, true);
} catch (error) {
console.error(`Failed to cleanup stale workflow:`, error);
}
}
}
}
/**
* Emit workflow event
*/
private emitEvent(
type: string,
workflowId: string,
taskId: string,
data?: any
): void {
const event: WorkflowEvent = {
type: type as any,
workflowId,
taskId,
timestamp: new Date(),
data
};
this.emit('event', event);
this.emit(type, event);
}
}

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview Workflow Engine Types
* Public type exports
*/
export * from './workflow.types.js';

View File

@@ -0,0 +1,119 @@
/**
* @fileoverview Workflow Engine Types
* Core types for workflow execution system
*/
export interface WorkflowConfig {
/** Maximum number of concurrent workflows */
maxConcurrent: number;
/** Default timeout for workflow execution (minutes) */
defaultTimeout: number;
/** Base directory for worktrees */
worktreeBase: string;
/** Claude Code executable path */
claudeExecutable: string;
/** Enable debug logging */
debug: boolean;
}
export interface WorkflowExecutionContext {
/** Task ID being executed */
taskId: string;
/** Task title for display */
taskTitle: string;
/** Full task description */
taskDescription: string;
/** Task implementation details */
taskDetails?: string;
/** Project root path */
projectRoot: string;
/** Worktree path */
worktreePath: string;
/** Branch name for this workflow */
branchName: string;
/** Process ID of running Claude Code */
processId?: number;
/** Workflow start time */
startedAt: Date;
/** Workflow status */
status: WorkflowStatus;
/** Last activity timestamp */
lastActivity: Date;
/** Execution metadata */
metadata?: Record<string, any>;
}
export type WorkflowStatus =
| 'pending' // Created but not started
| 'initializing' // Setting up worktree/process
| 'running' // Active execution
| 'paused' // Temporarily stopped
| 'completed' // Successfully finished
| 'failed' // Error occurred
| 'cancelled' // User cancelled
| 'timeout'; // Exceeded time limit
export interface WorkflowEvent {
type: WorkflowEventType;
workflowId: string;
taskId: string;
timestamp: Date;
data?: any;
error?: Error;
}
export type WorkflowEventType =
| 'workflow.created'
| 'workflow.started'
| 'workflow.paused'
| 'workflow.resumed'
| 'workflow.completed'
| 'workflow.failed'
| 'workflow.cancelled'
| 'worktree.created'
| 'worktree.deleted'
| 'process.started'
| 'process.stopped'
| 'process.output'
| 'process.error';
export interface WorkflowProcess {
/** Process ID */
pid: number;
/** Command that was executed */
command: string;
/** Command arguments */
args: string[];
/** Working directory */
cwd: string;
/** Environment variables */
env?: Record<string, string>;
/** Process start time */
startedAt: Date;
/** Process status */
status: ProcessStatus;
}
export type ProcessStatus =
| 'starting'
| 'running'
| 'stopped'
| 'crashed'
| 'killed';
export interface WorktreeInfo {
/** Worktree path */
path: string;
/** Branch name */
branch: string;
/** Creation timestamp */
createdAt: Date;
/** Associated task ID */
taskId: string;
/** Git commit hash */
commit?: string;
/** Worktree lock status */
locked: boolean;
/** Lock reason if applicable */
lockReason?: string;
}

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview Worktree Management
* Public exports for worktree operations
*/
export * from './worktree-manager.js';

View File

@@ -0,0 +1,351 @@
/**
* @fileoverview Worktree Manager
* Manages git worktree lifecycle for task execution
*/
import { spawn } from 'node:child_process';
import { promises as fs } from 'node:fs';
import path from 'node:path';
import type { WorktreeInfo } from '../types/workflow.types.js';
import { WorktreeError } from '../errors/workflow.errors.js';
export interface WorktreeManagerConfig {
/** Base directory for all worktrees */
worktreeBase: string;
/** Project root directory */
projectRoot: string;
/** Auto-cleanup on process exit */
autoCleanup: boolean;
}
/**
* WorktreeManager handles git worktree operations
* Single responsibility: Git worktree lifecycle management
*/
export class WorktreeManager {
private config: WorktreeManagerConfig;
private activeWorktrees = new Map<string, WorktreeInfo>();
constructor(config: WorktreeManagerConfig) {
this.config = config;
if (config.autoCleanup) {
this.setupCleanupHandlers();
}
}
/**
* Create a new worktree for task execution
*/
async createWorktree(taskId: string, branchName?: string): Promise<WorktreeInfo> {
const sanitizedTaskId = this.sanitizeTaskId(taskId);
const worktreePath = path.join(this.config.worktreeBase, `task-${sanitizedTaskId}`);
// Ensure base directory exists
await fs.mkdir(this.config.worktreeBase, { recursive: true });
// Generate unique branch name if not provided
const branch = branchName || `task/${sanitizedTaskId}-${Date.now()}`;
try {
// Check if worktree path already exists
if (await this.pathExists(worktreePath)) {
throw new WorktreeError(`Worktree path already exists: ${worktreePath}`);
}
// Create the worktree
await this.executeGitCommand(['worktree', 'add', '-b', branch, worktreePath], {
cwd: this.config.projectRoot
});
const worktreeInfo: WorktreeInfo = {
path: worktreePath,
branch,
createdAt: new Date(),
taskId,
locked: false
};
// Get commit hash
try {
const commit = await this.executeGitCommand(['rev-parse', 'HEAD'], {
cwd: worktreePath
});
worktreeInfo.commit = commit.trim();
} catch (error) {
console.warn('Failed to get commit hash for worktree:', error);
}
this.activeWorktrees.set(taskId, worktreeInfo);
return worktreeInfo;
} catch (error) {
throw new WorktreeError(
`Failed to create worktree for task ${taskId}`,
worktreePath,
error as Error
);
}
}
/**
* Remove a worktree and clean up
*/
async removeWorktree(taskId: string, force = false): Promise<void> {
const worktreeInfo = this.activeWorktrees.get(taskId);
if (!worktreeInfo) {
throw new WorktreeError(`No active worktree found for task ${taskId}`);
}
try {
// Remove the worktree
const args = ['worktree', 'remove', worktreeInfo.path];
if (force) {
args.push('--force');
}
await this.executeGitCommand(args, {
cwd: this.config.projectRoot
});
// Remove branch if it's a task-specific branch
if (worktreeInfo.branch.startsWith('task/')) {
try {
await this.executeGitCommand(['branch', '-D', worktreeInfo.branch], {
cwd: this.config.projectRoot
});
} catch (error) {
console.warn(`Failed to delete branch ${worktreeInfo.branch}:`, error);
}
}
this.activeWorktrees.delete(taskId);
} catch (error) {
throw new WorktreeError(
`Failed to remove worktree for task ${taskId}`,
worktreeInfo.path,
error as Error
);
}
}
/**
* List all active worktrees for this project
*/
async listWorktrees(): Promise<WorktreeInfo[]> {
try {
const output = await this.executeGitCommand(['worktree', 'list', '--porcelain'], {
cwd: this.config.projectRoot
});
const worktrees: WorktreeInfo[] = [];
const lines = output.trim().split('\n');
let currentWorktree: Partial<WorktreeInfo> = {};
for (const line of lines) {
if (line.startsWith('worktree ')) {
if (currentWorktree.path) {
// Complete previous worktree
worktrees.push(this.completeWorktreeInfo(currentWorktree));
}
currentWorktree = { path: line.substring(9) };
} else if (line.startsWith('HEAD ')) {
currentWorktree.commit = line.substring(5);
} else if (line.startsWith('branch ')) {
currentWorktree.branch = line.substring(7).replace('refs/heads/', '');
} else if (line === 'locked') {
currentWorktree.locked = true;
} else if (line.startsWith('locked ')) {
currentWorktree.locked = true;
currentWorktree.lockReason = line.substring(7);
}
}
// Add the last worktree
if (currentWorktree.path) {
worktrees.push(this.completeWorktreeInfo(currentWorktree));
}
// Filter to only our task worktrees
return worktrees.filter(wt =>
wt.path.startsWith(this.config.worktreeBase) &&
wt.branch?.startsWith('task/')
);
} catch (error) {
throw new WorktreeError('Failed to list worktrees', undefined, error as Error);
}
}
/**
* Get worktree info for a specific task
*/
getWorktreeInfo(taskId: string): WorktreeInfo | undefined {
return this.activeWorktrees.get(taskId);
}
/**
* Lock a worktree to prevent cleanup
*/
async lockWorktree(taskId: string, reason?: string): Promise<void> {
const worktreeInfo = this.activeWorktrees.get(taskId);
if (!worktreeInfo) {
throw new WorktreeError(`No active worktree found for task ${taskId}`);
}
try {
const args = ['worktree', 'lock', worktreeInfo.path];
if (reason) {
args.push('--reason', reason);
}
await this.executeGitCommand(args, {
cwd: this.config.projectRoot
});
worktreeInfo.locked = true;
worktreeInfo.lockReason = reason;
} catch (error) {
throw new WorktreeError(
`Failed to lock worktree for task ${taskId}`,
worktreeInfo.path,
error as Error
);
}
}
/**
* Unlock a worktree
*/
async unlockWorktree(taskId: string): Promise<void> {
const worktreeInfo = this.activeWorktrees.get(taskId);
if (!worktreeInfo) {
throw new WorktreeError(`No active worktree found for task ${taskId}`);
}
try {
await this.executeGitCommand(['worktree', 'unlock', worktreeInfo.path], {
cwd: this.config.projectRoot
});
worktreeInfo.locked = false;
delete worktreeInfo.lockReason;
} catch (error) {
throw new WorktreeError(
`Failed to unlock worktree for task ${taskId}`,
worktreeInfo.path,
error as Error
);
}
}
/**
* Clean up all task-related worktrees
*/
async cleanupAll(force = false): Promise<void> {
const worktrees = await this.listWorktrees();
for (const worktree of worktrees) {
if (worktree.taskId) {
try {
await this.removeWorktree(worktree.taskId, force);
} catch (error) {
console.error(`Failed to cleanup worktree for task ${worktree.taskId}:`, error);
}
}
}
}
/**
* Execute git command and return output
*/
private async executeGitCommand(
args: string[],
options: { cwd: string }
): Promise<string> {
return new Promise((resolve, reject) => {
const git = spawn('git', args, {
cwd: options.cwd,
stdio: ['ignore', 'pipe', 'pipe']
});
let stdout = '';
let stderr = '';
git.stdout.on('data', (data) => {
stdout += data.toString();
});
git.stderr.on('data', (data) => {
stderr += data.toString();
});
git.on('close', (code) => {
if (code === 0) {
resolve(stdout);
} else {
reject(new Error(`Git command failed (${code}): ${stderr || stdout}`));
}
});
git.on('error', (error) => {
reject(error);
});
});
}
/**
* Sanitize task ID for use in filesystem paths
*/
private sanitizeTaskId(taskId: string): string {
return taskId.replace(/[^a-zA-Z0-9.-]/g, '-');
}
/**
* Check if path exists
*/
private async pathExists(path: string): Promise<boolean> {
try {
await fs.access(path);
return true;
} catch {
return false;
}
}
/**
* Complete worktree info with defaults
*/
private completeWorktreeInfo(partial: Partial<WorktreeInfo>): WorktreeInfo {
const branch = partial.branch || 'unknown';
const taskIdMatch = branch.match(/^task\/(.+?)-/);
return {
path: partial.path || '',
branch,
createdAt: partial.createdAt || new Date(),
taskId: taskIdMatch?.[1] || partial.taskId || 'unknown',
commit: partial.commit,
locked: partial.locked || false,
lockReason: partial.lockReason
};
}
/**
* Setup cleanup handlers for graceful shutdown
*/
private setupCleanupHandlers(): void {
const cleanup = () => {
console.log('Cleaning up worktrees...');
this.cleanupAll(true).catch(console.error);
};
process.on('SIGINT', cleanup);
process.on('SIGTERM', cleanup);
process.on('exit', cleanup);
}
}

View File

@@ -0,0 +1,19 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "dist",
"rootDir": "src",
"declaration": true,
"declarationMap": true,
"sourceMap": true
},
"include": [
"src/**/*"
],
"exclude": [
"dist",
"node_modules",
"**/*.test.ts",
"**/*.spec.ts"
]
}

View File

@@ -0,0 +1,17 @@
import { defineConfig } from 'tsup';
export default defineConfig({
entry: [
'src/index.ts',
'src/task-execution/index.ts',
'src/worktree/index.ts',
'src/process/index.ts',
'src/state/index.ts'
],
format: ['esm'],
dts: true,
sourcemap: true,
clean: true,
splitting: false,
treeshake: true
});

View File

@@ -0,0 +1,19 @@
import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
environment: 'node',
globals: true,
coverage: {
provider: 'v8',
reporter: ['text', 'json', 'html'],
exclude: [
'node_modules/',
'dist/',
'**/*.d.ts',
'**/*.test.ts',
'**/*.spec.ts'
]
}
}
});

View File

@@ -15,8 +15,8 @@ import search from '@inquirer/search';
import ora from 'ora'; // Import ora
import { log, readJSON } from './utils.js';
// Import new ListTasksCommand from @tm/cli
import { ListTasksCommand } from '@tm/cli';
// Import command registry from @tm/cli
import { registerAllCommands } from '@tm/cli';
import {
parsePRD,
@@ -1737,9 +1737,10 @@ function registerCommands(programInstance) {
});
});
// NEW: Register the new list command from @tm/cli
// This command handles all its own configuration and logic
ListTasksCommand.registerOn(programInstance);
// Register all commands from @tm/cli using the command registry
// This automatically registers ListTasksCommand, AuthCommand, and any future commands
registerAllCommands(programInstance);
// expand command
programInstance
.command('expand')

View File

@@ -103,10 +103,14 @@ describe('Roo Files Inclusion in Package', () => {
test('source Roo files exist in public/assets directory', () => {
// Verify that the source files for Roo integration exist
expect(
fs.existsSync(path.join(process.cwd(), 'public', 'assets', 'roocode', '.roo'))
fs.existsSync(
path.join(process.cwd(), 'public', 'assets', 'roocode', '.roo')
)
).toBe(true);
expect(
fs.existsSync(path.join(process.cwd(), 'public', 'assets', 'roocode', '.roomodes'))
fs.existsSync(
path.join(process.cwd(), 'public', 'assets', 'roocode', '.roomodes')
)
).toBe(true);
});
});

View File

@@ -89,10 +89,14 @@ describe('Rules Files Inclusion in Package', () => {
test('source Roo files exist in public/assets directory', () => {
// Verify that the source files for Roo integration exist
expect(
fs.existsSync(path.join(process.cwd(), 'public', 'assets', 'roocode', '.roo'))
fs.existsSync(
path.join(process.cwd(), 'public', 'assets', 'roocode', '.roo')
)
).toBe(true);
expect(
fs.existsSync(path.join(process.cwd(), 'public', 'assets', 'roocode', '.roomodes'))
fs.existsSync(
path.join(process.cwd(), 'public', 'assets', 'roocode', '.roomodes')
)
).toBe(true);
});
});

View File

@@ -62,11 +62,11 @@ describe('PromptManager', () => {
describe('loadPrompt', () => {
it('should load and render a prompt from actual files', () => {
// Test with an actual prompt that exists
const result = promptManager.loadPrompt('research', {
const result = promptManager.loadPrompt('research', {
query: 'test query',
projectContext: 'test context'
});
expect(result.systemPrompt).toBeDefined();
expect(result.userPrompt).toBeDefined();
expect(result.userPrompt).toContain('test query');
@@ -87,7 +87,7 @@ describe('PromptManager', () => {
});
const result = promptManager.loadPrompt('test-prompt', { name: 'John' });
expect(result.userPrompt).toBe('Hello John, your age is ');
});
@@ -100,13 +100,13 @@ describe('PromptManager', () => {
it('should use cache for repeated calls', () => {
// First call with a real prompt
const result1 = promptManager.loadPrompt('research', { query: 'test' });
// Mark the result to verify cache is used
result1._cached = true;
// Second call with same parameters should return cached result
const result2 = promptManager.loadPrompt('research', { query: 'test' });
expect(result2._cached).toBe(true);
expect(result1).toBe(result2); // Same object reference
});
@@ -127,7 +127,7 @@ describe('PromptManager', () => {
const result = promptManager.loadPrompt('array-prompt', {
items: ['one', 'two', 'three']
});
// The actual implementation doesn't handle {{this}} properly, check what it does produce
expect(result.userPrompt).toContain('Item:');
});
@@ -145,10 +145,14 @@ describe('PromptManager', () => {
}
});
const withData = promptManager.loadPrompt('conditional-prompt', { hasData: true });
const withData = promptManager.loadPrompt('conditional-prompt', {
hasData: true
});
expect(withData.userPrompt).toBe('Data exists');
const withoutData = promptManager.loadPrompt('conditional-prompt', { hasData: false });
const withoutData = promptManager.loadPrompt('conditional-prompt', {
hasData: false
});
expect(withoutData.userPrompt).toBe('No data');
});
});
@@ -162,7 +166,7 @@ describe('PromptManager', () => {
age: 30
}
};
const result = promptManager.renderTemplate(template, variables);
expect(result).toBe('User: John, Age: 30');
});
@@ -172,7 +176,7 @@ describe('PromptManager', () => {
const variables = {
special: '<>&"\''
};
const result = promptManager.renderTemplate(template, variables);
expect(result).toBe('Special: <>&"\'');
});
@@ -183,8 +187,8 @@ describe('PromptManager', () => {
const prompts = promptManager.listPrompts();
expect(prompts).toBeInstanceOf(Array);
expect(prompts.length).toBeGreaterThan(0);
const ids = prompts.map(p => p.id);
const ids = prompts.map((p) => p.id);
expect(ids).toContain('analyze-complexity');
expect(ids).toContain('expand-task');
expect(ids).toContain('add-task');
@@ -192,7 +196,6 @@ describe('PromptManager', () => {
});
});
describe('validateTemplate', () => {
it('should validate a correct template', () => {
const result = promptManager.validateTemplate('research');
@@ -202,7 +205,7 @@ describe('PromptManager', () => {
it('should reject invalid template', () => {
const result = promptManager.validateTemplate('non-existent');
expect(result.valid).toBe(false);
expect(result.error).toContain("not found");
expect(result.error).toContain('not found');
});
});
});
});

View File

@@ -1,4 +1,20 @@
import { defineConfig } from 'tsup';
import { dotenvLoad } from 'dotenv-mono';
// Load .env from root level (monorepo support)
dotenvLoad();
// Get all TM_PUBLIC_* env variables for build-time injection
const getBuildTimeEnvs = () => {
const envs: Record<string, string> = {};
for (const [key, value] of Object.entries(process.env)) {
if (key.startsWith('TM_PUBLIC_')) {
// Return the actual value, not JSON.stringify'd
envs[key] = value || '';
}
}
return envs;
};
export default defineConfig({
entry: {
@@ -18,6 +34,8 @@ export default defineConfig({
'.js': 'jsx',
'.ts': 'ts'
},
// Replace process.env.TM_PUBLIC_* with actual values at build time
env: getBuildTimeEnvs(),
esbuildOptions(options) {
options.platform = 'node';
// Allow importing TypeScript from JavaScript
@@ -25,31 +43,9 @@ export default defineConfig({
},
// Bundle our monorepo packages but keep node_modules external
noExternal: [/@tm\/.*/],
external: [
// Keep native node modules external
'fs',
'path',
'child_process',
'crypto',
'os',
'url',
'util',
'stream',
'http',
'https',
'events',
'assert',
'buffer',
'querystring',
'readline',
'zlib',
'tty',
'net',
'dgram',
'dns',
'tls',
'cluster',
'process',
'module'
]
// Don't bundle any other dependencies (auto-external all node_modules)
// This regex matches anything that doesn't start with . or /
external: [/^[^./]/],
// Add success message for debugging
onSuccess: 'echo "✅ Build completed successfully"'
});