Compare commits

..

10 Commits

Author SHA1 Message Date
github-actions[bot]
3d11093732 docs: auto-update documentation based on changes in next branch
This PR was automatically generated to update documentation based on recent changes.

  Original commit: feat: implement workflow (wip)\n\n

  Co-authored-by: Claude <claude-assistant@anthropic.com>
2025-09-11 18:11:46 +00:00
Ralph Khreish
7c1d05958f feat: implement workflow (wip) 2025-09-11 11:03:31 -07:00
Ralph Khreish
3eeb19590a chore: fix CI with new typescript setup (#1194)
Co-authored-by: Ralph Khreish <Crunchyman-ralph@users.noreply.github.com>
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
2025-09-09 23:35:47 +02:00
Ralph Khreish
587745046f chore: fix format 2025-09-09 03:32:48 +02:00
Ralph Khreish
c61c73f827 feat: implement tm list remote (#1185) 2025-09-09 03:32:48 +02:00
Ralph Khreish
15900d9fd5 chore: address oauth PR concerns (#1184) 2025-09-09 03:32:48 +02:00
Ralph Khreish
7cf4004038 feat: add oauth with remote server (#1178) 2025-09-09 03:32:48 +02:00
Ralph Khreish
0f3ab00f26 feat: create tm-core and apps/cli (#1093)
- add typescript
- add npm workspaces
2025-09-09 03:32:48 +02:00
Ralph Khreish
a7ad4c8e92 chore: improve Claude documentation workflows (#1155) 2025-09-08 22:11:46 +02:00
Ralph Khreish
0d54747894 chore: fix CI 2025-09-08 12:46:07 -07:00
77 changed files with 16974 additions and 5611 deletions

View File

@@ -1,5 +1,5 @@
{ {
"mode": "pre", "mode": "exit",
"tag": "rc", "tag": "rc",
"initialVersions": { "initialVersions": {
"task-master-ai": "0.25.1", "task-master-ai": "0.25.1",

View File

@@ -9,70 +9,109 @@ on:
branches: branches:
- main - main
- next - next
workflow_dispatch:
permissions: permissions:
contents: read contents: read
env:
DO_NOT_TRACK: 1
NODE_ENV: development
jobs: jobs:
setup: # Fast checks that can run in parallel
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-node@v4
with:
node-version: 20
cache: 'npm'
- name: Install Dependencies
id: install
run: npm ci
timeout-minutes: 2
- name: Cache node_modules
uses: actions/cache@v4
with:
path: node_modules
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
format-check: format-check:
needs: setup name: Format Check
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version: 20 node-version: 20
cache: "npm"
- name: Restore node_modules - name: Install dependencies
uses: actions/cache@v4 run: npm install --frozen-lockfile --prefer-offline
with: timeout-minutes: 5
path: node_modules
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
- name: Format Check - name: Format Check
run: npm run format-check run: npm run format-check
env: env:
FORCE_COLOR: 1 FORCE_COLOR: 1
test: typecheck:
needs: setup name: Typecheck
timeout-minutes: 10
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version: 20 node-version: 20
cache: "npm"
- name: Restore node_modules - name: Install dependencies
uses: actions/cache@v4 run: npm install --frozen-lockfile --prefer-offline
timeout-minutes: 5
- name: Typecheck
run: npm run typecheck
env:
FORCE_COLOR: 1
# Build job to ensure everything compiles
build:
name: Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with: with:
path: node_modules fetch-depth: 2
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
- uses: actions/setup-node@v4
with:
node-version: 20
cache: "npm"
- name: Install dependencies
run: npm install --frozen-lockfile --prefer-offline
timeout-minutes: 5
- name: Build
run: npm run build
env:
NODE_ENV: production
FORCE_COLOR: 1
test:
name: Test
timeout-minutes: 15
runs-on: ubuntu-latest
needs: [format-check, typecheck, build]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4
with:
node-version: 20
cache: "npm"
- name: Install dependencies
run: npm install --frozen-lockfile --prefer-offline
timeout-minutes: 5
- name: Build packages (required for tests)
run: npm run build:packages
env:
NODE_ENV: production
- name: Run Tests - name: Run Tests
run: | run: |
@@ -81,7 +120,6 @@ jobs:
NODE_ENV: test NODE_ENV: test
CI: true CI: true
FORCE_COLOR: 1 FORCE_COLOR: 1
timeout-minutes: 10
- name: Upload Test Results - name: Upload Test Results
if: always() if: always()

View File

@@ -0,0 +1,57 @@
name: Trigger Claude Documentation Update
on:
push:
branches:
- next
paths-ignore:
- "apps/docs/**"
- "*.md"
- ".github/workflows/**"
jobs:
trigger-docs-update:
# Only run if changes were merged (not direct pushes from bots)
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
runs-on: ubuntu-latest
permissions:
contents: read
actions: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 2 # Need previous commit for comparison
- name: Get changed files
id: changed-files
run: |
echo "Changed files in this push:"
git diff --name-only HEAD^ HEAD | tee changed_files.txt
# Store changed files for Claude to analyze (escaped for JSON)
CHANGED_FILES=$(git diff --name-only HEAD^ HEAD | jq -Rs .)
echo "changed_files=$CHANGED_FILES" >> $GITHUB_OUTPUT
# Get the commit message (escaped for JSON)
COMMIT_MSG=$(git log -1 --pretty=%B | jq -Rs .)
echo "commit_message=$COMMIT_MSG" >> $GITHUB_OUTPUT
# Get diff for documentation context (escaped for JSON)
COMMIT_DIFF=$(git diff HEAD^ HEAD --stat | jq -Rs .)
echo "commit_diff=$COMMIT_DIFF" >> $GITHUB_OUTPUT
# Get commit SHA
echo "commit_sha=${{ github.sha }}" >> $GITHUB_OUTPUT
- name: Trigger Claude workflow
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Trigger the Claude docs updater workflow with the change information
gh workflow run claude-docs-updater.yml \
--ref next \
-f commit_sha="${{ steps.changed-files.outputs.commit_sha }}" \
-f commit_message=${{ steps.changed-files.outputs.commit_message }} \
-f changed_files=${{ steps.changed-files.outputs.changed_files }} \
-f commit_diff=${{ steps.changed-files.outputs.commit_diff }}

View File

@@ -1,18 +1,27 @@
name: Claude Documentation Updater name: Claude Documentation Updater
on: on:
push: workflow_dispatch:
branches: inputs:
- next commit_sha:
paths-ignore: description: 'The commit SHA that triggered this update'
- "apps/docs/**" required: true
- "*.md" type: string
- ".github/workflows/**" commit_message:
description: 'The commit message'
required: true
type: string
changed_files:
description: 'List of changed files'
required: true
type: string
commit_diff:
description: 'Diff summary of changes'
required: true
type: string
jobs: jobs:
update-docs: update-docs:
# Only run if changes were merged (not direct pushes from bots)
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
contents: write contents: write
@@ -22,28 +31,8 @@ jobs:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 2 # Need previous commit for comparison ref: next
fetch-depth: 0 # Need full history to checkout specific commit
- name: Get changed files
id: changed-files
run: |
echo "Changed files in this push:"
git diff --name-only HEAD^ HEAD | tee changed_files.txt
# Store changed files for Claude to analyze
echo "changed_files<<EOF" >> $GITHUB_OUTPUT
git diff --name-only HEAD^ HEAD >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Get the commit message and changes summary
echo "commit_message<<EOF" >> $GITHUB_OUTPUT
git log -1 --pretty=%B >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Get diff for documentation context
echo "commit_diff<<EOF" >> $GITHUB_OUTPUT
git diff HEAD^ HEAD --stat >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Create docs update branch - name: Create docs update branch
id: create-branch id: create-branch
@@ -71,12 +60,12 @@ jobs:
You are a documentation specialist. Analyze the recent changes pushed to the 'next' branch and update the documentation accordingly. You are a documentation specialist. Analyze the recent changes pushed to the 'next' branch and update the documentation accordingly.
Recent changes: Recent changes:
- Commit: ${{ steps.changed-files.outputs.commit_message }} - Commit: ${{ inputs.commit_message }}
- Changed files: - Changed files:
${{ steps.changed-files.outputs.changed_files }} ${{ inputs.changed_files }}
- Changes summary: - Changes summary:
${{ steps.changed-files.outputs.commit_diff }} ${{ inputs.commit_diff }}
Your task: Your task:
1. Analyze the changes to understand what functionality was added, modified, or removed 1. Analyze the changes to understand what functionality was added, modified, or removed
@@ -113,7 +102,7 @@ jobs:
This PR was automatically generated to update documentation based on recent changes. This PR was automatically generated to update documentation based on recent changes.
Original commit: ${{ steps.changed-files.outputs.commit_message }} Original commit: ${{ inputs.commit_message }}
Co-authored-by: Claude <claude-assistant@anthropic.com>" Co-authored-by: Claude <claude-assistant@anthropic.com>"
fi fi
@@ -133,12 +122,12 @@ jobs:
This PR automatically updates documentation based on recent changes merged to the \`next\` branch. This PR automatically updates documentation based on recent changes merged to the \`next\` branch.
### Original Changes ### Original Changes
**Commit:** ${{ github.sha }} **Commit:** ${{ inputs.commit_sha }}
**Message:** ${{ steps.changed-files.outputs.commit_message }} **Message:** ${{ inputs.commit_message }}
### Changed Files in Original Commit ### Changed Files in Original Commit
\`\`\` \`\`\`
${{ steps.changed-files.outputs.changed_files }} ${{ inputs.changed_files }}
\`\`\` \`\`\`
### Documentation Updates ### Documentation Updates

View File

@@ -4,12 +4,11 @@
"description": "Task Master CLI - Command line interface for task management", "description": "Task Master CLI - Command line interface for task management",
"type": "module", "type": "module",
"main": "./dist/index.js", "main": "./dist/index.js",
"types": "./dist/index.d.ts", "types": "./src/index.ts",
"exports": { "exports": {
".": { ".": {
"types": "./src/index.ts", "types": "./src/index.ts",
"import": "./dist/index.js", "import": "./dist/index.js"
"require": "./dist/index.js"
} }
}, },
"files": ["dist", "README.md"], "files": ["dist", "README.md"],
@@ -20,20 +19,26 @@
"lint": "biome check src", "lint": "biome check src",
"format": "biome format --write src", "format": "biome format --write src",
"test": "vitest run", "test": "vitest run",
"test:watch": "vitest" "test:watch": "vitest",
"test:coverage": "vitest run --coverage",
"test:unit": "vitest run -t unit",
"test:integration": "vitest run -t integration",
"test:e2e": "vitest run --dir tests/e2e",
"test:ci": "vitest run --coverage --reporter=dot"
}, },
"dependencies": { "dependencies": {
"@tm/core": "*", "@tm/core": "*",
"@tm/workflow-engine": "*",
"boxen": "^7.1.1", "boxen": "^7.1.1",
"chalk": "^5.3.0", "chalk": "5.6.2",
"cli-table3": "^0.6.5", "cli-table3": "^0.6.5",
"commander": "^12.1.0", "commander": "^12.1.0",
"inquirer": "^9.2.10", "inquirer": "^9.2.10",
"open": "^10.2.0",
"ora": "^8.1.0" "ora": "^8.1.0"
}, },
"devDependencies": { "devDependencies": {
"@biomejs/biome": "^1.9.4", "@biomejs/biome": "^1.9.4",
"@tm/build-config": "*",
"@types/inquirer": "^9.0.3", "@types/inquirer": "^9.0.3",
"@types/node": "^22.10.5", "@types/node": "^22.10.5",
"tsup": "^8.3.0", "tsup": "^8.3.0",

View File

@@ -494,17 +494,6 @@ export class AuthCommand extends Command {
/** /**
* Static method to register this command on an existing program * Static method to register this command on an existing program
* This is for gradual migration - allows commands.js to use this
*/
static registerOn(program: Command): Command {
const authCommand = new AuthCommand();
program.addCommand(authCommand);
return authCommand;
}
/**
* Alternative registration that returns the command for chaining
* Can also configure the command name if needed
*/ */
static register(program: Command, name?: string): AuthCommand { static register(program: Command, name?: string): AuthCommand {
const authCommand = new AuthCommand(name); const authCommand = new AuthCommand(name);

View File

@@ -0,0 +1,570 @@
/**
* @fileoverview Context command for managing org/brief selection
* Provides a clean interface for workspace context management
*/
import { Command } from 'commander';
import chalk from 'chalk';
import inquirer from 'inquirer';
import ora from 'ora';
import {
AuthManager,
AuthenticationError,
type UserContext
} from '@tm/core/auth';
import * as ui from '../utils/ui.js';
/**
* Result type from context command
*/
export interface ContextResult {
success: boolean;
action: 'show' | 'select-org' | 'select-brief' | 'clear' | 'set';
context?: UserContext;
message?: string;
}
/**
* ContextCommand extending Commander's Command class
* Manages user's workspace context (org/brief selection)
*/
export class ContextCommand extends Command {
private authManager: AuthManager;
private lastResult?: ContextResult;
constructor(name?: string) {
super(name || 'context');
// Initialize auth manager
this.authManager = AuthManager.getInstance();
// Configure the command
this.description(
'Manage workspace context (organization and brief selection)'
);
// Add subcommands
this.addOrgCommand();
this.addBriefCommand();
this.addClearCommand();
this.addSetCommand();
// Default action shows current context
this.action(async () => {
await this.executeShow();
});
}
/**
* Add org selection subcommand
*/
private addOrgCommand(): void {
this.command('org')
.description('Select an organization')
.action(async () => {
await this.executeSelectOrg();
});
}
/**
* Add brief selection subcommand
*/
private addBriefCommand(): void {
this.command('brief')
.description('Select a brief within the current organization')
.action(async () => {
await this.executeSelectBrief();
});
}
/**
* Add clear subcommand
*/
private addClearCommand(): void {
this.command('clear')
.description('Clear all context selections')
.action(async () => {
await this.executeClear();
});
}
/**
* Add set subcommand for direct context setting
*/
private addSetCommand(): void {
this.command('set')
.description('Set context directly')
.option('--org <id>', 'Organization ID')
.option('--org-name <name>', 'Organization name')
.option('--brief <id>', 'Brief ID')
.option('--brief-name <name>', 'Brief name')
.action(async (options) => {
await this.executeSet(options);
});
}
/**
* Execute show current context
*/
private async executeShow(): Promise<void> {
try {
const result = this.displayContext();
this.setLastResult(result);
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Display current context
*/
private displayContext(): ContextResult {
// Check authentication first
if (!this.authManager.isAuthenticated()) {
console.log(chalk.yellow('✗ Not authenticated'));
console.log(chalk.gray('\n Run "tm auth login" to authenticate first'));
return {
success: false,
action: 'show',
message: 'Not authenticated'
};
}
const context = this.authManager.getContext();
console.log(chalk.cyan('\n🌍 Workspace Context\n'));
if (context && (context.orgId || context.briefId)) {
if (context.orgName || context.orgId) {
console.log(chalk.green('✓ Organization'));
if (context.orgName) {
console.log(chalk.white(` ${context.orgName}`));
}
if (context.orgId) {
console.log(chalk.gray(` ID: ${context.orgId}`));
}
}
if (context.briefName || context.briefId) {
console.log(chalk.green('\n✓ Brief'));
if (context.briefName) {
console.log(chalk.white(` ${context.briefName}`));
}
if (context.briefId) {
console.log(chalk.gray(` ID: ${context.briefId}`));
}
}
if (context.updatedAt) {
console.log(
chalk.gray(
`\n Last updated: ${new Date(context.updatedAt).toLocaleString()}`
)
);
}
return {
success: true,
action: 'show',
context,
message: 'Context loaded'
};
} else {
console.log(chalk.yellow('✗ No context selected'));
console.log(
chalk.gray('\n Run "tm context org" to select an organization')
);
console.log(chalk.gray(' Run "tm context brief" to select a brief'));
return {
success: true,
action: 'show',
message: 'No context selected'
};
}
}
/**
* Execute org selection
*/
private async executeSelectOrg(): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
const result = await this.selectOrganization();
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Select an organization interactively
*/
private async selectOrganization(): Promise<ContextResult> {
const spinner = ora('Fetching organizations...').start();
try {
// Fetch organizations from API
const organizations = await this.authManager.getOrganizations();
spinner.stop();
if (organizations.length === 0) {
ui.displayWarning('No organizations available');
return {
success: false,
action: 'select-org',
message: 'No organizations available'
};
}
// Prompt for selection
const { selectedOrg } = await inquirer.prompt([
{
type: 'list',
name: 'selectedOrg',
message: 'Select an organization:',
choices: organizations.map((org) => ({
name: org.name,
value: org
}))
}
]);
// Update context
await this.authManager.updateContext({
orgId: selectedOrg.id,
orgName: selectedOrg.name,
// Clear brief when changing org
briefId: undefined,
briefName: undefined
});
ui.displaySuccess(`Selected organization: ${selectedOrg.name}`);
return {
success: true,
action: 'select-org',
context: this.authManager.getContext() || undefined,
message: `Selected organization: ${selectedOrg.name}`
};
} catch (error) {
spinner.fail('Failed to fetch organizations');
throw error;
}
}
/**
* Execute brief selection
*/
private async executeSelectBrief(): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
// Check if org is selected
const context = this.authManager.getContext();
if (!context?.orgId) {
ui.displayError(
'No organization selected. Run "tm context org" first.'
);
process.exit(1);
}
const result = await this.selectBrief(context.orgId);
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Select a brief within the current organization
*/
private async selectBrief(orgId: string): Promise<ContextResult> {
const spinner = ora('Fetching briefs...').start();
try {
// Fetch briefs from API
const briefs = await this.authManager.getBriefs(orgId);
spinner.stop();
if (briefs.length === 0) {
ui.displayWarning('No briefs available in this organization');
return {
success: false,
action: 'select-brief',
message: 'No briefs available'
};
}
// Prompt for selection
const { selectedBrief } = await inquirer.prompt([
{
type: 'list',
name: 'selectedBrief',
message: 'Select a brief:',
choices: [
{ name: '(No brief - organization level)', value: null },
...briefs.map((brief) => ({
name: `Brief ${brief.id.slice(0, 8)} (${new Date(brief.createdAt).toLocaleDateString()})`,
value: brief
}))
]
}
]);
if (selectedBrief) {
// Update context with brief
const briefName = `Brief ${selectedBrief.id.slice(0, 8)}`;
await this.authManager.updateContext({
briefId: selectedBrief.id,
briefName: briefName
});
ui.displaySuccess(`Selected brief: ${briefName}`);
return {
success: true,
action: 'select-brief',
context: this.authManager.getContext() || undefined,
message: `Selected brief: ${selectedBrief.name}`
};
} else {
// Clear brief selection
await this.authManager.updateContext({
briefId: undefined,
briefName: undefined
});
ui.displaySuccess('Cleared brief selection (organization level)');
return {
success: true,
action: 'select-brief',
context: this.authManager.getContext() || undefined,
message: 'Cleared brief selection'
};
}
} catch (error) {
spinner.fail('Failed to fetch briefs');
throw error;
}
}
/**
* Execute clear context
*/
private async executeClear(): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
const result = await this.clearContext();
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Clear all context selections
*/
private async clearContext(): Promise<ContextResult> {
try {
await this.authManager.clearContext();
ui.displaySuccess('Context cleared');
return {
success: true,
action: 'clear',
message: 'Context cleared'
};
} catch (error) {
ui.displayError(`Failed to clear context: ${(error as Error).message}`);
return {
success: false,
action: 'clear',
message: `Failed to clear context: ${(error as Error).message}`
};
}
}
/**
* Execute set context with options
*/
private async executeSet(options: any): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
const result = await this.setContext(options);
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Set context directly from options
*/
private async setContext(options: any): Promise<ContextResult> {
try {
const context: Partial<UserContext> = {};
if (options.org) {
context.orgId = options.org;
}
if (options.orgName) {
context.orgName = options.orgName;
}
if (options.brief) {
context.briefId = options.brief;
}
if (options.briefName) {
context.briefName = options.briefName;
}
if (Object.keys(context).length === 0) {
ui.displayWarning('No context options provided');
return {
success: false,
action: 'set',
message: 'No context options provided'
};
}
await this.authManager.updateContext(context);
ui.displaySuccess('Context updated');
// Display what was set
if (context.orgName || context.orgId) {
console.log(
chalk.gray(` Organization: ${context.orgName || context.orgId}`)
);
}
if (context.briefName || context.briefId) {
console.log(
chalk.gray(` Brief: ${context.briefName || context.briefId}`)
);
}
return {
success: true,
action: 'set',
context: this.authManager.getContext() || undefined,
message: 'Context updated'
};
} catch (error) {
ui.displayError(`Failed to set context: ${(error as Error).message}`);
return {
success: false,
action: 'set',
message: `Failed to set context: ${(error as Error).message}`
};
}
}
/**
* Handle errors
*/
private handleError(error: any): void {
if (error instanceof AuthenticationError) {
console.error(chalk.red(`\n✗ ${error.message}`));
if (error.code === 'NOT_AUTHENTICATED') {
ui.displayWarning('Please authenticate first: tm auth login');
}
} else {
const msg = error?.message ?? String(error);
console.error(chalk.red(`Error: ${msg}`));
if (error.stack && process.env.DEBUG) {
console.error(chalk.gray(error.stack));
}
}
}
/**
* Set the last result for programmatic access
*/
private setLastResult(result: ContextResult): void {
this.lastResult = result;
}
/**
* Get the last result (for programmatic usage)
*/
getLastResult(): ContextResult | undefined {
return this.lastResult;
}
/**
* Get current context (for programmatic usage)
*/
getContext(): UserContext | null {
return this.authManager.getContext();
}
/**
* Clean up resources
*/
async cleanup(): Promise<void> {
// No resources to clean up for context command
}
/**
* Static method to register this command on an existing program
*/
static registerOn(program: Command): Command {
const contextCommand = new ContextCommand();
program.addCommand(contextCommand);
return contextCommand;
}
/**
* Alternative registration that returns the command for chaining
*/
static register(program: Command, name?: string): ContextCommand {
const contextCommand = new ContextCommand(name);
program.addCommand(contextCommand);
return contextCommand;
}
}

View File

@@ -0,0 +1,38 @@
/**
* Command registry - exports all CLI commands for central registration
*/
import type { Command } from 'commander';
import { ListTasksCommand } from './list.command.js';
import { AuthCommand } from './auth.command.js';
import WorkflowCommand from './workflow.command.js';
// Define interface for command classes that can register themselves
export interface CommandRegistrar {
register(program: Command, name?: string): any;
}
// Future commands can be added here as they're created
// The pattern is: each command exports a class with a static register(program: Command, name?: string) method
/**
* Auto-register all exported commands that implement the CommandRegistrar interface
*/
export function registerAllCommands(program: Command): void {
// Get all exports from this module
const commands = [
ListTasksCommand,
AuthCommand,
WorkflowCommand
// Add new commands here as they're imported above
];
commands.forEach((CommandClass) => {
if (
'register' in CommandClass &&
typeof CommandClass.register === 'function'
) {
CommandClass.register(program);
}
});
}

View File

@@ -15,6 +15,7 @@ import {
STATUS_ICONS, STATUS_ICONS,
type OutputFormat type OutputFormat
} from '@tm/core'; } from '@tm/core';
import type { StorageType } from '@tm/core/types';
import * as ui from '../utils/ui.js'; import * as ui from '../utils/ui.js';
/** /**
@@ -37,7 +38,7 @@ export interface ListTasksResult {
total: number; total: number;
filtered: number; filtered: number;
tag?: string; tag?: string;
storageType: 'file' | 'api'; storageType: Exclude<StorageType, 'auto'>;
} }
/** /**
@@ -172,6 +173,13 @@ export class ListTasksCommand extends Command {
includeSubtasks: options.withSubtasks includeSubtasks: options.withSubtasks
}); });
// Runtime guard to prevent 'auto' from reaching CLI consumers
if (result.storageType === 'auto') {
throw new Error(
'Internal error: unresolved storage type reached CLI. Please check TaskService.getStorageType() implementation.'
);
}
return result as ListTasksResult; return result as ListTasksResult;
} }
@@ -307,17 +315,6 @@ export class ListTasksCommand extends Command {
/** /**
* Static method to register this command on an existing program * Static method to register this command on an existing program
* This is for gradual migration - allows commands.js to use this
*/
static registerOn(program: Command): Command {
const listCommand = new ListTasksCommand();
program.addCommand(listCommand);
return listCommand;
}
/**
* Alternative registration that returns the command for chaining
* Can also configure the command name if needed
*/ */
static register(program: Command, name?: string): ListTasksCommand { static register(program: Command, name?: string): ListTasksCommand {
const listCommand = new ListTasksCommand(name); const listCommand = new ListTasksCommand(name);

View File

@@ -0,0 +1,58 @@
/**
* @fileoverview Workflow Command
* Main workflow command with subcommands
*/
import { Command } from 'commander';
import {
WorkflowStartCommand,
WorkflowListCommand,
WorkflowStopCommand,
WorkflowStatusCommand
} from './workflow/index.js';
/**
* WorkflowCommand - Main workflow command with subcommands
*/
export class WorkflowCommand extends Command {
constructor(name?: string) {
super(name || 'workflow');
this.description('Manage task execution workflows with git worktrees and Claude Code')
.alias('wf');
// Register subcommands
this.addSubcommands();
}
private addSubcommands(): void {
// Start workflow
WorkflowStartCommand.register(this);
// List workflows
WorkflowListCommand.register(this);
// Stop workflow
WorkflowStopCommand.register(this);
// Show workflow status
WorkflowStatusCommand.register(this);
// Alias commands for convenience
this.addCommand(new WorkflowStartCommand('run')); // tm workflow run <task-id>
this.addCommand(new WorkflowStopCommand('kill')); // tm workflow kill <workflow-id>
this.addCommand(new WorkflowStatusCommand('info')); // tm workflow info <workflow-id>
}
/**
* Static method to register this command on an existing program
*/
static register(program: Command, name?: string): WorkflowCommand {
const workflowCommand = new WorkflowCommand(name);
program.addCommand(workflowCommand);
return workflowCommand;
}
}
export default WorkflowCommand;

View File

@@ -0,0 +1,9 @@
/**
* @fileoverview Workflow Commands
* Exports for all workflow-related CLI commands
*/
export * from './workflow-start.command.js';
export * from './workflow-list.command.js';
export * from './workflow-stop.command.js';
export * from './workflow-status.command.js';

View File

@@ -0,0 +1,253 @@
/**
* @fileoverview Workflow List Command
* List active and recent workflow executions
*/
import { Command } from 'commander';
import chalk from 'chalk';
import path from 'node:path';
import {
TaskExecutionManager,
type TaskExecutionManagerConfig,
type WorkflowExecutionContext
} from '@tm/workflow-engine';
import * as ui from '../../utils/ui.js';
export interface WorkflowListOptions {
project?: string;
status?: string;
format?: 'text' | 'json' | 'compact';
worktreeBase?: string;
claude?: string;
all?: boolean;
}
/**
* WorkflowListCommand - List workflow executions
*/
export class WorkflowListCommand extends Command {
private workflowManager?: TaskExecutionManager;
constructor(name?: string) {
super(name || 'list');
this.description('List active and recent workflow executions')
.alias('ls')
.option('-p, --project <path>', 'Project root directory', process.cwd())
.option('-s, --status <status>', 'Filter by status (running, completed, failed, etc.)')
.option('-f, --format <format>', 'Output format (text, json, compact)', 'text')
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
.option('--claude <path>', 'Claude Code executable path', 'claude')
.option('--all', 'Show all workflows including completed ones')
.action(async (options: WorkflowListOptions) => {
await this.executeCommand(options);
});
}
private async executeCommand(options: WorkflowListOptions): Promise<void> {
try {
// Initialize workflow manager
await this.initializeWorkflowManager(options);
// Get workflows
let workflows = this.workflowManager!.listWorkflows();
// Apply status filter
if (options.status) {
workflows = workflows.filter(w => w.status === options.status);
}
// Apply active filter (default behavior)
if (!options.all) {
workflows = workflows.filter(w =>
['pending', 'initializing', 'running', 'paused'].includes(w.status)
);
}
// Display results
this.displayResults(workflows, options);
} catch (error: any) {
ui.displayError(error.message || 'Failed to list workflows');
process.exit(1);
}
}
private async initializeWorkflowManager(options: WorkflowListOptions): Promise<void> {
if (!this.workflowManager) {
const projectRoot = options.project || process.cwd();
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
const config: TaskExecutionManagerConfig = {
projectRoot,
maxConcurrent: 5,
defaultTimeout: 60,
worktreeBase,
claudeExecutable: options.claude || 'claude',
debug: false
};
this.workflowManager = new TaskExecutionManager(config);
await this.workflowManager.initialize();
}
}
private displayResults(workflows: WorkflowExecutionContext[], options: WorkflowListOptions): void {
switch (options.format) {
case 'json':
this.displayJson(workflows);
break;
case 'compact':
this.displayCompact(workflows);
break;
case 'text':
default:
this.displayText(workflows);
break;
}
}
private displayJson(workflows: WorkflowExecutionContext[]): void {
console.log(JSON.stringify({
workflows: workflows.map(w => ({
workflowId: `workflow-${w.taskId}`,
taskId: w.taskId,
taskTitle: w.taskTitle,
status: w.status,
worktreePath: w.worktreePath,
branchName: w.branchName,
processId: w.processId,
startedAt: w.startedAt,
lastActivity: w.lastActivity,
metadata: w.metadata
})),
total: workflows.length,
timestamp: new Date().toISOString()
}, null, 2));
}
private displayCompact(workflows: WorkflowExecutionContext[]): void {
if (workflows.length === 0) {
console.log(chalk.gray('No workflows found'));
return;
}
workflows.forEach(workflow => {
const workflowId = `workflow-${workflow.taskId}`;
const statusDisplay = this.getStatusDisplay(workflow.status);
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
console.log(
`${chalk.cyan(workflowId)} ${statusDisplay} ${workflow.taskTitle} ${chalk.gray(`(${duration})`)}`
);
});
}
private displayText(workflows: WorkflowExecutionContext[]): void {
ui.displayBanner('Active Workflows');
if (workflows.length === 0) {
ui.displayWarning('No workflows found');
console.log();
console.log(chalk.blue('💡 Start a new workflow with:'));
console.log(` ${chalk.cyan('tm workflow start <task-id>')}`);
return;
}
// Statistics
console.log(chalk.blue.bold('\n📊 Statistics:\n'));
const statusCounts = this.getStatusCounts(workflows);
Object.entries(statusCounts).forEach(([status, count]) => {
console.log(` ${this.getStatusDisplay(status)}: ${chalk.cyan(count)}`);
});
// Workflows table
console.log(chalk.blue.bold(`\n🔄 Workflows (${workflows.length}):\n`));
const tableData = workflows.map(workflow => {
const workflowId = `workflow-${workflow.taskId}`;
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
return [
chalk.cyan(workflowId),
chalk.yellow(workflow.taskId),
workflow.taskTitle.substring(0, 30) + (workflow.taskTitle.length > 30 ? '...' : ''),
this.getStatusDisplay(workflow.status),
workflow.processId ? chalk.green(workflow.processId.toString()) : chalk.gray('N/A'),
chalk.gray(duration),
chalk.gray(path.basename(workflow.worktreePath))
];
});
console.log(ui.createTable(
['Workflow ID', 'Task ID', 'Task Title', 'Status', 'PID', 'Duration', 'Worktree'],
tableData
));
// Running workflows actions
const runningWorkflows = workflows.filter(w => w.status === 'running');
if (runningWorkflows.length > 0) {
console.log(chalk.blue.bold('\n🚀 Quick Actions:\n'));
runningWorkflows.slice(0, 3).forEach(workflow => {
const workflowId = `workflow-${workflow.taskId}`;
console.log(` • Attach to ${chalk.cyan(workflowId)}: ${chalk.gray(`tm workflow attach ${workflowId}`)}`);
});
if (runningWorkflows.length > 3) {
console.log(` ${chalk.gray(`... and ${runningWorkflows.length - 3} more`)}`);
}
}
}
private getStatusDisplay(status: string): string {
const statusMap = {
pending: { icon: '⏳', color: chalk.yellow },
initializing: { icon: '🔄', color: chalk.blue },
running: { icon: '🚀', color: chalk.green },
paused: { icon: '⏸️', color: chalk.orange },
completed: { icon: '✅', color: chalk.green },
failed: { icon: '❌', color: chalk.red },
cancelled: { icon: '🛑', color: chalk.gray },
timeout: { icon: '⏰', color: chalk.red }
};
const statusInfo = statusMap[status as keyof typeof statusMap] || { icon: '❓', color: chalk.white };
return `${statusInfo.icon} ${statusInfo.color(status)}`;
}
private getStatusCounts(workflows: WorkflowExecutionContext[]): Record<string, number> {
const counts: Record<string, number> = {};
workflows.forEach(workflow => {
counts[workflow.status] = (counts[workflow.status] || 0) + 1;
});
return counts;
}
private formatDuration(start: Date, end: Date): string {
const diff = end.getTime() - start.getTime();
const minutes = Math.floor(diff / (1000 * 60));
const hours = Math.floor(minutes / 60);
if (hours > 0) {
return `${hours}h ${minutes % 60}m`;
} else if (minutes > 0) {
return `${minutes}m`;
} else {
return '<1m';
}
}
async cleanup(): Promise<void> {
if (this.workflowManager) {
this.workflowManager.removeAllListeners();
}
}
static register(program: Command, name?: string): WorkflowListCommand {
const command = new WorkflowListCommand(name);
program.addCommand(command);
return command;
}
}

View File

@@ -0,0 +1,239 @@
/**
* @fileoverview Workflow Start Command
* Start task execution in isolated worktree with Claude Code process
*/
import { Command } from 'commander';
import chalk from 'chalk';
import path from 'node:path';
import {
createTaskMasterCore,
type TaskMasterCore
} from '@tm/core';
import {
TaskExecutionManager,
type TaskExecutionManagerConfig
} from '@tm/workflow-engine';
import * as ui from '../../utils/ui.js';
export interface WorkflowStartOptions {
project?: string;
branch?: string;
timeout?: number;
worktreeBase?: string;
claude?: string;
debug?: boolean;
env?: string;
}
/**
* WorkflowStartCommand - Start task execution workflow
*/
export class WorkflowStartCommand extends Command {
private tmCore?: TaskMasterCore;
private workflowManager?: TaskExecutionManager;
constructor(name?: string) {
super(name || 'start');
this.description('Start task execution in isolated worktree')
.argument('<task-id>', 'Task ID to execute')
.option('-p, --project <path>', 'Project root directory', process.cwd())
.option('-b, --branch <name>', 'Custom branch name for worktree')
.option('-t, --timeout <minutes>', 'Execution timeout in minutes', '60')
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
.option('--claude <path>', 'Claude Code executable path', 'claude')
.option('--debug', 'Enable debug logging')
.option('--env <vars>', 'Environment variables (KEY=VALUE,KEY2=VALUE2)')
.action(async (taskId: string, options: WorkflowStartOptions) => {
await this.executeCommand(taskId, options);
});
}
private async executeCommand(taskId: string, options: WorkflowStartOptions): Promise<void> {
try {
// Initialize components
await this.initializeCore(options.project || process.cwd());
await this.initializeWorkflowManager(options);
// Get task details
const task = await this.getTask(taskId);
if (!task) {
throw new Error(`Task ${taskId} not found`);
}
// Check if task already has active workflow
const existingWorkflow = this.workflowManager!.getWorkflowByTaskId(taskId);
if (existingWorkflow) {
ui.displayWarning(`Task ${taskId} already has an active workflow`);
console.log(`Workflow ID: ${chalk.cyan('workflow-' + taskId)}`);
console.log(`Status: ${this.getStatusDisplay(existingWorkflow.status)}`);
console.log(`Worktree: ${chalk.gray(existingWorkflow.worktreePath)}`);
return;
}
// Parse environment variables
const env = this.parseEnvironmentVariables(options.env);
// Display task info
ui.displayBanner(`Starting Workflow for Task ${taskId}`);
console.log(`${chalk.blue('Task:')} ${task.title}`);
console.log(`${chalk.blue('Description:')} ${task.description}`);
if (task.dependencies?.length) {
console.log(`${chalk.blue('Dependencies:')} ${task.dependencies.join(', ')}`);
}
console.log(`${chalk.blue('Priority:')} ${task.priority || 'normal'}`);
console.log();
// Start workflow
ui.displaySpinner('Creating worktree and starting Claude Code process...');
const workflowId = await this.workflowManager!.startTaskExecution(task, {
branchName: options.branch,
timeout: parseInt(options.timeout || '60'),
env
});
const workflow = this.workflowManager!.getWorkflowStatus(workflowId);
ui.displaySuccess('Workflow started successfully!');
console.log();
console.log(`${chalk.green('✓')} Workflow ID: ${chalk.cyan(workflowId)}`);
console.log(`${chalk.green('✓')} Worktree: ${chalk.gray(workflow?.worktreePath)}`);
console.log(`${chalk.green('✓')} Branch: ${chalk.gray(workflow?.branchName)}`);
console.log(`${chalk.green('✓')} Process ID: ${chalk.gray(workflow?.processId)}`);
console.log();
// Display next steps
console.log(chalk.blue.bold('📋 Next Steps:'));
console.log(` • Monitor: ${chalk.cyan(`tm workflow status ${workflowId}`)}`);
console.log(` • Attach: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
console.log();
// Setup event listeners for real-time updates
this.setupEventListeners();
} catch (error: any) {
ui.displayError(error.message || 'Failed to start workflow');
if (options.debug && error.stack) {
console.error(chalk.gray(error.stack));
}
process.exit(1);
}
}
private async initializeCore(projectRoot: string): Promise<void> {
if (!this.tmCore) {
this.tmCore = await createTaskMasterCore({ projectPath: projectRoot });
}
}
private async initializeWorkflowManager(options: WorkflowStartOptions): Promise<void> {
if (!this.workflowManager) {
const projectRoot = options.project || process.cwd();
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
const config: TaskExecutionManagerConfig = {
projectRoot,
maxConcurrent: 5,
defaultTimeout: parseInt(options.timeout || '60'),
worktreeBase,
claudeExecutable: options.claude || 'claude',
debug: options.debug || false
};
this.workflowManager = new TaskExecutionManager(config);
await this.workflowManager.initialize();
}
}
private async getTask(taskId: string) {
if (!this.tmCore) {
throw new Error('TaskMasterCore not initialized');
}
const result = await this.tmCore.getTaskList({});
return result.tasks.find(task => task.id === taskId);
}
private parseEnvironmentVariables(envString?: string): Record<string, string> | undefined {
if (!envString) return undefined;
const env: Record<string, string> = {};
for (const pair of envString.split(',')) {
const [key, ...valueParts] = pair.trim().split('=');
if (key && valueParts.length > 0) {
env[key] = valueParts.join('=');
}
}
return Object.keys(env).length > 0 ? env : undefined;
}
private getStatusDisplay(status: string): string {
const colors = {
pending: chalk.yellow,
initializing: chalk.blue,
running: chalk.green,
paused: chalk.orange,
completed: chalk.green,
failed: chalk.red,
cancelled: chalk.gray,
timeout: chalk.red
};
const color = colors[status as keyof typeof colors] || chalk.white;
return color(status);
}
private setupEventListeners(): void {
if (!this.workflowManager) return;
this.workflowManager.on('workflow.started', (event) => {
console.log(`${chalk.green('🚀')} Workflow started: ${event.workflowId}`);
});
this.workflowManager.on('process.output', (event) => {
if (event.data?.stream === 'stdout') {
console.log(`${chalk.blue('[OUT]')} ${event.data.data.trim()}`);
} else if (event.data?.stream === 'stderr') {
console.log(`${chalk.red('[ERR]')} ${event.data.data.trim()}`);
}
});
this.workflowManager.on('workflow.completed', (event) => {
console.log(`${chalk.green('✅')} Workflow completed: ${event.workflowId}`);
});
this.workflowManager.on('workflow.failed', (event) => {
console.log(`${chalk.red('❌')} Workflow failed: ${event.workflowId}`);
if (event.error) {
console.log(`${chalk.red('Error:')} ${event.error.message}`);
}
});
}
async cleanup(): Promise<void> {
if (this.workflowManager) {
// Don't cleanup workflows, just disconnect
this.workflowManager.removeAllListeners();
}
if (this.tmCore) {
await this.tmCore.close();
this.tmCore = undefined;
}
}
static register(program: Command, name?: string): WorkflowStartCommand {
const command = new WorkflowStartCommand(name);
program.addCommand(command);
return command;
}
}

View File

@@ -0,0 +1,339 @@
/**
* @fileoverview Workflow Status Command
* Show detailed status of a specific workflow
*/
import { Command } from 'commander';
import chalk from 'chalk';
import path from 'node:path';
import {
TaskExecutionManager,
type TaskExecutionManagerConfig
} from '@tm/workflow-engine';
import * as ui from '../../utils/ui.js';
export interface WorkflowStatusOptions {
project?: string;
worktreeBase?: string;
claude?: string;
watch?: boolean;
format?: 'text' | 'json';
}
/**
* WorkflowStatusCommand - Show workflow execution status
*/
export class WorkflowStatusCommand extends Command {
private workflowManager?: TaskExecutionManager;
constructor(name?: string) {
super(name || 'status');
this.description('Show detailed status of a workflow execution')
.argument('<workflow-id>', 'Workflow ID or task ID to check')
.option('-p, --project <path>', 'Project root directory', process.cwd())
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
.option('--claude <path>', 'Claude Code executable path', 'claude')
.option('-w, --watch', 'Watch for status changes (refresh every 2 seconds)')
.option('-f, --format <format>', 'Output format (text, json)', 'text')
.action(async (workflowId: string, options: WorkflowStatusOptions) => {
await this.executeCommand(workflowId, options);
});
}
private async executeCommand(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
try {
// Initialize workflow manager
await this.initializeWorkflowManager(options);
if (options.watch) {
await this.watchWorkflowStatus(workflowId, options);
} else {
await this.showWorkflowStatus(workflowId, options);
}
} catch (error: any) {
ui.displayError(error.message || 'Failed to get workflow status');
process.exit(1);
}
}
private async initializeWorkflowManager(options: WorkflowStatusOptions): Promise<void> {
if (!this.workflowManager) {
const projectRoot = options.project || process.cwd();
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
const config: TaskExecutionManagerConfig = {
projectRoot,
maxConcurrent: 5,
defaultTimeout: 60,
worktreeBase,
claudeExecutable: options.claude || 'claude',
debug: false
};
this.workflowManager = new TaskExecutionManager(config);
await this.workflowManager.initialize();
}
}
private async showWorkflowStatus(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
// Try to find workflow by ID or task ID
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
if (!workflow) {
// Try as task ID
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
}
if (!workflow) {
throw new Error(`Workflow not found: ${workflowId}`);
}
if (options.format === 'json') {
this.displayJsonStatus(workflow);
} else {
this.displayTextStatus(workflow);
}
}
private async watchWorkflowStatus(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
console.log(chalk.blue.bold('👀 Watching workflow status (Press Ctrl+C to exit)\n'));
let lastStatus = '';
let updateCount = 0;
const updateStatus = async () => {
try {
// Clear screen and move cursor to top
if (updateCount > 0) {
process.stdout.write('\x1b[2J\x1b[0f');
}
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
if (!workflow) {
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
}
if (!workflow) {
console.log(chalk.red(`Workflow not found: ${workflowId}`));
return;
}
// Display header with timestamp
console.log(chalk.blue.bold('👀 Watching Workflow Status'));
console.log(chalk.gray(`Last updated: ${new Date().toLocaleTimeString()}\n`));
this.displayTextStatus(workflow);
// Check if workflow has ended
if (['completed', 'failed', 'cancelled', 'timeout'].includes(workflow.status)) {
console.log(chalk.yellow('\n⚠ Workflow has ended. Stopping watch mode.'));
return;
}
updateCount++;
} catch (error) {
console.error(chalk.red('Error updating status:'), error);
}
};
// Initial display
await updateStatus();
// Setup interval for updates
const interval = setInterval(updateStatus, 2000);
// Handle Ctrl+C
process.on('SIGINT', () => {
clearInterval(interval);
console.log(chalk.yellow('\n👋 Stopped watching workflow status'));
process.exit(0);
});
// Keep the process alive
await new Promise(() => {});
}
private displayJsonStatus(workflow: any): void {
const status = {
workflowId: `workflow-${workflow.taskId}`,
taskId: workflow.taskId,
taskTitle: workflow.taskTitle,
taskDescription: workflow.taskDescription,
status: workflow.status,
worktreePath: workflow.worktreePath,
branchName: workflow.branchName,
processId: workflow.processId,
startedAt: workflow.startedAt,
lastActivity: workflow.lastActivity,
duration: this.calculateDuration(workflow.startedAt, workflow.lastActivity),
metadata: workflow.metadata
};
console.log(JSON.stringify(status, null, 2));
}
private displayTextStatus(workflow: any): void {
const workflowId = `workflow-${workflow.taskId}`;
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
ui.displayBanner(`Workflow Status: ${workflowId}`);
// Basic information
console.log(chalk.blue.bold('\n📋 Basic Information:\n'));
console.log(` Workflow ID: ${chalk.cyan(workflowId)}`);
console.log(` Task ID: ${chalk.cyan(workflow.taskId)}`);
console.log(` Task Title: ${workflow.taskTitle}`);
console.log(` Status: ${this.getStatusDisplay(workflow.status)}`);
console.log(` Duration: ${chalk.gray(duration)}`);
// Task details
if (workflow.taskDescription) {
console.log(chalk.blue.bold('\n📝 Task Details:\n'));
console.log(` ${workflow.taskDescription}`);
}
// Process information
console.log(chalk.blue.bold('\n⚙ Process Information:\n'));
console.log(` Process ID: ${workflow.processId ? chalk.green(workflow.processId) : chalk.gray('N/A')}`);
console.log(` Worktree: ${chalk.gray(workflow.worktreePath)}`);
console.log(` Branch: ${chalk.gray(workflow.branchName)}`);
// Timing information
console.log(chalk.blue.bold('\n⏰ Timing:\n'));
console.log(` Started: ${chalk.gray(workflow.startedAt.toLocaleString())}`);
console.log(` Last Activity: ${chalk.gray(workflow.lastActivity.toLocaleString())}`);
// Metadata
if (workflow.metadata && Object.keys(workflow.metadata).length > 0) {
console.log(chalk.blue.bold('\n🔖 Metadata:\n'));
Object.entries(workflow.metadata).forEach(([key, value]) => {
console.log(` ${key}: ${chalk.gray(String(value))}`);
});
}
// Status-specific information
this.displayStatusSpecificInfo(workflow);
// Actions
this.displayAvailableActions(workflow);
}
private displayStatusSpecificInfo(workflow: any): void {
const workflowId = `workflow-${workflow.taskId}`;
switch (workflow.status) {
case 'running':
console.log(chalk.blue.bold('\n🚀 Running Status:\n'));
console.log(` ${chalk.green('●')} Process is actively executing`);
console.log(` ${chalk.blue('')} Monitor output with: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
break;
case 'paused':
console.log(chalk.blue.bold('\n⏸ Paused Status:\n'));
console.log(` ${chalk.yellow('●')} Workflow is paused`);
console.log(` ${chalk.blue('')} Resume with: ${chalk.cyan(`tm workflow resume ${workflowId}`)}`);
break;
case 'completed':
console.log(chalk.blue.bold('\n✅ Completed Status:\n'));
console.log(` ${chalk.green('●')} Workflow completed successfully`);
console.log(` ${chalk.blue('')} Resources have been cleaned up`);
break;
case 'failed':
console.log(chalk.blue.bold('\n❌ Failed Status:\n'));
console.log(` ${chalk.red('●')} Workflow execution failed`);
console.log(` ${chalk.blue('')} Check logs for error details`);
break;
case 'initializing':
console.log(chalk.blue.bold('\n🔄 Initializing Status:\n'));
console.log(` ${chalk.blue('●')} Setting up worktree and process`);
console.log(` ${chalk.blue('')} This should complete shortly`);
break;
}
}
private displayAvailableActions(workflow: any): void {
const workflowId = `workflow-${workflow.taskId}`;
console.log(chalk.blue.bold('\n🎯 Available Actions:\n'));
switch (workflow.status) {
case 'running':
console.log(` • Attach: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
console.log(` • Pause: ${chalk.cyan(`tm workflow pause ${workflowId}`)}`);
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
break;
case 'paused':
console.log(` • Resume: ${chalk.cyan(`tm workflow resume ${workflowId}`)}`);
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
break;
case 'pending':
case 'initializing':
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
break;
case 'completed':
case 'failed':
case 'cancelled':
console.log(` • View logs: ${chalk.cyan(`tm workflow logs ${workflowId}`)}`);
console.log(` • Start new: ${chalk.cyan(`tm workflow start ${workflow.taskId}`)}`);
break;
}
console.log(` • List all: ${chalk.cyan('tm workflow list')}`);
}
private getStatusDisplay(status: string): string {
const statusMap = {
pending: { icon: '⏳', color: chalk.yellow },
initializing: { icon: '🔄', color: chalk.blue },
running: { icon: '🚀', color: chalk.green },
paused: { icon: '⏸️', color: chalk.orange },
completed: { icon: '✅', color: chalk.green },
failed: { icon: '❌', color: chalk.red },
cancelled: { icon: '🛑', color: chalk.gray },
timeout: { icon: '⏰', color: chalk.red }
};
const statusInfo = statusMap[status as keyof typeof statusMap] || { icon: '❓', color: chalk.white };
return `${statusInfo.icon} ${statusInfo.color(status)}`;
}
private formatDuration(start: Date, end: Date): string {
const diff = end.getTime() - start.getTime();
const minutes = Math.floor(diff / (1000 * 60));
const hours = Math.floor(minutes / 60);
const seconds = Math.floor((diff % (1000 * 60)) / 1000);
if (hours > 0) {
return `${hours}h ${minutes % 60}m ${seconds}s`;
} else if (minutes > 0) {
return `${minutes}m ${seconds}s`;
} else {
return `${seconds}s`;
}
}
private calculateDuration(start: Date, end: Date): number {
return Math.floor((end.getTime() - start.getTime()) / 1000);
}
async cleanup(): Promise<void> {
if (this.workflowManager) {
this.workflowManager.removeAllListeners();
}
}
static register(program: Command, name?: string): WorkflowStatusCommand {
const command = new WorkflowStatusCommand(name);
program.addCommand(command);
return command;
}
}

View File

@@ -0,0 +1,260 @@
/**
* @fileoverview Workflow Stop Command
* Stop and clean up workflow execution
*/
import { Command } from 'commander';
import chalk from 'chalk';
import path from 'node:path';
import {
TaskExecutionManager,
type TaskExecutionManagerConfig
} from '@tm/workflow-engine';
import * as ui from '../../utils/ui.js';
export interface WorkflowStopOptions {
project?: string;
worktreeBase?: string;
claude?: string;
force?: boolean;
all?: boolean;
}
/**
* WorkflowStopCommand - Stop workflow execution
*/
export class WorkflowStopCommand extends Command {
private workflowManager?: TaskExecutionManager;
constructor(name?: string) {
super(name || 'stop');
this.description('Stop workflow execution and clean up resources')
.argument('[workflow-id]', 'Workflow ID to stop (or task ID)')
.option('-p, --project <path>', 'Project root directory', process.cwd())
.option(
'--worktree-base <path>',
'Base directory for worktrees',
'../task-worktrees'
)
.option('--claude <path>', 'Claude Code executable path', 'claude')
.option('-f, --force', 'Force stop (kill process immediately)')
.option('--all', 'Stop all running workflows')
.action(
async (
workflowId: string | undefined,
options: WorkflowStopOptions
) => {
await this.executeCommand(workflowId, options);
}
);
}
private async executeCommand(
workflowId: string | undefined,
options: WorkflowStopOptions
): Promise<void> {
try {
// Initialize workflow manager
await this.initializeWorkflowManager(options);
if (options.all) {
await this.stopAllWorkflows(options);
} else if (workflowId) {
await this.stopSingleWorkflow(workflowId, options);
} else {
ui.displayError('Please specify a workflow ID or use --all flag');
process.exit(1);
}
} catch (error: any) {
ui.displayError(error.message || 'Failed to stop workflow');
process.exit(1);
}
}
private async initializeWorkflowManager(
options: WorkflowStopOptions
): Promise<void> {
if (!this.workflowManager) {
const projectRoot = options.project || process.cwd();
const worktreeBase = path.resolve(
projectRoot,
options.worktreeBase || '../task-worktrees'
);
const config: TaskExecutionManagerConfig = {
projectRoot,
maxConcurrent: 5,
defaultTimeout: 60,
worktreeBase,
claudeExecutable: options.claude || 'claude',
debug: false
};
this.workflowManager = new TaskExecutionManager(config);
await this.workflowManager.initialize();
}
}
private async stopSingleWorkflow(
workflowId: string,
options: WorkflowStopOptions
): Promise<void> {
// Try to find workflow by ID or task ID
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
if (!workflow) {
// Try as task ID
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
}
if (!workflow) {
throw new Error(`Workflow not found: ${workflowId}`);
}
const actualWorkflowId = `workflow-${workflow.taskId}`;
// Display workflow info
console.log(chalk.blue.bold(`🛑 Stopping Workflow: ${actualWorkflowId}`));
console.log(`${chalk.blue('Task:')} ${workflow.taskTitle}`);
console.log(
`${chalk.blue('Status:')} ${this.getStatusDisplay(workflow.status)}`
);
console.log(
`${chalk.blue('Worktree:')} ${chalk.gray(workflow.worktreePath)}`
);
if (workflow.processId) {
console.log(
`${chalk.blue('Process ID:')} ${chalk.gray(workflow.processId)}`
);
}
console.log();
// Confirm if not forced
if (!options.force && ['running', 'paused'].includes(workflow.status)) {
const shouldProceed = await ui.confirm(
`Are you sure you want to stop this ${workflow.status} workflow?`
);
if (!shouldProceed) {
console.log(chalk.gray('Operation cancelled'));
return;
}
}
// Stop the workflow
ui.displaySpinner('Stopping workflow and cleaning up resources...');
await this.workflowManager!.stopTaskExecution(
actualWorkflowId,
options.force
);
ui.displaySuccess('Workflow stopped successfully!');
console.log();
console.log(`${chalk.green('✓')} Process terminated`);
console.log(`${chalk.green('✓')} Worktree cleaned up`);
console.log(`${chalk.green('✓')} State updated`);
}
private async stopAllWorkflows(options: WorkflowStopOptions): Promise<void> {
const workflows = this.workflowManager!.listWorkflows();
const activeWorkflows = workflows.filter((w) =>
['pending', 'initializing', 'running', 'paused'].includes(w.status)
);
if (activeWorkflows.length === 0) {
ui.displayWarning('No active workflows to stop');
return;
}
console.log(
chalk.blue.bold(`🛑 Stopping ${activeWorkflows.length} Active Workflows`)
);
console.log();
// List workflows to be stopped
activeWorkflows.forEach((workflow) => {
console.log(
`${chalk.cyan(`workflow-${workflow.taskId}`)} - ${workflow.taskTitle} ${this.getStatusDisplay(workflow.status)}`
);
});
console.log();
// Confirm if not forced
if (!options.force) {
const shouldProceed = await ui.confirm(
`Are you sure you want to stop all ${activeWorkflows.length} active workflows?`
);
if (!shouldProceed) {
console.log(chalk.gray('Operation cancelled'));
return;
}
}
// Stop all workflows
ui.displaySpinner('Stopping all workflows...');
let stopped = 0;
let failed = 0;
for (const workflow of activeWorkflows) {
try {
const workflowId = `workflow-${workflow.taskId}`;
await this.workflowManager!.stopTaskExecution(
workflowId,
options.force
);
stopped++;
} catch (error) {
console.error(
`${chalk.red('✗')} Failed to stop workflow ${workflow.taskId}: ${error}`
);
failed++;
}
}
console.log();
if (stopped > 0) {
ui.displaySuccess(`Successfully stopped ${stopped} workflows`);
}
if (failed > 0) {
ui.displayWarning(`Failed to stop ${failed} workflows`);
}
}
private getStatusDisplay(status: string): string {
const statusMap = {
pending: { icon: '⏳', color: chalk.yellow },
initializing: { icon: '🔄', color: chalk.blue },
running: { icon: '🚀', color: chalk.green },
paused: { icon: '⏸️', color: chalk.hex('#FFA500') },
completed: { icon: '✅', color: chalk.green },
failed: { icon: '❌', color: chalk.red },
cancelled: { icon: '🛑', color: chalk.gray },
timeout: { icon: '⏰', color: chalk.red }
};
const statusInfo = statusMap[status as keyof typeof statusMap] || {
icon: '❓',
color: chalk.white
};
return `${statusInfo.icon} ${statusInfo.color(status)}`;
}
async cleanup(): Promise<void> {
if (this.workflowManager) {
this.workflowManager.removeAllListeners();
}
}
static register(program: Command, name?: string): WorkflowStopCommand {
const command = new WorkflowStopCommand(name);
program.addCommand(command);
return command;
}
}

View File

@@ -6,6 +6,11 @@
// Commands // Commands
export { ListTasksCommand } from './commands/list.command.js'; export { ListTasksCommand } from './commands/list.command.js';
export { AuthCommand } from './commands/auth.command.js'; export { AuthCommand } from './commands/auth.command.js';
export { WorkflowCommand } from './commands/workflow.command.js';
export { ContextCommand } from './commands/context.command.js';
// Command registry
export { registerAllCommands } from './commands/index.js';
// UI utilities (for other commands to use) // UI utilities (for other commands to use)
export * as ui from './utils/ui.js'; export * as ui from './utils/ui.js';

View File

@@ -6,7 +6,7 @@
import chalk from 'chalk'; import chalk from 'chalk';
import boxen from 'boxen'; import boxen from 'boxen';
import Table from 'cli-table3'; import Table from 'cli-table3';
import type { Task, TaskStatus, TaskPriority } from '@tm/core'; import type { Task, TaskStatus, TaskPriority } from '@tm/core/types';
/** /**
* Get colored status display with ASCII icons (matches scripts/modules/ui.js style) * Get colored status display with ASCII icons (matches scripts/modules/ui.js style)
@@ -324,3 +324,61 @@ export function createTaskTable(
return table.toString(); return table.toString();
} }
/**
* Display a spinner with message (mock implementation)
*/
export function displaySpinner(message: string): void {
console.log(chalk.blue('◐'), chalk.gray(message));
}
/**
* Simple confirmation prompt
*/
export async function confirm(message: string): Promise<boolean> {
// For now, return true. In a real implementation, use inquirer
console.log(chalk.yellow('?'), chalk.white(message), chalk.gray('(y/n)'));
// Mock implementation - in production this would use inquirer
return new Promise((resolve) => {
process.stdin.once('data', (data) => {
const answer = data.toString().trim().toLowerCase();
resolve(answer === 'y' || answer === 'yes');
});
process.stdin.resume();
});
}
/**
* Create a generic table
*/
export function createTable(headers: string[], rows: string[][]): string {
const table = new Table({
head: headers.map(h => chalk.blue.bold(h)),
style: {
head: [],
border: ['gray']
},
chars: {
'top': '─',
'top-mid': '┬',
'top-left': '┌',
'top-right': '┐',
'bottom': '─',
'bottom-mid': '┴',
'bottom-left': '└',
'bottom-right': '┘',
'left': '│',
'left-mid': '├',
'mid': '─',
'mid-mid': '┼',
'right': '│',
'right-mid': '┤',
'middle': '│'
}
});
rows.forEach(row => table.push(row));
return table.toString();
}

View File

@@ -1,15 +1,8 @@
import { defineConfig } from 'tsup'; import { defineConfig } from 'tsup';
import { cliConfig, mergeConfig } from '@tm/build-config';
export default defineConfig({ export default defineConfig(
entry: ['src/index.ts'], mergeConfig(cliConfig, {
format: ['esm'], entry: ['src/index.ts']
target: 'node18', })
splitting: false, );
sourcemap: true,
clean: true,
dts: true,
shims: true,
esbuildOptions(options) {
options.platform = 'node';
}
});

View File

@@ -200,6 +200,34 @@ sidebarTitle: "CLI Commands"
``` ```
</Accordion> </Accordion>
<Accordion title="Workflow Management">
```bash
# Start workflow execution for a task
task-master workflow start <task-id>
# or use alias
task-master workflow run <task-id>
# List all active workflows
task-master workflow list
# Check status of a specific workflow
task-master workflow status <workflow-id>
# or use alias
task-master workflow info <workflow-id>
# Stop a running workflow
task-master workflow stop <workflow-id>
# or use alias
task-master workflow kill <workflow-id>
```
The workflow system executes tasks in isolated git worktrees with dedicated Claude Code processes, providing:
- **Isolated Execution**: Each task runs in its own git worktree
- **Process Management**: Spawns dedicated Claude Code processes
- **Real-time Monitoring**: Track progress and output
- **Parallel Execution**: Run multiple tasks concurrently
</Accordion>
<Accordion title="Initialize a Project"> <Accordion title="Initialize a Project">
```bash ```bash
# Initialize a new project with Task Master structure # Initialize a new project with Task Master structure

View File

@@ -0,0 +1,221 @@
---
title: "Workflow Engine"
sidebarTitle: "Workflows"
---
The Task Master Workflow Engine provides advanced task execution capabilities with git worktree isolation and Claude Code process management.
## Overview
The workflow system extends Task Master with powerful execution features:
- **Git Worktree Isolation**: Each task runs in its own isolated git worktree
- **Process Sandboxing**: Spawns dedicated Claude Code processes for task execution
- **Real-time Monitoring**: Track workflow progress and process output
- **State Management**: Persistent workflow state across sessions
- **Parallel Execution**: Run multiple tasks concurrently with resource limits
## Quick Start
### Starting a Workflow
```bash
# Start workflow for a specific task
task-master workflow start 1.2
# Using the alias
task-master workflow run 1.2
```
### Monitoring Workflows
```bash
# List all active workflows
task-master workflow list
# Check specific workflow status
task-master workflow status workflow-1.2-1234567890-abc123
# Using the alias
task-master workflow info workflow-1.2-1234567890-abc123
```
### Stopping Workflows
```bash
# Stop a running workflow
task-master workflow stop workflow-1.2-1234567890-abc123
# Force stop using alias
task-master workflow kill workflow-1.2-1234567890-abc123
```
## Workflow States
| State | Description |
|-------|-------------|
| `pending` | Created but not started |
| `initializing` | Setting up worktree and process |
| `running` | Active execution in progress |
| `paused` | Temporarily stopped |
| `completed` | Successfully finished |
| `failed` | Error occurred during execution |
| `cancelled` | User cancelled the workflow |
| `timeout` | Exceeded time limit |
## Environment Configuration
### Environment Variables
Set these environment variables to customize workflow behavior:
- `TASKMASTER_WORKFLOW_DEBUG`: Enable debug logging
- `TASKMASTER_CLAUDE_PATH`: Custom Claude Code executable path
- `TASKMASTER_WORKTREE_BASE`: Base directory for worktrees
- `TASKMASTER_MAX_CONCURRENT`: Maximum concurrent workflows
### Example Configuration
```bash
# Enable debug mode
export TASKMASTER_WORKFLOW_DEBUG=true
# Set custom Claude path
export TASKMASTER_CLAUDE_PATH=/usr/local/bin/claude
# Set worktree base directory
export TASKMASTER_WORKTREE_BASE=./worktrees
# Limit concurrent workflows
export TASKMASTER_MAX_CONCURRENT=3
```
## Git Worktree Integration
### How It Works
When you start a workflow:
1. **Worktree Creation**: A new git worktree is created for the task
2. **Process Spawn**: A dedicated Claude Code process is launched in the worktree
3. **Task Execution**: The task runs in complete isolation
4. **State Tracking**: Progress is monitored and persisted
5. **Cleanup**: Worktree is removed when workflow completes
### Worktree Structure
```
project/
├── .git/ # Main repository
├── src/ # Main working directory
└── worktrees/ # Workflow worktrees
├── task-1.2/ # Worktree for task 1.2
├── task-2.1/ # Worktree for task 2.1
└── task-3.4/ # Worktree for task 3.4
```
## Best Practices
### When to Use Workflows
Use workflows for tasks that:
- Require isolated development environments
- Need dedicated Claude Code attention
- Benefit from parallel execution
- Require process monitoring and state tracking
### Workflow Management
- **Start workflows for complex tasks** that need focused execution
- **Monitor progress** using `workflow status` command
- **Clean up completed workflows** to free resources
- **Use meaningful task descriptions** for better workflow tracking
### Resource Management
- **Limit concurrent workflows** based on system resources
- **Monitor workflow output** for debugging and progress tracking
- **Stop unnecessary workflows** to free up resources
## Troubleshooting
### Common Issues
**Worktree Creation Fails**
```bash
# Check git version (requires 2.5+)
git --version
# Verify project is a git repository
git status
```
**Claude Code Not Found**
```bash
# Check Claude installation
which claude
# Set custom path
export TASKMASTER_CLAUDE_PATH=/path/to/claude
```
**Permission Errors**
```bash
# Check worktree directory permissions
chmod -R 755 ./worktrees
```
### Debug Mode
Enable debug logging for troubleshooting:
```bash
export TASKMASTER_WORKFLOW_DEBUG=true
task-master workflow start 1.2
```
## Integration Examples
### With VS Code Extension
The workflow engine integrates with the Task Master VS Code extension to provide:
- **Workflow Tree View**: Visual workflow management
- **Process Monitoring**: Real-time output streaming
- **Worktree Navigation**: Quick access to isolated workspaces
- **Status Indicators**: Visual workflow state tracking
### With Task Management
```bash
# Typical workflow
task-master next # Find next task
task-master workflow start 1.2 # Start workflow
task-master workflow status <id> # Monitor progress
task-master set-status --id=1.2 --status=done # Mark complete
```
## Advanced Features
### Parallel Execution
Run multiple workflows simultaneously:
```bash
# Start multiple workflows
task-master workflow start 1.2
task-master workflow start 2.1
task-master workflow start 3.4
# Monitor all active workflows
task-master workflow list
```
### Process Monitoring
Each workflow provides real-time output monitoring and process management through the workflow engine's event system.
### State Persistence
Workflow state is automatically persisted across sessions, allowing you to resume monitoring workflows after restarting the CLI.

View File

@@ -49,6 +49,7 @@
"pages": [ "pages": [
"capabilities/mcp", "capabilities/mcp",
"capabilities/cli-root-commands", "capabilities/cli-root-commands",
"capabilities/workflows",
"capabilities/task-structure" "capabilities/task-structure"
] ]
} }

View File

@@ -3,4 +3,38 @@ title: "What's New"
sidebarTitle: "What's New" sidebarTitle: "What's New"
--- ---
## New Workflow Engine (Latest)
Task Master now includes a powerful workflow engine that revolutionizes how tasks are executed:
### 🚀 Key Features
- **Git Worktree Isolation**: Each task runs in its own isolated git worktree
- **Claude Code Integration**: Spawns dedicated Claude Code processes for task execution
- **Real-time Monitoring**: Track workflow progress and process output
- **Parallel Execution**: Run multiple tasks concurrently with resource management
- **State Persistence**: Workflow state is maintained across sessions
### 🔧 New CLI Commands
```bash
# Start workflow execution
task-master workflow start <task-id>
# Monitor active workflows
task-master workflow list
# Check workflow status
task-master workflow status <workflow-id>
# Stop running workflow
task-master workflow stop <workflow-id>
```
### 📖 Learn More
Check out the new [Workflow Documentation](/capabilities/workflows) for comprehensive usage guides and best practices.
---
An easy way to see the latest releases An easy way to see the latest releases

40
output.txt Normal file

File diff suppressed because one or more lines are too long

14618
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -13,16 +13,24 @@
"scripts": { "scripts": {
"build": "npm run build:packages && tsup", "build": "npm run build:packages && tsup",
"dev": "npm run build:packages && npm link && (npm run dev:packages & tsup --watch --onSuccess 'echo Build complete && npm link')", "dev": "npm run build:packages && npm link && (npm run dev:packages & tsup --watch --onSuccess 'echo Build complete && npm link')",
"dev:packages": "(cd packages/tm-core && npm run dev) & (cd apps/cli && npm run dev) & wait", "dev:packages": "(cd packages/tm-core && npm run dev) & (cd packages/workflow-engine && npm run dev) & (cd apps/cli && npm run dev) & wait",
"dev:core": "cd packages/tm-core && npm run dev", "dev:core": "cd packages/tm-core && npm run dev",
"dev:workflow": "cd packages/workflow-engine && npm run dev",
"dev:cli": "cd apps/cli && npm run dev", "dev:cli": "cd apps/cli && npm run dev",
"build:packages": "npm run build:core && npm run build:cli", "build:packages": "npm run build:core && npm run build:workflow && npm run build:cli",
"build:core": "cd packages/tm-core && npm run build", "build:core": "cd packages/tm-core && npm run build",
"build:workflow": "cd packages/workflow-engine && npm run build",
"build:cli": "cd apps/cli && npm run build", "build:cli": "cd apps/cli && npm run build",
"typecheck": "npm run typecheck:core && npm run typecheck:cli",
"typecheck:core": "cd packages/tm-core && npm run typecheck",
"typecheck:cli": "cd apps/cli && npm run typecheck",
"test": "node --experimental-vm-modules node_modules/.bin/jest", "test": "node --experimental-vm-modules node_modules/.bin/jest",
"test:unit": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=unit",
"test:integration": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=integration",
"test:fails": "node --experimental-vm-modules node_modules/.bin/jest --onlyFailures", "test:fails": "node --experimental-vm-modules node_modules/.bin/jest --onlyFailures",
"test:watch": "node --experimental-vm-modules node_modules/.bin/jest --watch", "test:watch": "node --experimental-vm-modules node_modules/.bin/jest --watch",
"test:coverage": "node --experimental-vm-modules node_modules/.bin/jest --coverage", "test:coverage": "node --experimental-vm-modules node_modules/.bin/jest --coverage",
"test:ci": "node --experimental-vm-modules node_modules/.bin/jest --coverage --ci",
"test:e2e": "./tests/e2e/run_e2e.sh", "test:e2e": "./tests/e2e/run_e2e.sh",
"test:e2e-report": "./tests/e2e/run_e2e.sh --analyze-log", "test:e2e-report": "./tests/e2e/run_e2e.sh --analyze-log",
"postpack": "chmod +x dist/task-master.js dist/mcp-server.js", "postpack": "chmod +x dist/task-master.js dist/mcp-server.js",
@@ -67,7 +75,7 @@
"ajv": "^8.17.1", "ajv": "^8.17.1",
"ajv-formats": "^3.0.1", "ajv-formats": "^3.0.1",
"boxen": "^8.0.1", "boxen": "^8.0.1",
"chalk": "^5.4.1", "chalk": "5.6.2",
"cli-highlight": "^2.1.11", "cli-highlight": "^2.1.11",
"cli-progress": "^3.12.0", "cli-progress": "^3.12.0",
"cli-table3": "^0.6.5", "cli-table3": "^0.6.5",

View File

@@ -0,0 +1,31 @@
{
"name": "@tm/build-config",
"version": "1.0.0",
"description": "Shared build configuration for Task Master monorepo",
"type": "module",
"main": "./dist/tsup.base.js",
"types": "./dist/tsup.base.d.ts",
"exports": {
".": {
"types": "./src/tsup.base.ts",
"import": "./dist/tsup.base.js",
"require": "./dist/tsup.base.cjs"
}
},
"files": ["dist", "src"],
"keywords": ["build-config", "tsup", "monorepo"],
"author": "",
"license": "MIT",
"scripts": {
"build": "tsup",
"dev": "tsup --watch",
"typecheck": "tsc --noEmit"
},
"devDependencies": {
"tsup": "^8.5.0",
"typescript": "^5.7.3"
},
"peerDependencies": {
"tsup": "^8.0.0"
}
}

View File

@@ -0,0 +1,151 @@
/**
* Base tsup configuration for Task Master monorepo
* Provides shared configuration that can be extended by individual packages
*/
import type { Options } from 'tsup';
const isProduction = process.env.NODE_ENV === 'production';
const isDevelopment = !isProduction;
/**
* Base configuration for library packages (tm-core, etc.)
*/
export const libraryConfig: Partial<Options> = {
format: ['cjs', 'esm'],
target: 'es2022',
// Sourcemaps only in development to reduce production bundle size
sourcemap: isDevelopment,
clean: true,
dts: true,
// Enable optimizations in production
splitting: isProduction,
treeshake: isProduction,
minify: isProduction,
bundle: true,
esbuildOptions(options) {
options.conditions = ['module'];
// Better source mapping in development only
options.sourcesContent = isDevelopment;
// Keep original names for better debugging in development
options.keepNames = isDevelopment;
},
// Watch mode configuration for development
watch: isDevelopment ? ['src'] : false
};
/**
* Base configuration for CLI packages
*/
export const cliConfig: Partial<Options> = {
format: ['esm'],
target: 'node18',
splitting: false,
// Sourcemaps only in development to reduce production bundle size
sourcemap: isDevelopment,
clean: true,
dts: true,
shims: true,
// Enable minification in production for smaller bundles
minify: isProduction,
treeshake: isProduction,
esbuildOptions(options) {
options.platform = 'node';
// Better source mapping in development only
options.sourcesContent = isDevelopment;
// Keep original names for better debugging in development
options.keepNames = isDevelopment;
}
};
/**
* Base configuration for executable bundles (root level)
*/
export const executableConfig: Partial<Options> = {
format: ['esm'],
target: 'node18',
splitting: false,
// Sourcemaps only in development to reduce production bundle size
sourcemap: isDevelopment,
clean: true,
bundle: true, // Bundle everything into one file
// Minify in production for smaller executables
minify: isProduction,
// Handle TypeScript imports transparently
loader: {
'.js': 'jsx',
'.ts': 'ts'
},
esbuildOptions(options) {
options.platform = 'node';
// Allow importing TypeScript from JavaScript
options.resolveExtensions = ['.ts', '.js', '.mjs', '.json'];
// Better source mapping in development only
options.sourcesContent = isDevelopment;
// Keep original names for better debugging in development
options.keepNames = isDevelopment;
}
};
/**
* Common external modules that should not be bundled
*/
export const commonExternals = [
// Native Node.js modules
'fs',
'path',
'child_process',
'crypto',
'os',
'url',
'util',
'stream',
'http',
'https',
'events',
'assert',
'buffer',
'querystring',
'readline',
'zlib',
'tty',
'net',
'dgram',
'dns',
'tls',
'cluster',
'process',
'module'
];
/**
* Utility function to merge configurations
*/
export function mergeConfig(
baseConfig: Partial<Options>,
overrides: Partial<Options>
): Options {
return {
...baseConfig,
...overrides,
// Merge arrays instead of overwriting
external: [...(baseConfig.external || []), ...(overrides.external || [])],
// Merge esbuildOptions
esbuildOptions(options, context) {
if (baseConfig.esbuildOptions) {
baseConfig.esbuildOptions(options, context);
}
if (overrides.esbuildOptions) {
overrides.esbuildOptions(options, context);
}
}
} as Options;
}
/**
* Environment helpers
*/
export const env = {
isProduction,
isDevelopment,
NODE_ENV: process.env.NODE_ENV || 'development'
};

View File

@@ -0,0 +1,20 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022"],
"module": "ESNext",
"moduleResolution": "bundler",
"allowSyntheticDefaultImports": true,
"esModuleInterop": true,
"allowJs": true,
"strict": true,
"noEmit": true,
"resolveJsonModule": true,
"isolatedModules": true,
"declaration": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

View File

@@ -0,0 +1,23 @@
import { defineConfig } from 'tsup';
const isProduction = process.env.NODE_ENV === 'production';
export default defineConfig({
entry: ['src/tsup.base.ts'],
format: ['esm', 'cjs'],
target: 'node18',
// Sourcemaps only in development
sourcemap: !isProduction,
clean: true,
dts: true,
// Enable minification in production
minify: isProduction,
treeshake: isProduction,
external: ['tsup'],
esbuildOptions(options) {
// Better source mapping in development only
options.sourcesContent = !isProduction;
// Keep original names for better debugging in development
options.keepNames = !isProduction;
}
});

View File

@@ -1,66 +1,62 @@
{ {
"name": "@tm/core", "name": "@tm/core",
"version": "1.0.0", "version": "1.0.0",
"private": true,
"description": "Core library for Task Master - TypeScript task management system", "description": "Core library for Task Master - TypeScript task management system",
"type": "module", "type": "module",
"types": "./dist/index.d.ts", "types": "./src/index.ts",
"main": "./dist/index.js", "main": "./dist/index.js",
"exports": { "exports": {
".": { ".": {
"types": "./src/index.ts", "types": "./src/index.ts",
"import": "./dist/index.js", "import": "./dist/index.js"
"require": "./dist/index.js"
}, },
"./auth": { "./auth": {
"types": "./src/auth/index.ts", "types": "./src/auth/index.ts",
"import": "./dist/auth/index.js", "import": "./dist/auth/index.js"
"require": "./dist/auth/index.js"
}, },
"./storage": { "./storage": {
"types": "./src/storage/index.ts", "types": "./src/storage/index.ts",
"import": "./dist/storage/index.js", "import": "./dist/storage/index.js"
"require": "./dist/storage/index.js"
}, },
"./config": { "./config": {
"types": "./src/config/index.ts", "types": "./src/config/index.ts",
"import": "./dist/config/index.js", "import": "./dist/config/index.js"
"require": "./dist/config/index.js"
}, },
"./providers": { "./providers": {
"types": "./src/providers/index.ts", "types": "./src/providers/index.ts",
"import": "./dist/providers/index.js", "import": "./dist/providers/index.js"
"require": "./dist/providers/index.js"
}, },
"./services": { "./services": {
"types": "./src/services/index.ts", "types": "./src/services/index.ts",
"import": "./dist/services/index.js", "import": "./dist/services/index.js"
"require": "./dist/services/index.js"
}, },
"./errors": { "./errors": {
"types": "./src/errors/index.ts", "types": "./src/errors/index.ts",
"import": "./dist/errors/index.js", "import": "./dist/errors/index.js"
"require": "./dist/errors/index.js"
}, },
"./logger": { "./logger": {
"types": "./src/logger/index.ts", "types": "./src/logger/index.ts",
"import": "./dist/logger/index.js", "import": "./dist/logger/index.js"
"require": "./dist/logger/index.js"
}, },
"./types": { "./types": {
"types": "./src/types/index.ts", "types": "./src/types/index.ts",
"import": "./dist/types/index.js", "import": "./dist/types/index.js"
"require": "./dist/types/index.js"
}, },
"./interfaces": { "./interfaces": {
"types": "./src/interfaces/index.ts", "types": "./src/interfaces/index.ts",
"import": "./dist/interfaces/index.js", "import": "./dist/interfaces/index.js"
"require": "./dist/interfaces/index.js"
}, },
"./utils": { "./utils": {
"types": "./src/utils/index.ts", "types": "./src/utils/index.ts",
"import": "./dist/utils/index.js", "import": "./dist/utils/index.js",
"require": "./dist/utils/index.js" "require": "./dist/utils/index.js"
}, },
"./workflow": {
"types": "./src/workflow/index.ts",
"import": "./dist/workflow/index.js",
"require": "./dist/workflow/index.js"
},
"./package.json": "./package.json" "./package.json": "./package.json"
}, },
"scripts": { "scripts": {
@@ -78,14 +74,15 @@
}, },
"dependencies": { "dependencies": {
"@supabase/supabase-js": "^2.57.0", "@supabase/supabase-js": "^2.57.0",
"@tm/workflow-engine": "*",
"chalk": "^5.3.0", "chalk": "^5.3.0",
"zod": "^3.22.4" "zod": "^3.22.4"
}, },
"devDependencies": { "devDependencies": {
"@biomejs/biome": "^1.9.4", "@biomejs/biome": "^1.9.4",
"@tm/build-config": "*",
"@types/node": "^20.11.30", "@types/node": "^20.11.30",
"@vitest/coverage-v8": "^2.0.5", "@vitest/coverage-v8": "^2.0.5",
"dotenv-mono": "^1.5.1",
"ts-node": "^10.9.2", "ts-node": "^10.9.2",
"tsup": "^8.0.2", "tsup": "^8.0.2",
"typescript": "^5.4.3", "typescript": "^5.4.3",

View File

@@ -6,11 +6,18 @@ import {
AuthCredentials, AuthCredentials,
OAuthFlowOptions, OAuthFlowOptions,
AuthenticationError, AuthenticationError,
AuthConfig AuthConfig,
UserContext
} from './types.js'; } from './types.js';
import { CredentialStore } from './credential-store.js'; import { CredentialStore } from './credential-store.js';
import { OAuthService } from './oauth-service.js'; import { OAuthService } from './oauth-service.js';
import { SupabaseAuthClient } from '../clients/supabase-client.js'; import { SupabaseAuthClient } from '../clients/supabase-client.js';
import {
OrganizationService,
type Organization,
type Brief,
type RemoteTask
} from '../services/organization.service.js';
import { getLogger } from '../logger/index.js'; import { getLogger } from '../logger/index.js';
/** /**
@@ -21,11 +28,28 @@ export class AuthManager {
private credentialStore: CredentialStore; private credentialStore: CredentialStore;
private oauthService: OAuthService; private oauthService: OAuthService;
private supabaseClient: SupabaseAuthClient; private supabaseClient: SupabaseAuthClient;
private organizationService?: OrganizationService;
private constructor(config?: Partial<AuthConfig>) { private constructor(config?: Partial<AuthConfig>) {
this.credentialStore = new CredentialStore(config); this.credentialStore = new CredentialStore(config);
this.supabaseClient = new SupabaseAuthClient(); this.supabaseClient = new SupabaseAuthClient();
this.oauthService = new OAuthService(this.credentialStore, config); this.oauthService = new OAuthService(this.credentialStore, config);
// Initialize Supabase client with session restoration
this.initializeSupabaseSession();
}
/**
* Initialize Supabase session from stored credentials
*/
private async initializeSupabaseSession(): Promise<void> {
try {
await this.supabaseClient.initialize();
} catch (error) {
// Log but don't throw - session might not exist yet
const logger = getLogger('AuthManager');
logger.debug('No existing session to restore');
}
} }
/** /**
@@ -75,39 +99,48 @@ export class AuthManager {
} }
/** /**
* Refresh authentication token * Refresh authentication token using Supabase session
*/ */
async refreshToken(): Promise<AuthCredentials> { async refreshToken(): Promise<AuthCredentials> {
const authData = this.credentialStore.getCredentials({
allowExpired: true
});
if (!authData || !authData.refreshToken) {
throw new AuthenticationError(
'No refresh token available',
'NO_REFRESH_TOKEN'
);
}
try { try {
// Use Supabase client to refresh the token // Use Supabase's built-in session refresh
const response = await this.supabaseClient.refreshSession( const session = await this.supabaseClient.refreshSession();
authData.refreshToken
);
// Update authentication data if (!session) {
throw new AuthenticationError(
'Failed to refresh session',
'REFRESH_FAILED'
);
}
// Get existing credentials to preserve context
const existingCredentials = this.credentialStore.getCredentials({
allowExpired: true
});
// Update authentication data from session
const newAuthData: AuthCredentials = { const newAuthData: AuthCredentials = {
...authData, token: session.access_token,
token: response.token, refreshToken: session.refresh_token,
refreshToken: response.refreshToken, userId: session.user.id,
expiresAt: response.expiresAt, email: session.user.email,
savedAt: new Date().toISOString() expiresAt: session.expires_at
? new Date(session.expires_at * 1000).toISOString()
: undefined,
savedAt: new Date().toISOString(),
selectedContext: existingCredentials?.selectedContext
}; };
this.credentialStore.saveCredentials(newAuthData); this.credentialStore.saveCredentials(newAuthData);
return newAuthData; return newAuthData;
} catch (error) { } catch (error) {
throw error; if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Token refresh failed: ${(error as Error).message}`,
'REFRESH_FAILED'
);
} }
} }
@@ -133,4 +166,114 @@ export class AuthManager {
isAuthenticated(): boolean { isAuthenticated(): boolean {
return this.credentialStore.hasValidCredentials(); return this.credentialStore.hasValidCredentials();
} }
/**
* Get the current user context (org/brief selection)
*/
getContext(): UserContext | null {
const credentials = this.getCredentials();
return credentials?.selectedContext || null;
}
/**
* Update the user context (org/brief selection)
*/
async updateContext(context: Partial<UserContext>): Promise<void> {
const credentials = this.getCredentials();
if (!credentials) {
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
}
// Merge with existing context
const existingContext = credentials.selectedContext || {};
const newContext: UserContext = {
...existingContext,
...context,
updatedAt: new Date().toISOString()
};
// Save updated credentials with new context
const updatedCredentials: AuthCredentials = {
...credentials,
selectedContext: newContext
};
this.credentialStore.saveCredentials(updatedCredentials);
}
/**
* Clear the user context
*/
async clearContext(): Promise<void> {
const credentials = this.getCredentials();
if (!credentials) {
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
}
// Remove context from credentials
const { selectedContext, ...credentialsWithoutContext } = credentials;
this.credentialStore.saveCredentials(credentialsWithoutContext);
}
/**
* Get the organization service instance
* Uses the Supabase client with the current session or token
*/
private async getOrganizationService(): Promise<OrganizationService> {
if (!this.organizationService) {
// First check if we have credentials with a token
const credentials = this.getCredentials();
if (!credentials || !credentials.token) {
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
}
// Initialize session if needed (this will load from our storage adapter)
await this.supabaseClient.initialize();
// Use the SupabaseAuthClient which now has the session
const supabaseClient = this.supabaseClient.getClient();
this.organizationService = new OrganizationService(supabaseClient as any);
}
return this.organizationService;
}
/**
* Get all organizations for the authenticated user
*/
async getOrganizations(): Promise<Organization[]> {
const service = await this.getOrganizationService();
return service.getOrganizations();
}
/**
* Get all briefs for a specific organization
*/
async getBriefs(orgId: string): Promise<Brief[]> {
const service = await this.getOrganizationService();
return service.getBriefs(orgId);
}
/**
* Get a specific organization by ID
*/
async getOrganization(orgId: string): Promise<Organization | null> {
const service = await this.getOrganizationService();
return service.getOrganization(orgId);
}
/**
* Get a specific brief by ID
*/
async getBrief(briefId: string): Promise<Brief | null> {
const service = await this.getOrganizationService();
return service.getBrief(briefId);
}
/**
* Get all tasks for a specific brief
*/
async getTasks(briefId: string): Promise<RemoteTask[]> {
const service = await this.getOrganizationService();
return service.getTasks(briefId);
}
} }

View File

@@ -5,12 +5,19 @@
export { AuthManager } from './auth-manager.js'; export { AuthManager } from './auth-manager.js';
export { CredentialStore } from './credential-store.js'; export { CredentialStore } from './credential-store.js';
export { OAuthService } from './oauth-service.js'; export { OAuthService } from './oauth-service.js';
export { SupabaseSessionStorage } from './supabase-session-storage';
export type {
Organization,
Brief,
RemoteTask
} from '../services/organization.service.js';
export type { export type {
AuthCredentials, AuthCredentials,
OAuthFlowOptions, OAuthFlowOptions,
AuthConfig, AuthConfig,
CliData CliData,
UserContext
} from './types.js'; } from './types.js';
export { AuthenticationError } from './types.js'; export { AuthenticationError } from './types.js';

View File

@@ -181,8 +181,8 @@ export class OAuthService {
timestamp: Date.now() timestamp: Date.now()
}; };
// Build authorization URL for web app sign-in page // Build authorization URL for CLI-specific sign-in page
const authUrl = new URL(`${this.baseUrl}/auth/sign-in`); const authUrl = new URL(`${this.baseUrl}/auth/cli/sign-in`);
// Encode CLI data as base64 // Encode CLI data as base64
const cliParam = Buffer.from(JSON.stringify(cliData)).toString( const cliParam = Buffer.from(JSON.stringify(cliData)).toString(
@@ -272,7 +272,49 @@ export class OAuthService {
return; return;
} }
// Handle direct token response from server // Handle authorization code for PKCE flow
const code = url.searchParams.get('code');
if (code && type === 'pkce_callback') {
try {
this.logger.info('Received authorization code for PKCE flow');
// Exchange code for session using PKCE
const session = await this.supabaseClient.exchangeCodeForSession(code);
// Save authentication data
const authData: AuthCredentials = {
token: session.access_token,
refreshToken: session.refresh_token,
userId: session.user.id,
email: session.user.email,
expiresAt: session.expires_at
? new Date(session.expires_at * 1000).toISOString()
: undefined,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
this.credentialStore.saveCredentials(authData);
if (server.listening) {
server.close();
}
// Clear timeout since authentication succeeded
if (timeoutId) {
clearTimeout(timeoutId);
}
resolve(authData);
return;
} catch (error) {
if (server.listening) {
server.close();
}
reject(error);
return;
}
}
// Handle direct token response from server (legacy flow)
if ( if (
accessToken && accessToken &&
(type === 'oauth_success' || type === 'session_transfer') (type === 'oauth_success' || type === 'session_transfer')
@@ -280,8 +322,23 @@ export class OAuthService {
try { try {
this.logger.info(`Received tokens via ${type}`); this.logger.info(`Received tokens via ${type}`);
// Get user info using the access token if possible // Create a session with the tokens and set it in Supabase client
const user = await this.supabaseClient.getUser(accessToken); const session = {
access_token: accessToken,
refresh_token: refreshToken || '',
expires_at: expiresIn
? Math.floor(Date.now() / 1000) + parseInt(expiresIn)
: undefined,
expires_in: expiresIn ? parseInt(expiresIn) : undefined,
token_type: 'bearer',
user: null as any // Will be populated by setSession
};
// Set the session in Supabase client
await this.supabaseClient.setSession(session as any);
// Get user info from the session
const user = await this.supabaseClient.getUser();
// Calculate expiration time // Calculate expiration time
const expiresAt = expiresIn const expiresAt = expiresIn

View File

@@ -0,0 +1,155 @@
/**
* Custom storage adapter for Supabase Auth sessions in CLI environment
* Implements the SupportedStorage interface required by Supabase Auth
*
* This adapter bridges Supabase's session management with our existing
* auth.json credential storage, maintaining backward compatibility
*/
import { SupportedStorage } from '@supabase/supabase-js';
import { CredentialStore } from './credential-store';
import { AuthCredentials } from './types';
import { getLogger } from '../logger';
const STORAGE_KEY = 'sb-taskmaster-auth-token';
export class SupabaseSessionStorage implements SupportedStorage {
private store: CredentialStore;
private logger = getLogger('SupabaseSessionStorage');
constructor(store: CredentialStore) {
this.store = store;
}
/**
* Build a Supabase session object from our credentials
*/
private buildSessionFromCredentials(credentials: AuthCredentials): any {
// Create a session object that Supabase expects
const session = {
access_token: credentials.token,
refresh_token: credentials.refreshToken || '',
expires_at: credentials.expiresAt
? Math.floor(new Date(credentials.expiresAt).getTime() / 1000)
: Math.floor(Date.now() / 1000) + 3600, // Default to 1 hour
token_type: 'bearer',
user: {
id: credentials.userId,
email: credentials.email || '',
aud: 'authenticated',
role: 'authenticated',
email_confirmed_at: new Date().toISOString(),
app_metadata: {},
user_metadata: {},
created_at: new Date().toISOString(),
updated_at: new Date().toISOString()
}
};
return session;
}
/**
* Parse a Supabase session back to our credentials
*/
private parseSessionToCredentials(
sessionData: any
): Partial<AuthCredentials> {
try {
const session = JSON.parse(sessionData);
return {
token: session.access_token,
refreshToken: session.refresh_token,
userId: session.user?.id || 'unknown',
email: session.user?.email,
expiresAt: session.expires_at
? new Date(session.expires_at * 1000).toISOString()
: undefined
};
} catch (error) {
this.logger.error('Error parsing session:', error);
return {};
}
}
/**
* Get item from storage - Supabase will request the session with a specific key
*/
getItem(key: string): string | null {
// Supabase uses a specific key pattern for sessions
if (key === STORAGE_KEY || key.includes('auth-token')) {
try {
const credentials = this.store.getCredentials({ allowExpired: true });
if (credentials && credentials.token) {
// Build and return a session object from our stored credentials
const session = this.buildSessionFromCredentials(credentials);
return JSON.stringify(session);
}
} catch (error) {
this.logger.error('Error getting session:', error);
}
}
return null;
}
/**
* Set item in storage - Supabase will store the session with a specific key
*/
setItem(key: string, value: string): void {
// Only handle Supabase session keys
if (key === STORAGE_KEY || key.includes('auth-token')) {
try {
// Parse the session and update our credentials
const sessionUpdates = this.parseSessionToCredentials(value);
const existingCredentials = this.store.getCredentials({
allowExpired: true
});
if (sessionUpdates.token) {
const updatedCredentials: AuthCredentials = {
...existingCredentials,
...sessionUpdates,
savedAt: new Date().toISOString(),
selectedContext: existingCredentials?.selectedContext
} as AuthCredentials;
this.store.saveCredentials(updatedCredentials);
}
} catch (error) {
this.logger.error('Error setting session:', error);
}
}
}
/**
* Remove item from storage - Called when signing out
*/
removeItem(key: string): void {
if (key === STORAGE_KEY || key.includes('auth-token')) {
// Don't actually remove credentials, just clear the tokens
// This preserves other data like selectedContext
try {
const credentials = this.store.getCredentials({ allowExpired: true });
if (credentials) {
// Keep context but clear auth tokens
const clearedCredentials: AuthCredentials = {
...credentials,
token: '',
refreshToken: undefined,
expiresAt: undefined
} as AuthCredentials;
this.store.saveCredentials(clearedCredentials);
}
} catch (error) {
this.logger.error('Error removing session:', error);
}
}
}
/**
* Clear all session data
*/
clear(): void {
// Clear auth tokens but preserve context
this.removeItem(STORAGE_KEY);
}
}

View File

@@ -10,6 +10,15 @@ export interface AuthCredentials {
expiresAt?: string | number; expiresAt?: string | number;
tokenType?: 'standard'; tokenType?: 'standard';
savedAt: string; savedAt: string;
selectedContext?: UserContext;
}
export interface UserContext {
orgId?: string;
orgName?: string;
briefId?: string;
briefName?: string;
updatedAt: string;
} }
export interface OAuthFlowOptions { export interface OAuthFlowOptions {
@@ -67,7 +76,11 @@ export type AuthErrorCode =
| 'STORAGE_ERROR' | 'STORAGE_ERROR'
| 'NOT_SUPPORTED' | 'NOT_SUPPORTED'
| 'REFRESH_FAILED' | 'REFRESH_FAILED'
| 'INVALID_RESPONSE'; | 'INVALID_RESPONSE'
| 'PKCE_INIT_FAILED'
| 'PKCE_FAILED'
| 'CODE_EXCHANGE_FAILED'
| 'SESSION_SET_FAILED';
/** /**
* Authentication error class * Authentication error class

View File

@@ -1,19 +1,32 @@
/** /**
* Supabase client for authentication * Supabase authentication client for CLI auth flows
*/ */
import { createClient, SupabaseClient, User } from '@supabase/supabase-js'; import {
createClient,
SupabaseClient as SupabaseJSClient,
User,
Session
} from '@supabase/supabase-js';
import { AuthenticationError } from '../auth/types.js'; import { AuthenticationError } from '../auth/types.js';
import { getLogger } from '../logger/index.js'; import { getLogger } from '../logger/index.js';
import { SupabaseSessionStorage } from '../auth/supabase-session-storage';
import { CredentialStore } from '../auth/credential-store';
export class SupabaseAuthClient { export class SupabaseAuthClient {
private client: SupabaseClient | null = null; private client: SupabaseJSClient | null = null;
private sessionStorage: SupabaseSessionStorage;
private logger = getLogger('SupabaseAuthClient'); private logger = getLogger('SupabaseAuthClient');
constructor() {
const credentialStore = new CredentialStore();
this.sessionStorage = new SupabaseSessionStorage(credentialStore);
}
/** /**
* Initialize Supabase client * Get Supabase client with proper session management
*/ */
private getClient(): SupabaseClient { getClient(): SupabaseJSClient {
if (!this.client) { if (!this.client) {
// Get Supabase configuration from environment - using TM_PUBLIC prefix // Get Supabase configuration from environment - using TM_PUBLIC prefix
const supabaseUrl = process.env.TM_PUBLIC_SUPABASE_URL; const supabaseUrl = process.env.TM_PUBLIC_SUPABASE_URL;
@@ -26,10 +39,12 @@ export class SupabaseAuthClient {
); );
} }
// Create client with custom storage adapter (similar to React Native AsyncStorage)
this.client = createClient(supabaseUrl, supabaseAnonKey, { this.client = createClient(supabaseUrl, supabaseAnonKey, {
auth: { auth: {
storage: this.sessionStorage,
autoRefreshToken: true, autoRefreshToken: true,
persistSession: false, // We handle persistence ourselves persistSession: true,
detectSessionInUrl: false detectSessionInUrl: false
} }
}); });
@@ -39,40 +54,159 @@ export class SupabaseAuthClient {
} }
/** /**
* Note: Code exchange is now handled server-side * Initialize the client and restore session if available
* The server returns tokens directly to avoid PKCE issues
* This method is kept for potential future use
*/ */
async exchangeCodeForSession(_code: string): Promise<{ async initialize(): Promise<Session | null> {
token: string; const client = this.getClient();
refreshToken?: string;
userId: string; try {
email?: string; // Get the current session from storage
expiresAt?: string; const {
}> { data: { session },
throw new AuthenticationError( error
'Code exchange is handled server-side. CLI receives tokens directly.', } = await client.auth.getSession();
'NOT_SUPPORTED'
); if (error) {
this.logger.warn('Failed to restore session:', error);
return null;
}
if (session) {
this.logger.info('Session restored successfully');
}
return session;
} catch (error) {
this.logger.error('Error initializing session:', error);
return null;
}
} }
/** /**
* Refresh an access token * Sign in with PKCE flow (for CLI auth)
*/ */
async refreshSession(refreshToken: string): Promise<{ async signInWithPKCE(): Promise<{ url: string; codeVerifier: string }> {
token: string; const client = this.getClient();
refreshToken?: string;
expiresAt?: string;
}> {
try {
const client = this.getClient();
try {
// Generate PKCE challenge
const { data, error } = await client.auth.signInWithOAuth({
provider: 'github',
options: {
redirectTo:
process.env.TM_AUTH_CALLBACK_URL ||
'http://localhost:3421/auth/callback',
scopes: 'email'
}
});
if (error) {
throw new AuthenticationError(
`Failed to initiate PKCE flow: ${error.message}`,
'PKCE_INIT_FAILED'
);
}
if (!data?.url) {
throw new AuthenticationError(
'No authorization URL returned',
'INVALID_RESPONSE'
);
}
// Extract code_verifier from the URL or generate it
// Note: Supabase handles PKCE internally, we just need to handle the callback
return {
url: data.url,
codeVerifier: '' // Supabase manages this internally
};
} catch (error) {
if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Failed to start PKCE flow: ${(error as Error).message}`,
'PKCE_FAILED'
);
}
}
/**
* Exchange authorization code for session (PKCE flow)
*/
async exchangeCodeForSession(code: string): Promise<Session> {
const client = this.getClient();
try {
const { data, error } = await client.auth.exchangeCodeForSession(code);
if (error) {
throw new AuthenticationError(
`Failed to exchange code: ${error.message}`,
'CODE_EXCHANGE_FAILED'
);
}
if (!data?.session) {
throw new AuthenticationError(
'No session returned from code exchange',
'INVALID_RESPONSE'
);
}
this.logger.info('Successfully exchanged code for session');
return data.session;
} catch (error) {
if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Code exchange failed: ${(error as Error).message}`,
'CODE_EXCHANGE_FAILED'
);
}
}
/**
* Get the current session
*/
async getSession(): Promise<Session | null> {
const client = this.getClient();
try {
const {
data: { session },
error
} = await client.auth.getSession();
if (error) {
this.logger.warn('Failed to get session:', error);
return null;
}
return session;
} catch (error) {
this.logger.error('Error getting session:', error);
return null;
}
}
/**
* Refresh the current session
*/
async refreshSession(): Promise<Session | null> {
const client = this.getClient();
try {
this.logger.info('Refreshing session...'); this.logger.info('Refreshing session...');
// Set the session with refresh token // Supabase will automatically use the stored refresh token
const { data, error } = await client.auth.refreshSession({ const {
refresh_token: refreshToken data: { session },
}); error
} = await client.auth.refreshSession();
if (error) { if (error) {
this.logger.error('Failed to refresh session:', error); this.logger.error('Failed to refresh session:', error);
@@ -82,22 +216,11 @@ export class SupabaseAuthClient {
); );
} }
if (!data.session) { if (session) {
throw new AuthenticationError( this.logger.info('Successfully refreshed session');
'No session data returned',
'INVALID_RESPONSE'
);
} }
this.logger.info('Successfully refreshed session'); return session;
return {
token: data.session.access_token,
refreshToken: data.session.refresh_token,
expiresAt: data.session.expires_at
? new Date(data.session.expires_at * 1000).toISOString()
: undefined
};
} catch (error) { } catch (error) {
if (error instanceof AuthenticationError) { if (error instanceof AuthenticationError) {
throw error; throw error;
@@ -111,21 +234,23 @@ export class SupabaseAuthClient {
} }
/** /**
* Get user details from token * Get current user from session
*/ */
async getUser(token: string): Promise<User | null> { async getUser(): Promise<User | null> {
try { const client = this.getClient();
const client = this.getClient();
// Get user with the token try {
const { data, error } = await client.auth.getUser(token); const {
data: { user },
error
} = await client.auth.getUser();
if (error) { if (error) {
this.logger.warn('Failed to get user:', error); this.logger.warn('Failed to get user:', error);
return null; return null;
} }
return data.user; return user;
} catch (error) { } catch (error) {
this.logger.error('Error getting user:', error); this.logger.error('Error getting user:', error);
return null; return null;
@@ -133,22 +258,55 @@ export class SupabaseAuthClient {
} }
/** /**
* Sign out (revoke tokens) * Sign out and clear session
* Note: This requires the user to be authenticated with the current session.
* For remote token revocation, a server-side admin API with service_role key would be needed.
*/ */
async signOut(): Promise<void> { async signOut(): Promise<void> {
try { const client = this.getClient();
const client = this.getClient();
// Sign out the current session with global scope to revoke all refresh tokens try {
// Sign out with global scope to revoke all refresh tokens
const { error } = await client.auth.signOut({ scope: 'global' }); const { error } = await client.auth.signOut({ scope: 'global' });
if (error) { if (error) {
this.logger.warn('Failed to sign out:', error); this.logger.warn('Failed to sign out:', error);
} }
// Clear cached session data
this.sessionStorage.clear();
} catch (error) { } catch (error) {
this.logger.error('Error during sign out:', error); this.logger.error('Error during sign out:', error);
} }
} }
/**
* Set session from external auth (e.g., from server callback)
*/
async setSession(session: Session): Promise<void> {
const client = this.getClient();
try {
const { error } = await client.auth.setSession({
access_token: session.access_token,
refresh_token: session.refresh_token
});
if (error) {
throw new AuthenticationError(
`Failed to set session: ${error.message}`,
'SESSION_SET_FAILED'
);
}
this.logger.info('Session set successfully');
} catch (error) {
if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Failed to set session: ${(error as Error).message}`,
'SESSION_SET_FAILED'
);
}
}
} }

View File

@@ -177,7 +177,7 @@ describe('ConfigManager', () => {
it('should return storage configuration', () => { it('should return storage configuration', () => {
const storage = manager.getStorageConfig(); const storage = manager.getStorageConfig();
expect(storage).toEqual({ type: 'auto', apiConfigured: false }); expect(storage).toEqual({ type: 'file' });
}); });
it('should return API storage configuration when configured', async () => { it('should return API storage configuration when configured', async () => {
@@ -206,65 +206,7 @@ describe('ConfigManager', () => {
expect(storage).toEqual({ expect(storage).toEqual({
type: 'api', type: 'api',
apiEndpoint: 'https://api.example.com', apiEndpoint: 'https://api.example.com',
apiAccessToken: 'token123', apiAccessToken: 'token123'
apiConfigured: true
});
});
it('should return auto storage configuration with apiConfigured flag', async () => {
// Create a new instance with auto storage config and partial API settings
vi.mocked(ConfigMerger).mockImplementationOnce(
() =>
({
addSource: vi.fn(),
clearSources: vi.fn(),
merge: vi.fn().mockReturnValue({
storage: {
type: 'auto',
apiEndpoint: 'https://api.example.com'
// No apiAccessToken - partial config
}
}),
getSources: vi.fn().mockReturnValue([])
}) as any
);
const autoManager = await ConfigManager.create(testProjectRoot);
const storage = autoManager.getStorageConfig();
expect(storage).toEqual({
type: 'auto',
apiEndpoint: 'https://api.example.com',
apiAccessToken: undefined,
apiConfigured: true // true because apiEndpoint is provided
});
});
it('should return auto storage with apiConfigured false when no API settings', async () => {
// Create a new instance with auto storage but no API settings
vi.mocked(ConfigMerger).mockImplementationOnce(
() =>
({
addSource: vi.fn(),
clearSources: vi.fn(),
merge: vi.fn().mockReturnValue({
storage: {
type: 'auto'
// No API settings at all
}
}),
getSources: vi.fn().mockReturnValue([])
}) as any
);
const autoManager = await ConfigManager.create(testProjectRoot);
const storage = autoManager.getStorageConfig();
expect(storage).toEqual({
type: 'auto',
apiEndpoint: undefined,
apiAccessToken: undefined,
apiConfigured: false // false because no API settings
}); });
}); });

View File

@@ -85,11 +85,6 @@ describe('EnvironmentConfigProvider', () => {
provider = new EnvironmentConfigProvider(); // Reset provider provider = new EnvironmentConfigProvider(); // Reset provider
config = provider.loadConfig(); config = provider.loadConfig();
expect(config.storage?.type).toBe('api'); expect(config.storage?.type).toBe('api');
process.env.TASKMASTER_STORAGE_TYPE = 'auto';
provider = new EnvironmentConfigProvider(); // Reset provider
config = provider.loadConfig();
expect(config.storage?.type).toBe('auto');
}); });
it('should handle nested configuration paths', () => { it('should handle nested configuration paths', () => {

View File

@@ -31,7 +31,7 @@ export class EnvironmentConfigProvider {
{ {
env: 'TASKMASTER_STORAGE_TYPE', env: 'TASKMASTER_STORAGE_TYPE',
path: ['storage', 'type'], path: ['storage', 'type'],
validate: (v: string) => ['file', 'api', 'auto'].includes(v) validate: (v: string) => ['file', 'api'].includes(v)
}, },
{ env: 'TASKMASTER_API_ENDPOINT', path: ['storage', 'apiEndpoint'] }, { env: 'TASKMASTER_API_ENDPOINT', path: ['storage', 'apiEndpoint'] },
{ env: 'TASKMASTER_API_TOKEN', path: ['storage', 'apiAccessToken'] }, { env: 'TASKMASTER_API_TOKEN', path: ['storage', 'apiAccessToken'] },

View File

@@ -55,3 +55,7 @@ export {
// Re-export logger // Re-export logger
export { getLogger, createLogger, setGlobalLogger } from './logger/index.js'; export { getLogger, createLogger, setGlobalLogger } from './logger/index.js';
// Re-export workflow
export { WorkflowService, type WorkflowServiceConfig } from './workflow/index.js';
export type * from './workflow/index.js';

View File

@@ -3,7 +3,11 @@
* This file defines the contract for configuration management * This file defines the contract for configuration management
*/ */
import type { TaskComplexity, TaskPriority } from '../types/index.js'; import type {
TaskComplexity,
TaskPriority,
StorageType
} from '../types/index.js';
/** /**
* Model configuration for different AI roles * Model configuration for different AI roles
@@ -73,14 +77,6 @@ export interface TagSettings {
tagNamingConvention: 'kebab-case' | 'camelCase' | 'snake_case'; tagNamingConvention: 'kebab-case' | 'camelCase' | 'snake_case';
} }
/**
* Storage type options
* - 'file': Local file system storage
* - 'api': Remote API storage (Hamster integration)
* - 'auto': Automatically detect based on auth status
*/
export type StorageType = 'file' | 'api' | 'auto';
/** /**
* Runtime storage configuration used for storage backend selection * Runtime storage configuration used for storage backend selection
* This is what getStorageConfig() returns and what StorageFactory expects * This is what getStorageConfig() returns and what StorageFactory expects

View File

@@ -0,0 +1,170 @@
import { Task, Subtask } from '../types/index.js';
import { Database, Tables } from '../types/database.types.js';
type TaskRow = Tables<'tasks'>;
type DependencyRow = Tables<'task_dependencies'>;
export class TaskMapper {
/**
* Maps database tasks to internal Task format
*/
static mapDatabaseTasksToTasks(
dbTasks: TaskRow[],
dbDependencies: DependencyRow[]
): Task[] {
if (!dbTasks || dbTasks.length === 0) {
return [];
}
// Group dependencies by task_id
const dependenciesByTaskId = this.groupDependenciesByTaskId(dbDependencies);
// Separate parent tasks and subtasks
const parentTasks = dbTasks.filter((t) => !t.parent_task_id);
const subtasksByParentId = this.groupSubtasksByParentId(dbTasks);
// Map parent tasks with their subtasks
return parentTasks.map((taskRow) =>
this.mapDatabaseTaskToTask(
taskRow,
subtasksByParentId.get(taskRow.id) || [],
dependenciesByTaskId
)
);
}
/**
* Maps a single database task to internal Task format
*/
static mapDatabaseTaskToTask(
dbTask: TaskRow,
dbSubtasks: TaskRow[],
dependenciesByTaskId: Map<string, string[]>
): Task {
// Map subtasks
const subtasks: Subtask[] = dbSubtasks.map((subtask, index) => ({
id: index + 1, // Use numeric ID for subtasks
parentId: dbTask.id,
title: subtask.title,
description: subtask.description || '',
status: this.mapStatus(subtask.status),
priority: this.mapPriority(subtask.priority),
dependencies: dependenciesByTaskId.get(subtask.id) || [],
details: (subtask.metadata as any)?.details || '',
testStrategy: (subtask.metadata as any)?.testStrategy || '',
createdAt: subtask.created_at,
updatedAt: subtask.updated_at,
assignee: subtask.assignee_id || undefined,
complexity: subtask.complexity
? this.mapComplexityToInternal(subtask.complexity)
: undefined
}));
return {
id: dbTask.display_id || dbTask.id, // Use display_id if available
title: dbTask.title,
description: dbTask.description || '',
status: this.mapStatus(dbTask.status),
priority: this.mapPriority(dbTask.priority),
dependencies: dependenciesByTaskId.get(dbTask.id) || [],
details: (dbTask.metadata as any)?.details || '',
testStrategy: (dbTask.metadata as any)?.testStrategy || '',
subtasks,
createdAt: dbTask.created_at,
updatedAt: dbTask.updated_at,
assignee: dbTask.assignee_id || undefined,
complexity: dbTask.complexity
? this.mapComplexityToInternal(dbTask.complexity)
: undefined,
effort: dbTask.estimated_hours || undefined,
actualEffort: dbTask.actual_hours || undefined
};
}
/**
* Groups dependencies by task ID
*/
private static groupDependenciesByTaskId(
dependencies: DependencyRow[]
): Map<string, string[]> {
const dependenciesByTaskId = new Map<string, string[]>();
if (dependencies) {
for (const dep of dependencies) {
const deps = dependenciesByTaskId.get(dep.task_id) || [];
deps.push(dep.depends_on_task_id);
dependenciesByTaskId.set(dep.task_id, deps);
}
}
return dependenciesByTaskId;
}
/**
* Groups subtasks by their parent ID
*/
private static groupSubtasksByParentId(
tasks: TaskRow[]
): Map<string, TaskRow[]> {
const subtasksByParentId = new Map<string, TaskRow[]>();
for (const task of tasks) {
if (task.parent_task_id) {
const subtasks = subtasksByParentId.get(task.parent_task_id) || [];
subtasks.push(task);
subtasksByParentId.set(task.parent_task_id, subtasks);
}
}
// Sort subtasks by subtask_position for each parent
for (const subtasks of subtasksByParentId.values()) {
subtasks.sort((a, b) => a.subtask_position - b.subtask_position);
}
return subtasksByParentId;
}
/**
* Maps database status to internal status
*/
private static mapStatus(
status: Database['public']['Enums']['task_status']
): Task['status'] {
switch (status) {
case 'todo':
return 'pending';
case 'in_progress':
return 'in-progress';
case 'done':
return 'done';
default:
return 'pending';
}
}
/**
* Maps database priority to internal priority
*/
private static mapPriority(
priority: Database['public']['Enums']['task_priority']
): Task['priority'] {
switch (priority) {
case 'urgent':
return 'critical';
default:
return priority as Task['priority'];
}
}
/**
* Maps numeric complexity to descriptive complexity
*/
private static mapComplexityToInternal(
complexity: number
): Task['complexity'] {
if (complexity <= 2) return 'simple';
if (complexity <= 5) return 'moderate';
if (complexity <= 8) return 'complex';
return 'very-complex';
}
}

View File

@@ -0,0 +1,110 @@
import { SupabaseClient } from '@supabase/supabase-js';
import { Task } from '../types/index.js';
import { Database } from '../types/database.types.js';
import { TaskMapper } from '../mappers/TaskMapper.js';
import { AuthManager } from '../auth/auth-manager.js';
export class SupabaseTaskRepository {
constructor(private supabase: SupabaseClient<Database>) {}
async getTasks(_projectId?: string): Promise<Task[]> {
// Get the current context to determine briefId
const authManager = AuthManager.getInstance();
const context = authManager.getContext();
if (!context || !context.briefId) {
throw new Error(
'No brief selected. Please select a brief first using: tm context brief'
);
}
// Get all tasks for the brief using the exact query structure
const { data: tasks, error } = await this.supabase
.from('tasks')
.select(`
*,
document:document_id (
id,
document_name,
title,
description
)
`)
.eq('brief_id', context.briefId)
.order('position', { ascending: true })
.order('subtask_position', { ascending: true })
.order('created_at', { ascending: true });
if (error) {
throw new Error(`Failed to fetch tasks: ${error.message}`);
}
if (!tasks || tasks.length === 0) {
return [];
}
// Get all dependencies for these tasks
const taskIds = tasks.map((t: any) => t.id);
const { data: depsData, error: depsError } = await this.supabase
.from('task_dependencies')
.select('*')
.in('task_id', taskIds);
if (depsError) {
throw new Error(
`Failed to fetch task dependencies: ${depsError.message}`
);
}
// Use mapper to convert to internal format
return TaskMapper.mapDatabaseTasksToTasks(tasks, depsData || []);
}
async getTask(accountId: string, taskId: string): Promise<Task | null> {
const { data, error } = await this.supabase
.from('tasks')
.select('*')
.eq('account_id', accountId)
.eq('id', taskId)
.single();
if (error) {
if (error.code === 'PGRST116') {
return null; // Not found
}
throw new Error(`Failed to fetch task: ${error.message}`);
}
// Get dependencies for this task
const { data: depsData } = await this.supabase
.from('task_dependencies')
.select('*')
.eq('task_id', taskId);
// Get subtasks if this is a parent task
const { data: subtasksData } = await this.supabase
.from('tasks')
.select('*')
.eq('parent_task_id', taskId)
.order('subtask_position', { ascending: true });
// Create dependency map
const dependenciesByTaskId = new Map<string, string[]>();
if (depsData) {
dependenciesByTaskId.set(
taskId,
depsData.map(
(d: Database['public']['Tables']['task_dependencies']['Row']) =>
d.depends_on_task_id
)
);
}
// Use mapper to convert single task
return TaskMapper.mapDatabaseTaskToTask(
data,
subtasksData || [],
dependenciesByTaskId
);
}
}

View File

@@ -0,0 +1,36 @@
import { Task, TaskTag } from '../types/index.js';
export interface TaskRepository {
// Task operations
getTasks(projectId: string): Promise<Task[]>;
getTask(projectId: string, taskId: string): Promise<Task | null>;
createTask(projectId: string, task: Omit<Task, 'id'>): Promise<Task>;
updateTask(
projectId: string,
taskId: string,
updates: Partial<Task>
): Promise<Task>;
deleteTask(projectId: string, taskId: string): Promise<void>;
// Tag operations
getTags(projectId: string): Promise<TaskTag[]>;
getTag(projectId: string, tagName: string): Promise<TaskTag | null>;
createTag(projectId: string, tag: TaskTag): Promise<TaskTag>;
updateTag(
projectId: string,
tagName: string,
updates: Partial<TaskTag>
): Promise<TaskTag>;
deleteTag(projectId: string, tagName: string): Promise<void>;
// Bulk operations
bulkCreateTasks(
projectId: string,
tasks: Omit<Task, 'id'>[]
): Promise<Task[]>;
bulkUpdateTasks(
projectId: string,
updates: Array<{ id: string; updates: Partial<Task> }>
): Promise<Task[]>;
bulkDeleteTasks(projectId: string, taskIds: string[]): Promise<void>;
}

View File

@@ -4,3 +4,5 @@
*/ */
export { TaskService } from './task-service.js'; export { TaskService } from './task-service.js';
export { OrganizationService } from './organization.service.js';
export type { Organization, Brief } from './organization.service.js';

View File

@@ -0,0 +1,363 @@
/**
* @fileoverview Organization and Brief management service
* Handles fetching and managing organizations and briefs from the API
*/
import { SupabaseClient } from '@supabase/supabase-js';
import { Database } from '../types/database.types.js';
import { TaskMasterError, ERROR_CODES } from '../errors/task-master-error.js';
import { getLogger } from '../logger/index.js';
/**
* Organization data structure
*/
export interface Organization {
id: string;
name: string;
slug: string;
}
/**
* Brief data structure
*/
export interface Brief {
id: string;
accountId: string;
documentId: string;
status: string;
createdAt: string;
updatedAt: string;
}
/**
* Task data structure from the remote database
*/
export interface RemoteTask {
id: string;
briefId: string;
documentId: string;
position: number | null;
subtaskPosition: number | null;
status: string;
createdAt: string;
updatedAt: string;
// Document details from join
document?: {
id: string;
document_name: string;
title: string;
description: string;
};
}
/**
* Service for managing organizations and briefs
*/
export class OrganizationService {
private logger = getLogger('OrganizationService');
constructor(private supabaseClient: SupabaseClient<Database>) {}
/**
* Get all organizations for the authenticated user
*/
async getOrganizations(): Promise<Organization[]> {
try {
// The user is already authenticated via the Authorization header
// Query the user_accounts view/table (filtered by RLS for current user)
const { data, error } = await this.supabaseClient
.from('user_accounts')
.select(`
id,
name,
slug
`);
if (error) {
throw new TaskMasterError(
`Failed to fetch organizations: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getOrganizations' },
error
);
}
if (!data || data.length === 0) {
this.logger.debug('No organizations found for user');
return [];
}
// Map to our Organization interface
return data.map((org) => ({
id: org.id ?? '',
name: org.name ?? '',
slug: org.slug ?? org.id ?? '' // Use ID as fallback if slug is null
}));
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch organizations',
ERROR_CODES.API_ERROR,
{ operation: 'getOrganizations' },
error as Error
);
}
}
/**
* Get a specific organization by ID
*/
async getOrganization(orgId: string): Promise<Organization | null> {
try {
const { data, error } = await this.supabaseClient
.from('accounts')
.select(`
id,
name,
slug
`)
.eq('id', orgId)
.single();
if (error) {
if (error.code === 'PGRST116') {
// No rows found
return null;
}
throw new TaskMasterError(
`Failed to fetch organization: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getOrganization', orgId },
error
);
}
if (!data) {
return null;
}
const accountData =
data as Database['public']['Tables']['accounts']['Row'];
return {
id: accountData.id,
name: accountData.name,
slug: accountData.slug || accountData.id
};
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch organization',
ERROR_CODES.API_ERROR,
{ operation: 'getOrganization', orgId },
error as Error
);
}
}
/**
* Get all briefs for a specific organization
*/
async getBriefs(orgId: string): Promise<Brief[]> {
try {
const { data, error } = await this.supabaseClient
.from('brief')
.select(`
id,
account_id,
document_id,
status,
created_at,
updated_at
`)
.eq('account_id', orgId);
if (error) {
throw new TaskMasterError(
`Failed to fetch briefs: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getBriefs', orgId },
error
);
}
if (!data || data.length === 0) {
this.logger.debug(`No briefs found for organization ${orgId}`);
return [];
}
// Map to our Brief interface
return data.map((brief: any) => ({
id: brief.id,
accountId: brief.account_id,
documentId: brief.document_id,
status: brief.status,
createdAt: brief.created_at,
updatedAt: brief.updated_at
}));
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch briefs',
ERROR_CODES.API_ERROR,
{ operation: 'getBriefs', orgId },
error as Error
);
}
}
/**
* Get a specific brief by ID
*/
async getBrief(briefId: string): Promise<Brief | null> {
try {
const { data, error } = await this.supabaseClient
.from('brief')
.select(`
id,
account_id,
document_id,
status,
created_at,
updated_at
`)
.eq('id', briefId)
.single();
if (error) {
if (error.code === 'PGRST116') {
// No rows found
return null;
}
throw new TaskMasterError(
`Failed to fetch brief: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getBrief', briefId },
error
);
}
if (!data) {
return null;
}
const briefData = data as any;
return {
id: briefData.id,
accountId: briefData.account_id,
documentId: briefData.document_id,
status: briefData.status,
createdAt: briefData.created_at,
updatedAt: briefData.updated_at
};
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch brief',
ERROR_CODES.API_ERROR,
{ operation: 'getBrief', briefId },
error as Error
);
}
}
/**
* Validate that a user has access to an organization
*/
async validateOrgAccess(orgId: string): Promise<boolean> {
try {
const org = await this.getOrganization(orgId);
return org !== null;
} catch (error) {
this.logger.error(`Failed to validate org access: ${error}`);
return false;
}
}
/**
* Validate that a user has access to a brief
*/
async validateBriefAccess(briefId: string): Promise<boolean> {
try {
const brief = await this.getBrief(briefId);
return brief !== null;
} catch (error) {
this.logger.error(`Failed to validate brief access: ${error}`);
return false;
}
}
/**
* Get all tasks for a specific brief
*/
async getTasks(briefId: string): Promise<RemoteTask[]> {
try {
const { data, error } = await this.supabaseClient
.from('tasks')
.select(`
*,
document:document_id (
id,
document_name,
title,
description
)
`)
.eq('brief_id', briefId)
.order('position', { ascending: true })
.order('subtask_position', { ascending: true })
.order('created_at', { ascending: true });
if (error) {
throw new TaskMasterError(
`Failed to fetch tasks: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getTasks', briefId },
error
);
}
if (!data || data.length === 0) {
this.logger.debug(`No tasks found for brief ${briefId}`);
return [];
}
// Map to our RemoteTask interface
return data.map((task: any) => ({
id: task.id,
briefId: task.brief_id,
documentId: task.document_id,
position: task.position,
subtaskPosition: task.subtask_position,
status: task.status,
createdAt: task.created_at,
updatedAt: task.updated_at,
document: task.document
? {
id: task.document.id,
document_name: task.document.document_name,
title: task.document.title,
description: task.document.description
}
: undefined
}));
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch tasks',
ERROR_CODES.API_ERROR,
{ operation: 'getTasks', briefId },
error as Error
);
}
}
}

View File

@@ -3,7 +3,12 @@
* Core service for task operations - handles business logic between storage and API * Core service for task operations - handles business logic between storage and API
*/ */
import type { Task, TaskFilter, TaskStatus } from '../types/index.js'; import type {
Task,
TaskFilter,
TaskStatus,
StorageType
} from '../types/index.js';
import type { IStorage } from '../interfaces/storage.interface.js'; import type { IStorage } from '../interfaces/storage.interface.js';
import { ConfigManager } from '../config/config-manager.js'; import { ConfigManager } from '../config/config-manager.js';
import { StorageFactory } from '../storage/storage-factory.js'; import { StorageFactory } from '../storage/storage-factory.js';
@@ -22,8 +27,8 @@ export interface TaskListResult {
filtered: number; filtered: number;
/** The tag these tasks belong to (only present if explicitly provided) */ /** The tag these tasks belong to (only present if explicitly provided) */
tag?: string; tag?: string;
/** Storage type being used - includes 'auto' for automatic detection */ /** Storage type being used */
storageType: 'file' | 'api' | 'auto'; storageType: StorageType;
} }
/** /**
@@ -113,7 +118,7 @@ export class TaskService {
total: rawTasks.length, total: rawTasks.length,
filtered: filteredEntities.length, filtered: filteredEntities.length,
tag: options.tag, // Only include tag if explicitly provided tag: options.tag, // Only include tag if explicitly provided
storageType: this.configManager.getStorageConfig().type storageType: this.getStorageType()
}; };
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
@@ -166,7 +171,7 @@ export class TaskService {
byStatus: Record<TaskStatus, number>; byStatus: Record<TaskStatus, number>;
withSubtasks: number; withSubtasks: number;
blocked: number; blocked: number;
storageType: 'file' | 'api' | 'auto'; storageType: StorageType;
}> { }> {
const result = await this.getTaskList({ const result = await this.getTaskList({
tag, tag,
@@ -334,8 +339,12 @@ export class TaskService {
/** /**
* Get current storage type * Get current storage type
*/ */
getStorageType(): 'file' | 'api' | 'auto' { getStorageType(): StorageType {
return this.configManager.getStorageConfig().type; // Prefer the runtime storage type if available to avoid exposing 'auto'
const s = this.storage as { getType?: () => 'file' | 'api' } | null;
const runtimeType = s?.getType?.();
return (runtimeType ??
this.configManager.getStorageConfig().type) as StorageType;
} }
/** /**

View File

@@ -1,27 +1,29 @@
/** /**
* @fileoverview API-based storage implementation for Hamster integration * @fileoverview API-based storage implementation using repository pattern
* This provides storage via REST API instead of local file system * This provides storage via repository abstraction for flexibility
*/ */
import type { import type {
IStorage, IStorage,
StorageStats StorageStats
} from '../interfaces/storage.interface.js'; } from '../interfaces/storage.interface.js';
import type { Task, TaskMetadata } from '../types/index.js'; import type { Task, TaskMetadata, TaskTag } from '../types/index.js';
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js'; import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
import { TaskRepository } from '../repositories/task-repository.interface.js';
import { SupabaseTaskRepository } from '../repositories/supabase-task-repository.js';
import { SupabaseClient } from '@supabase/supabase-js';
import { AuthManager } from '../auth/auth-manager.js';
/** /**
* API storage configuration * API storage configuration
*/ */
export interface ApiStorageConfig { export interface ApiStorageConfig {
/** API endpoint base URL */ /** Supabase client instance */
endpoint: string; supabaseClient?: SupabaseClient;
/** Access token for authentication */ /** Custom repository implementation */
accessToken: string; repository?: TaskRepository;
/** Optional project ID */ /** Project ID for scoping */
projectId?: string; projectId: string;
/** Request timeout in milliseconds */
timeout?: number;
/** Enable request retries */ /** Enable request retries */
enableRetry?: boolean; enableRetry?: boolean;
/** Maximum retry attempts */ /** Maximum retry attempts */
@@ -29,64 +31,58 @@ export interface ApiStorageConfig {
} }
/** /**
* API response wrapper * ApiStorage implementation using repository pattern
*/ * Provides flexibility to swap between different backend implementations
interface ApiResponse<T> {
success: boolean;
data?: T;
error?: string;
message?: string;
}
/**
* ApiStorage implementation for Hamster integration
* Fetches and stores tasks via REST API
*/ */
export class ApiStorage implements IStorage { export class ApiStorage implements IStorage {
private readonly config: Required<ApiStorageConfig>; private readonly repository: TaskRepository;
private readonly projectId: string;
private readonly enableRetry: boolean;
private readonly maxRetries: number;
private initialized = false; private initialized = false;
private tagsCache: Map<string, TaskTag> = new Map();
constructor(config: ApiStorageConfig) { constructor(config: ApiStorageConfig) {
this.validateConfig(config); this.validateConfig(config);
this.config = { // Use provided repository or create Supabase repository
endpoint: config.endpoint.replace(/\/$/, ''), // Remove trailing slash if (config.repository) {
accessToken: config.accessToken, this.repository = config.repository;
projectId: config.projectId || 'default', } else if (config.supabaseClient) {
timeout: config.timeout || 30000, // TODO: SupabaseTaskRepository doesn't implement all TaskRepository methods yet
enableRetry: config.enableRetry ?? true, // Cast for now until full implementation is complete
maxRetries: config.maxRetries || 3 this.repository = new SupabaseTaskRepository(
}; config.supabaseClient
) as unknown as TaskRepository;
} else {
throw new TaskMasterError(
'Either repository or supabaseClient must be provided',
ERROR_CODES.MISSING_CONFIGURATION
);
}
this.projectId = config.projectId;
this.enableRetry = config.enableRetry ?? true;
this.maxRetries = config.maxRetries ?? 3;
} }
/** /**
* Validate API storage configuration * Validate API storage configuration
*/ */
private validateConfig(config: ApiStorageConfig): void { private validateConfig(config: ApiStorageConfig): void {
if (!config.endpoint) { if (!config.projectId) {
throw new TaskMasterError( throw new TaskMasterError(
'API endpoint is required for API storage', 'Project ID is required for API storage',
ERROR_CODES.MISSING_CONFIGURATION ERROR_CODES.MISSING_CONFIGURATION
); );
} }
if (!config.accessToken) { if (!config.repository && !config.supabaseClient) {
throw new TaskMasterError( throw new TaskMasterError(
'Access token is required for API storage', 'Either repository or supabaseClient must be provided',
ERROR_CODES.MISSING_CONFIGURATION ERROR_CODES.MISSING_CONFIGURATION
); );
} }
// Validate endpoint URL format
try {
new URL(config.endpoint);
} catch {
throw new TaskMasterError(
'Invalid API endpoint URL',
ERROR_CODES.INVALID_INPUT,
{ endpoint: config.endpoint }
);
}
} }
/** /**
@@ -96,8 +92,8 @@ export class ApiStorage implements IStorage {
if (this.initialized) return; if (this.initialized) return;
try { try {
// Verify API connectivity // Load initial tags
await this.verifyConnection(); await this.loadTagsIntoCache();
this.initialized = true; this.initialized = true;
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
@@ -110,39 +106,71 @@ export class ApiStorage implements IStorage {
} }
/** /**
* Verify API connection * Load tags into cache
* In our API-based system, "tags" represent briefs
*/ */
private async verifyConnection(): Promise<void> { private async loadTagsIntoCache(): Promise<void> {
const response = await this.makeRequest<{ status: string }>('/health'); try {
const authManager = AuthManager.getInstance();
const context = authManager.getContext();
if (!response.success) { // If we have a selected brief, create a virtual "tag" for it
throw new Error(`API health check failed: ${response.error}`); if (context?.briefId) {
// Create a virtual tag representing the current brief
const briefTag: TaskTag = {
name: context.briefId,
tasks: [], // Will be populated when tasks are loaded
metadata: {
briefId: context.briefId,
briefName: context.briefName,
organizationId: context.orgId
}
};
this.tagsCache.clear();
this.tagsCache.set(context.briefId, briefTag);
}
} catch (error) {
// If no brief is selected, that's okay - user needs to select one first
console.debug('No brief selected, starting with empty cache');
} }
} }
/** /**
* Load tasks from API * Load tasks from API
* In our system, the tag parameter represents a brief ID
*/ */
async loadTasks(tag?: string): Promise<Task[]> { async loadTasks(tag?: string): Promise<Task[]> {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag const authManager = AuthManager.getInstance();
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}` const context = authManager.getContext();
: `/projects/${this.config.projectId}/tasks`;
const response = await this.makeRequest<{ tasks: Task[] }>(endpoint); // If no brief is selected in context, throw an error
if (!context?.briefId) {
if (!response.success) { throw new Error(
throw new Error(response.error || 'Failed to load tasks'); 'No brief selected. Please select a brief first using: tm context brief <brief-id>'
);
} }
return response.data?.tasks || []; // Load tasks from the current brief context
const tasks = await this.retryOperation(() =>
this.repository.getTasks(this.projectId)
);
// Update the tag cache with the loaded task IDs
const briefTag = this.tagsCache.get(context.briefId);
if (briefTag) {
briefTag.tasks = tasks.map((task) => task.id);
}
return tasks;
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to load tasks from API', 'Failed to load tasks from API',
ERROR_CODES.STORAGE_ERROR, ERROR_CODES.STORAGE_ERROR,
{ operation: 'loadTasks', tag }, { operation: 'loadTasks', tag, context: 'brief-based loading' },
error as Error error as Error
); );
} }
@@ -155,15 +183,29 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag if (tag) {
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}` // Update tag with task IDs
: `/projects/${this.config.projectId}/tasks`; const tagData = this.tagsCache.get(tag) || {
name: tag,
tasks: [],
metadata: {}
};
tagData.tasks = tasks.map((t) => t.id);
const response = await this.makeRequest(endpoint, 'PUT', { tasks }); // Save or update tag
if (this.tagsCache.has(tag)) {
await this.repository.updateTag(this.projectId, tag, tagData);
} else {
await this.repository.createTag(this.projectId, tagData);
}
if (!response.success) { this.tagsCache.set(tag, tagData);
throw new Error(response.error || 'Failed to save tasks');
} }
// Save tasks using bulk operation
await this.retryOperation(() =>
this.repository.bulkCreateTasks(this.projectId, tasks)
);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to save tasks to API', 'Failed to save tasks to API',
@@ -181,20 +223,17 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag if (tag) {
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}` // Check if task is in tag
: `/projects/${this.config.projectId}/tasks/${taskId}`; const tagData = this.tagsCache.get(tag);
if (!tagData || !tagData.tasks.includes(taskId)) {
const response = await this.makeRequest<{ task: Task }>(endpoint);
if (!response.success) {
if (response.error?.includes('not found')) {
return null; return null;
} }
throw new Error(response.error || 'Failed to load task');
} }
return response.data?.task || null; return await this.retryOperation(() =>
this.repository.getTask(this.projectId, taskId)
);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to load task from API', 'Failed to load task from API',
@@ -212,14 +251,26 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag // Check if task exists
? `/projects/${this.config.projectId}/tasks/${task.id}?tag=${encodeURIComponent(tag)}` const existing = await this.repository.getTask(this.projectId, task.id);
: `/projects/${this.config.projectId}/tasks/${task.id}`;
const response = await this.makeRequest(endpoint, 'PUT', { task }); if (existing) {
await this.retryOperation(() =>
this.repository.updateTask(this.projectId, task.id, task)
);
} else {
await this.retryOperation(() =>
this.repository.createTask(this.projectId, task)
);
}
if (!response.success) { // Update tag if specified
throw new Error(response.error || 'Failed to save task'); if (tag) {
const tagData = this.tagsCache.get(tag);
if (tagData && !tagData.tasks.includes(task.id)) {
tagData.tasks.push(task.id);
await this.repository.updateTag(this.projectId, tag, tagData);
}
} }
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
@@ -238,14 +289,17 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag await this.retryOperation(() =>
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}` this.repository.deleteTask(this.projectId, taskId)
: `/projects/${this.config.projectId}/tasks/${taskId}`; );
const response = await this.makeRequest(endpoint, 'DELETE'); // Remove from tag if specified
if (tag) {
if (!response.success) { const tagData = this.tagsCache.get(tag);
throw new Error(response.error || 'Failed to delete task'); if (tagData) {
tagData.tasks = tagData.tasks.filter((id) => id !== taskId);
await this.repository.updateTag(this.projectId, tag, tagData);
}
} }
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
@@ -258,21 +312,24 @@ export class ApiStorage implements IStorage {
} }
/** /**
* List available tags * List available tags (briefs in our system)
*/ */
async listTags(): Promise<string[]> { async listTags(): Promise<string[]> {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest<{ tags: string[] }>( const authManager = AuthManager.getInstance();
`/projects/${this.config.projectId}/tags` const context = authManager.getContext();
);
if (!response.success) { // In our API-based system, we only have one "tag" at a time - the current brief
throw new Error(response.error || 'Failed to list tags'); if (context?.briefId) {
// Ensure the current brief is in our cache
await this.loadTagsIntoCache();
return [context.briefId];
} }
return response.data?.tags || []; // No brief selected, return empty array
return [];
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to list tags from API', 'Failed to list tags from API',
@@ -290,19 +347,15 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag if (tag) {
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}` const tagData = this.tagsCache.get(tag);
: `/projects/${this.config.projectId}/metadata`; return (tagData?.metadata as TaskMetadata) || null;
const response = await this.makeRequest<{ metadata: TaskMetadata }>(
endpoint
);
if (!response.success) {
return null;
} }
return response.data?.metadata || null; // Return global metadata if no tag specified
// This could be stored in a special system tag
const systemTag = await this.repository.getTag(this.projectId, '_system');
return (systemTag?.metadata as TaskMetadata) || null;
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to load metadata from API', 'Failed to load metadata from API',
@@ -320,14 +373,38 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag if (tag) {
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}` const tagData = this.tagsCache.get(tag) || {
: `/projects/${this.config.projectId}/metadata`; name: tag,
tasks: [],
metadata: {}
};
tagData.metadata = metadata as any;
const response = await this.makeRequest(endpoint, 'PUT', { metadata }); if (this.tagsCache.has(tag)) {
await this.repository.updateTag(this.projectId, tag, tagData);
} else {
await this.repository.createTag(this.projectId, tagData);
}
if (!response.success) { this.tagsCache.set(tag, tagData);
throw new Error(response.error || 'Failed to save metadata'); } else {
// Save to system tag
const systemTag: TaskTag = {
name: '_system',
tasks: [],
metadata: metadata as any
};
const existing = await this.repository.getTag(
this.projectId,
'_system'
);
if (existing) {
await this.repository.updateTag(this.projectId, '_system', systemTag);
} else {
await this.repository.createTag(this.projectId, systemTag);
}
} }
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
@@ -358,14 +435,30 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
// First load existing tasks // Use bulk create - repository should handle duplicates
const existingTasks = await this.loadTasks(tag); await this.retryOperation(() =>
this.repository.bulkCreateTasks(this.projectId, tasks)
);
// Append new tasks // Update tag if specified
const allTasks = [...existingTasks, ...tasks]; if (tag) {
const tagData = this.tagsCache.get(tag) || {
name: tag,
tasks: [],
metadata: {}
};
// Save all tasks const newTaskIds = tasks.map((t) => t.id);
await this.saveTasks(allTasks, tag); tagData.tasks = [...new Set([...tagData.tasks, ...newTaskIds])];
if (this.tagsCache.has(tag)) {
await this.repository.updateTag(this.projectId, tag, tagData);
} else {
await this.repository.createTag(this.projectId, tagData);
}
this.tagsCache.set(tag, tagData);
}
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to append tasks to API', 'Failed to append tasks to API',
@@ -387,18 +480,9 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
// Load the task await this.retryOperation(() =>
const task = await this.loadTask(taskId, tag); this.repository.updateTask(this.projectId, taskId, updates)
);
if (!task) {
throw new Error(`Task ${taskId} not found`);
}
// Merge updates
const updatedTask = { ...task, ...updates, id: taskId };
// Save updated task
await this.saveTask(updatedTask, tag);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to update task via API', 'Failed to update task via API',
@@ -423,14 +507,11 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest( await this.retryOperation(() =>
`/projects/${this.config.projectId}/tags/${encodeURIComponent(tag)}`, this.repository.deleteTag(this.projectId, tag)
'DELETE'
); );
if (!response.success) { this.tagsCache.delete(tag);
throw new Error(response.error || 'Failed to delete tag');
}
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to delete tag via API', 'Failed to delete tag via API',
@@ -448,15 +529,21 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest( const tagData = this.tagsCache.get(oldTag);
`/projects/${this.config.projectId}/tags/${encodeURIComponent(oldTag)}/rename`, if (!tagData) {
'POST', throw new Error(`Tag ${oldTag} not found`);
{ newTag }
);
if (!response.success) {
throw new Error(response.error || 'Failed to rename tag');
} }
// Create new tag with same data
const newTagData = { ...tagData, name: newTag };
await this.repository.createTag(this.projectId, newTagData);
// Delete old tag
await this.repository.deleteTag(this.projectId, oldTag);
// Update cache
this.tagsCache.delete(oldTag);
this.tagsCache.set(newTag, newTagData);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to rename tag via API', 'Failed to rename tag via API',
@@ -474,15 +561,17 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest( const sourceData = this.tagsCache.get(sourceTag);
`/projects/${this.config.projectId}/tags/${encodeURIComponent(sourceTag)}/copy`, if (!sourceData) {
'POST', throw new Error(`Source tag ${sourceTag} not found`);
{ targetTag }
);
if (!response.success) {
throw new Error(response.error || 'Failed to copy tag');
} }
// Create new tag with copied data
const targetData = { ...sourceData, name: targetTag };
await this.repository.createTag(this.projectId, targetData);
// Update cache
this.tagsCache.set(targetTag, targetData);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to copy tag via API', 'Failed to copy tag via API',
@@ -500,24 +589,22 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest<{ const tasks = await this.repository.getTasks(this.projectId);
stats: StorageStats; const tags = await this.repository.getTags(this.projectId);
}>(`/projects/${this.config.projectId}/stats`);
if (!response.success) { const tagStats = tags.map((tag) => ({
throw new Error(response.error || 'Failed to get stats'); tag: tag.name,
} taskCount: tag.tasks.length,
lastModified: new Date().toISOString() // TODO: Get actual last modified from tag data
}));
// Return stats or default values return {
return ( totalTasks: tasks.length,
response.data?.stats || { totalTags: tags.length,
totalTasks: 0, storageSize: 0, // Not applicable for API storage
totalTags: 0, lastModified: new Date().toISOString(),
storageSize: 0, tagStats
lastModified: new Date().toISOString(), };
tagStats: []
}
);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to get stats from API', 'Failed to get stats from API',
@@ -535,16 +622,15 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest<{ backupId: string }>( // Export all data
`/projects/${this.config.projectId}/backup`, await this.repository.getTasks(this.projectId);
'POST' await this.repository.getTags(this.projectId);
);
if (!response.success) { // TODO: In a real implementation, this would:
throw new Error(response.error || 'Failed to create backup'); // 1. Create backup data structure with tasks and tags
} // 2. Save the backup to a storage service
// For now, return a backup identifier
return response.data?.backupId || 'unknown'; return `backup-${this.projectId}-${Date.now()}`;
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to create backup via API', 'Failed to create backup via API',
@@ -558,27 +644,16 @@ export class ApiStorage implements IStorage {
/** /**
* Restore from backup * Restore from backup
*/ */
async restore(backupPath: string): Promise<void> { async restore(backupId: string): Promise<void> {
await this.ensureInitialized(); await this.ensureInitialized();
try { // This would restore from a backup service
const response = await this.makeRequest( // Implementation depends on backup strategy
`/projects/${this.config.projectId}/restore`, throw new TaskMasterError(
'POST', 'Restore not implemented for API storage',
{ backupId: backupPath } ERROR_CODES.NOT_IMPLEMENTED,
); { operation: 'restore', backupId }
);
if (!response.success) {
throw new Error(response.error || 'Failed to restore backup');
}
} catch (error) {
throw new TaskMasterError(
'Failed to restore backup via API',
ERROR_CODES.STORAGE_ERROR,
{ operation: 'restore', backupPath },
error as Error
);
}
} }
/** /**
@@ -588,14 +663,23 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest( // Delete all tasks
`/projects/${this.config.projectId}/clear`, const tasks = await this.repository.getTasks(this.projectId);
'POST' if (tasks.length > 0) {
); await this.repository.bulkDeleteTasks(
this.projectId,
if (!response.success) { tasks.map((t) => t.id)
throw new Error(response.error || 'Failed to clear data'); );
} }
// Delete all tags
const tags = await this.repository.getTags(this.projectId);
for (const tag of tags) {
await this.repository.deleteTag(this.projectId, tag.name);
}
// Clear cache
this.tagsCache.clear();
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to clear data via API', 'Failed to clear data via API',
@@ -611,6 +695,7 @@ export class ApiStorage implements IStorage {
*/ */
async close(): Promise<void> { async close(): Promise<void> {
this.initialized = false; this.initialized = false;
this.tagsCache.clear();
} }
/** /**
@@ -623,102 +708,21 @@ export class ApiStorage implements IStorage {
} }
/** /**
* Make HTTP request to API * Retry an operation with exponential backoff
*/ */
private async makeRequest<T>( private async retryOperation<T>(
path: string, operation: () => Promise<T>,
method: 'GET' | 'POST' | 'PUT' | 'DELETE' = 'GET', attempt: number = 1
body?: unknown ): Promise<T> {
): Promise<ApiResponse<T>> {
const url = `${this.config.endpoint}${path}`;
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);
try { try {
const options: RequestInit = { return await operation();
method, } catch (error) {
headers: { if (this.enableRetry && attempt < this.maxRetries) {
Authorization: `Bearer ${this.config.accessToken}`, const delay = Math.pow(2, attempt) * 1000;
'Content-Type': 'application/json', await new Promise((resolve) => setTimeout(resolve, delay));
Accept: 'application/json' return this.retryOperation(operation, attempt + 1);
},
signal: controller.signal
};
if (body && (method === 'POST' || method === 'PUT')) {
options.body = JSON.stringify(body);
} }
throw error;
let lastError: Error | null = null;
let attempt = 0;
while (attempt < this.config.maxRetries) {
attempt++;
try {
const response = await fetch(url, options);
const data = await response.json();
if (response.ok) {
return { success: true, data: data as T };
}
// Handle specific error codes
if (response.status === 401) {
return {
success: false,
error: 'Authentication failed - check access token'
};
}
if (response.status === 404) {
return {
success: false,
error: 'Resource not found'
};
}
if (response.status === 429) {
// Rate limited - retry with backoff
if (this.config.enableRetry && attempt < this.config.maxRetries) {
await this.delay(Math.pow(2, attempt) * 1000);
continue;
}
}
const errorData = data as any;
return {
success: false,
error:
errorData.error ||
errorData.message ||
`HTTP ${response.status}: ${response.statusText}`
};
} catch (error) {
lastError = error as Error;
// Retry on network errors
if (this.config.enableRetry && attempt < this.config.maxRetries) {
await this.delay(Math.pow(2, attempt) * 1000);
continue;
}
}
}
// All retries exhausted
return {
success: false,
error: lastError?.message || 'Request failed after retries'
};
} finally {
clearTimeout(timeoutId);
} }
} }
/**
* Delay helper for retries
*/
private delay(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
} }

View File

@@ -13,6 +13,7 @@ import { ApiStorage } from './api-storage.js';
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js'; import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
import { AuthManager } from '../auth/auth-manager.js'; import { AuthManager } from '../auth/auth-manager.js';
import { getLogger } from '../logger/index.js'; import { getLogger } from '../logger/index.js';
import { SupabaseAuthClient } from '../clients/supabase-client.js';
/** /**
* Factory for creating storage implementations based on configuration * Factory for creating storage implementations based on configuration
@@ -148,29 +149,13 @@ export class StorageFactory {
* Create API storage implementation * Create API storage implementation
*/ */
private static createApiStorage(config: Partial<IConfiguration>): ApiStorage { private static createApiStorage(config: Partial<IConfiguration>): ApiStorage {
const { apiEndpoint, apiAccessToken } = config.storage || {}; // Use our SupabaseAuthClient instead of creating a raw Supabase client
const supabaseAuthClient = new SupabaseAuthClient();
if (!apiEndpoint) { const supabaseClient = supabaseAuthClient.getClient();
throw new TaskMasterError(
'API endpoint is required for API storage',
ERROR_CODES.MISSING_CONFIGURATION,
{ storageType: 'api' }
);
}
if (!apiAccessToken) {
throw new TaskMasterError(
'API access token is required for API storage',
ERROR_CODES.MISSING_CONFIGURATION,
{ storageType: 'api' }
);
}
return new ApiStorage({ return new ApiStorage({
endpoint: apiEndpoint, supabaseClient,
accessToken: apiAccessToken, projectId: config.projectPath || '',
projectId: config.projectPath,
timeout: config.retry?.requestTimeout,
enableRetry: config.retry?.retryOnNetworkError, enableRetry: config.retry?.retryOnNetworkError,
maxRetries: config.retry?.retryAttempts maxRetries: config.retry?.retryAttempts
}); });

View File

@@ -10,7 +10,16 @@ import {
} from './services/task-service.js'; } from './services/task-service.js';
import { ERROR_CODES, TaskMasterError } from './errors/task-master-error.js'; import { ERROR_CODES, TaskMasterError } from './errors/task-master-error.js';
import type { IConfiguration } from './interfaces/configuration.interface.js'; import type { IConfiguration } from './interfaces/configuration.interface.js';
import type { Task, TaskStatus, TaskFilter } from './types/index.js'; import type {
Task,
TaskStatus,
TaskFilter,
StorageType
} from './types/index.js';
import {
WorkflowService,
type WorkflowServiceConfig
} from './workflow/index.js';
/** /**
* Options for creating TaskMasterCore instance * Options for creating TaskMasterCore instance
@@ -18,6 +27,7 @@ import type { Task, TaskStatus, TaskFilter } from './types/index.js';
export interface TaskMasterCoreOptions { export interface TaskMasterCoreOptions {
projectPath: string; projectPath: string;
configuration?: Partial<IConfiguration>; configuration?: Partial<IConfiguration>;
workflow?: Partial<WorkflowServiceConfig>;
} }
/** /**
@@ -33,6 +43,7 @@ export type { GetTaskListOptions } from './services/task-service.js';
export class TaskMasterCore { export class TaskMasterCore {
private configManager: ConfigManager; private configManager: ConfigManager;
private taskService: TaskService; private taskService: TaskService;
private workflowService: WorkflowService;
/** /**
* Create and initialize a new TaskMasterCore instance * Create and initialize a new TaskMasterCore instance
@@ -55,6 +66,7 @@ export class TaskMasterCore {
// Services will be initialized in the initialize() method // Services will be initialized in the initialize() method
this.configManager = null as any; this.configManager = null as any;
this.taskService = null as any; this.taskService = null as any;
this.workflowService = null as any;
} }
/** /**
@@ -81,6 +93,28 @@ export class TaskMasterCore {
// Create task service // Create task service
this.taskService = new TaskService(this.configManager); this.taskService = new TaskService(this.configManager);
await this.taskService.initialize(); await this.taskService.initialize();
// Create workflow service
const workflowConfig: WorkflowServiceConfig = {
projectRoot: options.projectPath,
...options.workflow
};
// Pass task retrieval function to workflow service
this.workflowService = new WorkflowService(
workflowConfig,
async (taskId: string) => {
const task = await this.getTask(taskId);
if (!task) {
throw new TaskMasterError(
`Task ${taskId} not found`,
ERROR_CODES.TASK_NOT_FOUND
);
}
return task;
}
);
await this.workflowService.initialize();
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to initialize TaskMasterCore', 'Failed to initialize TaskMasterCore',
@@ -152,7 +186,7 @@ export class TaskMasterCore {
/** /**
* Get current storage type * Get current storage type
*/ */
getStorageType(): 'file' | 'api' | 'auto' { getStorageType(): StorageType {
return this.taskService.getStorageType(); return this.taskService.getStorageType();
} }
@@ -170,11 +204,21 @@ export class TaskMasterCore {
await this.configManager.setActiveTag(tag); await this.configManager.setActiveTag(tag);
} }
/**
* Get workflow service for workflow operations
*/
get workflow(): WorkflowService {
return this.workflowService;
}
/** /**
* Close and cleanup resources * Close and cleanup resources
*/ */
async close(): Promise<void> { async close(): Promise<void> {
// TaskService handles storage cleanup internally // TaskService handles storage cleanup internally
if (this.workflowService) {
await this.workflowService.dispose();
}
} }
} }

View File

@@ -0,0 +1,491 @@
export type Json =
| string
| number
| boolean
| null
| { [key: string]: Json | undefined }
| Json[];
export type Database = {
public: {
Tables: {
accounts: {
Row: {
created_at: string | null;
created_by: string | null;
email: string | null;
id: string;
is_personal_account: boolean;
name: string;
picture_url: string | null;
primary_owner_user_id: string;
public_data: Json;
slug: string | null;
updated_at: string | null;
updated_by: string | null;
};
Insert: {
created_at?: string | null;
created_by?: string | null;
email?: string | null;
id?: string;
is_personal_account?: boolean;
name: string;
picture_url?: string | null;
primary_owner_user_id?: string;
public_data?: Json;
slug?: string | null;
updated_at?: string | null;
updated_by?: string | null;
};
Update: {
created_at?: string | null;
created_by?: string | null;
email?: string | null;
id?: string;
is_personal_account?: boolean;
name?: string;
picture_url?: string | null;
primary_owner_user_id?: string;
public_data?: Json;
slug?: string | null;
updated_at?: string | null;
updated_by?: string | null;
};
Relationships: [];
};
brief: {
Row: {
account_id: string;
created_at: string;
created_by: string;
document_id: string;
id: string;
plan_generation_completed_at: string | null;
plan_generation_error: string | null;
plan_generation_started_at: string | null;
plan_generation_status: Database['public']['Enums']['plan_generation_status'];
status: Database['public']['Enums']['brief_status'];
updated_at: string;
};
Insert: {
account_id: string;
created_at?: string;
created_by: string;
document_id: string;
id?: string;
plan_generation_completed_at?: string | null;
plan_generation_error?: string | null;
plan_generation_started_at?: string | null;
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
status?: Database['public']['Enums']['brief_status'];
updated_at?: string;
};
Update: {
account_id?: string;
created_at?: string;
created_by?: string;
document_id?: string;
id?: string;
plan_generation_completed_at?: string | null;
plan_generation_error?: string | null;
plan_generation_started_at?: string | null;
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
status?: Database['public']['Enums']['brief_status'];
updated_at?: string;
};
Relationships: [
{
foreignKeyName: 'brief_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
},
{
foreignKeyName: 'brief_document_id_fkey';
columns: ['document_id'];
isOneToOne: false;
referencedRelation: 'document';
referencedColumns: ['id'];
}
];
};
document: {
Row: {
account_id: string;
created_at: string;
created_by: string;
description: string | null;
document_name: string;
document_type: Database['public']['Enums']['document_type'];
file_path: string | null;
file_size: number | null;
id: string;
metadata: Json | null;
mime_type: string | null;
processed_at: string | null;
processing_error: string | null;
processing_status:
| Database['public']['Enums']['document_processing_status']
| null;
source_id: string | null;
source_type: string | null;
title: string;
updated_at: string;
};
Insert: {
account_id: string;
created_at?: string;
created_by: string;
description?: string | null;
document_name: string;
document_type?: Database['public']['Enums']['document_type'];
file_path?: string | null;
file_size?: number | null;
id?: string;
metadata?: Json | null;
mime_type?: string | null;
processed_at?: string | null;
processing_error?: string | null;
processing_status?:
| Database['public']['Enums']['document_processing_status']
| null;
source_id?: string | null;
source_type?: string | null;
title: string;
updated_at?: string;
};
Update: {
account_id?: string;
created_at?: string;
created_by?: string;
description?: string | null;
document_name?: string;
document_type?: Database['public']['Enums']['document_type'];
file_path?: string | null;
file_size?: number | null;
id?: string;
metadata?: Json | null;
mime_type?: string | null;
processed_at?: string | null;
processing_error?: string | null;
processing_status?:
| Database['public']['Enums']['document_processing_status']
| null;
source_id?: string | null;
source_type?: string | null;
title?: string;
updated_at?: string;
};
Relationships: [
{
foreignKeyName: 'document_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
}
];
};
tasks: {
Row: {
account_id: string;
actual_hours: number;
assignee_id: string | null;
brief_id: string | null;
completed_subtasks: number;
complexity: number | null;
created_at: string;
created_by: string;
description: string | null;
display_id: string | null;
document_id: string | null;
due_date: string | null;
estimated_hours: number | null;
id: string;
metadata: Json;
parent_task_id: string | null;
position: number;
priority: Database['public']['Enums']['task_priority'];
status: Database['public']['Enums']['task_status'];
subtask_position: number;
title: string;
total_subtasks: number;
updated_at: string;
updated_by: string;
};
Insert: {
account_id: string;
actual_hours?: number;
assignee_id?: string | null;
brief_id?: string | null;
completed_subtasks?: number;
complexity?: number | null;
created_at?: string;
created_by: string;
description?: string | null;
display_id?: string | null;
document_id?: string | null;
due_date?: string | null;
estimated_hours?: number | null;
id?: string;
metadata?: Json;
parent_task_id?: string | null;
position?: number;
priority?: Database['public']['Enums']['task_priority'];
status?: Database['public']['Enums']['task_status'];
subtask_position?: number;
title: string;
total_subtasks?: number;
updated_at?: string;
updated_by: string;
};
Update: {
account_id?: string;
actual_hours?: number;
assignee_id?: string | null;
brief_id?: string | null;
completed_subtasks?: number;
complexity?: number | null;
created_at?: string;
created_by?: string;
description?: string | null;
display_id?: string | null;
document_id?: string | null;
due_date?: string | null;
estimated_hours?: number | null;
id?: string;
metadata?: Json;
parent_task_id?: string | null;
position?: number;
priority?: Database['public']['Enums']['task_priority'];
status?: Database['public']['Enums']['task_status'];
subtask_position?: number;
title?: string;
total_subtasks?: number;
updated_at?: string;
updated_by?: string;
};
Relationships: [
{
foreignKeyName: 'tasks_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
},
{
foreignKeyName: 'tasks_brief_id_fkey';
columns: ['brief_id'];
isOneToOne: false;
referencedRelation: 'brief';
referencedColumns: ['id'];
},
{
foreignKeyName: 'tasks_document_id_fkey';
columns: ['document_id'];
isOneToOne: false;
referencedRelation: 'document';
referencedColumns: ['id'];
},
{
foreignKeyName: 'tasks_parent_task_id_fkey';
columns: ['parent_task_id'];
isOneToOne: false;
referencedRelation: 'tasks';
referencedColumns: ['id'];
}
];
};
task_dependencies: {
Row: {
account_id: string;
created_at: string;
depends_on_task_id: string;
id: string;
task_id: string;
};
Insert: {
account_id: string;
created_at?: string;
depends_on_task_id: string;
id?: string;
task_id: string;
};
Update: {
account_id?: string;
created_at?: string;
depends_on_task_id?: string;
id?: string;
task_id?: string;
};
Relationships: [
{
foreignKeyName: 'task_dependencies_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
},
{
foreignKeyName: 'task_dependencies_depends_on_task_id_fkey';
columns: ['depends_on_task_id'];
isOneToOne: false;
referencedRelation: 'tasks';
referencedColumns: ['id'];
},
{
foreignKeyName: 'task_dependencies_task_id_fkey';
columns: ['task_id'];
isOneToOne: false;
referencedRelation: 'tasks';
referencedColumns: ['id'];
}
];
};
user_accounts: {
Row: {
id: string | null;
name: string | null;
picture_url: string | null;
role: string | null;
slug: string | null;
};
Insert: {
id?: string | null;
name?: string | null;
picture_url?: string | null;
role?: string | null;
slug?: string | null;
};
Update: {
id?: string | null;
name?: string | null;
picture_url?: string | null;
role?: string | null;
slug?: string | null;
};
Relationships: [];
};
};
Views: {
[_ in never]: never;
};
Functions: {
[_ in never]: never;
};
Enums: {
brief_status:
| 'draft'
| 'refining'
| 'aligned'
| 'delivering'
| 'delivered'
| 'done'
| 'archived';
document_processing_status: 'pending' | 'processing' | 'ready' | 'failed';
document_type:
| 'brief'
| 'blueprint'
| 'file'
| 'note'
| 'transcript'
| 'generated_plan'
| 'generated_task'
| 'generated_summary'
| 'method'
| 'task';
plan_generation_status:
| 'not_started'
| 'generating'
| 'completed'
| 'failed';
task_priority: 'low' | 'medium' | 'high' | 'urgent';
task_status: 'todo' | 'in_progress' | 'done';
};
CompositeTypes: {
[_ in never]: never;
};
};
};
export type Tables<
PublicTableNameOrOptions extends
| keyof (Database['public']['Tables'] & Database['public']['Views'])
| { schema: keyof Database },
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
? keyof (Database[PublicTableNameOrOptions['schema']]['Tables'] &
Database[PublicTableNameOrOptions['schema']]['Views'])
: never = never
> = PublicTableNameOrOptions extends { schema: keyof Database }
? (Database[PublicTableNameOrOptions['schema']]['Tables'] &
Database[PublicTableNameOrOptions['schema']]['Views'])[TableName] extends {
Row: infer R;
}
? R
: never
: PublicTableNameOrOptions extends keyof (Database['public']['Tables'] &
Database['public']['Views'])
? (Database['public']['Tables'] &
Database['public']['Views'])[PublicTableNameOrOptions] extends {
Row: infer R;
}
? R
: never
: never;
export type TablesInsert<
PublicTableNameOrOptions extends
| keyof Database['public']['Tables']
| { schema: keyof Database },
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
: never = never
> = PublicTableNameOrOptions extends { schema: keyof Database }
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
Insert: infer I;
}
? I
: never
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
Insert: infer I;
}
? I
: never
: never;
export type TablesUpdate<
PublicTableNameOrOptions extends
| keyof Database['public']['Tables']
| { schema: keyof Database },
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
: never = never
> = PublicTableNameOrOptions extends { schema: keyof Database }
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
Update: infer U;
}
? U
: never
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
Update: infer U;
}
? U
: never
: never;
export type Enums<
PublicEnumNameOrOptions extends
| keyof Database['public']['Enums']
| { schema: keyof Database },
EnumName extends PublicEnumNameOrOptions extends { schema: keyof Database }
? keyof Database[PublicEnumNameOrOptions['schema']]['Enums']
: never = never
> = PublicEnumNameOrOptions extends { schema: keyof Database }
? Database[PublicEnumNameOrOptions['schema']]['Enums'][EnumName]
: PublicEnumNameOrOptions extends keyof Database['public']['Enums']
? Database['public']['Enums'][PublicEnumNameOrOptions]
: never;

View File

@@ -2,6 +2,14 @@
* Core type definitions for Task Master * Core type definitions for Task Master
*/ */
/**
* Storage type options
* - 'file': Local file system storage
* - 'api': Remote API storage (Hamster integration)
* - 'auto': Automatically detect based on auth status
*/
export type StorageType = 'file' | 'api' | 'auto';
// ============================================================================ // ============================================================================
// Type Literals // Type Literals
// ============================================================================ // ============================================================================
@@ -96,6 +104,15 @@ export interface TaskCollection {
metadata: TaskMetadata; metadata: TaskMetadata;
} }
/**
* Task tag for organizing tasks
*/
export interface TaskTag {
name: string;
tasks: string[]; // Task IDs belonging to this tag
metadata: Record<string, any>;
}
// ============================================================================ // ============================================================================
// Utility Types // Utility Types
// ============================================================================ // ============================================================================

View File

@@ -0,0 +1,17 @@
/**
* @fileoverview Workflow Module
* Public exports for workflow functionality
*/
export { WorkflowService, type WorkflowServiceConfig } from './workflow-service.js';
// Re-export workflow engine types for convenience
export type {
WorkflowExecutionContext,
WorkflowStatus,
WorkflowEvent,
WorkflowEventType,
WorkflowProcess,
ProcessStatus,
WorktreeInfo
} from '@tm/workflow-engine';

View File

@@ -0,0 +1,218 @@
/**
* @fileoverview Workflow Service
* Integrates workflow engine into Task Master Core
*/
import {
TaskExecutionManager,
type TaskExecutionManagerConfig,
type WorkflowExecutionContext
} from '@tm/workflow-engine';
import type { Task } from '../types/index.js';
import { TaskMasterError } from '../errors/index.js';
export interface WorkflowServiceConfig {
/** Project root directory */
projectRoot: string;
/** Maximum number of concurrent workflows */
maxConcurrent?: number;
/** Default timeout for workflow execution (minutes) */
defaultTimeout?: number;
/** Base directory for worktrees */
worktreeBase?: string;
/** Claude Code executable path */
claudeExecutable?: string;
/** Enable debug logging */
debug?: boolean;
}
/**
* WorkflowService provides Task Master workflow capabilities through core
*/
export class WorkflowService {
private workflowEngine: TaskExecutionManager;
constructor(
config: WorkflowServiceConfig,
private getTask: (taskId: string) => Promise<Task>
) {
const engineConfig: TaskExecutionManagerConfig = {
projectRoot: config.projectRoot,
maxConcurrent: config.maxConcurrent || 5,
defaultTimeout: config.defaultTimeout || 60,
worktreeBase:
config.worktreeBase ||
require('path').join(config.projectRoot, '..', 'task-worktrees'),
claudeExecutable: config.claudeExecutable || 'claude',
debug: config.debug || false
};
this.workflowEngine = new TaskExecutionManager(engineConfig);
}
/**
* Initialize the workflow service
*/
async initialize(): Promise<void> {
await this.workflowEngine.initialize();
}
/**
* Start a workflow for a task
*/
async start(
taskId: string,
options?: {
branchName?: string;
timeout?: number;
env?: Record<string, string>;
}
): Promise<string> {
try {
// Get task from core
const task = await this.getTask(taskId);
// Start workflow using engine
return await this.workflowEngine.startTaskExecution(task, options);
} catch (error) {
throw new TaskMasterError(
`Failed to start workflow for task ${taskId}`,
'WORKFLOW_START_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Stop a workflow
*/
async stop(workflowId: string, force = false): Promise<void> {
try {
await this.workflowEngine.stopTaskExecution(workflowId, force);
} catch (error) {
throw new TaskMasterError(
`Failed to stop workflow ${workflowId}`,
'WORKFLOW_STOP_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Pause a workflow
*/
async pause(workflowId: string): Promise<void> {
try {
await this.workflowEngine.pauseTaskExecution(workflowId);
} catch (error) {
throw new TaskMasterError(
`Failed to pause workflow ${workflowId}`,
'WORKFLOW_PAUSE_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Resume a paused workflow
*/
async resume(workflowId: string): Promise<void> {
try {
await this.workflowEngine.resumeTaskExecution(workflowId);
} catch (error) {
throw new TaskMasterError(
`Failed to resume workflow ${workflowId}`,
'WORKFLOW_RESUME_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Get workflow status
*/
getStatus(workflowId: string): WorkflowExecutionContext | undefined {
return this.workflowEngine.getWorkflowStatus(workflowId);
}
/**
* Get workflow by task ID
*/
getByTaskId(taskId: string): WorkflowExecutionContext | undefined {
return this.workflowEngine.getWorkflowByTaskId(taskId);
}
/**
* List all workflows
*/
list(): WorkflowExecutionContext[] {
return this.workflowEngine.listWorkflows();
}
/**
* List active workflows
*/
listActive(): WorkflowExecutionContext[] {
return this.workflowEngine.listActiveWorkflows();
}
/**
* Send input to a running workflow
*/
async sendInput(workflowId: string, input: string): Promise<void> {
try {
await this.workflowEngine.sendInputToWorkflow(workflowId, input);
} catch (error) {
throw new TaskMasterError(
`Failed to send input to workflow ${workflowId}`,
'WORKFLOW_INPUT_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Clean up all workflows
*/
async cleanup(force = false): Promise<void> {
try {
await this.workflowEngine.cleanup(force);
} catch (error) {
throw new TaskMasterError(
'Failed to cleanup workflows',
'WORKFLOW_CLEANUP_FAILED',
error instanceof Error ? error : undefined
);
}
}
/**
* Subscribe to workflow events
*/
on(event: string, listener: (...args: any[]) => void): void {
this.workflowEngine.on(event, listener);
}
/**
* Unsubscribe from workflow events
*/
off(event: string, listener: (...args: any[]) => void): void {
this.workflowEngine.off(event, listener);
}
/**
* Get workflow engine instance (for advanced usage)
*/
getEngine(): TaskExecutionManager {
return this.workflowEngine;
}
/**
* Dispose of the workflow service
*/
async dispose(): Promise<void> {
await this.cleanup(true);
this.workflowEngine.removeAllListeners();
}
}

View File

@@ -7,6 +7,7 @@
"declarationMap": true, "declarationMap": true,
"sourceMap": true, "sourceMap": true,
"outDir": "./dist", "outDir": "./dist",
"baseUrl": ".",
"rootDir": "./src", "rootDir": "./src",
"strict": true, "strict": true,
"noImplicitAny": true, "noImplicitAny": true,
@@ -27,21 +28,7 @@
"moduleDetection": "force", "moduleDetection": "force",
"types": ["node"], "types": ["node"],
"resolveJsonModule": true, "resolveJsonModule": true,
"isolatedModules": true, "isolatedModules": true
"paths": {
"@/*": ["./src/*"],
"@/auth": ["./src/auth"],
"@/config": ["./src/config"],
"@/errors": ["./src/errors"],
"@/interfaces": ["./src/interfaces"],
"@/logger": ["./src/logger"],
"@/parser": ["./src/parser"],
"@/providers": ["./src/providers"],
"@/services": ["./src/services"],
"@/storage": ["./src/storage"],
"@/types": ["./src/types"],
"@/utils": ["./src/utils"]
}
}, },
"include": ["src/**/*"], "include": ["src/**/*"],
"exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"] "exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"]

View File

@@ -27,7 +27,8 @@ export default defineConfig({
'services/index': 'src/services/index.ts', 'services/index': 'src/services/index.ts',
'storage/index': 'src/storage/index.ts', 'storage/index': 'src/storage/index.ts',
'types/index': 'src/types/index.ts', 'types/index': 'src/types/index.ts',
'utils/index': 'src/utils/index.ts' 'utils/index': 'src/utils/index.ts',
'workflow/index': 'src/workflow/index.ts'
}, },
format: ['cjs', 'esm'], format: ['cjs', 'esm'],
dts: true, dts: true,

View File

@@ -0,0 +1,371 @@
# @tm/workflow-engine
Enhanced Task Master workflow execution engine with git worktree isolation and Claude Code process management.
## Overview
The Workflow Engine extends Task Master with advanced execution capabilities:
- **Git Worktree Isolation**: Each task runs in its own isolated worktree
- **Process Sandboxing**: Spawns dedicated Claude Code processes for task execution
- **Real-time Monitoring**: Track workflow progress and process output
- **State Management**: Persistent workflow state across sessions
- **Parallel Execution**: Run multiple tasks concurrently with resource limits
## Architecture
```
TaskExecutionManager
├── WorktreeManager # Git worktree lifecycle
├── ProcessSandbox # Claude Code process management
└── WorkflowStateManager # Persistent state tracking
```
## Quick Start
```typescript
import { TaskExecutionManager } from '@tm/workflow-engine';
const manager = new TaskExecutionManager({
projectRoot: '/path/to/project',
worktreeBase: '/path/to/worktrees',
claudeExecutable: 'claude',
maxConcurrent: 3,
defaultTimeout: 60,
debug: true
});
await manager.initialize();
// Start task execution
const workflowId = await manager.startTaskExecution({
id: '1.2',
title: 'Implement authentication',
description: 'Add JWT-based auth system',
status: 'pending',
priority: 'high'
});
// Monitor workflow
const workflow = manager.getWorkflowStatus(workflowId);
console.log(`Status: ${workflow.status}`);
// Stop when complete
await manager.stopTaskExecution(workflowId);
```
## CLI Integration
```bash
# Start workflow
tm workflow start 1.2
# List active workflows
tm workflow list
# Check status
tm workflow status workflow-1.2-1234567890-abc123
# Stop workflow
tm workflow stop workflow-1.2-1234567890-abc123
```
## VS Code Extension
The workflow engine integrates with the Task Master VS Code extension to provide:
- **Workflow Tree View**: Visual workflow management
- **Process Monitoring**: Real-time output streaming
- **Worktree Navigation**: Quick access to isolated workspaces
- **Status Indicators**: Visual workflow state tracking
## Core Components
### TaskExecutionManager
Orchestrates complete workflow lifecycle:
```typescript
// Event-driven workflow management
manager.on('workflow.started', (event) => {
console.log(`Started: ${event.workflowId}`);
});
manager.on('process.output', (event) => {
console.log(`[${event.data.stream}]: ${event.data.data}`);
});
```
### WorktreeManager
Manages git worktree operations:
```typescript
import { WorktreeManager } from '@tm/workflow-engine';
const manager = new WorktreeManager({
worktreeBase: './worktrees',
projectRoot: process.cwd(),
autoCleanup: true
});
// Create isolated workspace
const worktree = await manager.createWorktree('task-1.2');
console.log(`Created: ${worktree.path}`);
// List all worktrees
const worktrees = await manager.listWorktrees();
// Cleanup
await manager.removeWorktree('task-1.2');
```
### ProcessSandbox
Spawns and manages Claude Code processes:
```typescript
import { ProcessSandbox } from '@tm/workflow-engine';
const sandbox = new ProcessSandbox({
claudeExecutable: 'claude',
defaultTimeout: 30,
debug: true
});
// Start isolated process
const process = await sandbox.startProcess(
'workflow-123',
'task-1.2',
'Implement user authentication with JWT tokens',
{ cwd: '/path/to/worktree' }
);
// Send input
await sandbox.sendInput('workflow-123', 'npm test');
// Monitor output
sandbox.on('process.output', (event) => {
console.log(event.data.data);
});
```
### WorkflowStateManager
Persistent workflow state management:
```typescript
import { WorkflowStateManager } from '@tm/workflow-engine';
const stateManager = new WorkflowStateManager({
projectRoot: process.cwd()
});
await stateManager.loadState();
// Register workflow
const workflowId = await stateManager.registerWorkflow({
taskId: '1.2',
taskTitle: 'Authentication',
// ... other context
});
// Update status
await stateManager.updateWorkflowStatus(workflowId, 'running');
// Query workflows
const running = stateManager.listWorkflowsByStatus('running');
```
## Configuration
### Environment Variables
- `TASKMASTER_WORKFLOW_DEBUG`: Enable debug logging
- `TASKMASTER_CLAUDE_PATH`: Custom Claude Code executable path
- `TASKMASTER_WORKTREE_BASE`: Base directory for worktrees
- `TASKMASTER_MAX_CONCURRENT`: Maximum concurrent workflows
### Config Object
```typescript
interface TaskExecutionManagerConfig {
projectRoot: string; // Project root directory
worktreeBase: string; // Worktree base path
claudeExecutable: string; // Claude executable
maxConcurrent: number; // Concurrent limit
defaultTimeout: number; // Timeout (minutes)
debug: boolean; // Debug logging
}
```
## Workflow States
| State | Description |
|-------|-------------|
| `pending` | Created but not started |
| `initializing` | Setting up worktree/process |
| `running` | Active execution |
| `paused` | Temporarily stopped |
| `completed` | Successfully finished |
| `failed` | Error occurred |
| `cancelled` | User cancelled |
| `timeout` | Exceeded time limit |
## Events
The workflow engine emits events for real-time monitoring:
```typescript
// Workflow lifecycle
manager.on('workflow.started', (event) => {});
manager.on('workflow.completed', (event) => {});
manager.on('workflow.failed', (event) => {});
// Process events
manager.on('process.started', (event) => {});
manager.on('process.output', (event) => {});
manager.on('process.stopped', (event) => {});
// Worktree events
manager.on('worktree.created', (event) => {});
manager.on('worktree.deleted', (event) => {});
```
## Error Handling
The workflow engine provides specialized error types:
```typescript
import {
WorkflowError,
WorktreeError,
ProcessError,
MaxConcurrentWorkflowsError
} from '@tm/workflow-engine';
try {
await manager.startTaskExecution(task);
} catch (error) {
if (error instanceof MaxConcurrentWorkflowsError) {
console.log('Too many concurrent workflows');
} else if (error instanceof WorktreeError) {
console.log('Worktree operation failed');
}
}
```
## Development
```bash
# Install dependencies
npm install
# Build package
npm run build
# Run tests
npm test
# Development mode
npm run dev
```
## Integration Examples
### With Task Master Core
```typescript
import { createTaskMasterCore } from '@tm/core';
import { TaskExecutionManager } from '@tm/workflow-engine';
const core = await createTaskMasterCore({ projectPath: '.' });
const workflows = new TaskExecutionManager({ /*...*/ });
// Get task from core
const tasks = await core.getTaskList({});
const task = tasks.tasks.find(t => t.id === '1.2');
// Execute with workflow engine
if (task) {
const workflowId = await workflows.startTaskExecution(task);
}
```
### With VS Code Extension
```typescript
import { WorkflowProvider } from './workflow-provider';
// Register tree view
const provider = new WorkflowProvider(context);
vscode.window.createTreeView('taskmaster.workflows', {
treeDataProvider: provider
});
// Register commands
vscode.commands.registerCommand('taskmaster.workflow.start',
async (taskId) => {
await provider.startWorkflow(taskId);
}
);
```
## Troubleshooting
### Common Issues
1. **Worktree Creation Fails**
```bash
# Check git version (requires 2.5+)
git --version
# Verify project is git repository
git status
```
2. **Claude Code Not Found**
```bash
# Check Claude installation
which claude
# Set custom path
export TASKMASTER_CLAUDE_PATH=/path/to/claude
```
3. **Permission Errors**
```bash
# Check worktree directory permissions
chmod -R 755 ./worktrees
```
### Debug Mode
Enable debug logging for troubleshooting:
```typescript
const manager = new TaskExecutionManager({
// ... other config
debug: true
});
```
Or via environment:
```bash
export TASKMASTER_WORKFLOW_DEBUG=true
tm workflow start 1.2
```
## Roadmap
- [ ] Process resource monitoring (CPU, memory)
- [ ] Workflow templates and presets
- [ ] Integration with CI/CD pipelines
- [ ] Workflow scheduling and queueing
- [ ] Multi-machine workflow distribution
- [ ] Advanced debugging and profiling tools
## License
MIT WITH Commons-Clause

View File

@@ -0,0 +1,56 @@
{
"name": "@tm/workflow-engine",
"version": "0.1.0",
"description": "Task Master workflow execution engine with git worktree and process management",
"type": "module",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"exports": {
".": {
"import": "./dist/index.js",
"types": "./dist/index.d.ts"
},
"./task-execution": {
"import": "./dist/task-execution/index.js",
"types": "./dist/task-execution/index.d.ts"
},
"./worktree": {
"import": "./dist/worktree/index.js",
"types": "./dist/worktree/index.d.ts"
},
"./process": {
"import": "./dist/process/index.js",
"types": "./dist/process/index.d.ts"
},
"./state": {
"import": "./dist/state/index.js",
"types": "./dist/state/index.d.ts"
}
},
"scripts": {
"build": "tsup",
"dev": "tsup --watch",
"test": "vitest",
"test:watch": "vitest --watch",
"type-check": "tsc --noEmit"
},
"dependencies": {
"@tm/core": "*"
},
"devDependencies": {
"@types/node": "^22.0.0",
"tsup": "^8.0.0",
"typescript": "^5.5.0",
"vitest": "^2.0.0"
},
"files": ["dist"],
"keywords": [
"task-master",
"workflow",
"git-worktree",
"process-management",
"claude-code"
],
"author": "Task Master AI Team",
"license": "MIT"
}

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview Workflow Engine Errors
* Public error exports
*/
export * from './workflow.errors.js';

View File

@@ -0,0 +1,59 @@
/**
* @fileoverview Workflow Engine Errors
* Custom error classes for workflow operations
*/
export class WorkflowError extends Error {
constructor(
message: string,
public code: string,
public workflowId?: string,
public taskId?: string,
public cause?: Error
) {
super(message);
this.name = 'WorkflowError';
}
}
export class WorktreeError extends WorkflowError {
constructor(message: string, public path?: string, cause?: Error) {
super(message, 'WORKTREE_ERROR', undefined, undefined, cause);
this.name = 'WorktreeError';
}
}
export class ProcessError extends WorkflowError {
constructor(message: string, public pid?: number, cause?: Error) {
super(message, 'PROCESS_ERROR', undefined, undefined, cause);
this.name = 'ProcessError';
}
}
export class WorkflowTimeoutError extends WorkflowError {
constructor(workflowId: string, timeoutMinutes: number) {
super(
`Workflow ${workflowId} timed out after ${timeoutMinutes} minutes`,
'WORKFLOW_TIMEOUT',
workflowId
);
this.name = 'WorkflowTimeoutError';
}
}
export class WorkflowNotFoundError extends WorkflowError {
constructor(workflowId: string) {
super(`Workflow ${workflowId} not found`, 'WORKFLOW_NOT_FOUND', workflowId);
this.name = 'WorkflowNotFoundError';
}
}
export class MaxConcurrentWorkflowsError extends WorkflowError {
constructor(maxConcurrent: number) {
super(
`Maximum concurrent workflows (${maxConcurrent}) reached`,
'MAX_CONCURRENT_WORKFLOWS'
);
this.name = 'MaxConcurrentWorkflowsError';
}
}

View File

@@ -0,0 +1,19 @@
/**
* @fileoverview Workflow Engine
* Main entry point for the Task Master workflow execution engine
*/
// Core task execution
export * from './task-execution/index.js';
// Component managers
export * from './worktree/index.js';
export * from './process/index.js';
export * from './state/index.js';
// Types and errors
export * from './types/index.js';
export * from './errors/index.js';
// Convenience exports
export { TaskExecutionManager as WorkflowEngine } from './task-execution/index.js';

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview Process Management
* Public exports for process operations
*/
export * from './process-sandbox.js';

View File

@@ -0,0 +1,378 @@
/**
* @fileoverview Process Sandbox
* Manages Claude Code process execution in isolated environments
*/
import { spawn, ChildProcess } from 'node:child_process';
import { EventEmitter } from 'node:events';
import type {
WorkflowProcess,
WorkflowEvent,
WorkflowEventType
} from '../types/workflow.types.js';
import { ProcessError } from '../errors/workflow.errors.js';
export interface ProcessSandboxConfig {
/** Claude Code executable path */
claudeExecutable: string;
/** Default timeout for processes (minutes) */
defaultTimeout: number;
/** Environment variables to pass to processes */
environment?: Record<string, string>;
/** Enable debug output */
debug: boolean;
}
export interface ProcessOptions {
/** Working directory for the process */
cwd: string;
/** Environment variables (merged with config) */
env?: Record<string, string>;
/** Timeout in minutes (overrides default) */
timeout?: number;
/** Additional Claude Code arguments */
args?: string[];
}
/**
* ProcessSandbox manages Claude Code process lifecycle
* Single responsibility: Process spawning, monitoring, and cleanup
*/
export class ProcessSandbox extends EventEmitter {
private config: ProcessSandboxConfig;
private activeProcesses = new Map<string, WorkflowProcess>();
private childProcesses = new Map<string, ChildProcess>();
private timeouts = new Map<string, NodeJS.Timeout>();
constructor(config: ProcessSandboxConfig) {
super();
this.config = config;
this.setupCleanupHandlers();
}
/**
* Start a Claude Code process for task execution
*/
async startProcess(
workflowId: string,
taskId: string,
taskPrompt: string,
options: ProcessOptions
): Promise<WorkflowProcess> {
if (this.activeProcesses.has(workflowId)) {
throw new ProcessError(
`Process already running for workflow ${workflowId}`
);
}
// Prepare command and arguments
const args = [
'-p', // Print mode for non-interactive execution
taskPrompt,
...(options.args || [])
];
// Prepare environment
const env = {
...process.env,
...this.config.environment,
...options.env,
// Ensure task context is available
TASKMASTER_WORKFLOW_ID: workflowId,
TASKMASTER_TASK_ID: taskId
};
try {
// Spawn Claude Code process
const childProcess = spawn(this.config.claudeExecutable, args, {
cwd: options.cwd,
env,
stdio: ['pipe', 'pipe', 'pipe']
});
const workflowProcess: WorkflowProcess = {
pid: childProcess.pid!,
command: this.config.claudeExecutable,
args,
cwd: options.cwd,
env,
startedAt: new Date(),
status: 'starting'
};
// Store process references
this.activeProcesses.set(workflowId, workflowProcess);
this.childProcesses.set(workflowId, childProcess);
// Setup process event handlers
this.setupProcessHandlers(workflowId, taskId, childProcess);
// Setup timeout if specified
const timeoutMinutes = options.timeout || this.config.defaultTimeout;
if (timeoutMinutes > 0) {
this.setupProcessTimeout(workflowId, timeoutMinutes);
}
// Emit process started event
this.emitEvent('process.started', workflowId, taskId, {
pid: workflowProcess.pid,
command: workflowProcess.command
});
workflowProcess.status = 'running';
return workflowProcess;
} catch (error) {
throw new ProcessError(
`Failed to start process for workflow ${workflowId}`,
undefined,
error as Error
);
}
}
/**
* Stop a running process
*/
async stopProcess(workflowId: string, force = false): Promise<void> {
const process = this.activeProcesses.get(workflowId);
const childProcess = this.childProcesses.get(workflowId);
if (!process || !childProcess) {
throw new ProcessError(
`No running process found for workflow ${workflowId}`
);
}
try {
// Clear timeout
const timeout = this.timeouts.get(workflowId);
if (timeout) {
clearTimeout(timeout);
this.timeouts.delete(workflowId);
}
// Kill the process
if (force) {
childProcess.kill('SIGKILL');
} else {
childProcess.kill('SIGTERM');
// Give it 5 seconds to gracefully exit, then force kill
setTimeout(() => {
if (!childProcess.killed) {
childProcess.kill('SIGKILL');
}
}, 5000);
}
process.status = 'stopped';
// Emit process stopped event
this.emitEvent('process.stopped', workflowId, process.pid.toString(), {
pid: process.pid,
forced: force
});
} catch (error) {
throw new ProcessError(
`Failed to stop process for workflow ${workflowId}`,
process.pid,
error as Error
);
}
}
/**
* Send input to a running process
*/
async sendInput(workflowId: string, input: string): Promise<void> {
const childProcess = this.childProcesses.get(workflowId);
if (!childProcess) {
throw new ProcessError(
`No running process found for workflow ${workflowId}`
);
}
try {
childProcess.stdin?.write(input);
childProcess.stdin?.write('\n');
} catch (error) {
throw new ProcessError(
`Failed to send input to process for workflow ${workflowId}`,
childProcess.pid,
error as Error
);
}
}
/**
* Get process information
*/
getProcess(workflowId: string): WorkflowProcess | undefined {
return this.activeProcesses.get(workflowId);
}
/**
* List all active processes
*/
listProcesses(): WorkflowProcess[] {
return Array.from(this.activeProcesses.values());
}
/**
* Check if a process is running
*/
isProcessRunning(workflowId: string): boolean {
const process = this.activeProcesses.get(workflowId);
return process?.status === 'running' || process?.status === 'starting';
}
/**
* Clean up all processes
*/
async cleanupAll(force = false): Promise<void> {
const workflowIds = Array.from(this.activeProcesses.keys());
await Promise.all(
workflowIds.map(async (workflowId) => {
try {
await this.stopProcess(workflowId, force);
} catch (error) {
console.error(
`Failed to cleanup process for workflow ${workflowId}:`,
error
);
}
})
);
}
/**
* Setup process event handlers
*/
private setupProcessHandlers(
workflowId: string,
taskId: string,
childProcess: ChildProcess
): void {
const process = this.activeProcesses.get(workflowId);
if (!process) return;
// Handle stdout
childProcess.stdout?.on('data', (data) => {
const output = data.toString();
if (this.config.debug) {
console.log(`[${workflowId}] STDOUT:`, output);
}
this.emitEvent('process.output', workflowId, taskId, {
stream: 'stdout',
data: output
});
});
// Handle stderr
childProcess.stderr?.on('data', (data) => {
const output = data.toString();
if (this.config.debug) {
console.error(`[${workflowId}] STDERR:`, output);
}
this.emitEvent('process.output', workflowId, taskId, {
stream: 'stderr',
data: output
});
});
// Handle process exit
childProcess.on('exit', (code, signal) => {
process.status = code === 0 ? 'stopped' : 'crashed';
this.emitEvent('process.stopped', workflowId, taskId, {
pid: process.pid,
exitCode: code,
signal
});
// Cleanup
this.activeProcesses.delete(workflowId);
this.childProcesses.delete(workflowId);
const timeout = this.timeouts.get(workflowId);
if (timeout) {
clearTimeout(timeout);
this.timeouts.delete(workflowId);
}
});
// Handle process errors
childProcess.on('error', (error) => {
process.status = 'crashed';
this.emitEvent('process.error', workflowId, taskId, undefined, error);
// Cleanup
this.activeProcesses.delete(workflowId);
this.childProcesses.delete(workflowId);
});
}
/**
* Setup process timeout
*/
private setupProcessTimeout(
workflowId: string,
timeoutMinutes: number
): void {
const timeout = setTimeout(
async () => {
console.warn(`Process timeout reached for workflow ${workflowId}`);
try {
await this.stopProcess(workflowId, true);
} catch (error) {
console.error('Failed to stop timed out process:', error);
}
},
timeoutMinutes * 60 * 1000
);
this.timeouts.set(workflowId, timeout);
}
/**
* Emit workflow event
*/
private emitEvent(
type: WorkflowEventType,
workflowId: string,
taskId: string,
data?: any,
error?: Error
): void {
const event: WorkflowEvent = {
type,
workflowId,
taskId,
timestamp: new Date(),
data,
error
};
this.emit('event', event);
this.emit(type, event);
}
/**
* Setup cleanup handlers for graceful shutdown
*/
private setupCleanupHandlers(): void {
const cleanup = () => {
console.log('Cleaning up processes...');
this.cleanupAll(true).catch(console.error);
};
process.on('SIGINT', cleanup);
process.on('SIGTERM', cleanup);
process.on('exit', cleanup);
}
}

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview State Management
* Public exports for workflow state operations
*/
export * from './workflow-state-manager.js';

View File

@@ -0,0 +1,320 @@
/**
* @fileoverview Workflow State Manager
* Extends tm-core RuntimeStateManager with workflow tracking capabilities
*/
import { promises as fs } from 'node:fs';
import path from 'node:path';
import type {
WorkflowExecutionContext,
WorkflowStatus,
WorkflowEvent
} from '../types/workflow.types.js';
import { WorkflowError } from '../errors/workflow.errors.js';
export interface WorkflowStateConfig {
/** Project root directory */
projectRoot: string;
/** Custom state directory (defaults to .taskmaster) */
stateDir?: string;
}
export interface WorkflowRegistryEntry {
/** Workflow ID */
workflowId: string;
/** Task ID being executed */
taskId: string;
/** Workflow status */
status: WorkflowStatus;
/** Worktree path */
worktreePath: string;
/** Process ID if running */
processId?: number;
/** Start timestamp */
startedAt: string;
/** Last activity timestamp */
lastActivity: string;
/** Branch name */
branchName: string;
/** Additional metadata */
metadata?: Record<string, any>;
}
/**
* WorkflowStateManager manages workflow execution state
* Extends the concept of RuntimeStateManager to track active workflows globally
*/
export class WorkflowStateManager {
private config: WorkflowStateConfig;
private stateFilePath: string;
private activeWorkflows = new Map<string, WorkflowExecutionContext>();
constructor(config: WorkflowStateConfig) {
this.config = config;
const stateDir = config.stateDir || '.taskmaster';
this.stateFilePath = path.join(config.projectRoot, stateDir, 'workflows.json');
}
/**
* Load workflow state from disk
*/
async loadState(): Promise<void> {
try {
const stateData = await fs.readFile(this.stateFilePath, 'utf-8');
const registry = JSON.parse(stateData) as Record<string, WorkflowRegistryEntry>;
// Convert registry entries to WorkflowExecutionContext
for (const [workflowId, entry] of Object.entries(registry)) {
const context: WorkflowExecutionContext = {
taskId: entry.taskId,
taskTitle: `Task ${entry.taskId}`, // Will be updated when task details are loaded
taskDescription: '',
projectRoot: this.config.projectRoot,
worktreePath: entry.worktreePath,
branchName: entry.branchName,
processId: entry.processId,
startedAt: new Date(entry.startedAt),
status: entry.status,
lastActivity: new Date(entry.lastActivity),
metadata: entry.metadata
};
this.activeWorkflows.set(workflowId, context);
}
} catch (error: any) {
if (error.code === 'ENOENT') {
// Workflows file doesn't exist, start with empty state
console.debug('No workflows.json found, starting with empty state');
return;
}
console.warn('Failed to load workflow state:', error.message);
}
}
/**
* Save workflow state to disk
*/
async saveState(): Promise<void> {
const stateDir = path.dirname(this.stateFilePath);
try {
await fs.mkdir(stateDir, { recursive: true });
// Convert contexts to registry entries
const registry: Record<string, WorkflowRegistryEntry> = {};
for (const [workflowId, context] of this.activeWorkflows.entries()) {
registry[workflowId] = {
workflowId,
taskId: context.taskId,
status: context.status,
worktreePath: context.worktreePath,
processId: context.processId,
startedAt: context.startedAt.toISOString(),
lastActivity: context.lastActivity.toISOString(),
branchName: context.branchName,
metadata: context.metadata
};
}
await fs.writeFile(
this.stateFilePath,
JSON.stringify(registry, null, 2),
'utf-8'
);
} catch (error) {
throw new WorkflowError(
'Failed to save workflow state',
'WORKFLOW_STATE_SAVE_ERROR',
undefined,
undefined,
error as Error
);
}
}
/**
* Register a new workflow
*/
async registerWorkflow(context: WorkflowExecutionContext): Promise<string> {
const workflowId = this.generateWorkflowId(context.taskId);
this.activeWorkflows.set(workflowId, {
...context,
lastActivity: new Date()
});
await this.saveState();
return workflowId;
}
/**
* Update workflow context
*/
async updateWorkflow(
workflowId: string,
updates: Partial<WorkflowExecutionContext>
): Promise<void> {
const existing = this.activeWorkflows.get(workflowId);
if (!existing) {
throw new WorkflowError(
`Workflow ${workflowId} not found`,
'WORKFLOW_NOT_FOUND',
workflowId
);
}
const updated = {
...existing,
...updates,
lastActivity: new Date()
};
this.activeWorkflows.set(workflowId, updated);
await this.saveState();
}
/**
* Update workflow status
*/
async updateWorkflowStatus(workflowId: string, status: WorkflowStatus): Promise<void> {
await this.updateWorkflow(workflowId, { status });
}
/**
* Unregister a workflow (remove from state)
*/
async unregisterWorkflow(workflowId: string): Promise<void> {
if (!this.activeWorkflows.has(workflowId)) {
throw new WorkflowError(
`Workflow ${workflowId} not found`,
'WORKFLOW_NOT_FOUND',
workflowId
);
}
this.activeWorkflows.delete(workflowId);
await this.saveState();
}
/**
* Get workflow context by ID
*/
getWorkflow(workflowId: string): WorkflowExecutionContext | undefined {
return this.activeWorkflows.get(workflowId);
}
/**
* Get workflow by task ID
*/
getWorkflowByTaskId(taskId: string): WorkflowExecutionContext | undefined {
for (const context of this.activeWorkflows.values()) {
if (context.taskId === taskId) {
return context;
}
}
return undefined;
}
/**
* List all active workflows
*/
listWorkflows(): WorkflowExecutionContext[] {
return Array.from(this.activeWorkflows.values());
}
/**
* List workflows by status
*/
listWorkflowsByStatus(status: WorkflowStatus): WorkflowExecutionContext[] {
return this.listWorkflows().filter(w => w.status === status);
}
/**
* Get running workflows count
*/
getRunningCount(): number {
return this.listWorkflowsByStatus('running').length;
}
/**
* Check if a task has an active workflow
*/
hasActiveWorkflow(taskId: string): boolean {
return this.getWorkflowByTaskId(taskId) !== undefined;
}
/**
* Clean up completed/failed workflows older than specified time
*/
async cleanupOldWorkflows(olderThanHours = 24): Promise<number> {
const cutoffTime = new Date(Date.now() - (olderThanHours * 60 * 60 * 1000));
let cleaned = 0;
for (const [workflowId, context] of this.activeWorkflows.entries()) {
const isOld = context.lastActivity < cutoffTime;
const isFinished = ['completed', 'failed', 'cancelled', 'timeout'].includes(context.status);
if (isOld && isFinished) {
this.activeWorkflows.delete(workflowId);
cleaned++;
}
}
if (cleaned > 0) {
await this.saveState();
}
return cleaned;
}
/**
* Clear all workflow state
*/
async clearState(): Promise<void> {
try {
await fs.unlink(this.stateFilePath);
} catch (error: any) {
if (error.code !== 'ENOENT') {
throw error;
}
}
this.activeWorkflows.clear();
}
/**
* Record workflow event (for audit trail)
*/
async recordEvent(event: WorkflowEvent): Promise<void> {
// Update workflow last activity
const workflow = this.activeWorkflows.get(event.workflowId);
if (workflow) {
workflow.lastActivity = event.timestamp;
await this.saveState();
}
// Optional: Could extend to maintain event log file
if (process.env.TASKMASTER_DEBUG) {
console.log('Workflow Event:', {
type: event.type,
workflowId: event.workflowId,
taskId: event.taskId,
timestamp: event.timestamp.toISOString(),
data: event.data
});
}
}
/**
* Generate unique workflow ID
*/
private generateWorkflowId(taskId: string): string {
const timestamp = Date.now();
const random = Math.random().toString(36).substring(2, 8);
return `workflow-${taskId}-${timestamp}-${random}`;
}
}

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview Task Execution Management
* Public exports for task execution operations
*/
export * from './task-execution-manager.js';

View File

@@ -0,0 +1,433 @@
/**
* @fileoverview Task Execution Manager
* Orchestrates the complete task execution workflow using worktrees and processes
*/
import { EventEmitter } from 'node:events';
import path from 'node:path';
import type { Task } from '@tm/core';
import {
WorktreeManager,
type WorktreeManagerConfig
} from '../worktree/worktree-manager.js';
import {
ProcessSandbox,
type ProcessSandboxConfig
} from '../process/process-sandbox.js';
import {
WorkflowStateManager,
type WorkflowStateConfig
} from '../state/workflow-state-manager.js';
import type {
WorkflowConfig,
WorkflowExecutionContext,
WorkflowStatus,
WorkflowEvent
} from '../types/workflow.types.js';
import {
WorkflowError,
WorkflowNotFoundError,
MaxConcurrentWorkflowsError,
WorkflowTimeoutError
} from '../errors/workflow.errors.js';
export interface TaskExecutionManagerConfig extends WorkflowConfig {
/** Project root directory */
projectRoot: string;
}
/**
* TaskExecutionManager orchestrates the complete task execution workflow
* Coordinates worktree creation, process spawning, and state management
*/
export class TaskExecutionManager extends EventEmitter {
private config: TaskExecutionManagerConfig;
private worktreeManager: WorktreeManager;
private processSandbox: ProcessSandbox;
private stateManager: WorkflowStateManager;
private initialized = false;
constructor(config: TaskExecutionManagerConfig) {
super();
this.config = config;
// Initialize component managers
const worktreeConfig: WorktreeManagerConfig = {
worktreeBase: config.worktreeBase,
projectRoot: config.projectRoot,
autoCleanup: true
};
const processConfig: ProcessSandboxConfig = {
claudeExecutable: config.claudeExecutable,
defaultTimeout: config.defaultTimeout,
debug: config.debug
};
const stateConfig: WorkflowStateConfig = {
projectRoot: config.projectRoot
};
this.worktreeManager = new WorktreeManager(worktreeConfig);
this.processSandbox = new ProcessSandbox(processConfig);
this.stateManager = new WorkflowStateManager(stateConfig);
// Forward events from components
this.processSandbox.on('event', (event: WorkflowEvent) => {
this.stateManager.recordEvent(event);
this.emit('event', event);
});
}
/**
* Initialize the task execution manager
*/
async initialize(): Promise<void> {
if (this.initialized) return;
await this.stateManager.loadState();
// Clean up any stale workflows
await this.cleanupStaleWorkflows();
this.initialized = true;
}
/**
* Start task execution workflow
*/
async startTaskExecution(
task: Task,
options?: {
branchName?: string;
timeout?: number;
env?: Record<string, string>;
}
): Promise<string> {
if (!this.initialized) {
await this.initialize();
}
// Check concurrent workflow limit
const runningCount = this.stateManager.getRunningCount();
if (runningCount >= this.config.maxConcurrent) {
throw new MaxConcurrentWorkflowsError(this.config.maxConcurrent);
}
// Check if task already has an active workflow
if (this.stateManager.hasActiveWorkflow(task.id)) {
throw new WorkflowError(
`Task ${task.id} already has an active workflow`,
'TASK_ALREADY_EXECUTING',
undefined,
task.id
);
}
try {
// Create worktree
const worktreeInfo = await this.worktreeManager.createWorktree(
task.id,
options?.branchName
);
// Prepare task context
const context: WorkflowExecutionContext = {
taskId: task.id,
taskTitle: task.title,
taskDescription: task.description,
taskDetails: task.details,
projectRoot: this.config.projectRoot,
worktreePath: worktreeInfo.path,
branchName: worktreeInfo.branch,
startedAt: new Date(),
status: 'initializing',
lastActivity: new Date(),
metadata: {
priority: task.priority,
dependencies: task.dependencies
}
};
// Register workflow
const workflowId = await this.stateManager.registerWorkflow(context);
try {
// Prepare task prompt for Claude Code
const taskPrompt = this.generateTaskPrompt(task);
// Start Claude Code process
const process = await this.processSandbox.startProcess(
workflowId,
task.id,
taskPrompt,
{
cwd: worktreeInfo.path,
timeout: options?.timeout,
env: options?.env
}
);
// Update workflow with process information
await this.stateManager.updateWorkflow(workflowId, {
processId: process.pid,
status: 'running'
});
// Emit workflow started event
this.emitEvent('workflow.started', workflowId, task.id, {
worktreePath: worktreeInfo.path,
processId: process.pid
});
return workflowId;
} catch (error) {
// Clean up worktree if process failed to start
await this.worktreeManager.removeWorktree(task.id, true);
await this.stateManager.unregisterWorkflow(workflowId);
throw error;
}
} catch (error) {
throw new WorkflowError(
`Failed to start task execution for ${task.id}`,
'TASK_EXECUTION_START_ERROR',
undefined,
task.id,
error as Error
);
}
}
/**
* Stop task execution workflow
*/
async stopTaskExecution(workflowId: string, force = false): Promise<void> {
const workflow = this.stateManager.getWorkflow(workflowId);
if (!workflow) {
throw new WorkflowNotFoundError(workflowId);
}
try {
// Stop the process if running
if (this.processSandbox.isProcessRunning(workflowId)) {
await this.processSandbox.stopProcess(workflowId, force);
}
// Update workflow status
const status: WorkflowStatus = force ? 'cancelled' : 'completed';
await this.stateManager.updateWorkflowStatus(workflowId, status);
// Clean up worktree
await this.worktreeManager.removeWorktree(workflow.taskId, force);
// Emit workflow stopped event
this.emitEvent('workflow.completed', workflowId, workflow.taskId, {
status,
forced: force
});
// Unregister workflow
await this.stateManager.unregisterWorkflow(workflowId);
} catch (error) {
throw new WorkflowError(
`Failed to stop workflow ${workflowId}`,
'WORKFLOW_STOP_ERROR',
workflowId,
workflow.taskId,
error as Error
);
}
}
/**
* Pause task execution
*/
async pauseTaskExecution(workflowId: string): Promise<void> {
const workflow = this.stateManager.getWorkflow(workflowId);
if (!workflow) {
throw new WorkflowNotFoundError(workflowId);
}
if (workflow.status !== 'running') {
throw new WorkflowError(
`Cannot pause workflow ${workflowId} - not currently running`,
'WORKFLOW_NOT_RUNNING',
workflowId,
workflow.taskId
);
}
// For now, we'll just mark as paused - in the future could implement
// process suspension or other pause mechanisms
await this.stateManager.updateWorkflowStatus(workflowId, 'paused');
this.emitEvent('workflow.paused', workflowId, workflow.taskId);
}
/**
* Resume paused task execution
*/
async resumeTaskExecution(workflowId: string): Promise<void> {
const workflow = this.stateManager.getWorkflow(workflowId);
if (!workflow) {
throw new WorkflowNotFoundError(workflowId);
}
if (workflow.status !== 'paused') {
throw new WorkflowError(
`Cannot resume workflow ${workflowId} - not currently paused`,
'WORKFLOW_NOT_PAUSED',
workflowId,
workflow.taskId
);
}
await this.stateManager.updateWorkflowStatus(workflowId, 'running');
this.emitEvent('workflow.resumed', workflowId, workflow.taskId);
}
/**
* Get workflow status
*/
getWorkflowStatus(workflowId: string): WorkflowExecutionContext | undefined {
return this.stateManager.getWorkflow(workflowId);
}
/**
* Get workflow by task ID
*/
getWorkflowByTaskId(taskId: string): WorkflowExecutionContext | undefined {
return this.stateManager.getWorkflowByTaskId(taskId);
}
/**
* List all workflows
*/
listWorkflows(): WorkflowExecutionContext[] {
return this.stateManager.listWorkflows();
}
/**
* List active workflows
*/
listActiveWorkflows(): WorkflowExecutionContext[] {
return this.stateManager.listWorkflowsByStatus('running');
}
/**
* Send input to a running workflow
*/
async sendInputToWorkflow(workflowId: string, input: string): Promise<void> {
const workflow = this.stateManager.getWorkflow(workflowId);
if (!workflow) {
throw new WorkflowNotFoundError(workflowId);
}
if (!this.processSandbox.isProcessRunning(workflowId)) {
throw new WorkflowError(
`Cannot send input to workflow ${workflowId} - process not running`,
'PROCESS_NOT_RUNNING',
workflowId,
workflow.taskId
);
}
await this.processSandbox.sendInput(workflowId, input);
}
/**
* Clean up all workflows
*/
async cleanup(force = false): Promise<void> {
// Stop all processes
await this.processSandbox.cleanupAll(force);
// Clean up all worktrees
await this.worktreeManager.cleanupAll(force);
// Clear workflow state
await this.stateManager.clearState();
}
/**
* Generate task prompt for Claude Code
*/
private generateTaskPrompt(task: Task): string {
const prompt = [
`Work on Task ${task.id}: ${task.title}`,
'',
`Description: ${task.description}`
];
if (task.details) {
prompt.push('', `Details: ${task.details}`);
}
if (task.testStrategy) {
prompt.push('', `Test Strategy: ${task.testStrategy}`);
}
if (task.dependencies?.length) {
prompt.push('', `Dependencies: ${task.dependencies.join(', ')}`);
}
prompt.push(
'',
'Please implement this task following the project conventions and best practices.',
'When complete, update the task status appropriately using the available Task Master commands.'
);
return prompt.join('\n');
}
/**
* Clean up stale workflows from previous sessions
*/
private async cleanupStaleWorkflows(): Promise<void> {
const workflows = this.stateManager.listWorkflows();
for (const workflow of workflows) {
const isStale =
workflow.status === 'running' &&
!this.processSandbox.isProcessRunning(`workflow-${workflow.taskId}`);
if (isStale) {
console.log(`Cleaning up stale workflow for task ${workflow.taskId}`);
try {
await this.stateManager.updateWorkflowStatus(
`workflow-${workflow.taskId}`,
'failed'
);
// Try to clean up worktree
await this.worktreeManager.removeWorktree(workflow.taskId, true);
} catch (error) {
console.error(`Failed to cleanup stale workflow:`, error);
}
}
}
}
/**
* Emit workflow event
*/
private emitEvent(
type: string,
workflowId: string,
taskId: string,
data?: any
): void {
const event: WorkflowEvent = {
type: type as any,
workflowId,
taskId,
timestamp: new Date(),
data
};
this.emit('event', event);
this.emit(type, event);
}
}

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview Workflow Engine Types
* Public type exports
*/
export * from './workflow.types.js';

View File

@@ -0,0 +1,119 @@
/**
* @fileoverview Workflow Engine Types
* Core types for workflow execution system
*/
export interface WorkflowConfig {
/** Maximum number of concurrent workflows */
maxConcurrent: number;
/** Default timeout for workflow execution (minutes) */
defaultTimeout: number;
/** Base directory for worktrees */
worktreeBase: string;
/** Claude Code executable path */
claudeExecutable: string;
/** Enable debug logging */
debug: boolean;
}
export interface WorkflowExecutionContext {
/** Task ID being executed */
taskId: string;
/** Task title for display */
taskTitle: string;
/** Full task description */
taskDescription: string;
/** Task implementation details */
taskDetails?: string;
/** Project root path */
projectRoot: string;
/** Worktree path */
worktreePath: string;
/** Branch name for this workflow */
branchName: string;
/** Process ID of running Claude Code */
processId?: number;
/** Workflow start time */
startedAt: Date;
/** Workflow status */
status: WorkflowStatus;
/** Last activity timestamp */
lastActivity: Date;
/** Execution metadata */
metadata?: Record<string, any>;
}
export type WorkflowStatus =
| 'pending' // Created but not started
| 'initializing' // Setting up worktree/process
| 'running' // Active execution
| 'paused' // Temporarily stopped
| 'completed' // Successfully finished
| 'failed' // Error occurred
| 'cancelled' // User cancelled
| 'timeout'; // Exceeded time limit
export interface WorkflowEvent {
type: WorkflowEventType;
workflowId: string;
taskId: string;
timestamp: Date;
data?: any;
error?: Error;
}
export type WorkflowEventType =
| 'workflow.created'
| 'workflow.started'
| 'workflow.paused'
| 'workflow.resumed'
| 'workflow.completed'
| 'workflow.failed'
| 'workflow.cancelled'
| 'worktree.created'
| 'worktree.deleted'
| 'process.started'
| 'process.stopped'
| 'process.output'
| 'process.error';
export interface WorkflowProcess {
/** Process ID */
pid: number;
/** Command that was executed */
command: string;
/** Command arguments */
args: string[];
/** Working directory */
cwd: string;
/** Environment variables */
env?: Record<string, string>;
/** Process start time */
startedAt: Date;
/** Process status */
status: ProcessStatus;
}
export type ProcessStatus =
| 'starting'
| 'running'
| 'stopped'
| 'crashed'
| 'killed';
export interface WorktreeInfo {
/** Worktree path */
path: string;
/** Branch name */
branch: string;
/** Creation timestamp */
createdAt: Date;
/** Associated task ID */
taskId: string;
/** Git commit hash */
commit?: string;
/** Worktree lock status */
locked: boolean;
/** Lock reason if applicable */
lockReason?: string;
}

View File

@@ -0,0 +1,6 @@
/**
* @fileoverview Worktree Management
* Public exports for worktree operations
*/
export * from './worktree-manager.js';

View File

@@ -0,0 +1,351 @@
/**
* @fileoverview Worktree Manager
* Manages git worktree lifecycle for task execution
*/
import { spawn } from 'node:child_process';
import { promises as fs } from 'node:fs';
import path from 'node:path';
import type { WorktreeInfo } from '../types/workflow.types.js';
import { WorktreeError } from '../errors/workflow.errors.js';
export interface WorktreeManagerConfig {
/** Base directory for all worktrees */
worktreeBase: string;
/** Project root directory */
projectRoot: string;
/** Auto-cleanup on process exit */
autoCleanup: boolean;
}
/**
* WorktreeManager handles git worktree operations
* Single responsibility: Git worktree lifecycle management
*/
export class WorktreeManager {
private config: WorktreeManagerConfig;
private activeWorktrees = new Map<string, WorktreeInfo>();
constructor(config: WorktreeManagerConfig) {
this.config = config;
if (config.autoCleanup) {
this.setupCleanupHandlers();
}
}
/**
* Create a new worktree for task execution
*/
async createWorktree(taskId: string, branchName?: string): Promise<WorktreeInfo> {
const sanitizedTaskId = this.sanitizeTaskId(taskId);
const worktreePath = path.join(this.config.worktreeBase, `task-${sanitizedTaskId}`);
// Ensure base directory exists
await fs.mkdir(this.config.worktreeBase, { recursive: true });
// Generate unique branch name if not provided
const branch = branchName || `task/${sanitizedTaskId}-${Date.now()}`;
try {
// Check if worktree path already exists
if (await this.pathExists(worktreePath)) {
throw new WorktreeError(`Worktree path already exists: ${worktreePath}`);
}
// Create the worktree
await this.executeGitCommand(['worktree', 'add', '-b', branch, worktreePath], {
cwd: this.config.projectRoot
});
const worktreeInfo: WorktreeInfo = {
path: worktreePath,
branch,
createdAt: new Date(),
taskId,
locked: false
};
// Get commit hash
try {
const commit = await this.executeGitCommand(['rev-parse', 'HEAD'], {
cwd: worktreePath
});
worktreeInfo.commit = commit.trim();
} catch (error) {
console.warn('Failed to get commit hash for worktree:', error);
}
this.activeWorktrees.set(taskId, worktreeInfo);
return worktreeInfo;
} catch (error) {
throw new WorktreeError(
`Failed to create worktree for task ${taskId}`,
worktreePath,
error as Error
);
}
}
/**
* Remove a worktree and clean up
*/
async removeWorktree(taskId: string, force = false): Promise<void> {
const worktreeInfo = this.activeWorktrees.get(taskId);
if (!worktreeInfo) {
throw new WorktreeError(`No active worktree found for task ${taskId}`);
}
try {
// Remove the worktree
const args = ['worktree', 'remove', worktreeInfo.path];
if (force) {
args.push('--force');
}
await this.executeGitCommand(args, {
cwd: this.config.projectRoot
});
// Remove branch if it's a task-specific branch
if (worktreeInfo.branch.startsWith('task/')) {
try {
await this.executeGitCommand(['branch', '-D', worktreeInfo.branch], {
cwd: this.config.projectRoot
});
} catch (error) {
console.warn(`Failed to delete branch ${worktreeInfo.branch}:`, error);
}
}
this.activeWorktrees.delete(taskId);
} catch (error) {
throw new WorktreeError(
`Failed to remove worktree for task ${taskId}`,
worktreeInfo.path,
error as Error
);
}
}
/**
* List all active worktrees for this project
*/
async listWorktrees(): Promise<WorktreeInfo[]> {
try {
const output = await this.executeGitCommand(['worktree', 'list', '--porcelain'], {
cwd: this.config.projectRoot
});
const worktrees: WorktreeInfo[] = [];
const lines = output.trim().split('\n');
let currentWorktree: Partial<WorktreeInfo> = {};
for (const line of lines) {
if (line.startsWith('worktree ')) {
if (currentWorktree.path) {
// Complete previous worktree
worktrees.push(this.completeWorktreeInfo(currentWorktree));
}
currentWorktree = { path: line.substring(9) };
} else if (line.startsWith('HEAD ')) {
currentWorktree.commit = line.substring(5);
} else if (line.startsWith('branch ')) {
currentWorktree.branch = line.substring(7).replace('refs/heads/', '');
} else if (line === 'locked') {
currentWorktree.locked = true;
} else if (line.startsWith('locked ')) {
currentWorktree.locked = true;
currentWorktree.lockReason = line.substring(7);
}
}
// Add the last worktree
if (currentWorktree.path) {
worktrees.push(this.completeWorktreeInfo(currentWorktree));
}
// Filter to only our task worktrees
return worktrees.filter(wt =>
wt.path.startsWith(this.config.worktreeBase) &&
wt.branch?.startsWith('task/')
);
} catch (error) {
throw new WorktreeError('Failed to list worktrees', undefined, error as Error);
}
}
/**
* Get worktree info for a specific task
*/
getWorktreeInfo(taskId: string): WorktreeInfo | undefined {
return this.activeWorktrees.get(taskId);
}
/**
* Lock a worktree to prevent cleanup
*/
async lockWorktree(taskId: string, reason?: string): Promise<void> {
const worktreeInfo = this.activeWorktrees.get(taskId);
if (!worktreeInfo) {
throw new WorktreeError(`No active worktree found for task ${taskId}`);
}
try {
const args = ['worktree', 'lock', worktreeInfo.path];
if (reason) {
args.push('--reason', reason);
}
await this.executeGitCommand(args, {
cwd: this.config.projectRoot
});
worktreeInfo.locked = true;
worktreeInfo.lockReason = reason;
} catch (error) {
throw new WorktreeError(
`Failed to lock worktree for task ${taskId}`,
worktreeInfo.path,
error as Error
);
}
}
/**
* Unlock a worktree
*/
async unlockWorktree(taskId: string): Promise<void> {
const worktreeInfo = this.activeWorktrees.get(taskId);
if (!worktreeInfo) {
throw new WorktreeError(`No active worktree found for task ${taskId}`);
}
try {
await this.executeGitCommand(['worktree', 'unlock', worktreeInfo.path], {
cwd: this.config.projectRoot
});
worktreeInfo.locked = false;
delete worktreeInfo.lockReason;
} catch (error) {
throw new WorktreeError(
`Failed to unlock worktree for task ${taskId}`,
worktreeInfo.path,
error as Error
);
}
}
/**
* Clean up all task-related worktrees
*/
async cleanupAll(force = false): Promise<void> {
const worktrees = await this.listWorktrees();
for (const worktree of worktrees) {
if (worktree.taskId) {
try {
await this.removeWorktree(worktree.taskId, force);
} catch (error) {
console.error(`Failed to cleanup worktree for task ${worktree.taskId}:`, error);
}
}
}
}
/**
* Execute git command and return output
*/
private async executeGitCommand(
args: string[],
options: { cwd: string }
): Promise<string> {
return new Promise((resolve, reject) => {
const git = spawn('git', args, {
cwd: options.cwd,
stdio: ['ignore', 'pipe', 'pipe']
});
let stdout = '';
let stderr = '';
git.stdout.on('data', (data) => {
stdout += data.toString();
});
git.stderr.on('data', (data) => {
stderr += data.toString();
});
git.on('close', (code) => {
if (code === 0) {
resolve(stdout);
} else {
reject(new Error(`Git command failed (${code}): ${stderr || stdout}`));
}
});
git.on('error', (error) => {
reject(error);
});
});
}
/**
* Sanitize task ID for use in filesystem paths
*/
private sanitizeTaskId(taskId: string): string {
return taskId.replace(/[^a-zA-Z0-9.-]/g, '-');
}
/**
* Check if path exists
*/
private async pathExists(path: string): Promise<boolean> {
try {
await fs.access(path);
return true;
} catch {
return false;
}
}
/**
* Complete worktree info with defaults
*/
private completeWorktreeInfo(partial: Partial<WorktreeInfo>): WorktreeInfo {
const branch = partial.branch || 'unknown';
const taskIdMatch = branch.match(/^task\/(.+?)-/);
return {
path: partial.path || '',
branch,
createdAt: partial.createdAt || new Date(),
taskId: taskIdMatch?.[1] || partial.taskId || 'unknown',
commit: partial.commit,
locked: partial.locked || false,
lockReason: partial.lockReason
};
}
/**
* Setup cleanup handlers for graceful shutdown
*/
private setupCleanupHandlers(): void {
const cleanup = () => {
console.log('Cleaning up worktrees...');
this.cleanupAll(true).catch(console.error);
};
process.on('SIGINT', cleanup);
process.on('SIGTERM', cleanup);
process.on('exit', cleanup);
}
}

View File

@@ -0,0 +1,19 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "dist",
"rootDir": "src",
"declaration": true,
"declarationMap": true,
"sourceMap": true
},
"include": [
"src/**/*"
],
"exclude": [
"dist",
"node_modules",
"**/*.test.ts",
"**/*.spec.ts"
]
}

View File

@@ -0,0 +1,17 @@
import { defineConfig } from 'tsup';
export default defineConfig({
entry: [
'src/index.ts',
'src/task-execution/index.ts',
'src/worktree/index.ts',
'src/process/index.ts',
'src/state/index.ts'
],
format: ['esm'],
dts: true,
sourcemap: true,
clean: true,
splitting: false,
treeshake: true
});

View File

@@ -0,0 +1,19 @@
import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
environment: 'node',
globals: true,
coverage: {
provider: 'v8',
reporter: ['text', 'json', 'html'],
exclude: [
'node_modules/',
'dist/',
'**/*.d.ts',
'**/*.test.ts',
'**/*.spec.ts'
]
}
}
});

View File

@@ -15,8 +15,8 @@ import search from '@inquirer/search';
import ora from 'ora'; // Import ora import ora from 'ora'; // Import ora
import { log, readJSON } from './utils.js'; import { log, readJSON } from './utils.js';
// Import new commands from @tm/cli // Import command registry from @tm/cli
import { ListTasksCommand, AuthCommand } from '@tm/cli'; import { registerAllCommands } from '@tm/cli';
import { import {
parsePRD, parsePRD,
@@ -1737,13 +1737,9 @@ function registerCommands(programInstance) {
}); });
}); });
// NEW: Register the new list command from @tm/cli // Register all commands from @tm/cli using the command registry
// This command handles all its own configuration and logic // This automatically registers ListTasksCommand, AuthCommand, and any future commands
ListTasksCommand.registerOn(programInstance); registerAllCommands(programInstance);
// Register the auth command from @tm/cli
// Handles authentication with tryhamster.com
AuthCommand.registerOn(programInstance);
// expand command // expand command
programInstance programInstance

View File

@@ -198,11 +198,13 @@ jest.unstable_mockModule('fs', () => ({
default: { default: {
existsSync: jest.fn(() => false), existsSync: jest.fn(() => false),
readFileSync: jest.fn(), readFileSync: jest.fn(),
writeFileSync: mockWriteFileSync writeFileSync: mockWriteFileSync,
unlinkSync: jest.fn()
}, },
existsSync: jest.fn(() => false), existsSync: jest.fn(() => false),
readFileSync: jest.fn(), readFileSync: jest.fn(),
writeFileSync: mockWriteFileSync writeFileSync: mockWriteFileSync,
unlinkSync: jest.fn()
})); }));
jest.unstable_mockModule( jest.unstable_mockModule(

View File

@@ -1,51 +1,20 @@
import { defineConfig } from 'tsup'; import { defineConfig } from 'tsup';
import { dotenvLoad } from 'dotenv-mono'; import {
executableConfig,
mergeConfig,
commonExternals
} from '@tm/build-config';
// Load .env from root level (monorepo support) export default defineConfig(
dotenvLoad(); mergeConfig(executableConfig, {
entry: {
// Get all TM_PUBLIC_* env variables for build-time injection 'task-master': 'bin/task-master.js',
const getBuildTimeEnvs = () => { 'mcp-server': 'mcp-server/server.js'
const envs: Record<string, string> = {}; },
for (const [key, value] of Object.entries(process.env)) { outDir: 'dist',
if (key.startsWith('TM_PUBLIC_')) { publicDir: 'public',
// Return the actual value, not JSON.stringify'd // Bundle our monorepo packages but keep node_modules external
envs[key] = value || ''; noExternal: [/@tm\/.*/],
} external: commonExternals
} })
return envs; );
};
export default defineConfig({
entry: {
'task-master': 'bin/task-master.js',
'mcp-server': 'mcp-server/server.js'
},
format: ['esm'],
target: 'node18',
splitting: false,
sourcemap: true,
clean: true,
bundle: true, // Bundle everything into one file
outDir: 'dist',
publicDir: 'public',
// Handle TypeScript imports transparently
loader: {
'.js': 'jsx',
'.ts': 'ts'
},
// Replace process.env.TM_PUBLIC_* with actual values at build time
env: getBuildTimeEnvs(),
esbuildOptions(options) {
options.platform = 'node';
// Allow importing TypeScript from JavaScript
options.resolveExtensions = ['.ts', '.js', '.mjs', '.json'];
},
// Bundle our monorepo packages but keep node_modules external
noExternal: [/@tm\/.*/],
// Don't bundle any other dependencies (auto-external all node_modules)
// This regex matches anything that doesn't start with . or /
external: [/^[^./]/],
// Add success message for debugging
onSuccess: 'echo "✅ Build completed successfully"'
});