Compare commits
4 Commits
docs/auto-
...
ralph/feat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d4826e0258 | ||
|
|
b9e3eecafe | ||
|
|
6dd910fc52 | ||
|
|
19ec52181d |
@@ -6,7 +6,7 @@
|
|||||||
"repo": "eyaltoledano/claude-task-master"
|
"repo": "eyaltoledano/claude-task-master"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"commit": true,
|
"commit": false,
|
||||||
"fixed": [],
|
"fixed": [],
|
||||||
"linked": [],
|
"linked": [],
|
||||||
"access": "public",
|
"access": "public",
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"mode": "exit",
|
"mode": "pre",
|
||||||
"tag": "rc",
|
"tag": "rc",
|
||||||
"initialVersions": {
|
"initialVersions": {
|
||||||
"task-master-ai": "0.25.1",
|
"task-master-ai": "0.25.1",
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
---
|
|
||||||
"extension": minor
|
|
||||||
---
|
|
||||||
|
|
||||||
Added a Start Build button to the VSCODE Task Properties Right Panel
|
|
||||||
110
.github/workflows/ci.yml
vendored
110
.github/workflows/ci.yml
vendored
@@ -9,109 +9,70 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- next
|
- next
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
env:
|
|
||||||
DO_NOT_TRACK: 1
|
|
||||||
NODE_ENV: development
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Fast checks that can run in parallel
|
setup:
|
||||||
format-check:
|
|
||||||
name: Format Check
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 2
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20
|
||||||
cache: "npm"
|
cache: 'npm'
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install Dependencies
|
||||||
run: npm install --frozen-lockfile --prefer-offline
|
id: install
|
||||||
timeout-minutes: 5
|
run: npm ci
|
||||||
|
timeout-minutes: 2
|
||||||
|
|
||||||
|
- name: Cache node_modules
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: node_modules
|
||||||
|
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
|
||||||
|
|
||||||
|
format-check:
|
||||||
|
needs: setup
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Restore node_modules
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: node_modules
|
||||||
|
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
|
||||||
|
|
||||||
- name: Format Check
|
- name: Format Check
|
||||||
run: npm run format-check
|
run: npm run format-check
|
||||||
env:
|
env:
|
||||||
FORCE_COLOR: 1
|
FORCE_COLOR: 1
|
||||||
|
|
||||||
typecheck:
|
|
||||||
name: Typecheck
|
|
||||||
timeout-minutes: 10
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
cache: "npm"
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm install --frozen-lockfile --prefer-offline
|
|
||||||
timeout-minutes: 5
|
|
||||||
|
|
||||||
- name: Typecheck
|
|
||||||
run: npm run typecheck
|
|
||||||
env:
|
|
||||||
FORCE_COLOR: 1
|
|
||||||
|
|
||||||
# Build job to ensure everything compiles
|
|
||||||
build:
|
|
||||||
name: Build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
cache: "npm"
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm install --frozen-lockfile --prefer-offline
|
|
||||||
timeout-minutes: 5
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
run: npm run build
|
|
||||||
env:
|
|
||||||
NODE_ENV: production
|
|
||||||
FORCE_COLOR: 1
|
|
||||||
|
|
||||||
test:
|
test:
|
||||||
name: Test
|
needs: setup
|
||||||
timeout-minutes: 15
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [format-check, typecheck, build]
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20
|
||||||
cache: "npm"
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Restore node_modules
|
||||||
run: npm install --frozen-lockfile --prefer-offline
|
uses: actions/cache@v4
|
||||||
timeout-minutes: 5
|
with:
|
||||||
|
path: node_modules
|
||||||
- name: Build packages (required for tests)
|
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
|
||||||
run: npm run build:packages
|
|
||||||
env:
|
|
||||||
NODE_ENV: production
|
|
||||||
|
|
||||||
- name: Run Tests
|
- name: Run Tests
|
||||||
run: |
|
run: |
|
||||||
@@ -120,6 +81,7 @@ jobs:
|
|||||||
NODE_ENV: test
|
NODE_ENV: test
|
||||||
CI: true
|
CI: true
|
||||||
FORCE_COLOR: 1
|
FORCE_COLOR: 1
|
||||||
|
timeout-minutes: 10
|
||||||
|
|
||||||
- name: Upload Test Results
|
- name: Upload Test Results
|
||||||
if: always()
|
if: always()
|
||||||
|
|||||||
57
.github/workflows/claude-docs-trigger.yml
vendored
57
.github/workflows/claude-docs-trigger.yml
vendored
@@ -1,57 +0,0 @@
|
|||||||
name: Trigger Claude Documentation Update
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- next
|
|
||||||
paths-ignore:
|
|
||||||
- "apps/docs/**"
|
|
||||||
- "*.md"
|
|
||||||
- ".github/workflows/**"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
trigger-docs-update:
|
|
||||||
# Only run if changes were merged (not direct pushes from bots)
|
|
||||||
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
actions: write
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 2 # Need previous commit for comparison
|
|
||||||
|
|
||||||
- name: Get changed files
|
|
||||||
id: changed-files
|
|
||||||
run: |
|
|
||||||
echo "Changed files in this push:"
|
|
||||||
git diff --name-only HEAD^ HEAD | tee changed_files.txt
|
|
||||||
|
|
||||||
# Store changed files for Claude to analyze (escaped for JSON)
|
|
||||||
CHANGED_FILES=$(git diff --name-only HEAD^ HEAD | jq -Rs .)
|
|
||||||
echo "changed_files=$CHANGED_FILES" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# Get the commit message (escaped for JSON)
|
|
||||||
COMMIT_MSG=$(git log -1 --pretty=%B | jq -Rs .)
|
|
||||||
echo "commit_message=$COMMIT_MSG" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# Get diff for documentation context (escaped for JSON)
|
|
||||||
COMMIT_DIFF=$(git diff HEAD^ HEAD --stat | jq -Rs .)
|
|
||||||
echo "commit_diff=$COMMIT_DIFF" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# Get commit SHA
|
|
||||||
echo "commit_sha=${{ github.sha }}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Trigger Claude workflow
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
run: |
|
|
||||||
# Trigger the Claude docs updater workflow with the change information
|
|
||||||
gh workflow run claude-docs-updater.yml \
|
|
||||||
--ref next \
|
|
||||||
-f commit_sha="${{ steps.changed-files.outputs.commit_sha }}" \
|
|
||||||
-f commit_message=${{ steps.changed-files.outputs.commit_message }} \
|
|
||||||
-f changed_files=${{ steps.changed-files.outputs.changed_files }} \
|
|
||||||
-f commit_diff=${{ steps.changed-files.outputs.commit_diff }}
|
|
||||||
65
.github/workflows/claude-docs-updater.yml
vendored
65
.github/workflows/claude-docs-updater.yml
vendored
@@ -1,27 +1,18 @@
|
|||||||
name: Claude Documentation Updater
|
name: Claude Documentation Updater
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
push:
|
||||||
inputs:
|
branches:
|
||||||
commit_sha:
|
- next
|
||||||
description: 'The commit SHA that triggered this update'
|
paths-ignore:
|
||||||
required: true
|
- "apps/docs/**"
|
||||||
type: string
|
- "*.md"
|
||||||
commit_message:
|
- ".github/workflows/**"
|
||||||
description: 'The commit message'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
changed_files:
|
|
||||||
description: 'List of changed files'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
commit_diff:
|
|
||||||
description: 'Diff summary of changes'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
update-docs:
|
update-docs:
|
||||||
|
# Only run if changes were merged (not direct pushes from bots)
|
||||||
|
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
@@ -31,8 +22,28 @@ jobs:
|
|||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: next
|
fetch-depth: 2 # Need previous commit for comparison
|
||||||
fetch-depth: 0 # Need full history to checkout specific commit
|
|
||||||
|
- name: Get changed files
|
||||||
|
id: changed-files
|
||||||
|
run: |
|
||||||
|
echo "Changed files in this push:"
|
||||||
|
git diff --name-only HEAD^ HEAD | tee changed_files.txt
|
||||||
|
|
||||||
|
# Store changed files for Claude to analyze
|
||||||
|
echo "changed_files<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
git diff --name-only HEAD^ HEAD >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# Get the commit message and changes summary
|
||||||
|
echo "commit_message<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
git log -1 --pretty=%B >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# Get diff for documentation context
|
||||||
|
echo "commit_diff<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
git diff HEAD^ HEAD --stat >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Create docs update branch
|
- name: Create docs update branch
|
||||||
id: create-branch
|
id: create-branch
|
||||||
@@ -60,12 +71,12 @@ jobs:
|
|||||||
You are a documentation specialist. Analyze the recent changes pushed to the 'next' branch and update the documentation accordingly.
|
You are a documentation specialist. Analyze the recent changes pushed to the 'next' branch and update the documentation accordingly.
|
||||||
|
|
||||||
Recent changes:
|
Recent changes:
|
||||||
- Commit: ${{ inputs.commit_message }}
|
- Commit: ${{ steps.changed-files.outputs.commit_message }}
|
||||||
- Changed files:
|
- Changed files:
|
||||||
${{ inputs.changed_files }}
|
${{ steps.changed-files.outputs.changed_files }}
|
||||||
|
|
||||||
- Changes summary:
|
- Changes summary:
|
||||||
${{ inputs.commit_diff }}
|
${{ steps.changed-files.outputs.commit_diff }}
|
||||||
|
|
||||||
Your task:
|
Your task:
|
||||||
1. Analyze the changes to understand what functionality was added, modified, or removed
|
1. Analyze the changes to understand what functionality was added, modified, or removed
|
||||||
@@ -102,7 +113,7 @@ jobs:
|
|||||||
|
|
||||||
This PR was automatically generated to update documentation based on recent changes.
|
This PR was automatically generated to update documentation based on recent changes.
|
||||||
|
|
||||||
Original commit: ${{ inputs.commit_message }}
|
Original commit: ${{ steps.changed-files.outputs.commit_message }}
|
||||||
|
|
||||||
Co-authored-by: Claude <claude-assistant@anthropic.com>"
|
Co-authored-by: Claude <claude-assistant@anthropic.com>"
|
||||||
fi
|
fi
|
||||||
@@ -122,12 +133,12 @@ jobs:
|
|||||||
This PR automatically updates documentation based on recent changes merged to the \`next\` branch.
|
This PR automatically updates documentation based on recent changes merged to the \`next\` branch.
|
||||||
|
|
||||||
### Original Changes
|
### Original Changes
|
||||||
**Commit:** ${{ inputs.commit_sha }}
|
**Commit:** ${{ github.sha }}
|
||||||
**Message:** ${{ inputs.commit_message }}
|
**Message:** ${{ steps.changed-files.outputs.commit_message }}
|
||||||
|
|
||||||
### Changed Files in Original Commit
|
### Changed Files in Original Commit
|
||||||
\`\`\`
|
\`\`\`
|
||||||
${{ inputs.changed_files }}
|
${{ steps.changed-files.outputs.changed_files }}
|
||||||
\`\`\`
|
\`\`\`
|
||||||
|
|
||||||
### Documentation Updates
|
### Documentation Updates
|
||||||
|
|||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -93,7 +93,4 @@ dev-debug.log
|
|||||||
apps/extension/.vscode-test/
|
apps/extension/.vscode-test/
|
||||||
|
|
||||||
# apps/extension
|
# apps/extension
|
||||||
apps/extension/vsix-build/
|
apps/extension/vsix-build/
|
||||||
|
|
||||||
# turbo
|
|
||||||
.turbo
|
|
||||||
@@ -4,41 +4,40 @@
|
|||||||
"description": "Task Master CLI - Command line interface for task management",
|
"description": "Task Master CLI - Command line interface for task management",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "./dist/index.js",
|
"main": "./dist/index.js",
|
||||||
"types": "./src/index.ts",
|
"types": "./dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
"types": "./src/index.ts",
|
"types": "./src/index.ts",
|
||||||
"import": "./dist/index.js"
|
"import": "./dist/index.js",
|
||||||
|
"require": "./dist/index.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"files": ["dist", "README.md"],
|
"files": ["dist", "README.md"],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc",
|
"build": "tsup",
|
||||||
"dev": "tsc --watch",
|
"dev": "tsup --watch",
|
||||||
"typecheck": "tsc --noEmit",
|
"typecheck": "tsc --noEmit",
|
||||||
"lint": "biome check src",
|
"lint": "biome check src",
|
||||||
"format": "biome format --write src",
|
"format": "biome format --write src",
|
||||||
"test": "vitest run",
|
"test": "vitest run",
|
||||||
"test:watch": "vitest",
|
"test:watch": "vitest"
|
||||||
"test:coverage": "vitest run --coverage",
|
|
||||||
"test:unit": "vitest run -t unit",
|
|
||||||
"test:integration": "vitest run -t integration",
|
|
||||||
"test:e2e": "vitest run --dir tests/e2e",
|
|
||||||
"test:ci": "vitest run --coverage --reporter=dot"
|
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@tm/core": "*",
|
"@tm/core": "*",
|
||||||
|
"@tm/workflow-engine": "*",
|
||||||
"boxen": "^7.1.1",
|
"boxen": "^7.1.1",
|
||||||
"chalk": "5.6.2",
|
"chalk": "^5.3.0",
|
||||||
"cli-table3": "^0.6.5",
|
"cli-table3": "^0.6.5",
|
||||||
"commander": "^12.1.0",
|
"commander": "^12.1.0",
|
||||||
"inquirer": "^9.2.10",
|
"inquirer": "^9.2.10",
|
||||||
|
"open": "^10.2.0",
|
||||||
"ora": "^8.1.0"
|
"ora": "^8.1.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@biomejs/biome": "^1.9.4",
|
"@biomejs/biome": "^1.9.4",
|
||||||
"@types/inquirer": "^9.0.3",
|
"@types/inquirer": "^9.0.3",
|
||||||
"@types/node": "^22.10.5",
|
"@types/node": "^22.10.5",
|
||||||
|
"tsup": "^8.3.0",
|
||||||
"tsx": "^4.20.4",
|
"tsx": "^4.20.4",
|
||||||
"typescript": "^5.7.3",
|
"typescript": "^5.7.3",
|
||||||
"vitest": "^2.1.8"
|
"vitest": "^2.1.8"
|
||||||
|
|||||||
@@ -494,17 +494,6 @@ export class AuthCommand extends Command {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Static method to register this command on an existing program
|
* Static method to register this command on an existing program
|
||||||
* This is for gradual migration - allows commands.js to use this
|
|
||||||
*/
|
|
||||||
static registerOn(program: Command): Command {
|
|
||||||
const authCommand = new AuthCommand();
|
|
||||||
program.addCommand(authCommand);
|
|
||||||
return authCommand;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Alternative registration that returns the command for chaining
|
|
||||||
* Can also configure the command name if needed
|
|
||||||
*/
|
*/
|
||||||
static register(program: Command, name?: string): AuthCommand {
|
static register(program: Command, name?: string): AuthCommand {
|
||||||
const authCommand = new AuthCommand(name);
|
const authCommand = new AuthCommand(name);
|
||||||
|
|||||||
@@ -1,570 +0,0 @@
|
|||||||
/**
|
|
||||||
* @fileoverview Context command for managing org/brief selection
|
|
||||||
* Provides a clean interface for workspace context management
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { Command } from 'commander';
|
|
||||||
import chalk from 'chalk';
|
|
||||||
import inquirer from 'inquirer';
|
|
||||||
import ora from 'ora';
|
|
||||||
import {
|
|
||||||
AuthManager,
|
|
||||||
AuthenticationError,
|
|
||||||
type UserContext
|
|
||||||
} from '@tm/core/auth';
|
|
||||||
import * as ui from '../utils/ui.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Result type from context command
|
|
||||||
*/
|
|
||||||
export interface ContextResult {
|
|
||||||
success: boolean;
|
|
||||||
action: 'show' | 'select-org' | 'select-brief' | 'clear' | 'set';
|
|
||||||
context?: UserContext;
|
|
||||||
message?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* ContextCommand extending Commander's Command class
|
|
||||||
* Manages user's workspace context (org/brief selection)
|
|
||||||
*/
|
|
||||||
export class ContextCommand extends Command {
|
|
||||||
private authManager: AuthManager;
|
|
||||||
private lastResult?: ContextResult;
|
|
||||||
|
|
||||||
constructor(name?: string) {
|
|
||||||
super(name || 'context');
|
|
||||||
|
|
||||||
// Initialize auth manager
|
|
||||||
this.authManager = AuthManager.getInstance();
|
|
||||||
|
|
||||||
// Configure the command
|
|
||||||
this.description(
|
|
||||||
'Manage workspace context (organization and brief selection)'
|
|
||||||
);
|
|
||||||
|
|
||||||
// Add subcommands
|
|
||||||
this.addOrgCommand();
|
|
||||||
this.addBriefCommand();
|
|
||||||
this.addClearCommand();
|
|
||||||
this.addSetCommand();
|
|
||||||
|
|
||||||
// Default action shows current context
|
|
||||||
this.action(async () => {
|
|
||||||
await this.executeShow();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add org selection subcommand
|
|
||||||
*/
|
|
||||||
private addOrgCommand(): void {
|
|
||||||
this.command('org')
|
|
||||||
.description('Select an organization')
|
|
||||||
.action(async () => {
|
|
||||||
await this.executeSelectOrg();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add brief selection subcommand
|
|
||||||
*/
|
|
||||||
private addBriefCommand(): void {
|
|
||||||
this.command('brief')
|
|
||||||
.description('Select a brief within the current organization')
|
|
||||||
.action(async () => {
|
|
||||||
await this.executeSelectBrief();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add clear subcommand
|
|
||||||
*/
|
|
||||||
private addClearCommand(): void {
|
|
||||||
this.command('clear')
|
|
||||||
.description('Clear all context selections')
|
|
||||||
.action(async () => {
|
|
||||||
await this.executeClear();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add set subcommand for direct context setting
|
|
||||||
*/
|
|
||||||
private addSetCommand(): void {
|
|
||||||
this.command('set')
|
|
||||||
.description('Set context directly')
|
|
||||||
.option('--org <id>', 'Organization ID')
|
|
||||||
.option('--org-name <name>', 'Organization name')
|
|
||||||
.option('--brief <id>', 'Brief ID')
|
|
||||||
.option('--brief-name <name>', 'Brief name')
|
|
||||||
.action(async (options) => {
|
|
||||||
await this.executeSet(options);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute show current context
|
|
||||||
*/
|
|
||||||
private async executeShow(): Promise<void> {
|
|
||||||
try {
|
|
||||||
const result = this.displayContext();
|
|
||||||
this.setLastResult(result);
|
|
||||||
} catch (error: any) {
|
|
||||||
this.handleError(error);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Display current context
|
|
||||||
*/
|
|
||||||
private displayContext(): ContextResult {
|
|
||||||
// Check authentication first
|
|
||||||
if (!this.authManager.isAuthenticated()) {
|
|
||||||
console.log(chalk.yellow('✗ Not authenticated'));
|
|
||||||
console.log(chalk.gray('\n Run "tm auth login" to authenticate first'));
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
action: 'show',
|
|
||||||
message: 'Not authenticated'
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const context = this.authManager.getContext();
|
|
||||||
|
|
||||||
console.log(chalk.cyan('\n🌍 Workspace Context\n'));
|
|
||||||
|
|
||||||
if (context && (context.orgId || context.briefId)) {
|
|
||||||
if (context.orgName || context.orgId) {
|
|
||||||
console.log(chalk.green('✓ Organization'));
|
|
||||||
if (context.orgName) {
|
|
||||||
console.log(chalk.white(` ${context.orgName}`));
|
|
||||||
}
|
|
||||||
if (context.orgId) {
|
|
||||||
console.log(chalk.gray(` ID: ${context.orgId}`));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (context.briefName || context.briefId) {
|
|
||||||
console.log(chalk.green('\n✓ Brief'));
|
|
||||||
if (context.briefName) {
|
|
||||||
console.log(chalk.white(` ${context.briefName}`));
|
|
||||||
}
|
|
||||||
if (context.briefId) {
|
|
||||||
console.log(chalk.gray(` ID: ${context.briefId}`));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (context.updatedAt) {
|
|
||||||
console.log(
|
|
||||||
chalk.gray(
|
|
||||||
`\n Last updated: ${new Date(context.updatedAt).toLocaleString()}`
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
action: 'show',
|
|
||||||
context,
|
|
||||||
message: 'Context loaded'
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
console.log(chalk.yellow('✗ No context selected'));
|
|
||||||
console.log(
|
|
||||||
chalk.gray('\n Run "tm context org" to select an organization')
|
|
||||||
);
|
|
||||||
console.log(chalk.gray(' Run "tm context brief" to select a brief'));
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
action: 'show',
|
|
||||||
message: 'No context selected'
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute org selection
|
|
||||||
*/
|
|
||||||
private async executeSelectOrg(): Promise<void> {
|
|
||||||
try {
|
|
||||||
// Check authentication
|
|
||||||
if (!this.authManager.isAuthenticated()) {
|
|
||||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await this.selectOrganization();
|
|
||||||
this.setLastResult(result);
|
|
||||||
|
|
||||||
if (!result.success) {
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
this.handleError(error);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Select an organization interactively
|
|
||||||
*/
|
|
||||||
private async selectOrganization(): Promise<ContextResult> {
|
|
||||||
const spinner = ora('Fetching organizations...').start();
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Fetch organizations from API
|
|
||||||
const organizations = await this.authManager.getOrganizations();
|
|
||||||
spinner.stop();
|
|
||||||
|
|
||||||
if (organizations.length === 0) {
|
|
||||||
ui.displayWarning('No organizations available');
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
action: 'select-org',
|
|
||||||
message: 'No organizations available'
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prompt for selection
|
|
||||||
const { selectedOrg } = await inquirer.prompt([
|
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'selectedOrg',
|
|
||||||
message: 'Select an organization:',
|
|
||||||
choices: organizations.map((org) => ({
|
|
||||||
name: org.name,
|
|
||||||
value: org
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Update context
|
|
||||||
await this.authManager.updateContext({
|
|
||||||
orgId: selectedOrg.id,
|
|
||||||
orgName: selectedOrg.name,
|
|
||||||
// Clear brief when changing org
|
|
||||||
briefId: undefined,
|
|
||||||
briefName: undefined
|
|
||||||
});
|
|
||||||
|
|
||||||
ui.displaySuccess(`Selected organization: ${selectedOrg.name}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
action: 'select-org',
|
|
||||||
context: this.authManager.getContext() || undefined,
|
|
||||||
message: `Selected organization: ${selectedOrg.name}`
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
spinner.fail('Failed to fetch organizations');
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute brief selection
|
|
||||||
*/
|
|
||||||
private async executeSelectBrief(): Promise<void> {
|
|
||||||
try {
|
|
||||||
// Check authentication
|
|
||||||
if (!this.authManager.isAuthenticated()) {
|
|
||||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if org is selected
|
|
||||||
const context = this.authManager.getContext();
|
|
||||||
if (!context?.orgId) {
|
|
||||||
ui.displayError(
|
|
||||||
'No organization selected. Run "tm context org" first.'
|
|
||||||
);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await this.selectBrief(context.orgId);
|
|
||||||
this.setLastResult(result);
|
|
||||||
|
|
||||||
if (!result.success) {
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
this.handleError(error);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Select a brief within the current organization
|
|
||||||
*/
|
|
||||||
private async selectBrief(orgId: string): Promise<ContextResult> {
|
|
||||||
const spinner = ora('Fetching briefs...').start();
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Fetch briefs from API
|
|
||||||
const briefs = await this.authManager.getBriefs(orgId);
|
|
||||||
spinner.stop();
|
|
||||||
|
|
||||||
if (briefs.length === 0) {
|
|
||||||
ui.displayWarning('No briefs available in this organization');
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
action: 'select-brief',
|
|
||||||
message: 'No briefs available'
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prompt for selection
|
|
||||||
const { selectedBrief } = await inquirer.prompt([
|
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'selectedBrief',
|
|
||||||
message: 'Select a brief:',
|
|
||||||
choices: [
|
|
||||||
{ name: '(No brief - organization level)', value: null },
|
|
||||||
...briefs.map((brief) => ({
|
|
||||||
name: `Brief ${brief.id.slice(0, 8)} (${new Date(brief.createdAt).toLocaleDateString()})`,
|
|
||||||
value: brief
|
|
||||||
}))
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]);
|
|
||||||
|
|
||||||
if (selectedBrief) {
|
|
||||||
// Update context with brief
|
|
||||||
const briefName = `Brief ${selectedBrief.id.slice(0, 8)}`;
|
|
||||||
await this.authManager.updateContext({
|
|
||||||
briefId: selectedBrief.id,
|
|
||||||
briefName: briefName
|
|
||||||
});
|
|
||||||
|
|
||||||
ui.displaySuccess(`Selected brief: ${briefName}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
action: 'select-brief',
|
|
||||||
context: this.authManager.getContext() || undefined,
|
|
||||||
message: `Selected brief: ${selectedBrief.name}`
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
// Clear brief selection
|
|
||||||
await this.authManager.updateContext({
|
|
||||||
briefId: undefined,
|
|
||||||
briefName: undefined
|
|
||||||
});
|
|
||||||
|
|
||||||
ui.displaySuccess('Cleared brief selection (organization level)');
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
action: 'select-brief',
|
|
||||||
context: this.authManager.getContext() || undefined,
|
|
||||||
message: 'Cleared brief selection'
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
spinner.fail('Failed to fetch briefs');
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute clear context
|
|
||||||
*/
|
|
||||||
private async executeClear(): Promise<void> {
|
|
||||||
try {
|
|
||||||
// Check authentication
|
|
||||||
if (!this.authManager.isAuthenticated()) {
|
|
||||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await this.clearContext();
|
|
||||||
this.setLastResult(result);
|
|
||||||
|
|
||||||
if (!result.success) {
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
this.handleError(error);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all context selections
|
|
||||||
*/
|
|
||||||
private async clearContext(): Promise<ContextResult> {
|
|
||||||
try {
|
|
||||||
await this.authManager.clearContext();
|
|
||||||
ui.displaySuccess('Context cleared');
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
action: 'clear',
|
|
||||||
message: 'Context cleared'
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
ui.displayError(`Failed to clear context: ${(error as Error).message}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
action: 'clear',
|
|
||||||
message: `Failed to clear context: ${(error as Error).message}`
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute set context with options
|
|
||||||
*/
|
|
||||||
private async executeSet(options: any): Promise<void> {
|
|
||||||
try {
|
|
||||||
// Check authentication
|
|
||||||
if (!this.authManager.isAuthenticated()) {
|
|
||||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await this.setContext(options);
|
|
||||||
this.setLastResult(result);
|
|
||||||
|
|
||||||
if (!result.success) {
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
this.handleError(error);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set context directly from options
|
|
||||||
*/
|
|
||||||
private async setContext(options: any): Promise<ContextResult> {
|
|
||||||
try {
|
|
||||||
const context: Partial<UserContext> = {};
|
|
||||||
|
|
||||||
if (options.org) {
|
|
||||||
context.orgId = options.org;
|
|
||||||
}
|
|
||||||
if (options.orgName) {
|
|
||||||
context.orgName = options.orgName;
|
|
||||||
}
|
|
||||||
if (options.brief) {
|
|
||||||
context.briefId = options.brief;
|
|
||||||
}
|
|
||||||
if (options.briefName) {
|
|
||||||
context.briefName = options.briefName;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Object.keys(context).length === 0) {
|
|
||||||
ui.displayWarning('No context options provided');
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
action: 'set',
|
|
||||||
message: 'No context options provided'
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.authManager.updateContext(context);
|
|
||||||
ui.displaySuccess('Context updated');
|
|
||||||
|
|
||||||
// Display what was set
|
|
||||||
if (context.orgName || context.orgId) {
|
|
||||||
console.log(
|
|
||||||
chalk.gray(` Organization: ${context.orgName || context.orgId}`)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (context.briefName || context.briefId) {
|
|
||||||
console.log(
|
|
||||||
chalk.gray(` Brief: ${context.briefName || context.briefId}`)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
action: 'set',
|
|
||||||
context: this.authManager.getContext() || undefined,
|
|
||||||
message: 'Context updated'
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
ui.displayError(`Failed to set context: ${(error as Error).message}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
action: 'set',
|
|
||||||
message: `Failed to set context: ${(error as Error).message}`
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle errors
|
|
||||||
*/
|
|
||||||
private handleError(error: any): void {
|
|
||||||
if (error instanceof AuthenticationError) {
|
|
||||||
console.error(chalk.red(`\n✗ ${error.message}`));
|
|
||||||
|
|
||||||
if (error.code === 'NOT_AUTHENTICATED') {
|
|
||||||
ui.displayWarning('Please authenticate first: tm auth login');
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const msg = error?.message ?? String(error);
|
|
||||||
console.error(chalk.red(`Error: ${msg}`));
|
|
||||||
|
|
||||||
if (error.stack && process.env.DEBUG) {
|
|
||||||
console.error(chalk.gray(error.stack));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the last result for programmatic access
|
|
||||||
*/
|
|
||||||
private setLastResult(result: ContextResult): void {
|
|
||||||
this.lastResult = result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the last result (for programmatic usage)
|
|
||||||
*/
|
|
||||||
getLastResult(): ContextResult | undefined {
|
|
||||||
return this.lastResult;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get current context (for programmatic usage)
|
|
||||||
*/
|
|
||||||
getContext(): UserContext | null {
|
|
||||||
return this.authManager.getContext();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up resources
|
|
||||||
*/
|
|
||||||
async cleanup(): Promise<void> {
|
|
||||||
// No resources to clean up for context command
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Static method to register this command on an existing program
|
|
||||||
*/
|
|
||||||
static registerOn(program: Command): Command {
|
|
||||||
const contextCommand = new ContextCommand();
|
|
||||||
program.addCommand(contextCommand);
|
|
||||||
return contextCommand;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Alternative registration that returns the command for chaining
|
|
||||||
*/
|
|
||||||
static register(program: Command, name?: string): ContextCommand {
|
|
||||||
const contextCommand = new ContextCommand(name);
|
|
||||||
program.addCommand(contextCommand);
|
|
||||||
return contextCommand;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
38
apps/cli/src/commands/index.ts
Normal file
38
apps/cli/src/commands/index.ts
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
/**
|
||||||
|
* Command registry - exports all CLI commands for central registration
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Command } from 'commander';
|
||||||
|
import { ListTasksCommand } from './list.command.js';
|
||||||
|
import { AuthCommand } from './auth.command.js';
|
||||||
|
import WorkflowCommand from './workflow.command.js';
|
||||||
|
|
||||||
|
// Define interface for command classes that can register themselves
|
||||||
|
export interface CommandRegistrar {
|
||||||
|
register(program: Command, name?: string): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Future commands can be added here as they're created
|
||||||
|
// The pattern is: each command exports a class with a static register(program: Command, name?: string) method
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Auto-register all exported commands that implement the CommandRegistrar interface
|
||||||
|
*/
|
||||||
|
export function registerAllCommands(program: Command): void {
|
||||||
|
// Get all exports from this module
|
||||||
|
const commands = [
|
||||||
|
ListTasksCommand,
|
||||||
|
AuthCommand,
|
||||||
|
WorkflowCommand
|
||||||
|
// Add new commands here as they're imported above
|
||||||
|
];
|
||||||
|
|
||||||
|
commands.forEach((CommandClass) => {
|
||||||
|
if (
|
||||||
|
'register' in CommandClass &&
|
||||||
|
typeof CommandClass.register === 'function'
|
||||||
|
) {
|
||||||
|
CommandClass.register(program);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
@@ -15,7 +15,6 @@ import {
|
|||||||
STATUS_ICONS,
|
STATUS_ICONS,
|
||||||
type OutputFormat
|
type OutputFormat
|
||||||
} from '@tm/core';
|
} from '@tm/core';
|
||||||
import type { StorageType } from '@tm/core/types';
|
|
||||||
import * as ui from '../utils/ui.js';
|
import * as ui from '../utils/ui.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -38,7 +37,7 @@ export interface ListTasksResult {
|
|||||||
total: number;
|
total: number;
|
||||||
filtered: number;
|
filtered: number;
|
||||||
tag?: string;
|
tag?: string;
|
||||||
storageType: Exclude<StorageType, 'auto'>;
|
storageType: 'file' | 'api';
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -308,17 +307,6 @@ export class ListTasksCommand extends Command {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Static method to register this command on an existing program
|
* Static method to register this command on an existing program
|
||||||
* This is for gradual migration - allows commands.js to use this
|
|
||||||
*/
|
|
||||||
static registerOn(program: Command): Command {
|
|
||||||
const listCommand = new ListTasksCommand();
|
|
||||||
program.addCommand(listCommand);
|
|
||||||
return listCommand;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Alternative registration that returns the command for chaining
|
|
||||||
* Can also configure the command name if needed
|
|
||||||
*/
|
*/
|
||||||
static register(program: Command, name?: string): ListTasksCommand {
|
static register(program: Command, name?: string): ListTasksCommand {
|
||||||
const listCommand = new ListTasksCommand(name);
|
const listCommand = new ListTasksCommand(name);
|
||||||
|
|||||||
@@ -1,406 +0,0 @@
|
|||||||
/**
|
|
||||||
* @fileoverview ShowCommand using Commander's native class pattern
|
|
||||||
* Extends Commander.Command for better integration with the framework
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { Command } from 'commander';
|
|
||||||
import chalk from 'chalk';
|
|
||||||
import boxen from 'boxen';
|
|
||||||
import { createTaskMasterCore, type Task, type TaskMasterCore } from '@tm/core';
|
|
||||||
import type { StorageType } from '@tm/core/types';
|
|
||||||
import * as ui from '../utils/ui.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Options interface for the show command
|
|
||||||
*/
|
|
||||||
export interface ShowCommandOptions {
|
|
||||||
id?: string;
|
|
||||||
status?: string;
|
|
||||||
format?: 'text' | 'json';
|
|
||||||
silent?: boolean;
|
|
||||||
project?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Result type from show command
|
|
||||||
*/
|
|
||||||
export interface ShowTaskResult {
|
|
||||||
task: Task | null;
|
|
||||||
found: boolean;
|
|
||||||
storageType: Exclude<StorageType, 'auto'>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Result type for multiple tasks
|
|
||||||
*/
|
|
||||||
export interface ShowMultipleTasksResult {
|
|
||||||
tasks: Task[];
|
|
||||||
notFound: string[];
|
|
||||||
storageType: Exclude<StorageType, 'auto'>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* ShowCommand extending Commander's Command class
|
|
||||||
* This is a thin presentation layer over @tm/core
|
|
||||||
*/
|
|
||||||
export class ShowCommand extends Command {
|
|
||||||
private tmCore?: TaskMasterCore;
|
|
||||||
private lastResult?: ShowTaskResult | ShowMultipleTasksResult;
|
|
||||||
|
|
||||||
constructor(name?: string) {
|
|
||||||
super(name || 'show');
|
|
||||||
|
|
||||||
// Configure the command
|
|
||||||
this.description('Display detailed information about one or more tasks')
|
|
||||||
.argument('[id]', 'Task ID(s) to show (comma-separated for multiple)')
|
|
||||||
.option(
|
|
||||||
'-i, --id <id>',
|
|
||||||
'Task ID(s) to show (comma-separated for multiple)'
|
|
||||||
)
|
|
||||||
.option('-s, --status <status>', 'Filter subtasks by status')
|
|
||||||
.option('-f, --format <format>', 'Output format (text, json)', 'text')
|
|
||||||
.option('--silent', 'Suppress output (useful for programmatic usage)')
|
|
||||||
.option('-p, --project <path>', 'Project root directory', process.cwd())
|
|
||||||
.action(
|
|
||||||
async (taskId: string | undefined, options: ShowCommandOptions) => {
|
|
||||||
await this.executeCommand(taskId, options);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the show command
|
|
||||||
*/
|
|
||||||
private async executeCommand(
|
|
||||||
taskId: string | undefined,
|
|
||||||
options: ShowCommandOptions
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
// Validate options
|
|
||||||
if (!this.validateOptions(options)) {
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize tm-core
|
|
||||||
await this.initializeCore(options.project || process.cwd());
|
|
||||||
|
|
||||||
// Get the task ID from argument or option
|
|
||||||
const idArg = taskId || options.id;
|
|
||||||
if (!idArg) {
|
|
||||||
console.error(chalk.red('Error: Please provide a task ID'));
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if multiple IDs are provided (comma-separated)
|
|
||||||
const taskIds = idArg
|
|
||||||
.split(',')
|
|
||||||
.map((id) => id.trim())
|
|
||||||
.filter((id) => id.length > 0);
|
|
||||||
|
|
||||||
// Get tasks from core
|
|
||||||
const result =
|
|
||||||
taskIds.length > 1
|
|
||||||
? await this.getMultipleTasks(taskIds, options)
|
|
||||||
: await this.getSingleTask(taskIds[0], options);
|
|
||||||
|
|
||||||
// Store result for programmatic access
|
|
||||||
this.setLastResult(result);
|
|
||||||
|
|
||||||
// Display results
|
|
||||||
if (!options.silent) {
|
|
||||||
this.displayResults(result, options);
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
const msg = error?.getSanitizedDetails?.() ?? {
|
|
||||||
message: error?.message ?? String(error)
|
|
||||||
};
|
|
||||||
console.error(chalk.red(`Error: ${msg.message || 'Unexpected error'}`));
|
|
||||||
if (error.stack && process.env.DEBUG) {
|
|
||||||
console.error(chalk.gray(error.stack));
|
|
||||||
}
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate command options
|
|
||||||
*/
|
|
||||||
private validateOptions(options: ShowCommandOptions): boolean {
|
|
||||||
// Validate format
|
|
||||||
if (options.format && !['text', 'json'].includes(options.format)) {
|
|
||||||
console.error(chalk.red(`Invalid format: ${options.format}`));
|
|
||||||
console.error(chalk.gray(`Valid formats: text, json`));
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize TaskMasterCore
|
|
||||||
*/
|
|
||||||
private async initializeCore(projectRoot: string): Promise<void> {
|
|
||||||
if (!this.tmCore) {
|
|
||||||
this.tmCore = await createTaskMasterCore({ projectPath: projectRoot });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a single task from tm-core
|
|
||||||
*/
|
|
||||||
private async getSingleTask(
|
|
||||||
taskId: string,
|
|
||||||
_options: ShowCommandOptions
|
|
||||||
): Promise<ShowTaskResult> {
|
|
||||||
if (!this.tmCore) {
|
|
||||||
throw new Error('TaskMasterCore not initialized');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the task
|
|
||||||
const task = await this.tmCore.getTask(taskId);
|
|
||||||
|
|
||||||
// Get storage type
|
|
||||||
const storageType = this.tmCore.getStorageType();
|
|
||||||
|
|
||||||
return {
|
|
||||||
task,
|
|
||||||
found: task !== null,
|
|
||||||
storageType: storageType as Exclude<StorageType, 'auto'>
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get multiple tasks from tm-core
|
|
||||||
*/
|
|
||||||
private async getMultipleTasks(
|
|
||||||
taskIds: string[],
|
|
||||||
_options: ShowCommandOptions
|
|
||||||
): Promise<ShowMultipleTasksResult> {
|
|
||||||
if (!this.tmCore) {
|
|
||||||
throw new Error('TaskMasterCore not initialized');
|
|
||||||
}
|
|
||||||
|
|
||||||
const tasks: Task[] = [];
|
|
||||||
const notFound: string[] = [];
|
|
||||||
|
|
||||||
// Get each task individually
|
|
||||||
for (const taskId of taskIds) {
|
|
||||||
const task = await this.tmCore.getTask(taskId);
|
|
||||||
if (task) {
|
|
||||||
tasks.push(task);
|
|
||||||
} else {
|
|
||||||
notFound.push(taskId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get storage type
|
|
||||||
const storageType = this.tmCore.getStorageType();
|
|
||||||
|
|
||||||
return {
|
|
||||||
tasks,
|
|
||||||
notFound,
|
|
||||||
storageType: storageType as Exclude<StorageType, 'auto'>
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Display results based on format
|
|
||||||
*/
|
|
||||||
private displayResults(
|
|
||||||
result: ShowTaskResult | ShowMultipleTasksResult,
|
|
||||||
options: ShowCommandOptions
|
|
||||||
): void {
|
|
||||||
const format = options.format || 'text';
|
|
||||||
|
|
||||||
switch (format) {
|
|
||||||
case 'json':
|
|
||||||
this.displayJson(result);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case 'text':
|
|
||||||
default:
|
|
||||||
if ('task' in result) {
|
|
||||||
// Single task result
|
|
||||||
this.displaySingleTask(result, options);
|
|
||||||
} else {
|
|
||||||
// Multiple tasks result
|
|
||||||
this.displayMultipleTasks(result, options);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Display in JSON format
|
|
||||||
*/
|
|
||||||
private displayJson(result: ShowTaskResult | ShowMultipleTasksResult): void {
|
|
||||||
console.log(JSON.stringify(result, null, 2));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Display a single task in text format
|
|
||||||
*/
|
|
||||||
private displaySingleTask(
|
|
||||||
result: ShowTaskResult,
|
|
||||||
options: ShowCommandOptions
|
|
||||||
): void {
|
|
||||||
if (!result.found || !result.task) {
|
|
||||||
console.log(
|
|
||||||
boxen(chalk.yellow(`Task not found!`), {
|
|
||||||
padding: { top: 0, bottom: 0, left: 1, right: 1 },
|
|
||||||
borderColor: 'yellow',
|
|
||||||
borderStyle: 'round',
|
|
||||||
margin: { top: 1 }
|
|
||||||
})
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const task = result.task;
|
|
||||||
|
|
||||||
// Header
|
|
||||||
console.log(
|
|
||||||
boxen(chalk.white.bold(`Task #${task.id} - ${task.title}`), {
|
|
||||||
padding: { top: 0, bottom: 0, left: 1, right: 1 },
|
|
||||||
borderColor: 'blue',
|
|
||||||
borderStyle: 'round',
|
|
||||||
margin: { top: 1 }
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
// Task details
|
|
||||||
console.log(
|
|
||||||
`\n${chalk.blue.bold('Status:')} ${ui.getStatusWithColor(task.status)}`
|
|
||||||
);
|
|
||||||
console.log(
|
|
||||||
`${chalk.blue.bold('Priority:')} ${ui.getPriorityWithColor(task.priority)}`
|
|
||||||
);
|
|
||||||
|
|
||||||
if (task.description) {
|
|
||||||
console.log(`\n${chalk.blue.bold('Description:')}`);
|
|
||||||
console.log(task.description);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (task.details) {
|
|
||||||
console.log(`\n${chalk.blue.bold('Details:')}`);
|
|
||||||
console.log(task.details);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Dependencies
|
|
||||||
if (task.dependencies && task.dependencies.length > 0) {
|
|
||||||
console.log(`\n${chalk.blue.bold('Dependencies:')}`);
|
|
||||||
task.dependencies.forEach((dep) => {
|
|
||||||
console.log(` - ${chalk.cyan(dep)}`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Subtasks
|
|
||||||
if (task.subtasks && task.subtasks.length > 0) {
|
|
||||||
console.log(`\n${chalk.blue.bold('Subtasks:')}`);
|
|
||||||
|
|
||||||
// Filter subtasks by status if provided
|
|
||||||
const filteredSubtasks = options.status
|
|
||||||
? task.subtasks.filter((sub) => sub.status === options.status)
|
|
||||||
: task.subtasks;
|
|
||||||
|
|
||||||
if (filteredSubtasks.length === 0 && options.status) {
|
|
||||||
console.log(
|
|
||||||
chalk.gray(` No subtasks with status '${options.status}'`)
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
filteredSubtasks.forEach((subtask) => {
|
|
||||||
console.log(
|
|
||||||
` ${chalk.cyan(`${task.id}.${subtask.id}`)} ${ui.getStatusWithColor(subtask.status)} ${subtask.title}`
|
|
||||||
);
|
|
||||||
if (subtask.description) {
|
|
||||||
console.log(` ${chalk.gray(subtask.description)}`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (task.testStrategy) {
|
|
||||||
console.log(`\n${chalk.blue.bold('Test Strategy:')}`);
|
|
||||||
console.log(task.testStrategy);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`\n${chalk.gray('Storage: ' + result.storageType)}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Display multiple tasks in text format
|
|
||||||
*/
|
|
||||||
private displayMultipleTasks(
|
|
||||||
result: ShowMultipleTasksResult,
|
|
||||||
_options: ShowCommandOptions
|
|
||||||
): void {
|
|
||||||
// Header
|
|
||||||
ui.displayBanner(`Tasks (${result.tasks.length} found)`);
|
|
||||||
|
|
||||||
if (result.notFound.length > 0) {
|
|
||||||
console.log(chalk.yellow(`\n⚠ Not found: ${result.notFound.join(', ')}`));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.tasks.length === 0) {
|
|
||||||
ui.displayWarning('No tasks found matching the criteria.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Task table
|
|
||||||
console.log(chalk.blue.bold(`\n📋 Tasks:\n`));
|
|
||||||
console.log(
|
|
||||||
ui.createTaskTable(result.tasks, {
|
|
||||||
showSubtasks: true,
|
|
||||||
showDependencies: true
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log(`\n${chalk.gray('Storage: ' + result.storageType)}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the last result for programmatic access
|
|
||||||
*/
|
|
||||||
private setLastResult(
|
|
||||||
result: ShowTaskResult | ShowMultipleTasksResult
|
|
||||||
): void {
|
|
||||||
this.lastResult = result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the last result (for programmatic usage)
|
|
||||||
*/
|
|
||||||
getLastResult(): ShowTaskResult | ShowMultipleTasksResult | undefined {
|
|
||||||
return this.lastResult;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up resources
|
|
||||||
*/
|
|
||||||
async cleanup(): Promise<void> {
|
|
||||||
if (this.tmCore) {
|
|
||||||
await this.tmCore.close();
|
|
||||||
this.tmCore = undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Static method to register this command on an existing program
|
|
||||||
* This is for gradual migration - allows commands.js to use this
|
|
||||||
*/
|
|
||||||
static registerOn(program: Command): Command {
|
|
||||||
const showCommand = new ShowCommand();
|
|
||||||
program.addCommand(showCommand);
|
|
||||||
return showCommand;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Alternative registration that returns the command for chaining
|
|
||||||
* Can also configure the command name if needed
|
|
||||||
*/
|
|
||||||
static register(program: Command, name?: string): ShowCommand {
|
|
||||||
const showCommand = new ShowCommand(name);
|
|
||||||
program.addCommand(showCommand);
|
|
||||||
return showCommand;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
58
apps/cli/src/commands/workflow.command.ts
Normal file
58
apps/cli/src/commands/workflow.command.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Command
|
||||||
|
* Main workflow command with subcommands
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Command } from 'commander';
|
||||||
|
import {
|
||||||
|
WorkflowStartCommand,
|
||||||
|
WorkflowListCommand,
|
||||||
|
WorkflowStopCommand,
|
||||||
|
WorkflowStatusCommand
|
||||||
|
} from './workflow/index.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WorkflowCommand - Main workflow command with subcommands
|
||||||
|
*/
|
||||||
|
export class WorkflowCommand extends Command {
|
||||||
|
constructor(name?: string) {
|
||||||
|
super(name || 'workflow');
|
||||||
|
|
||||||
|
this.description('Manage task execution workflows with git worktrees and Claude Code')
|
||||||
|
.alias('wf');
|
||||||
|
|
||||||
|
// Register subcommands
|
||||||
|
this.addSubcommands();
|
||||||
|
}
|
||||||
|
|
||||||
|
private addSubcommands(): void {
|
||||||
|
// Start workflow
|
||||||
|
WorkflowStartCommand.register(this);
|
||||||
|
|
||||||
|
// List workflows
|
||||||
|
WorkflowListCommand.register(this);
|
||||||
|
|
||||||
|
// Stop workflow
|
||||||
|
WorkflowStopCommand.register(this);
|
||||||
|
|
||||||
|
// Show workflow status
|
||||||
|
WorkflowStatusCommand.register(this);
|
||||||
|
|
||||||
|
// Alias commands for convenience
|
||||||
|
this.addCommand(new WorkflowStartCommand('run')); // tm workflow run <task-id>
|
||||||
|
this.addCommand(new WorkflowStopCommand('kill')); // tm workflow kill <workflow-id>
|
||||||
|
this.addCommand(new WorkflowStatusCommand('info')); // tm workflow info <workflow-id>
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Static method to register this command on an existing program
|
||||||
|
*/
|
||||||
|
static register(program: Command, name?: string): WorkflowCommand {
|
||||||
|
const workflowCommand = new WorkflowCommand(name);
|
||||||
|
program.addCommand(workflowCommand);
|
||||||
|
return workflowCommand;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
export default WorkflowCommand;
|
||||||
9
apps/cli/src/commands/workflow/index.ts
Normal file
9
apps/cli/src/commands/workflow/index.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Commands
|
||||||
|
* Exports for all workflow-related CLI commands
|
||||||
|
*/
|
||||||
|
|
||||||
|
export * from './workflow-start.command.js';
|
||||||
|
export * from './workflow-list.command.js';
|
||||||
|
export * from './workflow-stop.command.js';
|
||||||
|
export * from './workflow-status.command.js';
|
||||||
253
apps/cli/src/commands/workflow/workflow-list.command.ts
Normal file
253
apps/cli/src/commands/workflow/workflow-list.command.ts
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow List Command
|
||||||
|
* List active and recent workflow executions
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Command } from 'commander';
|
||||||
|
import chalk from 'chalk';
|
||||||
|
import path from 'node:path';
|
||||||
|
import {
|
||||||
|
TaskExecutionManager,
|
||||||
|
type TaskExecutionManagerConfig,
|
||||||
|
type WorkflowExecutionContext
|
||||||
|
} from '@tm/workflow-engine';
|
||||||
|
import * as ui from '../../utils/ui.js';
|
||||||
|
|
||||||
|
export interface WorkflowListOptions {
|
||||||
|
project?: string;
|
||||||
|
status?: string;
|
||||||
|
format?: 'text' | 'json' | 'compact';
|
||||||
|
worktreeBase?: string;
|
||||||
|
claude?: string;
|
||||||
|
all?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WorkflowListCommand - List workflow executions
|
||||||
|
*/
|
||||||
|
export class WorkflowListCommand extends Command {
|
||||||
|
private workflowManager?: TaskExecutionManager;
|
||||||
|
|
||||||
|
constructor(name?: string) {
|
||||||
|
super(name || 'list');
|
||||||
|
|
||||||
|
this.description('List active and recent workflow executions')
|
||||||
|
.alias('ls')
|
||||||
|
.option('-p, --project <path>', 'Project root directory', process.cwd())
|
||||||
|
.option('-s, --status <status>', 'Filter by status (running, completed, failed, etc.)')
|
||||||
|
.option('-f, --format <format>', 'Output format (text, json, compact)', 'text')
|
||||||
|
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
|
||||||
|
.option('--claude <path>', 'Claude Code executable path', 'claude')
|
||||||
|
.option('--all', 'Show all workflows including completed ones')
|
||||||
|
.action(async (options: WorkflowListOptions) => {
|
||||||
|
await this.executeCommand(options);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private async executeCommand(options: WorkflowListOptions): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Initialize workflow manager
|
||||||
|
await this.initializeWorkflowManager(options);
|
||||||
|
|
||||||
|
// Get workflows
|
||||||
|
let workflows = this.workflowManager!.listWorkflows();
|
||||||
|
|
||||||
|
// Apply status filter
|
||||||
|
if (options.status) {
|
||||||
|
workflows = workflows.filter(w => w.status === options.status);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply active filter (default behavior)
|
||||||
|
if (!options.all) {
|
||||||
|
workflows = workflows.filter(w =>
|
||||||
|
['pending', 'initializing', 'running', 'paused'].includes(w.status)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display results
|
||||||
|
this.displayResults(workflows, options);
|
||||||
|
|
||||||
|
} catch (error: any) {
|
||||||
|
ui.displayError(error.message || 'Failed to list workflows');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async initializeWorkflowManager(options: WorkflowListOptions): Promise<void> {
|
||||||
|
if (!this.workflowManager) {
|
||||||
|
const projectRoot = options.project || process.cwd();
|
||||||
|
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
|
||||||
|
|
||||||
|
const config: TaskExecutionManagerConfig = {
|
||||||
|
projectRoot,
|
||||||
|
maxConcurrent: 5,
|
||||||
|
defaultTimeout: 60,
|
||||||
|
worktreeBase,
|
||||||
|
claudeExecutable: options.claude || 'claude',
|
||||||
|
debug: false
|
||||||
|
};
|
||||||
|
|
||||||
|
this.workflowManager = new TaskExecutionManager(config);
|
||||||
|
await this.workflowManager.initialize();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private displayResults(workflows: WorkflowExecutionContext[], options: WorkflowListOptions): void {
|
||||||
|
switch (options.format) {
|
||||||
|
case 'json':
|
||||||
|
this.displayJson(workflows);
|
||||||
|
break;
|
||||||
|
case 'compact':
|
||||||
|
this.displayCompact(workflows);
|
||||||
|
break;
|
||||||
|
case 'text':
|
||||||
|
default:
|
||||||
|
this.displayText(workflows);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private displayJson(workflows: WorkflowExecutionContext[]): void {
|
||||||
|
console.log(JSON.stringify({
|
||||||
|
workflows: workflows.map(w => ({
|
||||||
|
workflowId: `workflow-${w.taskId}`,
|
||||||
|
taskId: w.taskId,
|
||||||
|
taskTitle: w.taskTitle,
|
||||||
|
status: w.status,
|
||||||
|
worktreePath: w.worktreePath,
|
||||||
|
branchName: w.branchName,
|
||||||
|
processId: w.processId,
|
||||||
|
startedAt: w.startedAt,
|
||||||
|
lastActivity: w.lastActivity,
|
||||||
|
metadata: w.metadata
|
||||||
|
})),
|
||||||
|
total: workflows.length,
|
||||||
|
timestamp: new Date().toISOString()
|
||||||
|
}, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
private displayCompact(workflows: WorkflowExecutionContext[]): void {
|
||||||
|
if (workflows.length === 0) {
|
||||||
|
console.log(chalk.gray('No workflows found'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
workflows.forEach(workflow => {
|
||||||
|
const workflowId = `workflow-${workflow.taskId}`;
|
||||||
|
const statusDisplay = this.getStatusDisplay(workflow.status);
|
||||||
|
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`${chalk.cyan(workflowId)} ${statusDisplay} ${workflow.taskTitle} ${chalk.gray(`(${duration})`)}`
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private displayText(workflows: WorkflowExecutionContext[]): void {
|
||||||
|
ui.displayBanner('Active Workflows');
|
||||||
|
|
||||||
|
if (workflows.length === 0) {
|
||||||
|
ui.displayWarning('No workflows found');
|
||||||
|
console.log();
|
||||||
|
console.log(chalk.blue('💡 Start a new workflow with:'));
|
||||||
|
console.log(` ${chalk.cyan('tm workflow start <task-id>')}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Statistics
|
||||||
|
console.log(chalk.blue.bold('\n📊 Statistics:\n'));
|
||||||
|
const statusCounts = this.getStatusCounts(workflows);
|
||||||
|
Object.entries(statusCounts).forEach(([status, count]) => {
|
||||||
|
console.log(` ${this.getStatusDisplay(status)}: ${chalk.cyan(count)}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Workflows table
|
||||||
|
console.log(chalk.blue.bold(`\n🔄 Workflows (${workflows.length}):\n`));
|
||||||
|
|
||||||
|
const tableData = workflows.map(workflow => {
|
||||||
|
const workflowId = `workflow-${workflow.taskId}`;
|
||||||
|
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
|
||||||
|
|
||||||
|
return [
|
||||||
|
chalk.cyan(workflowId),
|
||||||
|
chalk.yellow(workflow.taskId),
|
||||||
|
workflow.taskTitle.substring(0, 30) + (workflow.taskTitle.length > 30 ? '...' : ''),
|
||||||
|
this.getStatusDisplay(workflow.status),
|
||||||
|
workflow.processId ? chalk.green(workflow.processId.toString()) : chalk.gray('N/A'),
|
||||||
|
chalk.gray(duration),
|
||||||
|
chalk.gray(path.basename(workflow.worktreePath))
|
||||||
|
];
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(ui.createTable(
|
||||||
|
['Workflow ID', 'Task ID', 'Task Title', 'Status', 'PID', 'Duration', 'Worktree'],
|
||||||
|
tableData
|
||||||
|
));
|
||||||
|
|
||||||
|
// Running workflows actions
|
||||||
|
const runningWorkflows = workflows.filter(w => w.status === 'running');
|
||||||
|
if (runningWorkflows.length > 0) {
|
||||||
|
console.log(chalk.blue.bold('\n🚀 Quick Actions:\n'));
|
||||||
|
runningWorkflows.slice(0, 3).forEach(workflow => {
|
||||||
|
const workflowId = `workflow-${workflow.taskId}`;
|
||||||
|
console.log(` • Attach to ${chalk.cyan(workflowId)}: ${chalk.gray(`tm workflow attach ${workflowId}`)}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (runningWorkflows.length > 3) {
|
||||||
|
console.log(` ${chalk.gray(`... and ${runningWorkflows.length - 3} more`)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private getStatusDisplay(status: string): string {
|
||||||
|
const statusMap = {
|
||||||
|
pending: { icon: '⏳', color: chalk.yellow },
|
||||||
|
initializing: { icon: '🔄', color: chalk.blue },
|
||||||
|
running: { icon: '🚀', color: chalk.green },
|
||||||
|
paused: { icon: '⏸️', color: chalk.orange },
|
||||||
|
completed: { icon: '✅', color: chalk.green },
|
||||||
|
failed: { icon: '❌', color: chalk.red },
|
||||||
|
cancelled: { icon: '🛑', color: chalk.gray },
|
||||||
|
timeout: { icon: '⏰', color: chalk.red }
|
||||||
|
};
|
||||||
|
|
||||||
|
const statusInfo = statusMap[status as keyof typeof statusMap] || { icon: '❓', color: chalk.white };
|
||||||
|
return `${statusInfo.icon} ${statusInfo.color(status)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getStatusCounts(workflows: WorkflowExecutionContext[]): Record<string, number> {
|
||||||
|
const counts: Record<string, number> = {};
|
||||||
|
|
||||||
|
workflows.forEach(workflow => {
|
||||||
|
counts[workflow.status] = (counts[workflow.status] || 0) + 1;
|
||||||
|
});
|
||||||
|
|
||||||
|
return counts;
|
||||||
|
}
|
||||||
|
|
||||||
|
private formatDuration(start: Date, end: Date): string {
|
||||||
|
const diff = end.getTime() - start.getTime();
|
||||||
|
const minutes = Math.floor(diff / (1000 * 60));
|
||||||
|
const hours = Math.floor(minutes / 60);
|
||||||
|
|
||||||
|
if (hours > 0) {
|
||||||
|
return `${hours}h ${minutes % 60}m`;
|
||||||
|
} else if (minutes > 0) {
|
||||||
|
return `${minutes}m`;
|
||||||
|
} else {
|
||||||
|
return '<1m';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async cleanup(): Promise<void> {
|
||||||
|
if (this.workflowManager) {
|
||||||
|
this.workflowManager.removeAllListeners();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static register(program: Command, name?: string): WorkflowListCommand {
|
||||||
|
const command = new WorkflowListCommand(name);
|
||||||
|
program.addCommand(command);
|
||||||
|
return command;
|
||||||
|
}
|
||||||
|
}
|
||||||
239
apps/cli/src/commands/workflow/workflow-start.command.ts
Normal file
239
apps/cli/src/commands/workflow/workflow-start.command.ts
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Start Command
|
||||||
|
* Start task execution in isolated worktree with Claude Code process
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Command } from 'commander';
|
||||||
|
import chalk from 'chalk';
|
||||||
|
import path from 'node:path';
|
||||||
|
import {
|
||||||
|
createTaskMasterCore,
|
||||||
|
type TaskMasterCore
|
||||||
|
} from '@tm/core';
|
||||||
|
import {
|
||||||
|
TaskExecutionManager,
|
||||||
|
type TaskExecutionManagerConfig
|
||||||
|
} from '@tm/workflow-engine';
|
||||||
|
import * as ui from '../../utils/ui.js';
|
||||||
|
|
||||||
|
export interface WorkflowStartOptions {
|
||||||
|
project?: string;
|
||||||
|
branch?: string;
|
||||||
|
timeout?: number;
|
||||||
|
worktreeBase?: string;
|
||||||
|
claude?: string;
|
||||||
|
debug?: boolean;
|
||||||
|
env?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WorkflowStartCommand - Start task execution workflow
|
||||||
|
*/
|
||||||
|
export class WorkflowStartCommand extends Command {
|
||||||
|
private tmCore?: TaskMasterCore;
|
||||||
|
private workflowManager?: TaskExecutionManager;
|
||||||
|
|
||||||
|
constructor(name?: string) {
|
||||||
|
super(name || 'start');
|
||||||
|
|
||||||
|
this.description('Start task execution in isolated worktree')
|
||||||
|
.argument('<task-id>', 'Task ID to execute')
|
||||||
|
.option('-p, --project <path>', 'Project root directory', process.cwd())
|
||||||
|
.option('-b, --branch <name>', 'Custom branch name for worktree')
|
||||||
|
.option('-t, --timeout <minutes>', 'Execution timeout in minutes', '60')
|
||||||
|
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
|
||||||
|
.option('--claude <path>', 'Claude Code executable path', 'claude')
|
||||||
|
.option('--debug', 'Enable debug logging')
|
||||||
|
.option('--env <vars>', 'Environment variables (KEY=VALUE,KEY2=VALUE2)')
|
||||||
|
.action(async (taskId: string, options: WorkflowStartOptions) => {
|
||||||
|
await this.executeCommand(taskId, options);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private async executeCommand(taskId: string, options: WorkflowStartOptions): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Initialize components
|
||||||
|
await this.initializeCore(options.project || process.cwd());
|
||||||
|
await this.initializeWorkflowManager(options);
|
||||||
|
|
||||||
|
// Get task details
|
||||||
|
const task = await this.getTask(taskId);
|
||||||
|
if (!task) {
|
||||||
|
throw new Error(`Task ${taskId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if task already has active workflow
|
||||||
|
const existingWorkflow = this.workflowManager!.getWorkflowByTaskId(taskId);
|
||||||
|
if (existingWorkflow) {
|
||||||
|
ui.displayWarning(`Task ${taskId} already has an active workflow`);
|
||||||
|
console.log(`Workflow ID: ${chalk.cyan('workflow-' + taskId)}`);
|
||||||
|
console.log(`Status: ${this.getStatusDisplay(existingWorkflow.status)}`);
|
||||||
|
console.log(`Worktree: ${chalk.gray(existingWorkflow.worktreePath)}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse environment variables
|
||||||
|
const env = this.parseEnvironmentVariables(options.env);
|
||||||
|
|
||||||
|
// Display task info
|
||||||
|
ui.displayBanner(`Starting Workflow for Task ${taskId}`);
|
||||||
|
console.log(`${chalk.blue('Task:')} ${task.title}`);
|
||||||
|
console.log(`${chalk.blue('Description:')} ${task.description}`);
|
||||||
|
|
||||||
|
if (task.dependencies?.length) {
|
||||||
|
console.log(`${chalk.blue('Dependencies:')} ${task.dependencies.join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`${chalk.blue('Priority:')} ${task.priority || 'normal'}`);
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
// Start workflow
|
||||||
|
ui.displaySpinner('Creating worktree and starting Claude Code process...');
|
||||||
|
|
||||||
|
const workflowId = await this.workflowManager!.startTaskExecution(task, {
|
||||||
|
branchName: options.branch,
|
||||||
|
timeout: parseInt(options.timeout || '60'),
|
||||||
|
env
|
||||||
|
});
|
||||||
|
|
||||||
|
const workflow = this.workflowManager!.getWorkflowStatus(workflowId);
|
||||||
|
|
||||||
|
ui.displaySuccess('Workflow started successfully!');
|
||||||
|
console.log();
|
||||||
|
console.log(`${chalk.green('✓')} Workflow ID: ${chalk.cyan(workflowId)}`);
|
||||||
|
console.log(`${chalk.green('✓')} Worktree: ${chalk.gray(workflow?.worktreePath)}`);
|
||||||
|
console.log(`${chalk.green('✓')} Branch: ${chalk.gray(workflow?.branchName)}`);
|
||||||
|
console.log(`${chalk.green('✓')} Process ID: ${chalk.gray(workflow?.processId)}`);
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
// Display next steps
|
||||||
|
console.log(chalk.blue.bold('📋 Next Steps:'));
|
||||||
|
console.log(` • Monitor: ${chalk.cyan(`tm workflow status ${workflowId}`)}`);
|
||||||
|
console.log(` • Attach: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
|
||||||
|
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
// Setup event listeners for real-time updates
|
||||||
|
this.setupEventListeners();
|
||||||
|
|
||||||
|
} catch (error: any) {
|
||||||
|
ui.displayError(error.message || 'Failed to start workflow');
|
||||||
|
|
||||||
|
if (options.debug && error.stack) {
|
||||||
|
console.error(chalk.gray(error.stack));
|
||||||
|
}
|
||||||
|
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async initializeCore(projectRoot: string): Promise<void> {
|
||||||
|
if (!this.tmCore) {
|
||||||
|
this.tmCore = await createTaskMasterCore({ projectPath: projectRoot });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async initializeWorkflowManager(options: WorkflowStartOptions): Promise<void> {
|
||||||
|
if (!this.workflowManager) {
|
||||||
|
const projectRoot = options.project || process.cwd();
|
||||||
|
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
|
||||||
|
|
||||||
|
const config: TaskExecutionManagerConfig = {
|
||||||
|
projectRoot,
|
||||||
|
maxConcurrent: 5,
|
||||||
|
defaultTimeout: parseInt(options.timeout || '60'),
|
||||||
|
worktreeBase,
|
||||||
|
claudeExecutable: options.claude || 'claude',
|
||||||
|
debug: options.debug || false
|
||||||
|
};
|
||||||
|
|
||||||
|
this.workflowManager = new TaskExecutionManager(config);
|
||||||
|
await this.workflowManager.initialize();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async getTask(taskId: string) {
|
||||||
|
if (!this.tmCore) {
|
||||||
|
throw new Error('TaskMasterCore not initialized');
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await this.tmCore.getTaskList({});
|
||||||
|
return result.tasks.find(task => task.id === taskId);
|
||||||
|
}
|
||||||
|
|
||||||
|
private parseEnvironmentVariables(envString?: string): Record<string, string> | undefined {
|
||||||
|
if (!envString) return undefined;
|
||||||
|
|
||||||
|
const env: Record<string, string> = {};
|
||||||
|
|
||||||
|
for (const pair of envString.split(',')) {
|
||||||
|
const [key, ...valueParts] = pair.trim().split('=');
|
||||||
|
if (key && valueParts.length > 0) {
|
||||||
|
env[key] = valueParts.join('=');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Object.keys(env).length > 0 ? env : undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getStatusDisplay(status: string): string {
|
||||||
|
const colors = {
|
||||||
|
pending: chalk.yellow,
|
||||||
|
initializing: chalk.blue,
|
||||||
|
running: chalk.green,
|
||||||
|
paused: chalk.orange,
|
||||||
|
completed: chalk.green,
|
||||||
|
failed: chalk.red,
|
||||||
|
cancelled: chalk.gray,
|
||||||
|
timeout: chalk.red
|
||||||
|
};
|
||||||
|
|
||||||
|
const color = colors[status as keyof typeof colors] || chalk.white;
|
||||||
|
return color(status);
|
||||||
|
}
|
||||||
|
|
||||||
|
private setupEventListeners(): void {
|
||||||
|
if (!this.workflowManager) return;
|
||||||
|
|
||||||
|
this.workflowManager.on('workflow.started', (event) => {
|
||||||
|
console.log(`${chalk.green('🚀')} Workflow started: ${event.workflowId}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.workflowManager.on('process.output', (event) => {
|
||||||
|
if (event.data?.stream === 'stdout') {
|
||||||
|
console.log(`${chalk.blue('[OUT]')} ${event.data.data.trim()}`);
|
||||||
|
} else if (event.data?.stream === 'stderr') {
|
||||||
|
console.log(`${chalk.red('[ERR]')} ${event.data.data.trim()}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.workflowManager.on('workflow.completed', (event) => {
|
||||||
|
console.log(`${chalk.green('✅')} Workflow completed: ${event.workflowId}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.workflowManager.on('workflow.failed', (event) => {
|
||||||
|
console.log(`${chalk.red('❌')} Workflow failed: ${event.workflowId}`);
|
||||||
|
if (event.error) {
|
||||||
|
console.log(`${chalk.red('Error:')} ${event.error.message}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async cleanup(): Promise<void> {
|
||||||
|
if (this.workflowManager) {
|
||||||
|
// Don't cleanup workflows, just disconnect
|
||||||
|
this.workflowManager.removeAllListeners();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.tmCore) {
|
||||||
|
await this.tmCore.close();
|
||||||
|
this.tmCore = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static register(program: Command, name?: string): WorkflowStartCommand {
|
||||||
|
const command = new WorkflowStartCommand(name);
|
||||||
|
program.addCommand(command);
|
||||||
|
return command;
|
||||||
|
}
|
||||||
|
}
|
||||||
339
apps/cli/src/commands/workflow/workflow-status.command.ts
Normal file
339
apps/cli/src/commands/workflow/workflow-status.command.ts
Normal file
@@ -0,0 +1,339 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Status Command
|
||||||
|
* Show detailed status of a specific workflow
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Command } from 'commander';
|
||||||
|
import chalk from 'chalk';
|
||||||
|
import path from 'node:path';
|
||||||
|
import {
|
||||||
|
TaskExecutionManager,
|
||||||
|
type TaskExecutionManagerConfig
|
||||||
|
} from '@tm/workflow-engine';
|
||||||
|
import * as ui from '../../utils/ui.js';
|
||||||
|
|
||||||
|
export interface WorkflowStatusOptions {
|
||||||
|
project?: string;
|
||||||
|
worktreeBase?: string;
|
||||||
|
claude?: string;
|
||||||
|
watch?: boolean;
|
||||||
|
format?: 'text' | 'json';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WorkflowStatusCommand - Show workflow execution status
|
||||||
|
*/
|
||||||
|
export class WorkflowStatusCommand extends Command {
|
||||||
|
private workflowManager?: TaskExecutionManager;
|
||||||
|
|
||||||
|
constructor(name?: string) {
|
||||||
|
super(name || 'status');
|
||||||
|
|
||||||
|
this.description('Show detailed status of a workflow execution')
|
||||||
|
.argument('<workflow-id>', 'Workflow ID or task ID to check')
|
||||||
|
.option('-p, --project <path>', 'Project root directory', process.cwd())
|
||||||
|
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
|
||||||
|
.option('--claude <path>', 'Claude Code executable path', 'claude')
|
||||||
|
.option('-w, --watch', 'Watch for status changes (refresh every 2 seconds)')
|
||||||
|
.option('-f, --format <format>', 'Output format (text, json)', 'text')
|
||||||
|
.action(async (workflowId: string, options: WorkflowStatusOptions) => {
|
||||||
|
await this.executeCommand(workflowId, options);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private async executeCommand(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Initialize workflow manager
|
||||||
|
await this.initializeWorkflowManager(options);
|
||||||
|
|
||||||
|
if (options.watch) {
|
||||||
|
await this.watchWorkflowStatus(workflowId, options);
|
||||||
|
} else {
|
||||||
|
await this.showWorkflowStatus(workflowId, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error: any) {
|
||||||
|
ui.displayError(error.message || 'Failed to get workflow status');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async initializeWorkflowManager(options: WorkflowStatusOptions): Promise<void> {
|
||||||
|
if (!this.workflowManager) {
|
||||||
|
const projectRoot = options.project || process.cwd();
|
||||||
|
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
|
||||||
|
|
||||||
|
const config: TaskExecutionManagerConfig = {
|
||||||
|
projectRoot,
|
||||||
|
maxConcurrent: 5,
|
||||||
|
defaultTimeout: 60,
|
||||||
|
worktreeBase,
|
||||||
|
claudeExecutable: options.claude || 'claude',
|
||||||
|
debug: false
|
||||||
|
};
|
||||||
|
|
||||||
|
this.workflowManager = new TaskExecutionManager(config);
|
||||||
|
await this.workflowManager.initialize();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async showWorkflowStatus(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
|
||||||
|
// Try to find workflow by ID or task ID
|
||||||
|
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
|
||||||
|
|
||||||
|
if (!workflow) {
|
||||||
|
// Try as task ID
|
||||||
|
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!workflow) {
|
||||||
|
throw new Error(`Workflow not found: ${workflowId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.format === 'json') {
|
||||||
|
this.displayJsonStatus(workflow);
|
||||||
|
} else {
|
||||||
|
this.displayTextStatus(workflow);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async watchWorkflowStatus(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
|
||||||
|
console.log(chalk.blue.bold('👀 Watching workflow status (Press Ctrl+C to exit)\n'));
|
||||||
|
|
||||||
|
let lastStatus = '';
|
||||||
|
let updateCount = 0;
|
||||||
|
|
||||||
|
const updateStatus = async () => {
|
||||||
|
try {
|
||||||
|
// Clear screen and move cursor to top
|
||||||
|
if (updateCount > 0) {
|
||||||
|
process.stdout.write('\x1b[2J\x1b[0f');
|
||||||
|
}
|
||||||
|
|
||||||
|
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
|
||||||
|
|
||||||
|
if (!workflow) {
|
||||||
|
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!workflow) {
|
||||||
|
console.log(chalk.red(`Workflow not found: ${workflowId}`));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display header with timestamp
|
||||||
|
console.log(chalk.blue.bold('👀 Watching Workflow Status'));
|
||||||
|
console.log(chalk.gray(`Last updated: ${new Date().toLocaleTimeString()}\n`));
|
||||||
|
|
||||||
|
this.displayTextStatus(workflow);
|
||||||
|
|
||||||
|
// Check if workflow has ended
|
||||||
|
if (['completed', 'failed', 'cancelled', 'timeout'].includes(workflow.status)) {
|
||||||
|
console.log(chalk.yellow('\n⚠️ Workflow has ended. Stopping watch mode.'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
updateCount++;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error(chalk.red('Error updating status:'), error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Initial display
|
||||||
|
await updateStatus();
|
||||||
|
|
||||||
|
// Setup interval for updates
|
||||||
|
const interval = setInterval(updateStatus, 2000);
|
||||||
|
|
||||||
|
// Handle Ctrl+C
|
||||||
|
process.on('SIGINT', () => {
|
||||||
|
clearInterval(interval);
|
||||||
|
console.log(chalk.yellow('\n👋 Stopped watching workflow status'));
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Keep the process alive
|
||||||
|
await new Promise(() => {});
|
||||||
|
}
|
||||||
|
|
||||||
|
private displayJsonStatus(workflow: any): void {
|
||||||
|
const status = {
|
||||||
|
workflowId: `workflow-${workflow.taskId}`,
|
||||||
|
taskId: workflow.taskId,
|
||||||
|
taskTitle: workflow.taskTitle,
|
||||||
|
taskDescription: workflow.taskDescription,
|
||||||
|
status: workflow.status,
|
||||||
|
worktreePath: workflow.worktreePath,
|
||||||
|
branchName: workflow.branchName,
|
||||||
|
processId: workflow.processId,
|
||||||
|
startedAt: workflow.startedAt,
|
||||||
|
lastActivity: workflow.lastActivity,
|
||||||
|
duration: this.calculateDuration(workflow.startedAt, workflow.lastActivity),
|
||||||
|
metadata: workflow.metadata
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log(JSON.stringify(status, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
private displayTextStatus(workflow: any): void {
|
||||||
|
const workflowId = `workflow-${workflow.taskId}`;
|
||||||
|
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
|
||||||
|
|
||||||
|
ui.displayBanner(`Workflow Status: ${workflowId}`);
|
||||||
|
|
||||||
|
// Basic information
|
||||||
|
console.log(chalk.blue.bold('\n📋 Basic Information:\n'));
|
||||||
|
console.log(` Workflow ID: ${chalk.cyan(workflowId)}`);
|
||||||
|
console.log(` Task ID: ${chalk.cyan(workflow.taskId)}`);
|
||||||
|
console.log(` Task Title: ${workflow.taskTitle}`);
|
||||||
|
console.log(` Status: ${this.getStatusDisplay(workflow.status)}`);
|
||||||
|
console.log(` Duration: ${chalk.gray(duration)}`);
|
||||||
|
|
||||||
|
// Task details
|
||||||
|
if (workflow.taskDescription) {
|
||||||
|
console.log(chalk.blue.bold('\n📝 Task Details:\n'));
|
||||||
|
console.log(` ${workflow.taskDescription}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process information
|
||||||
|
console.log(chalk.blue.bold('\n⚙️ Process Information:\n'));
|
||||||
|
console.log(` Process ID: ${workflow.processId ? chalk.green(workflow.processId) : chalk.gray('N/A')}`);
|
||||||
|
console.log(` Worktree: ${chalk.gray(workflow.worktreePath)}`);
|
||||||
|
console.log(` Branch: ${chalk.gray(workflow.branchName)}`);
|
||||||
|
|
||||||
|
// Timing information
|
||||||
|
console.log(chalk.blue.bold('\n⏰ Timing:\n'));
|
||||||
|
console.log(` Started: ${chalk.gray(workflow.startedAt.toLocaleString())}`);
|
||||||
|
console.log(` Last Activity: ${chalk.gray(workflow.lastActivity.toLocaleString())}`);
|
||||||
|
|
||||||
|
// Metadata
|
||||||
|
if (workflow.metadata && Object.keys(workflow.metadata).length > 0) {
|
||||||
|
console.log(chalk.blue.bold('\n🔖 Metadata:\n'));
|
||||||
|
Object.entries(workflow.metadata).forEach(([key, value]) => {
|
||||||
|
console.log(` ${key}: ${chalk.gray(String(value))}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Status-specific information
|
||||||
|
this.displayStatusSpecificInfo(workflow);
|
||||||
|
|
||||||
|
// Actions
|
||||||
|
this.displayAvailableActions(workflow);
|
||||||
|
}
|
||||||
|
|
||||||
|
private displayStatusSpecificInfo(workflow: any): void {
|
||||||
|
const workflowId = `workflow-${workflow.taskId}`;
|
||||||
|
|
||||||
|
switch (workflow.status) {
|
||||||
|
case 'running':
|
||||||
|
console.log(chalk.blue.bold('\n🚀 Running Status:\n'));
|
||||||
|
console.log(` ${chalk.green('●')} Process is actively executing`);
|
||||||
|
console.log(` ${chalk.blue('ℹ')} Monitor output with: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'paused':
|
||||||
|
console.log(chalk.blue.bold('\n⏸️ Paused Status:\n'));
|
||||||
|
console.log(` ${chalk.yellow('●')} Workflow is paused`);
|
||||||
|
console.log(` ${chalk.blue('ℹ')} Resume with: ${chalk.cyan(`tm workflow resume ${workflowId}`)}`);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'completed':
|
||||||
|
console.log(chalk.blue.bold('\n✅ Completed Status:\n'));
|
||||||
|
console.log(` ${chalk.green('●')} Workflow completed successfully`);
|
||||||
|
console.log(` ${chalk.blue('ℹ')} Resources have been cleaned up`);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'failed':
|
||||||
|
console.log(chalk.blue.bold('\n❌ Failed Status:\n'));
|
||||||
|
console.log(` ${chalk.red('●')} Workflow execution failed`);
|
||||||
|
console.log(` ${chalk.blue('ℹ')} Check logs for error details`);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'initializing':
|
||||||
|
console.log(chalk.blue.bold('\n🔄 Initializing Status:\n'));
|
||||||
|
console.log(` ${chalk.blue('●')} Setting up worktree and process`);
|
||||||
|
console.log(` ${chalk.blue('ℹ')} This should complete shortly`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private displayAvailableActions(workflow: any): void {
|
||||||
|
const workflowId = `workflow-${workflow.taskId}`;
|
||||||
|
console.log(chalk.blue.bold('\n🎯 Available Actions:\n'));
|
||||||
|
|
||||||
|
switch (workflow.status) {
|
||||||
|
case 'running':
|
||||||
|
console.log(` • Attach: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
|
||||||
|
console.log(` • Pause: ${chalk.cyan(`tm workflow pause ${workflowId}`)}`);
|
||||||
|
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'paused':
|
||||||
|
console.log(` • Resume: ${chalk.cyan(`tm workflow resume ${workflowId}`)}`);
|
||||||
|
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'pending':
|
||||||
|
case 'initializing':
|
||||||
|
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'completed':
|
||||||
|
case 'failed':
|
||||||
|
case 'cancelled':
|
||||||
|
console.log(` • View logs: ${chalk.cyan(`tm workflow logs ${workflowId}`)}`);
|
||||||
|
console.log(` • Start new: ${chalk.cyan(`tm workflow start ${workflow.taskId}`)}`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` • List all: ${chalk.cyan('tm workflow list')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
private getStatusDisplay(status: string): string {
|
||||||
|
const statusMap = {
|
||||||
|
pending: { icon: '⏳', color: chalk.yellow },
|
||||||
|
initializing: { icon: '🔄', color: chalk.blue },
|
||||||
|
running: { icon: '🚀', color: chalk.green },
|
||||||
|
paused: { icon: '⏸️', color: chalk.orange },
|
||||||
|
completed: { icon: '✅', color: chalk.green },
|
||||||
|
failed: { icon: '❌', color: chalk.red },
|
||||||
|
cancelled: { icon: '🛑', color: chalk.gray },
|
||||||
|
timeout: { icon: '⏰', color: chalk.red }
|
||||||
|
};
|
||||||
|
|
||||||
|
const statusInfo = statusMap[status as keyof typeof statusMap] || { icon: '❓', color: chalk.white };
|
||||||
|
return `${statusInfo.icon} ${statusInfo.color(status)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private formatDuration(start: Date, end: Date): string {
|
||||||
|
const diff = end.getTime() - start.getTime();
|
||||||
|
const minutes = Math.floor(diff / (1000 * 60));
|
||||||
|
const hours = Math.floor(minutes / 60);
|
||||||
|
const seconds = Math.floor((diff % (1000 * 60)) / 1000);
|
||||||
|
|
||||||
|
if (hours > 0) {
|
||||||
|
return `${hours}h ${minutes % 60}m ${seconds}s`;
|
||||||
|
} else if (minutes > 0) {
|
||||||
|
return `${minutes}m ${seconds}s`;
|
||||||
|
} else {
|
||||||
|
return `${seconds}s`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private calculateDuration(start: Date, end: Date): number {
|
||||||
|
return Math.floor((end.getTime() - start.getTime()) / 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
async cleanup(): Promise<void> {
|
||||||
|
if (this.workflowManager) {
|
||||||
|
this.workflowManager.removeAllListeners();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static register(program: Command, name?: string): WorkflowStatusCommand {
|
||||||
|
const command = new WorkflowStatusCommand(name);
|
||||||
|
program.addCommand(command);
|
||||||
|
return command;
|
||||||
|
}
|
||||||
|
}
|
||||||
260
apps/cli/src/commands/workflow/workflow-stop.command.ts
Normal file
260
apps/cli/src/commands/workflow/workflow-stop.command.ts
Normal file
@@ -0,0 +1,260 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Stop Command
|
||||||
|
* Stop and clean up workflow execution
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Command } from 'commander';
|
||||||
|
import chalk from 'chalk';
|
||||||
|
import path from 'node:path';
|
||||||
|
import {
|
||||||
|
TaskExecutionManager,
|
||||||
|
type TaskExecutionManagerConfig
|
||||||
|
} from '@tm/workflow-engine';
|
||||||
|
import * as ui from '../../utils/ui.js';
|
||||||
|
|
||||||
|
export interface WorkflowStopOptions {
|
||||||
|
project?: string;
|
||||||
|
worktreeBase?: string;
|
||||||
|
claude?: string;
|
||||||
|
force?: boolean;
|
||||||
|
all?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WorkflowStopCommand - Stop workflow execution
|
||||||
|
*/
|
||||||
|
export class WorkflowStopCommand extends Command {
|
||||||
|
private workflowManager?: TaskExecutionManager;
|
||||||
|
|
||||||
|
constructor(name?: string) {
|
||||||
|
super(name || 'stop');
|
||||||
|
|
||||||
|
this.description('Stop workflow execution and clean up resources')
|
||||||
|
.argument('[workflow-id]', 'Workflow ID to stop (or task ID)')
|
||||||
|
.option('-p, --project <path>', 'Project root directory', process.cwd())
|
||||||
|
.option(
|
||||||
|
'--worktree-base <path>',
|
||||||
|
'Base directory for worktrees',
|
||||||
|
'../task-worktrees'
|
||||||
|
)
|
||||||
|
.option('--claude <path>', 'Claude Code executable path', 'claude')
|
||||||
|
.option('-f, --force', 'Force stop (kill process immediately)')
|
||||||
|
.option('--all', 'Stop all running workflows')
|
||||||
|
.action(
|
||||||
|
async (
|
||||||
|
workflowId: string | undefined,
|
||||||
|
options: WorkflowStopOptions
|
||||||
|
) => {
|
||||||
|
await this.executeCommand(workflowId, options);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async executeCommand(
|
||||||
|
workflowId: string | undefined,
|
||||||
|
options: WorkflowStopOptions
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Initialize workflow manager
|
||||||
|
await this.initializeWorkflowManager(options);
|
||||||
|
|
||||||
|
if (options.all) {
|
||||||
|
await this.stopAllWorkflows(options);
|
||||||
|
} else if (workflowId) {
|
||||||
|
await this.stopSingleWorkflow(workflowId, options);
|
||||||
|
} else {
|
||||||
|
ui.displayError('Please specify a workflow ID or use --all flag');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
ui.displayError(error.message || 'Failed to stop workflow');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async initializeWorkflowManager(
|
||||||
|
options: WorkflowStopOptions
|
||||||
|
): Promise<void> {
|
||||||
|
if (!this.workflowManager) {
|
||||||
|
const projectRoot = options.project || process.cwd();
|
||||||
|
const worktreeBase = path.resolve(
|
||||||
|
projectRoot,
|
||||||
|
options.worktreeBase || '../task-worktrees'
|
||||||
|
);
|
||||||
|
|
||||||
|
const config: TaskExecutionManagerConfig = {
|
||||||
|
projectRoot,
|
||||||
|
maxConcurrent: 5,
|
||||||
|
defaultTimeout: 60,
|
||||||
|
worktreeBase,
|
||||||
|
claudeExecutable: options.claude || 'claude',
|
||||||
|
debug: false
|
||||||
|
};
|
||||||
|
|
||||||
|
this.workflowManager = new TaskExecutionManager(config);
|
||||||
|
await this.workflowManager.initialize();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async stopSingleWorkflow(
|
||||||
|
workflowId: string,
|
||||||
|
options: WorkflowStopOptions
|
||||||
|
): Promise<void> {
|
||||||
|
// Try to find workflow by ID or task ID
|
||||||
|
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
|
||||||
|
|
||||||
|
if (!workflow) {
|
||||||
|
// Try as task ID
|
||||||
|
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!workflow) {
|
||||||
|
throw new Error(`Workflow not found: ${workflowId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const actualWorkflowId = `workflow-${workflow.taskId}`;
|
||||||
|
|
||||||
|
// Display workflow info
|
||||||
|
console.log(chalk.blue.bold(`🛑 Stopping Workflow: ${actualWorkflowId}`));
|
||||||
|
console.log(`${chalk.blue('Task:')} ${workflow.taskTitle}`);
|
||||||
|
console.log(
|
||||||
|
`${chalk.blue('Status:')} ${this.getStatusDisplay(workflow.status)}`
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
`${chalk.blue('Worktree:')} ${chalk.gray(workflow.worktreePath)}`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (workflow.processId) {
|
||||||
|
console.log(
|
||||||
|
`${chalk.blue('Process ID:')} ${chalk.gray(workflow.processId)}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
// Confirm if not forced
|
||||||
|
if (!options.force && ['running', 'paused'].includes(workflow.status)) {
|
||||||
|
const shouldProceed = await ui.confirm(
|
||||||
|
`Are you sure you want to stop this ${workflow.status} workflow?`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!shouldProceed) {
|
||||||
|
console.log(chalk.gray('Operation cancelled'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop the workflow
|
||||||
|
ui.displaySpinner('Stopping workflow and cleaning up resources...');
|
||||||
|
|
||||||
|
await this.workflowManager!.stopTaskExecution(
|
||||||
|
actualWorkflowId,
|
||||||
|
options.force
|
||||||
|
);
|
||||||
|
|
||||||
|
ui.displaySuccess('Workflow stopped successfully!');
|
||||||
|
console.log();
|
||||||
|
console.log(`${chalk.green('✓')} Process terminated`);
|
||||||
|
console.log(`${chalk.green('✓')} Worktree cleaned up`);
|
||||||
|
console.log(`${chalk.green('✓')} State updated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async stopAllWorkflows(options: WorkflowStopOptions): Promise<void> {
|
||||||
|
const workflows = this.workflowManager!.listWorkflows();
|
||||||
|
const activeWorkflows = workflows.filter((w) =>
|
||||||
|
['pending', 'initializing', 'running', 'paused'].includes(w.status)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (activeWorkflows.length === 0) {
|
||||||
|
ui.displayWarning('No active workflows to stop');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
chalk.blue.bold(`🛑 Stopping ${activeWorkflows.length} Active Workflows`)
|
||||||
|
);
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
// List workflows to be stopped
|
||||||
|
activeWorkflows.forEach((workflow) => {
|
||||||
|
console.log(
|
||||||
|
` • ${chalk.cyan(`workflow-${workflow.taskId}`)} - ${workflow.taskTitle} ${this.getStatusDisplay(workflow.status)}`
|
||||||
|
);
|
||||||
|
});
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
// Confirm if not forced
|
||||||
|
if (!options.force) {
|
||||||
|
const shouldProceed = await ui.confirm(
|
||||||
|
`Are you sure you want to stop all ${activeWorkflows.length} active workflows?`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!shouldProceed) {
|
||||||
|
console.log(chalk.gray('Operation cancelled'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop all workflows
|
||||||
|
ui.displaySpinner('Stopping all workflows...');
|
||||||
|
|
||||||
|
let stopped = 0;
|
||||||
|
let failed = 0;
|
||||||
|
|
||||||
|
for (const workflow of activeWorkflows) {
|
||||||
|
try {
|
||||||
|
const workflowId = `workflow-${workflow.taskId}`;
|
||||||
|
await this.workflowManager!.stopTaskExecution(
|
||||||
|
workflowId,
|
||||||
|
options.force
|
||||||
|
);
|
||||||
|
stopped++;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`${chalk.red('✗')} Failed to stop workflow ${workflow.taskId}: ${error}`
|
||||||
|
);
|
||||||
|
failed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
if (stopped > 0) {
|
||||||
|
ui.displaySuccess(`Successfully stopped ${stopped} workflows`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (failed > 0) {
|
||||||
|
ui.displayWarning(`Failed to stop ${failed} workflows`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private getStatusDisplay(status: string): string {
|
||||||
|
const statusMap = {
|
||||||
|
pending: { icon: '⏳', color: chalk.yellow },
|
||||||
|
initializing: { icon: '🔄', color: chalk.blue },
|
||||||
|
running: { icon: '🚀', color: chalk.green },
|
||||||
|
paused: { icon: '⏸️', color: chalk.hex('#FFA500') },
|
||||||
|
completed: { icon: '✅', color: chalk.green },
|
||||||
|
failed: { icon: '❌', color: chalk.red },
|
||||||
|
cancelled: { icon: '🛑', color: chalk.gray },
|
||||||
|
timeout: { icon: '⏰', color: chalk.red }
|
||||||
|
};
|
||||||
|
|
||||||
|
const statusInfo = statusMap[status as keyof typeof statusMap] || {
|
||||||
|
icon: '❓',
|
||||||
|
color: chalk.white
|
||||||
|
};
|
||||||
|
return `${statusInfo.icon} ${statusInfo.color(status)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
async cleanup(): Promise<void> {
|
||||||
|
if (this.workflowManager) {
|
||||||
|
this.workflowManager.removeAllListeners();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static register(program: Command, name?: string): WorkflowStopCommand {
|
||||||
|
const command = new WorkflowStopCommand(name);
|
||||||
|
program.addCommand(command);
|
||||||
|
return command;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,9 +5,11 @@
|
|||||||
|
|
||||||
// Commands
|
// Commands
|
||||||
export { ListTasksCommand } from './commands/list.command.js';
|
export { ListTasksCommand } from './commands/list.command.js';
|
||||||
export { ShowCommand } from './commands/show.command.js';
|
|
||||||
export { AuthCommand } from './commands/auth.command.js';
|
export { AuthCommand } from './commands/auth.command.js';
|
||||||
export { ContextCommand } from './commands/context.command.js';
|
export { WorkflowCommand } from './commands/workflow.command.js';
|
||||||
|
|
||||||
|
// Command registry
|
||||||
|
export { registerAllCommands } from './commands/index.js';
|
||||||
|
|
||||||
// UI utilities (for other commands to use)
|
// UI utilities (for other commands to use)
|
||||||
export * as ui from './utils/ui.js';
|
export * as ui from './utils/ui.js';
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
import chalk from 'chalk';
|
import chalk from 'chalk';
|
||||||
import boxen from 'boxen';
|
import boxen from 'boxen';
|
||||||
import Table from 'cli-table3';
|
import Table from 'cli-table3';
|
||||||
import type { Task, TaskStatus, TaskPriority } from '@tm/core/types';
|
import type { Task, TaskStatus, TaskPriority } from '@tm/core';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get colored status display with ASCII icons (matches scripts/modules/ui.js style)
|
* Get colored status display with ASCII icons (matches scripts/modules/ui.js style)
|
||||||
@@ -324,3 +324,61 @@ export function createTaskTable(
|
|||||||
|
|
||||||
return table.toString();
|
return table.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display a spinner with message (mock implementation)
|
||||||
|
*/
|
||||||
|
export function displaySpinner(message: string): void {
|
||||||
|
console.log(chalk.blue('◐'), chalk.gray(message));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simple confirmation prompt
|
||||||
|
*/
|
||||||
|
export async function confirm(message: string): Promise<boolean> {
|
||||||
|
// For now, return true. In a real implementation, use inquirer
|
||||||
|
console.log(chalk.yellow('?'), chalk.white(message), chalk.gray('(y/n)'));
|
||||||
|
|
||||||
|
// Mock implementation - in production this would use inquirer
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
process.stdin.once('data', (data) => {
|
||||||
|
const answer = data.toString().trim().toLowerCase();
|
||||||
|
resolve(answer === 'y' || answer === 'yes');
|
||||||
|
});
|
||||||
|
process.stdin.resume();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a generic table
|
||||||
|
*/
|
||||||
|
export function createTable(headers: string[], rows: string[][]): string {
|
||||||
|
const table = new Table({
|
||||||
|
head: headers.map(h => chalk.blue.bold(h)),
|
||||||
|
style: {
|
||||||
|
head: [],
|
||||||
|
border: ['gray']
|
||||||
|
},
|
||||||
|
chars: {
|
||||||
|
'top': '─',
|
||||||
|
'top-mid': '┬',
|
||||||
|
'top-left': '┌',
|
||||||
|
'top-right': '┐',
|
||||||
|
'bottom': '─',
|
||||||
|
'bottom-mid': '┴',
|
||||||
|
'bottom-left': '└',
|
||||||
|
'bottom-right': '┘',
|
||||||
|
'left': '│',
|
||||||
|
'left-mid': '├',
|
||||||
|
'mid': '─',
|
||||||
|
'mid-mid': '┼',
|
||||||
|
'right': '│',
|
||||||
|
'right-mid': '┤',
|
||||||
|
'middle': '│'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
rows.forEach(row => table.push(row));
|
||||||
|
return table.toString();
|
||||||
|
}
|
||||||
|
|||||||
15
apps/cli/tsup.config.ts
Normal file
15
apps/cli/tsup.config.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { defineConfig } from 'tsup';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
entry: ['src/index.ts'],
|
||||||
|
format: ['esm'],
|
||||||
|
target: 'node18',
|
||||||
|
splitting: false,
|
||||||
|
sourcemap: true,
|
||||||
|
clean: true,
|
||||||
|
dts: true,
|
||||||
|
shims: true,
|
||||||
|
esbuildOptions(options) {
|
||||||
|
options.platform = 'node';
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -5,9 +5,10 @@
|
|||||||
"description": "Task Master documentation powered by Mintlify",
|
"description": "Task Master documentation powered by Mintlify",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "mintlify dev",
|
"dev": "mintlify dev",
|
||||||
|
"build": "mintlify build",
|
||||||
"preview": "mintlify preview"
|
"preview": "mintlify preview"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"mintlify": "^4.2.111"
|
"mintlify": "^4.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -83,7 +83,6 @@ The extension automatically handles the Taskmaster MCP server connection:
|
|||||||
| **View Kanban Board** | `Ctrl/Cmd + Shift + P` → "Taskmaster: Show Board" |
|
| **View Kanban Board** | `Ctrl/Cmd + Shift + P` → "Taskmaster: Show Board" |
|
||||||
| **Change Task Status** | Drag task card to different column |
|
| **Change Task Status** | Drag task card to different column |
|
||||||
| **View Task Details** | Click on any task card |
|
| **View Task Details** | Click on any task card |
|
||||||
| **Start Working on Task** | Click task → Click "Start Task" button in properties panel |
|
|
||||||
| **Edit Task Content** | Click task → Use edit buttons in details panel |
|
| **Edit Task Content** | Click task → Use edit buttons in details panel |
|
||||||
| **Add Subtasks** | Click the + button on parent task cards |
|
| **Add Subtasks** | Click the + button on parent task cards |
|
||||||
| **Use AI Features** | Open task details → Click AI action buttons |
|
| **Use AI Features** | Open task details → Click AI action buttons |
|
||||||
@@ -96,14 +95,6 @@ The extension automatically handles the Taskmaster MCP server connection:
|
|||||||
- ✅ **Done** - Completed tasks
|
- ✅ **Done** - Completed tasks
|
||||||
- ⏸️ **Deferred** - Postponed for later
|
- ⏸️ **Deferred** - Postponed for later
|
||||||
|
|
||||||
### **Built-in Development Tools**
|
|
||||||
|
|
||||||
**Start Task Button** - Quickly begin working on any task:
|
|
||||||
- **One-Click Terminal** - Opens a new VS Code terminal named after your task
|
|
||||||
- **Context Awareness** - Terminal automatically opens in your workspace root
|
|
||||||
- **Smart State Management** - Button is disabled for completed tasks
|
|
||||||
- **Seamless Workflow** - Go from task planning to implementation instantly
|
|
||||||
|
|
||||||
### **AI-Powered Task Management**
|
### **AI-Powered Task Management**
|
||||||
|
|
||||||
The extension integrates seamlessly with Taskmaster AI via MCP to provide:
|
The extension integrates seamlessly with Taskmaster AI via MCP to provide:
|
||||||
|
|||||||
@@ -103,8 +103,8 @@ async function main() {
|
|||||||
// This prevents the multiple React instances issue
|
// This prevents the multiple React instances issue
|
||||||
// Ensure React is resolved from the workspace root to avoid duplicates
|
// Ensure React is resolved from the workspace root to avoid duplicates
|
||||||
alias: {
|
alias: {
|
||||||
react: path.resolve(__dirname, '../../node_modules/react'),
|
react: path.resolve(__dirname, 'node_modules/react'),
|
||||||
'react-dom': path.resolve(__dirname, '../../node_modules/react-dom')
|
'react-dom': path.resolve(__dirname, 'node_modules/react-dom')
|
||||||
},
|
},
|
||||||
define: {
|
define: {
|
||||||
'process.env.NODE_ENV': production ? '"production"' : '"development"',
|
'process.env.NODE_ENV': production ? '"production"' : '"development"',
|
||||||
@@ -135,8 +135,8 @@ async function main() {
|
|||||||
jsxImportSource: 'react',
|
jsxImportSource: 'react',
|
||||||
external: ['*.css'],
|
external: ['*.css'],
|
||||||
alias: {
|
alias: {
|
||||||
react: path.resolve(__dirname, '../../node_modules/react'),
|
react: path.resolve(__dirname, 'node_modules/react'),
|
||||||
'react-dom': path.resolve(__dirname, '../../node_modules/react-dom')
|
'react-dom': path.resolve(__dirname, 'node_modules/react-dom')
|
||||||
},
|
},
|
||||||
define: {
|
define: {
|
||||||
'process.env.NODE_ENV': production ? '"production"' : '"development"',
|
'process.env.NODE_ENV': production ? '"production"' : '"development"',
|
||||||
|
|||||||
@@ -229,7 +229,6 @@
|
|||||||
"build": "npm run build:js && npm run build:css",
|
"build": "npm run build:js && npm run build:css",
|
||||||
"build:js": "node ./esbuild.js --production",
|
"build:js": "node ./esbuild.js --production",
|
||||||
"build:css": "npx @tailwindcss/cli -i ./src/webview/index.css -o ./dist/index.css --minify",
|
"build:css": "npx @tailwindcss/cli -i ./src/webview/index.css -o ./dist/index.css --minify",
|
||||||
"dev": "npm run watch",
|
|
||||||
"package": "npm exec node ./package.mjs",
|
"package": "npm exec node ./package.mjs",
|
||||||
"package:direct": "node ./package.mjs",
|
"package:direct": "node ./package.mjs",
|
||||||
"debug:env": "node ./debug-env.mjs",
|
"debug:env": "node ./debug-env.mjs",
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
import type React from 'react';
|
import type React from 'react';
|
||||||
import { useState, useEffect } from 'react';
|
import { useState, useEffect } from 'react';
|
||||||
import { Button } from '@/components/ui/button';
|
import { Button } from '@/components/ui/button';
|
||||||
import { Loader2, Play } from 'lucide-react';
|
import { Loader2 } from 'lucide-react';
|
||||||
import { PriorityBadge } from './PriorityBadge';
|
import { PriorityBadge } from './PriorityBadge';
|
||||||
import type { TaskMasterTask } from '../../webview/types';
|
import type { TaskMasterTask } from '../../webview/types';
|
||||||
import { useVSCodeContext } from '../../webview/contexts/VSCodeContext';
|
|
||||||
|
|
||||||
interface TaskMetadataSidebarProps {
|
interface TaskMetadataSidebarProps {
|
||||||
currentTask: TaskMasterTask;
|
currentTask: TaskMasterTask;
|
||||||
@@ -29,12 +28,10 @@ export const TaskMetadataSidebar: React.FC<TaskMetadataSidebarProps> = ({
|
|||||||
isRegenerating = false,
|
isRegenerating = false,
|
||||||
isAppending = false
|
isAppending = false
|
||||||
}) => {
|
}) => {
|
||||||
const { vscode } = useVSCodeContext();
|
|
||||||
const [isLoadingComplexity, setIsLoadingComplexity] = useState(false);
|
const [isLoadingComplexity, setIsLoadingComplexity] = useState(false);
|
||||||
const [mcpComplexityScore, setMcpComplexityScore] = useState<
|
const [mcpComplexityScore, setMcpComplexityScore] = useState<
|
||||||
number | undefined
|
number | undefined
|
||||||
>(undefined);
|
>(undefined);
|
||||||
const [isStartingTask, setIsStartingTask] = useState(false);
|
|
||||||
|
|
||||||
// Get complexity score from task
|
// Get complexity score from task
|
||||||
const currentComplexityScore = complexity?.score;
|
const currentComplexityScore = complexity?.score;
|
||||||
@@ -100,29 +97,6 @@ export const TaskMetadataSidebar: React.FC<TaskMetadataSidebarProps> = ({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Handle starting a task
|
|
||||||
const handleStartTask = () => {
|
|
||||||
if (!currentTask || isStartingTask) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
setIsStartingTask(true);
|
|
||||||
|
|
||||||
// Send message to extension to open terminal
|
|
||||||
if (vscode) {
|
|
||||||
vscode.postMessage({
|
|
||||||
type: 'openTerminal',
|
|
||||||
taskId: currentTask.id,
|
|
||||||
taskTitle: currentTask.title
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reset loading state after a short delay
|
|
||||||
setTimeout(() => {
|
|
||||||
setIsStartingTask(false);
|
|
||||||
}, 500);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Effect to handle complexity on task change
|
// Effect to handle complexity on task change
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (currentTask?.id) {
|
if (currentTask?.id) {
|
||||||
@@ -310,24 +284,6 @@ export const TaskMetadataSidebar: React.FC<TaskMetadataSidebarProps> = ({
|
|||||||
{currentTask.dependencies && currentTask.dependencies.length > 0 && (
|
{currentTask.dependencies && currentTask.dependencies.length > 0 && (
|
||||||
<div className="border-b border-textSeparator-foreground" />
|
<div className="border-b border-textSeparator-foreground" />
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Start Task Button */}
|
|
||||||
<div className="mt-4">
|
|
||||||
<Button
|
|
||||||
onClick={handleStartTask}
|
|
||||||
variant="default"
|
|
||||||
size="sm"
|
|
||||||
className="w-full text-xs"
|
|
||||||
disabled={isRegenerating || isAppending || isStartingTask || currentTask?.status === 'done'}
|
|
||||||
>
|
|
||||||
{isStartingTask ? (
|
|
||||||
<Loader2 className="w-4 h-4 mr-2 animate-spin" />
|
|
||||||
) : (
|
|
||||||
<Play className="w-4 h-4 mr-2" />
|
|
||||||
)}
|
|
||||||
{isStartingTask ? 'Starting...' : 'Start Task'}
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -202,16 +202,16 @@ export const TaskDetailsView: React.FC<TaskDetailsViewProps> = ({
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Right column - Metadata (1/3 width) */}
|
{/* Right column - Metadata (1/3 width) */}
|
||||||
<TaskMetadataSidebar
|
<TaskMetadataSidebar
|
||||||
currentTask={currentTask}
|
currentTask={currentTask}
|
||||||
tasks={allTasks}
|
tasks={allTasks}
|
||||||
complexity={complexity}
|
complexity={complexity}
|
||||||
isSubtask={isSubtask}
|
isSubtask={isSubtask}
|
||||||
sendMessage={sendMessage}
|
sendMessage={sendMessage}
|
||||||
onStatusChange={handleStatusChange}
|
onStatusChange={handleStatusChange}
|
||||||
onDependencyClick={handleDependencyClick}
|
onDependencyClick={handleDependencyClick}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -361,25 +361,6 @@ export class WebviewManager {
|
|||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
|
|
||||||
case 'openTerminal':
|
|
||||||
// Open VS Code terminal for task execution
|
|
||||||
this.logger.info(`Opening terminal for task ${data.taskId}: ${data.taskTitle}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const terminal = vscode.window.createTerminal({
|
|
||||||
name: `Task ${data.taskId}: ${data.taskTitle}`,
|
|
||||||
cwd: this.workspaceRoot
|
|
||||||
});
|
|
||||||
terminal.show();
|
|
||||||
|
|
||||||
this.logger.info('Terminal created and shown successfully');
|
|
||||||
response = { success: true };
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error('Failed to create terminal:', error);
|
|
||||||
response = { success: false, error: error.message };
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unknown message type: ${type}`);
|
throw new Error(`Unknown message type: ${type}`);
|
||||||
}
|
}
|
||||||
|
|||||||
53
output.txt
53
output.txt
File diff suppressed because one or more lines are too long
15525
package-lock.json
generated
15525
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
43
package.json
43
package.json
@@ -11,36 +11,25 @@
|
|||||||
},
|
},
|
||||||
"workspaces": ["apps/*", "packages/*", "."],
|
"workspaces": ["apps/*", "packages/*", "."],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "npm run build:build-config && tsup",
|
"build": "npm run build:packages && tsup",
|
||||||
"dev": "tsup --watch",
|
"dev": "npm run build:packages && npm link && (npm run dev:packages & tsup --watch --onSuccess 'echo Build complete && npm link')",
|
||||||
"turbo:dev": "turbo dev",
|
"dev:packages": "(cd packages/tm-core && npm run dev) & (cd packages/workflow-engine && npm run dev) & (cd apps/cli && npm run dev) & wait",
|
||||||
"turbo:build": "turbo build",
|
"dev:core": "cd packages/tm-core && npm run dev",
|
||||||
"dev:main": "tsup --watch --onSuccess 'echo \"📦 Main package built\" && npm link'",
|
"dev:workflow": "cd packages/workflow-engine && npm run dev",
|
||||||
"dev:legacy": "npm run build:build-config && concurrently -n \"core,cli,main\" -c \"blue,green,yellow\" \"npm run dev:core\" \"npm run dev:cli\" \"npm run dev:main\"",
|
"dev:cli": "cd apps/cli && npm run dev",
|
||||||
"dev:core": "npm run dev -w @tm/core",
|
"build:packages": "npm run build:core && npm run build:workflow && npm run build:cli",
|
||||||
"dev:cli": "npm run dev -w @tm/cli",
|
"build:core": "cd packages/tm-core && npm run build",
|
||||||
"build:packages": "turbo build --filter='./packages/*' --filter='./apps/*'",
|
"build:workflow": "cd packages/workflow-engine && npm run build",
|
||||||
"build:packages:parallel": "turbo build --filter='./packages/*' --filter='./apps/*'",
|
"build:cli": "cd apps/cli && npm run build",
|
||||||
"build:build-config": "npm run build -w @tm/build-config",
|
|
||||||
"build:core": "npm run build -w @tm/core",
|
|
||||||
"build:cli": "npm run build -w @tm/cli",
|
|
||||||
"typecheck": "turbo typecheck",
|
|
||||||
"typecheck:all": "turbo typecheck",
|
|
||||||
"typecheck:core": "npm run typecheck -w @tm/core",
|
|
||||||
"typecheck:cli": "npm run typecheck -w @tm/cli",
|
|
||||||
"test": "node --experimental-vm-modules node_modules/.bin/jest",
|
"test": "node --experimental-vm-modules node_modules/.bin/jest",
|
||||||
"test:unit": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=unit",
|
|
||||||
"test:integration": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=integration",
|
|
||||||
"test:fails": "node --experimental-vm-modules node_modules/.bin/jest --onlyFailures",
|
"test:fails": "node --experimental-vm-modules node_modules/.bin/jest --onlyFailures",
|
||||||
"test:watch": "node --experimental-vm-modules node_modules/.bin/jest --watch",
|
"test:watch": "node --experimental-vm-modules node_modules/.bin/jest --watch",
|
||||||
"test:coverage": "node --experimental-vm-modules node_modules/.bin/jest --coverage",
|
"test:coverage": "node --experimental-vm-modules node_modules/.bin/jest --coverage",
|
||||||
"test:ci": "node --experimental-vm-modules node_modules/.bin/jest --coverage --ci",
|
|
||||||
"test:e2e": "./tests/e2e/run_e2e.sh",
|
"test:e2e": "./tests/e2e/run_e2e.sh",
|
||||||
"test:e2e-report": "./tests/e2e/run_e2e.sh --analyze-log",
|
"test:e2e-report": "./tests/e2e/run_e2e.sh --analyze-log",
|
||||||
"postpack": "chmod +x dist/task-master.js dist/mcp-server.js",
|
"postpack": "chmod +x dist/task-master.js dist/mcp-server.js",
|
||||||
"changeset": "changeset",
|
"changeset": "changeset",
|
||||||
"release": "changeset publish",
|
"release": "changeset publish",
|
||||||
"publish-packages": "turbo run build lint test && changeset version && changeset publish",
|
|
||||||
"inspector": "npx @modelcontextprotocol/inspector node dist/mcp-server.js",
|
"inspector": "npx @modelcontextprotocol/inspector node dist/mcp-server.js",
|
||||||
"mcp-server": "node dist/mcp-server.js",
|
"mcp-server": "node dist/mcp-server.js",
|
||||||
"format-check": "biome format .",
|
"format-check": "biome format .",
|
||||||
@@ -62,7 +51,6 @@
|
|||||||
"license": "MIT WITH Commons-Clause",
|
"license": "MIT WITH Commons-Clause",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@ai-sdk/amazon-bedrock": "^2.2.9",
|
"@ai-sdk/amazon-bedrock": "^2.2.9",
|
||||||
"@tm/cli": "*",
|
|
||||||
"@ai-sdk/anthropic": "^1.2.10",
|
"@ai-sdk/anthropic": "^1.2.10",
|
||||||
"@ai-sdk/azure": "^1.3.17",
|
"@ai-sdk/azure": "^1.3.17",
|
||||||
"@ai-sdk/google": "^1.2.13",
|
"@ai-sdk/google": "^1.2.13",
|
||||||
@@ -81,7 +69,7 @@
|
|||||||
"ajv": "^8.17.1",
|
"ajv": "^8.17.1",
|
||||||
"ajv-formats": "^3.0.1",
|
"ajv-formats": "^3.0.1",
|
||||||
"boxen": "^8.0.1",
|
"boxen": "^8.0.1",
|
||||||
"chalk": "5.6.2",
|
"chalk": "^5.4.1",
|
||||||
"cli-highlight": "^2.1.11",
|
"cli-highlight": "^2.1.11",
|
||||||
"cli-progress": "^3.12.0",
|
"cli-progress": "^3.12.0",
|
||||||
"cli-table3": "^0.6.5",
|
"cli-table3": "^0.6.5",
|
||||||
@@ -115,7 +103,6 @@
|
|||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=18.0.0"
|
"node": ">=18.0.0"
|
||||||
},
|
},
|
||||||
"packageManager": "npm@10.9.2",
|
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/eyaltoledano/claude-task-master.git"
|
"url": "git+https://github.com/eyaltoledano/claude-task-master.git"
|
||||||
@@ -131,13 +118,14 @@
|
|||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@biomejs/biome": "^1.9.4",
|
"@biomejs/biome": "^1.9.4",
|
||||||
|
|
||||||
"@changesets/changelog-github": "^0.5.1",
|
"@changesets/changelog-github": "^0.5.1",
|
||||||
"@changesets/cli": "^2.28.1",
|
"@changesets/cli": "^2.28.1",
|
||||||
"@types/jest": "^29.5.14",
|
|
||||||
"concurrently": "^9.2.1",
|
|
||||||
"cross-env": "^10.0.0",
|
|
||||||
"dotenv-mono": "^1.5.1",
|
"dotenv-mono": "^1.5.1",
|
||||||
|
|
||||||
|
"@types/jest": "^29.5.14",
|
||||||
"execa": "^8.0.1",
|
"execa": "^8.0.1",
|
||||||
|
"ink": "^5.0.1",
|
||||||
"jest": "^29.7.0",
|
"jest": "^29.7.0",
|
||||||
"jest-environment-node": "^29.7.0",
|
"jest-environment-node": "^29.7.0",
|
||||||
"mock-fs": "^5.5.0",
|
"mock-fs": "^5.5.0",
|
||||||
@@ -145,7 +133,6 @@
|
|||||||
"supertest": "^7.1.0",
|
"supertest": "^7.1.0",
|
||||||
"tsup": "^8.5.0",
|
"tsup": "^8.5.0",
|
||||||
"tsx": "^4.16.2",
|
"tsx": "^4.16.2",
|
||||||
"turbo": "^2.5.6",
|
|
||||||
"typescript": "^5.9.2"
|
"typescript": "^5.9.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,29 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@tm/build-config",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"description": "Shared build configuration for Task Master monorepo",
|
|
||||||
"type": "module",
|
|
||||||
"main": "./dist/tsup.base.js",
|
|
||||||
"types": "./dist/tsup.base.d.ts",
|
|
||||||
"exports": {
|
|
||||||
".": {
|
|
||||||
"types": "./dist/tsup.base.d.ts",
|
|
||||||
"import": "./dist/tsup.base.js"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"files": ["dist", "src"],
|
|
||||||
"keywords": ["build-config", "tsup", "monorepo"],
|
|
||||||
"author": "",
|
|
||||||
"license": "MIT",
|
|
||||||
"scripts": {
|
|
||||||
"build": "tsc",
|
|
||||||
"typecheck": "tsc --noEmit"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"dotenv-mono": "^1.5.1",
|
|
||||||
"typescript": "^5.7.3"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"tsup": "^8.5.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,123 +0,0 @@
|
|||||||
/**
|
|
||||||
* Base tsup configuration for Task Master monorepo
|
|
||||||
* Provides shared configuration that can be extended by individual packages
|
|
||||||
*/
|
|
||||||
import type { Options } from 'tsup';
|
|
||||||
import * as dotenv from 'dotenv-mono';
|
|
||||||
|
|
||||||
dotenv.load();
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
'TM_PUBLIC_BASE_DOMAIN:',
|
|
||||||
process.env.TM_PUBLIC_BASE_DOMAIN,
|
|
||||||
'TM_PUBLIC_SUPABASE_URL:',
|
|
||||||
process.env.TM_PUBLIC_SUPABASE_URL,
|
|
||||||
'TM_PUBLIC_SUPABASE_ANON_KEY:',
|
|
||||||
process.env.TM_PUBLIC_SUPABASE_ANON_KEY
|
|
||||||
);
|
|
||||||
|
|
||||||
const isProduction = process.env.NODE_ENV === 'production';
|
|
||||||
const isDevelopment = !isProduction;
|
|
||||||
|
|
||||||
const envVariables = {
|
|
||||||
TM_PUBLIC_BASE_DOMAIN: process.env.TM_PUBLIC_BASE_DOMAIN ?? '',
|
|
||||||
TM_PUBLIC_SUPABASE_URL: process.env.TM_PUBLIC_SUPABASE_URL ?? '',
|
|
||||||
TM_PUBLIC_SUPABASE_ANON_KEY: process.env.TM_PUBLIC_SUPABASE_ANON_KEY ?? ''
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log('envVariables:', envVariables);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Environment helpers
|
|
||||||
*/
|
|
||||||
export const env = {
|
|
||||||
isProduction,
|
|
||||||
isDevelopment,
|
|
||||||
NODE_ENV: process.env.NODE_ENV || 'development',
|
|
||||||
...envVariables
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Base tsup configuration for all packages
|
|
||||||
* Since everything gets bundled into root dist/ anyway, use consistent settings
|
|
||||||
*/
|
|
||||||
export const baseConfig: Partial<Options> = {
|
|
||||||
format: ['esm'],
|
|
||||||
target: 'node18',
|
|
||||||
sourcemap: isDevelopment,
|
|
||||||
clean: true,
|
|
||||||
dts: false,
|
|
||||||
minify: isProduction,
|
|
||||||
treeshake: isProduction,
|
|
||||||
splitting: false,
|
|
||||||
// Don't bundle any other dependencies (auto-external all node_modules)
|
|
||||||
external: [/^[^./]/],
|
|
||||||
env: envVariables,
|
|
||||||
esbuildOptions(options) {
|
|
||||||
options.platform = 'node';
|
|
||||||
// Allow importing TypeScript from JavaScript
|
|
||||||
options.resolveExtensions = ['.ts', '.js', '.mjs', '.json'];
|
|
||||||
// Better source mapping in development only
|
|
||||||
options.sourcesContent = isDevelopment;
|
|
||||||
// Keep original names for better debugging in development
|
|
||||||
options.keepNames = isDevelopment;
|
|
||||||
},
|
|
||||||
// Watch mode configuration for development
|
|
||||||
watch: false
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Legacy external modules list - kept for backwards compatibility
|
|
||||||
* Note: When using tsup-node, this is not needed as it automatically
|
|
||||||
* excludes dependencies and peerDependencies from package.json
|
|
||||||
*/
|
|
||||||
export const commonExternals = [
|
|
||||||
// Native Node.js modules (for cases where tsup is used instead of tsup-node)
|
|
||||||
'fs',
|
|
||||||
'path',
|
|
||||||
'child_process',
|
|
||||||
'crypto',
|
|
||||||
'os',
|
|
||||||
'url',
|
|
||||||
'util',
|
|
||||||
'stream',
|
|
||||||
'http',
|
|
||||||
'https',
|
|
||||||
'events',
|
|
||||||
'assert',
|
|
||||||
'buffer',
|
|
||||||
'querystring',
|
|
||||||
'readline',
|
|
||||||
'zlib',
|
|
||||||
'tty',
|
|
||||||
'net',
|
|
||||||
'dgram',
|
|
||||||
'dns',
|
|
||||||
'tls',
|
|
||||||
'cluster',
|
|
||||||
'process',
|
|
||||||
'module'
|
|
||||||
];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Utility function to merge configurations
|
|
||||||
* Simplified for tsup-node usage
|
|
||||||
*/
|
|
||||||
export function mergeConfig(
|
|
||||||
baseConfig: Partial<Options>,
|
|
||||||
overrides: Partial<Options>
|
|
||||||
): Options {
|
|
||||||
return {
|
|
||||||
...baseConfig,
|
|
||||||
...overrides,
|
|
||||||
// Merge esbuildOptions
|
|
||||||
esbuildOptions(options, context) {
|
|
||||||
if (baseConfig.esbuildOptions) {
|
|
||||||
baseConfig.esbuildOptions(options, context);
|
|
||||||
}
|
|
||||||
if (overrides.esbuildOptions) {
|
|
||||||
overrides.esbuildOptions(options, context);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} as Options;
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"target": "ES2022",
|
|
||||||
"lib": ["ES2022"],
|
|
||||||
"module": "ESNext",
|
|
||||||
"moduleResolution": "bundler",
|
|
||||||
"allowSyntheticDefaultImports": true,
|
|
||||||
"esModuleInterop": true,
|
|
||||||
"baseUrl": ".",
|
|
||||||
"outDir": "dist",
|
|
||||||
"allowJs": true,
|
|
||||||
"strict": true,
|
|
||||||
"resolveJsonModule": true,
|
|
||||||
"isolatedModules": true,
|
|
||||||
"declaration": true,
|
|
||||||
"skipLibCheck": true,
|
|
||||||
"forceConsistentCasingInFileNames": true
|
|
||||||
},
|
|
||||||
"include": ["src/**/*"],
|
|
||||||
"exclude": ["node_modules", "dist"]
|
|
||||||
}
|
|
||||||
@@ -1,60 +1,76 @@
|
|||||||
{
|
{
|
||||||
"name": "@tm/core",
|
"name": "@tm/core",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"private": true,
|
|
||||||
"description": "Core library for Task Master - TypeScript task management system",
|
"description": "Core library for Task Master - TypeScript task management system",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"types": "./src/index.ts",
|
"types": "./dist/index.d.ts",
|
||||||
"main": "./dist/index.js",
|
"main": "./dist/index.js",
|
||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
"types": "./src/index.ts",
|
"types": "./src/index.ts",
|
||||||
"import": "./dist/index.js"
|
"import": "./dist/index.js",
|
||||||
|
"require": "./dist/index.js"
|
||||||
},
|
},
|
||||||
"./auth": {
|
"./auth": {
|
||||||
"types": "./src/auth/index.ts",
|
"types": "./src/auth/index.ts",
|
||||||
"import": "./dist/auth/index.js"
|
"import": "./dist/auth/index.js",
|
||||||
|
"require": "./dist/auth/index.js"
|
||||||
},
|
},
|
||||||
"./storage": {
|
"./storage": {
|
||||||
"types": "./src/storage/index.ts",
|
"types": "./src/storage/index.ts",
|
||||||
"import": "./dist/storage/index.js"
|
"import": "./dist/storage/index.js",
|
||||||
|
"require": "./dist/storage/index.js"
|
||||||
},
|
},
|
||||||
"./config": {
|
"./config": {
|
||||||
"types": "./src/config/index.ts",
|
"types": "./src/config/index.ts",
|
||||||
"import": "./dist/config/index.js"
|
"import": "./dist/config/index.js",
|
||||||
|
"require": "./dist/config/index.js"
|
||||||
},
|
},
|
||||||
"./providers": {
|
"./providers": {
|
||||||
"types": "./src/providers/index.ts",
|
"types": "./src/providers/index.ts",
|
||||||
"import": "./dist/providers/index.js"
|
"import": "./dist/providers/index.js",
|
||||||
|
"require": "./dist/providers/index.js"
|
||||||
},
|
},
|
||||||
"./services": {
|
"./services": {
|
||||||
"types": "./src/services/index.ts",
|
"types": "./src/services/index.ts",
|
||||||
"import": "./dist/services/index.js"
|
"import": "./dist/services/index.js",
|
||||||
|
"require": "./dist/services/index.js"
|
||||||
},
|
},
|
||||||
"./errors": {
|
"./errors": {
|
||||||
"types": "./src/errors/index.ts",
|
"types": "./src/errors/index.ts",
|
||||||
"import": "./dist/errors/index.js"
|
"import": "./dist/errors/index.js",
|
||||||
|
"require": "./dist/errors/index.js"
|
||||||
},
|
},
|
||||||
"./logger": {
|
"./logger": {
|
||||||
"types": "./src/logger/index.ts",
|
"types": "./src/logger/index.ts",
|
||||||
"import": "./dist/logger/index.js"
|
"import": "./dist/logger/index.js",
|
||||||
|
"require": "./dist/logger/index.js"
|
||||||
},
|
},
|
||||||
"./types": {
|
"./types": {
|
||||||
"types": "./src/types/index.ts",
|
"types": "./src/types/index.ts",
|
||||||
"import": "./dist/types/index.js"
|
"import": "./dist/types/index.js",
|
||||||
|
"require": "./dist/types/index.js"
|
||||||
},
|
},
|
||||||
"./interfaces": {
|
"./interfaces": {
|
||||||
"types": "./src/interfaces/index.ts",
|
"types": "./src/interfaces/index.ts",
|
||||||
"import": "./dist/interfaces/index.js"
|
"import": "./dist/interfaces/index.js",
|
||||||
|
"require": "./dist/interfaces/index.js"
|
||||||
},
|
},
|
||||||
"./utils": {
|
"./utils": {
|
||||||
"types": "./src/utils/index.ts",
|
"types": "./src/utils/index.ts",
|
||||||
"import": "./dist/utils/index.js"
|
"import": "./dist/utils/index.js",
|
||||||
}
|
"require": "./dist/utils/index.js"
|
||||||
|
},
|
||||||
|
"./workflow": {
|
||||||
|
"types": "./src/workflow/index.ts",
|
||||||
|
"import": "./dist/workflow/index.js",
|
||||||
|
"require": "./dist/workflow/index.js"
|
||||||
|
},
|
||||||
|
"./package.json": "./package.json"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc",
|
"build": "tsup",
|
||||||
"dev": "tsc --watch",
|
"dev": "tsup --watch",
|
||||||
"test": "vitest run",
|
"test": "vitest run",
|
||||||
"test:watch": "vitest",
|
"test:watch": "vitest",
|
||||||
"test:coverage": "vitest run --coverage",
|
"test:coverage": "vitest run --coverage",
|
||||||
@@ -66,21 +82,25 @@
|
|||||||
"typecheck": "tsc --noEmit"
|
"typecheck": "tsc --noEmit"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@supabase/supabase-js": "^2.57.4",
|
"@supabase/supabase-js": "^2.57.0",
|
||||||
|
"@tm/workflow-engine": "*",
|
||||||
|
"chalk": "^5.3.0",
|
||||||
"zod": "^3.22.4"
|
"zod": "^3.22.4"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@biomejs/biome": "^1.9.4",
|
"@biomejs/biome": "^1.9.4",
|
||||||
"@types/node": "^20.11.30",
|
"@types/node": "^20.11.30",
|
||||||
"@vitest/coverage-v8": "^2.0.5",
|
"@vitest/coverage-v8": "^2.0.5",
|
||||||
|
"dotenv-mono": "^1.5.1",
|
||||||
"ts-node": "^10.9.2",
|
"ts-node": "^10.9.2",
|
||||||
|
"tsup": "^8.0.2",
|
||||||
"typescript": "^5.4.3",
|
"typescript": "^5.4.3",
|
||||||
"vitest": "^2.0.5"
|
"vitest": "^2.0.5"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=18.0.0"
|
"node": ">=18.0.0"
|
||||||
},
|
},
|
||||||
"files": ["src", "README.md", "CHANGELOG.md"],
|
"files": ["dist", "README.md", "CHANGELOG.md"],
|
||||||
"keywords": ["task-management", "typescript", "ai", "prd", "parser"],
|
"keywords": ["task-management", "typescript", "ai", "prd", "parser"],
|
||||||
"author": "Task Master AI",
|
"author": "Task Master AI",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
|
|||||||
@@ -6,18 +6,11 @@ import {
|
|||||||
AuthCredentials,
|
AuthCredentials,
|
||||||
OAuthFlowOptions,
|
OAuthFlowOptions,
|
||||||
AuthenticationError,
|
AuthenticationError,
|
||||||
AuthConfig,
|
AuthConfig
|
||||||
UserContext
|
|
||||||
} from './types.js';
|
} from './types.js';
|
||||||
import { CredentialStore } from './credential-store.js';
|
import { CredentialStore } from './credential-store.js';
|
||||||
import { OAuthService } from './oauth-service.js';
|
import { OAuthService } from './oauth-service.js';
|
||||||
import { SupabaseAuthClient } from '../clients/supabase-client.js';
|
import { SupabaseAuthClient } from '../clients/supabase-client.js';
|
||||||
import {
|
|
||||||
OrganizationService,
|
|
||||||
type Organization,
|
|
||||||
type Brief,
|
|
||||||
type RemoteTask
|
|
||||||
} from '../services/organization.service.js';
|
|
||||||
import { getLogger } from '../logger/index.js';
|
import { getLogger } from '../logger/index.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -28,28 +21,11 @@ export class AuthManager {
|
|||||||
private credentialStore: CredentialStore;
|
private credentialStore: CredentialStore;
|
||||||
private oauthService: OAuthService;
|
private oauthService: OAuthService;
|
||||||
private supabaseClient: SupabaseAuthClient;
|
private supabaseClient: SupabaseAuthClient;
|
||||||
private organizationService?: OrganizationService;
|
|
||||||
|
|
||||||
private constructor(config?: Partial<AuthConfig>) {
|
private constructor(config?: Partial<AuthConfig>) {
|
||||||
this.credentialStore = CredentialStore.getInstance(config);
|
this.credentialStore = new CredentialStore(config);
|
||||||
this.supabaseClient = new SupabaseAuthClient();
|
this.supabaseClient = new SupabaseAuthClient();
|
||||||
this.oauthService = new OAuthService(this.credentialStore, config);
|
this.oauthService = new OAuthService(this.credentialStore, config);
|
||||||
|
|
||||||
// Initialize Supabase client with session restoration
|
|
||||||
this.initializeSupabaseSession();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize Supabase session from stored credentials
|
|
||||||
*/
|
|
||||||
private async initializeSupabaseSession(): Promise<void> {
|
|
||||||
try {
|
|
||||||
await this.supabaseClient.initialize();
|
|
||||||
} catch (error) {
|
|
||||||
// Log but don't throw - session might not exist yet
|
|
||||||
const logger = getLogger('AuthManager');
|
|
||||||
logger.debug('No existing session to restore');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -73,7 +49,6 @@ export class AuthManager {
|
|||||||
*/
|
*/
|
||||||
static resetInstance(): void {
|
static resetInstance(): void {
|
||||||
AuthManager.instance = null;
|
AuthManager.instance = null;
|
||||||
CredentialStore.resetInstance();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -100,48 +75,39 @@ export class AuthManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Refresh authentication token using Supabase session
|
* Refresh authentication token
|
||||||
*/
|
*/
|
||||||
async refreshToken(): Promise<AuthCredentials> {
|
async refreshToken(): Promise<AuthCredentials> {
|
||||||
|
const authData = this.credentialStore.getCredentials({
|
||||||
|
allowExpired: true
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!authData || !authData.refreshToken) {
|
||||||
|
throw new AuthenticationError(
|
||||||
|
'No refresh token available',
|
||||||
|
'NO_REFRESH_TOKEN'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Use Supabase's built-in session refresh
|
// Use Supabase client to refresh the token
|
||||||
const session = await this.supabaseClient.refreshSession();
|
const response = await this.supabaseClient.refreshSession(
|
||||||
|
authData.refreshToken
|
||||||
|
);
|
||||||
|
|
||||||
if (!session) {
|
// Update authentication data
|
||||||
throw new AuthenticationError(
|
|
||||||
'Failed to refresh session',
|
|
||||||
'REFRESH_FAILED'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get existing credentials to preserve context
|
|
||||||
const existingCredentials = this.credentialStore.getCredentials({
|
|
||||||
allowExpired: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Update authentication data from session
|
|
||||||
const newAuthData: AuthCredentials = {
|
const newAuthData: AuthCredentials = {
|
||||||
token: session.access_token,
|
...authData,
|
||||||
refreshToken: session.refresh_token,
|
token: response.token,
|
||||||
userId: session.user.id,
|
refreshToken: response.refreshToken,
|
||||||
email: session.user.email,
|
expiresAt: response.expiresAt,
|
||||||
expiresAt: session.expires_at
|
savedAt: new Date().toISOString()
|
||||||
? new Date(session.expires_at * 1000).toISOString()
|
|
||||||
: undefined,
|
|
||||||
savedAt: new Date().toISOString(),
|
|
||||||
selectedContext: existingCredentials?.selectedContext
|
|
||||||
};
|
};
|
||||||
|
|
||||||
this.credentialStore.saveCredentials(newAuthData);
|
this.credentialStore.saveCredentials(newAuthData);
|
||||||
return newAuthData;
|
return newAuthData;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof AuthenticationError) {
|
throw error;
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new AuthenticationError(
|
|
||||||
`Token refresh failed: ${(error as Error).message}`,
|
|
||||||
'REFRESH_FAILED'
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -167,114 +133,4 @@ export class AuthManager {
|
|||||||
isAuthenticated(): boolean {
|
isAuthenticated(): boolean {
|
||||||
return this.credentialStore.hasValidCredentials();
|
return this.credentialStore.hasValidCredentials();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the current user context (org/brief selection)
|
|
||||||
*/
|
|
||||||
getContext(): UserContext | null {
|
|
||||||
const credentials = this.getCredentials();
|
|
||||||
return credentials?.selectedContext || null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update the user context (org/brief selection)
|
|
||||||
*/
|
|
||||||
async updateContext(context: Partial<UserContext>): Promise<void> {
|
|
||||||
const credentials = this.getCredentials();
|
|
||||||
if (!credentials) {
|
|
||||||
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge with existing context
|
|
||||||
const existingContext = credentials.selectedContext || {};
|
|
||||||
const newContext: UserContext = {
|
|
||||||
...existingContext,
|
|
||||||
...context,
|
|
||||||
updatedAt: new Date().toISOString()
|
|
||||||
};
|
|
||||||
|
|
||||||
// Save updated credentials with new context
|
|
||||||
const updatedCredentials: AuthCredentials = {
|
|
||||||
...credentials,
|
|
||||||
selectedContext: newContext
|
|
||||||
};
|
|
||||||
|
|
||||||
this.credentialStore.saveCredentials(updatedCredentials);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear the user context
|
|
||||||
*/
|
|
||||||
async clearContext(): Promise<void> {
|
|
||||||
const credentials = this.getCredentials();
|
|
||||||
if (!credentials) {
|
|
||||||
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove context from credentials
|
|
||||||
const { selectedContext, ...credentialsWithoutContext } = credentials;
|
|
||||||
this.credentialStore.saveCredentials(credentialsWithoutContext);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the organization service instance
|
|
||||||
* Uses the Supabase client with the current session or token
|
|
||||||
*/
|
|
||||||
private async getOrganizationService(): Promise<OrganizationService> {
|
|
||||||
if (!this.organizationService) {
|
|
||||||
// First check if we have credentials with a token
|
|
||||||
const credentials = this.getCredentials();
|
|
||||||
if (!credentials || !credentials.token) {
|
|
||||||
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize session if needed (this will load from our storage adapter)
|
|
||||||
await this.supabaseClient.initialize();
|
|
||||||
|
|
||||||
// Use the SupabaseAuthClient which now has the session
|
|
||||||
const supabaseClient = this.supabaseClient.getClient();
|
|
||||||
this.organizationService = new OrganizationService(supabaseClient as any);
|
|
||||||
}
|
|
||||||
return this.organizationService;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all organizations for the authenticated user
|
|
||||||
*/
|
|
||||||
async getOrganizations(): Promise<Organization[]> {
|
|
||||||
const service = await this.getOrganizationService();
|
|
||||||
return service.getOrganizations();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all briefs for a specific organization
|
|
||||||
*/
|
|
||||||
async getBriefs(orgId: string): Promise<Brief[]> {
|
|
||||||
const service = await this.getOrganizationService();
|
|
||||||
return service.getBriefs(orgId);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a specific organization by ID
|
|
||||||
*/
|
|
||||||
async getOrganization(orgId: string): Promise<Organization | null> {
|
|
||||||
const service = await this.getOrganizationService();
|
|
||||||
return service.getOrganization(orgId);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a specific brief by ID
|
|
||||||
*/
|
|
||||||
async getBrief(briefId: string): Promise<Brief | null> {
|
|
||||||
const service = await this.getOrganizationService();
|
|
||||||
return service.getBrief(briefId);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all tasks for a specific brief
|
|
||||||
*/
|
|
||||||
async getTasks(briefId: string): Promise<RemoteTask[]> {
|
|
||||||
const service = await this.getOrganizationService();
|
|
||||||
return service.getTasks(briefId);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,39 +19,15 @@ import { getLogger } from '../logger/index.js';
|
|||||||
* human-readable persisted format in the auth.json file.
|
* human-readable persisted format in the auth.json file.
|
||||||
*/
|
*/
|
||||||
export class CredentialStore {
|
export class CredentialStore {
|
||||||
private static instance: CredentialStore | null = null;
|
|
||||||
private logger = getLogger('CredentialStore');
|
private logger = getLogger('CredentialStore');
|
||||||
private config: AuthConfig;
|
private config: AuthConfig;
|
||||||
// Clock skew tolerance for expiry checks (30 seconds)
|
// Clock skew tolerance for expiry checks (30 seconds)
|
||||||
private readonly CLOCK_SKEW_MS = 30_000;
|
private readonly CLOCK_SKEW_MS = 30_000;
|
||||||
|
|
||||||
private constructor(config?: Partial<AuthConfig>) {
|
constructor(config?: Partial<AuthConfig>) {
|
||||||
this.config = getAuthConfig(config);
|
this.config = getAuthConfig(config);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the singleton instance of CredentialStore
|
|
||||||
*/
|
|
||||||
static getInstance(config?: Partial<AuthConfig>): CredentialStore {
|
|
||||||
if (!CredentialStore.instance) {
|
|
||||||
CredentialStore.instance = new CredentialStore(config);
|
|
||||||
} else if (config) {
|
|
||||||
// Warn if config is provided after initialization
|
|
||||||
const logger = getLogger('CredentialStore');
|
|
||||||
logger.warn(
|
|
||||||
'getInstance called with config after initialization; config is ignored.'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return CredentialStore.instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Reset the singleton instance (useful for testing)
|
|
||||||
*/
|
|
||||||
static resetInstance(): void {
|
|
||||||
CredentialStore.instance = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get stored authentication credentials
|
* Get stored authentication credentials
|
||||||
* @returns AuthCredentials with expiresAt as number (milliseconds) for runtime use
|
* @returns AuthCredentials with expiresAt as number (milliseconds) for runtime use
|
||||||
|
|||||||
@@ -5,19 +5,12 @@
|
|||||||
export { AuthManager } from './auth-manager.js';
|
export { AuthManager } from './auth-manager.js';
|
||||||
export { CredentialStore } from './credential-store.js';
|
export { CredentialStore } from './credential-store.js';
|
||||||
export { OAuthService } from './oauth-service.js';
|
export { OAuthService } from './oauth-service.js';
|
||||||
export { SupabaseSessionStorage } from './supabase-session-storage.js';
|
|
||||||
export type {
|
|
||||||
Organization,
|
|
||||||
Brief,
|
|
||||||
RemoteTask
|
|
||||||
} from '../services/organization.service.js';
|
|
||||||
|
|
||||||
export type {
|
export type {
|
||||||
AuthCredentials,
|
AuthCredentials,
|
||||||
OAuthFlowOptions,
|
OAuthFlowOptions,
|
||||||
AuthConfig,
|
AuthConfig,
|
||||||
CliData,
|
CliData
|
||||||
UserContext
|
|
||||||
} from './types.js';
|
} from './types.js';
|
||||||
|
|
||||||
export { AuthenticationError } from './types.js';
|
export { AuthenticationError } from './types.js';
|
||||||
|
|||||||
@@ -181,8 +181,8 @@ export class OAuthService {
|
|||||||
timestamp: Date.now()
|
timestamp: Date.now()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Build authorization URL for CLI-specific sign-in page
|
// Build authorization URL for web app sign-in page
|
||||||
const authUrl = new URL(`${this.baseUrl}/auth/cli/sign-in`);
|
const authUrl = new URL(`${this.baseUrl}/auth/sign-in`);
|
||||||
|
|
||||||
// Encode CLI data as base64
|
// Encode CLI data as base64
|
||||||
const cliParam = Buffer.from(JSON.stringify(cliData)).toString(
|
const cliParam = Buffer.from(JSON.stringify(cliData)).toString(
|
||||||
@@ -272,49 +272,7 @@ export class OAuthService {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle authorization code for PKCE flow
|
// Handle direct token response from server
|
||||||
const code = url.searchParams.get('code');
|
|
||||||
if (code && type === 'pkce_callback') {
|
|
||||||
try {
|
|
||||||
this.logger.info('Received authorization code for PKCE flow');
|
|
||||||
|
|
||||||
// Exchange code for session using PKCE
|
|
||||||
const session = await this.supabaseClient.exchangeCodeForSession(code);
|
|
||||||
|
|
||||||
// Save authentication data
|
|
||||||
const authData: AuthCredentials = {
|
|
||||||
token: session.access_token,
|
|
||||||
refreshToken: session.refresh_token,
|
|
||||||
userId: session.user.id,
|
|
||||||
email: session.user.email,
|
|
||||||
expiresAt: session.expires_at
|
|
||||||
? new Date(session.expires_at * 1000).toISOString()
|
|
||||||
: undefined,
|
|
||||||
tokenType: 'standard',
|
|
||||||
savedAt: new Date().toISOString()
|
|
||||||
};
|
|
||||||
|
|
||||||
this.credentialStore.saveCredentials(authData);
|
|
||||||
|
|
||||||
if (server.listening) {
|
|
||||||
server.close();
|
|
||||||
}
|
|
||||||
// Clear timeout since authentication succeeded
|
|
||||||
if (timeoutId) {
|
|
||||||
clearTimeout(timeoutId);
|
|
||||||
}
|
|
||||||
resolve(authData);
|
|
||||||
return;
|
|
||||||
} catch (error) {
|
|
||||||
if (server.listening) {
|
|
||||||
server.close();
|
|
||||||
}
|
|
||||||
reject(error);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle direct token response from server (legacy flow)
|
|
||||||
if (
|
if (
|
||||||
accessToken &&
|
accessToken &&
|
||||||
(type === 'oauth_success' || type === 'session_transfer')
|
(type === 'oauth_success' || type === 'session_transfer')
|
||||||
@@ -322,23 +280,8 @@ export class OAuthService {
|
|||||||
try {
|
try {
|
||||||
this.logger.info(`Received tokens via ${type}`);
|
this.logger.info(`Received tokens via ${type}`);
|
||||||
|
|
||||||
// Create a session with the tokens and set it in Supabase client
|
// Get user info using the access token if possible
|
||||||
const session = {
|
const user = await this.supabaseClient.getUser(accessToken);
|
||||||
access_token: accessToken,
|
|
||||||
refresh_token: refreshToken || '',
|
|
||||||
expires_at: expiresIn
|
|
||||||
? Math.floor(Date.now() / 1000) + parseInt(expiresIn)
|
|
||||||
: undefined,
|
|
||||||
expires_in: expiresIn ? parseInt(expiresIn) : undefined,
|
|
||||||
token_type: 'bearer',
|
|
||||||
user: null as any // Will be populated by setSession
|
|
||||||
};
|
|
||||||
|
|
||||||
// Set the session in Supabase client
|
|
||||||
await this.supabaseClient.setSession(session as any);
|
|
||||||
|
|
||||||
// Get user info from the session
|
|
||||||
const user = await this.supabaseClient.getUser();
|
|
||||||
|
|
||||||
// Calculate expiration time
|
// Calculate expiration time
|
||||||
const expiresAt = expiresIn
|
const expiresAt = expiresIn
|
||||||
|
|||||||
@@ -1,155 +0,0 @@
|
|||||||
/**
|
|
||||||
* Custom storage adapter for Supabase Auth sessions in CLI environment
|
|
||||||
* Implements the SupportedStorage interface required by Supabase Auth
|
|
||||||
*
|
|
||||||
* This adapter bridges Supabase's session management with our existing
|
|
||||||
* auth.json credential storage, maintaining backward compatibility
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { SupportedStorage } from '@supabase/supabase-js';
|
|
||||||
import { CredentialStore } from './credential-store.js';
|
|
||||||
import { AuthCredentials } from './types.js';
|
|
||||||
import { getLogger } from '../logger/index.js';
|
|
||||||
|
|
||||||
const STORAGE_KEY = 'sb-taskmaster-auth-token';
|
|
||||||
|
|
||||||
export class SupabaseSessionStorage implements SupportedStorage {
|
|
||||||
private store: CredentialStore;
|
|
||||||
private logger = getLogger('SupabaseSessionStorage');
|
|
||||||
|
|
||||||
constructor(store: CredentialStore) {
|
|
||||||
this.store = store;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Build a Supabase session object from our credentials
|
|
||||||
*/
|
|
||||||
private buildSessionFromCredentials(credentials: AuthCredentials): any {
|
|
||||||
// Create a session object that Supabase expects
|
|
||||||
const session = {
|
|
||||||
access_token: credentials.token,
|
|
||||||
refresh_token: credentials.refreshToken || '',
|
|
||||||
expires_at: credentials.expiresAt
|
|
||||||
? Math.floor(new Date(credentials.expiresAt).getTime() / 1000)
|
|
||||||
: Math.floor(Date.now() / 1000) + 3600, // Default to 1 hour
|
|
||||||
token_type: 'bearer',
|
|
||||||
user: {
|
|
||||||
id: credentials.userId,
|
|
||||||
email: credentials.email || '',
|
|
||||||
aud: 'authenticated',
|
|
||||||
role: 'authenticated',
|
|
||||||
email_confirmed_at: new Date().toISOString(),
|
|
||||||
app_metadata: {},
|
|
||||||
user_metadata: {},
|
|
||||||
created_at: new Date().toISOString(),
|
|
||||||
updated_at: new Date().toISOString()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
return session;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse a Supabase session back to our credentials
|
|
||||||
*/
|
|
||||||
private parseSessionToCredentials(
|
|
||||||
sessionData: any
|
|
||||||
): Partial<AuthCredentials> {
|
|
||||||
try {
|
|
||||||
const session = JSON.parse(sessionData);
|
|
||||||
return {
|
|
||||||
token: session.access_token,
|
|
||||||
refreshToken: session.refresh_token,
|
|
||||||
userId: session.user?.id || 'unknown',
|
|
||||||
email: session.user?.email,
|
|
||||||
expiresAt: session.expires_at
|
|
||||||
? new Date(session.expires_at * 1000).toISOString()
|
|
||||||
: undefined
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error('Error parsing session:', error);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get item from storage - Supabase will request the session with a specific key
|
|
||||||
*/
|
|
||||||
getItem(key: string): string | null {
|
|
||||||
// Supabase uses a specific key pattern for sessions
|
|
||||||
if (key === STORAGE_KEY || key.includes('auth-token')) {
|
|
||||||
try {
|
|
||||||
const credentials = this.store.getCredentials({ allowExpired: true });
|
|
||||||
if (credentials && credentials.token) {
|
|
||||||
// Build and return a session object from our stored credentials
|
|
||||||
const session = this.buildSessionFromCredentials(credentials);
|
|
||||||
return JSON.stringify(session);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error('Error getting session:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set item in storage - Supabase will store the session with a specific key
|
|
||||||
*/
|
|
||||||
setItem(key: string, value: string): void {
|
|
||||||
// Only handle Supabase session keys
|
|
||||||
if (key === STORAGE_KEY || key.includes('auth-token')) {
|
|
||||||
try {
|
|
||||||
// Parse the session and update our credentials
|
|
||||||
const sessionUpdates = this.parseSessionToCredentials(value);
|
|
||||||
const existingCredentials = this.store.getCredentials({
|
|
||||||
allowExpired: true
|
|
||||||
});
|
|
||||||
|
|
||||||
if (sessionUpdates.token) {
|
|
||||||
const updatedCredentials: AuthCredentials = {
|
|
||||||
...existingCredentials,
|
|
||||||
...sessionUpdates,
|
|
||||||
savedAt: new Date().toISOString(),
|
|
||||||
selectedContext: existingCredentials?.selectedContext
|
|
||||||
} as AuthCredentials;
|
|
||||||
|
|
||||||
this.store.saveCredentials(updatedCredentials);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error('Error setting session:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove item from storage - Called when signing out
|
|
||||||
*/
|
|
||||||
removeItem(key: string): void {
|
|
||||||
if (key === STORAGE_KEY || key.includes('auth-token')) {
|
|
||||||
// Don't actually remove credentials, just clear the tokens
|
|
||||||
// This preserves other data like selectedContext
|
|
||||||
try {
|
|
||||||
const credentials = this.store.getCredentials({ allowExpired: true });
|
|
||||||
if (credentials) {
|
|
||||||
// Keep context but clear auth tokens
|
|
||||||
const clearedCredentials: AuthCredentials = {
|
|
||||||
...credentials,
|
|
||||||
token: '',
|
|
||||||
refreshToken: undefined,
|
|
||||||
expiresAt: undefined
|
|
||||||
} as AuthCredentials;
|
|
||||||
this.store.saveCredentials(clearedCredentials);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error('Error removing session:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all session data
|
|
||||||
*/
|
|
||||||
clear(): void {
|
|
||||||
// Clear auth tokens but preserve context
|
|
||||||
this.removeItem(STORAGE_KEY);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -10,15 +10,6 @@ export interface AuthCredentials {
|
|||||||
expiresAt?: string | number;
|
expiresAt?: string | number;
|
||||||
tokenType?: 'standard';
|
tokenType?: 'standard';
|
||||||
savedAt: string;
|
savedAt: string;
|
||||||
selectedContext?: UserContext;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface UserContext {
|
|
||||||
orgId?: string;
|
|
||||||
orgName?: string;
|
|
||||||
briefId?: string;
|
|
||||||
briefName?: string;
|
|
||||||
updatedAt: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface OAuthFlowOptions {
|
export interface OAuthFlowOptions {
|
||||||
@@ -76,11 +67,7 @@ export type AuthErrorCode =
|
|||||||
| 'STORAGE_ERROR'
|
| 'STORAGE_ERROR'
|
||||||
| 'NOT_SUPPORTED'
|
| 'NOT_SUPPORTED'
|
||||||
| 'REFRESH_FAILED'
|
| 'REFRESH_FAILED'
|
||||||
| 'INVALID_RESPONSE'
|
| 'INVALID_RESPONSE';
|
||||||
| 'PKCE_INIT_FAILED'
|
|
||||||
| 'PKCE_FAILED'
|
|
||||||
| 'CODE_EXCHANGE_FAILED'
|
|
||||||
| 'SESSION_SET_FAILED';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Authentication error class
|
* Authentication error class
|
||||||
|
|||||||
@@ -1,32 +1,19 @@
|
|||||||
/**
|
/**
|
||||||
* Supabase authentication client for CLI auth flows
|
* Supabase client for authentication
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import {
|
import { createClient, SupabaseClient, User } from '@supabase/supabase-js';
|
||||||
createClient,
|
|
||||||
SupabaseClient as SupabaseJSClient,
|
|
||||||
User,
|
|
||||||
Session
|
|
||||||
} from '@supabase/supabase-js';
|
|
||||||
import { AuthenticationError } from '../auth/types.js';
|
import { AuthenticationError } from '../auth/types.js';
|
||||||
import { getLogger } from '../logger/index.js';
|
import { getLogger } from '../logger/index.js';
|
||||||
import { SupabaseSessionStorage } from '../auth/supabase-session-storage.js';
|
|
||||||
import { CredentialStore } from '../auth/credential-store.js';
|
|
||||||
|
|
||||||
export class SupabaseAuthClient {
|
export class SupabaseAuthClient {
|
||||||
private client: SupabaseJSClient | null = null;
|
private client: SupabaseClient | null = null;
|
||||||
private sessionStorage: SupabaseSessionStorage;
|
|
||||||
private logger = getLogger('SupabaseAuthClient');
|
private logger = getLogger('SupabaseAuthClient');
|
||||||
|
|
||||||
constructor() {
|
|
||||||
const credentialStore = CredentialStore.getInstance();
|
|
||||||
this.sessionStorage = new SupabaseSessionStorage(credentialStore);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get Supabase client with proper session management
|
* Initialize Supabase client
|
||||||
*/
|
*/
|
||||||
getClient(): SupabaseJSClient {
|
private getClient(): SupabaseClient {
|
||||||
if (!this.client) {
|
if (!this.client) {
|
||||||
// Get Supabase configuration from environment - using TM_PUBLIC prefix
|
// Get Supabase configuration from environment - using TM_PUBLIC prefix
|
||||||
const supabaseUrl = process.env.TM_PUBLIC_SUPABASE_URL;
|
const supabaseUrl = process.env.TM_PUBLIC_SUPABASE_URL;
|
||||||
@@ -39,12 +26,10 @@ export class SupabaseAuthClient {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create client with custom storage adapter (similar to React Native AsyncStorage)
|
|
||||||
this.client = createClient(supabaseUrl, supabaseAnonKey, {
|
this.client = createClient(supabaseUrl, supabaseAnonKey, {
|
||||||
auth: {
|
auth: {
|
||||||
storage: this.sessionStorage,
|
|
||||||
autoRefreshToken: true,
|
autoRefreshToken: true,
|
||||||
persistSession: true,
|
persistSession: false, // We handle persistence ourselves
|
||||||
detectSessionInUrl: false
|
detectSessionInUrl: false
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -54,159 +39,40 @@ export class SupabaseAuthClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize the client and restore session if available
|
* Note: Code exchange is now handled server-side
|
||||||
|
* The server returns tokens directly to avoid PKCE issues
|
||||||
|
* This method is kept for potential future use
|
||||||
*/
|
*/
|
||||||
async initialize(): Promise<Session | null> {
|
async exchangeCodeForSession(_code: string): Promise<{
|
||||||
const client = this.getClient();
|
token: string;
|
||||||
|
refreshToken?: string;
|
||||||
try {
|
userId: string;
|
||||||
// Get the current session from storage
|
email?: string;
|
||||||
const {
|
expiresAt?: string;
|
||||||
data: { session },
|
}> {
|
||||||
error
|
throw new AuthenticationError(
|
||||||
} = await client.auth.getSession();
|
'Code exchange is handled server-side. CLI receives tokens directly.',
|
||||||
|
'NOT_SUPPORTED'
|
||||||
if (error) {
|
);
|
||||||
this.logger.warn('Failed to restore session:', error);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (session) {
|
|
||||||
this.logger.info('Session restored successfully');
|
|
||||||
}
|
|
||||||
|
|
||||||
return session;
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error('Error initializing session:', error);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sign in with PKCE flow (for CLI auth)
|
* Refresh an access token
|
||||||
*/
|
*/
|
||||||
async signInWithPKCE(): Promise<{ url: string; codeVerifier: string }> {
|
async refreshSession(refreshToken: string): Promise<{
|
||||||
const client = this.getClient();
|
token: string;
|
||||||
|
refreshToken?: string;
|
||||||
|
expiresAt?: string;
|
||||||
|
}> {
|
||||||
try {
|
try {
|
||||||
// Generate PKCE challenge
|
const client = this.getClient();
|
||||||
const { data, error } = await client.auth.signInWithOAuth({
|
|
||||||
provider: 'github',
|
|
||||||
options: {
|
|
||||||
redirectTo:
|
|
||||||
process.env.TM_AUTH_CALLBACK_URL ||
|
|
||||||
'http://localhost:3421/auth/callback',
|
|
||||||
scopes: 'email'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw new AuthenticationError(
|
|
||||||
`Failed to initiate PKCE flow: ${error.message}`,
|
|
||||||
'PKCE_INIT_FAILED'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!data?.url) {
|
|
||||||
throw new AuthenticationError(
|
|
||||||
'No authorization URL returned',
|
|
||||||
'INVALID_RESPONSE'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract code_verifier from the URL or generate it
|
|
||||||
// Note: Supabase handles PKCE internally, we just need to handle the callback
|
|
||||||
return {
|
|
||||||
url: data.url,
|
|
||||||
codeVerifier: '' // Supabase manages this internally
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof AuthenticationError) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new AuthenticationError(
|
|
||||||
`Failed to start PKCE flow: ${(error as Error).message}`,
|
|
||||||
'PKCE_FAILED'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Exchange authorization code for session (PKCE flow)
|
|
||||||
*/
|
|
||||||
async exchangeCodeForSession(code: string): Promise<Session> {
|
|
||||||
const client = this.getClient();
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { data, error } = await client.auth.exchangeCodeForSession(code);
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw new AuthenticationError(
|
|
||||||
`Failed to exchange code: ${error.message}`,
|
|
||||||
'CODE_EXCHANGE_FAILED'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!data?.session) {
|
|
||||||
throw new AuthenticationError(
|
|
||||||
'No session returned from code exchange',
|
|
||||||
'INVALID_RESPONSE'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.logger.info('Successfully exchanged code for session');
|
|
||||||
return data.session;
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof AuthenticationError) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new AuthenticationError(
|
|
||||||
`Code exchange failed: ${(error as Error).message}`,
|
|
||||||
'CODE_EXCHANGE_FAILED'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the current session
|
|
||||||
*/
|
|
||||||
async getSession(): Promise<Session | null> {
|
|
||||||
const client = this.getClient();
|
|
||||||
|
|
||||||
try {
|
|
||||||
const {
|
|
||||||
data: { session },
|
|
||||||
error
|
|
||||||
} = await client.auth.getSession();
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
this.logger.warn('Failed to get session:', error);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return session;
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error('Error getting session:', error);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Refresh the current session
|
|
||||||
*/
|
|
||||||
async refreshSession(): Promise<Session | null> {
|
|
||||||
const client = this.getClient();
|
|
||||||
|
|
||||||
try {
|
|
||||||
this.logger.info('Refreshing session...');
|
this.logger.info('Refreshing session...');
|
||||||
|
|
||||||
// Supabase will automatically use the stored refresh token
|
// Set the session with refresh token
|
||||||
const {
|
const { data, error } = await client.auth.refreshSession({
|
||||||
data: { session },
|
refresh_token: refreshToken
|
||||||
error
|
});
|
||||||
} = await client.auth.refreshSession();
|
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
this.logger.error('Failed to refresh session:', error);
|
this.logger.error('Failed to refresh session:', error);
|
||||||
@@ -216,11 +82,22 @@ export class SupabaseAuthClient {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (session) {
|
if (!data.session) {
|
||||||
this.logger.info('Successfully refreshed session');
|
throw new AuthenticationError(
|
||||||
|
'No session data returned',
|
||||||
|
'INVALID_RESPONSE'
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return session;
|
this.logger.info('Successfully refreshed session');
|
||||||
|
|
||||||
|
return {
|
||||||
|
token: data.session.access_token,
|
||||||
|
refreshToken: data.session.refresh_token,
|
||||||
|
expiresAt: data.session.expires_at
|
||||||
|
? new Date(data.session.expires_at * 1000).toISOString()
|
||||||
|
: undefined
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof AuthenticationError) {
|
if (error instanceof AuthenticationError) {
|
||||||
throw error;
|
throw error;
|
||||||
@@ -234,23 +111,21 @@ export class SupabaseAuthClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get current user from session
|
* Get user details from token
|
||||||
*/
|
*/
|
||||||
async getUser(): Promise<User | null> {
|
async getUser(token: string): Promise<User | null> {
|
||||||
const client = this.getClient();
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const {
|
const client = this.getClient();
|
||||||
data: { user },
|
|
||||||
error
|
// Get user with the token
|
||||||
} = await client.auth.getUser();
|
const { data, error } = await client.auth.getUser(token);
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
this.logger.warn('Failed to get user:', error);
|
this.logger.warn('Failed to get user:', error);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return user;
|
return data.user;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
this.logger.error('Error getting user:', error);
|
this.logger.error('Error getting user:', error);
|
||||||
return null;
|
return null;
|
||||||
@@ -258,55 +133,22 @@ export class SupabaseAuthClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sign out and clear session
|
* Sign out (revoke tokens)
|
||||||
|
* Note: This requires the user to be authenticated with the current session.
|
||||||
|
* For remote token revocation, a server-side admin API with service_role key would be needed.
|
||||||
*/
|
*/
|
||||||
async signOut(): Promise<void> {
|
async signOut(): Promise<void> {
|
||||||
const client = this.getClient();
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Sign out with global scope to revoke all refresh tokens
|
const client = this.getClient();
|
||||||
|
|
||||||
|
// Sign out the current session with global scope to revoke all refresh tokens
|
||||||
const { error } = await client.auth.signOut({ scope: 'global' });
|
const { error } = await client.auth.signOut({ scope: 'global' });
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
this.logger.warn('Failed to sign out:', error);
|
this.logger.warn('Failed to sign out:', error);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clear cached session data
|
|
||||||
this.sessionStorage.clear();
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
this.logger.error('Error during sign out:', error);
|
this.logger.error('Error during sign out:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Set session from external auth (e.g., from server callback)
|
|
||||||
*/
|
|
||||||
async setSession(session: Session): Promise<void> {
|
|
||||||
const client = this.getClient();
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { error } = await client.auth.setSession({
|
|
||||||
access_token: session.access_token,
|
|
||||||
refresh_token: session.refresh_token
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw new AuthenticationError(
|
|
||||||
`Failed to set session: ${error.message}`,
|
|
||||||
'SESSION_SET_FAILED'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.logger.info('Session set successfully');
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof AuthenticationError) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new AuthenticationError(
|
|
||||||
`Failed to set session: ${(error as Error).message}`,
|
|
||||||
'SESSION_SET_FAILED'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -177,7 +177,7 @@ describe('ConfigManager', () => {
|
|||||||
|
|
||||||
it('should return storage configuration', () => {
|
it('should return storage configuration', () => {
|
||||||
const storage = manager.getStorageConfig();
|
const storage = manager.getStorageConfig();
|
||||||
expect(storage).toEqual({ type: 'file' });
|
expect(storage).toEqual({ type: 'auto', apiConfigured: false });
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return API storage configuration when configured', async () => {
|
it('should return API storage configuration when configured', async () => {
|
||||||
@@ -206,7 +206,65 @@ describe('ConfigManager', () => {
|
|||||||
expect(storage).toEqual({
|
expect(storage).toEqual({
|
||||||
type: 'api',
|
type: 'api',
|
||||||
apiEndpoint: 'https://api.example.com',
|
apiEndpoint: 'https://api.example.com',
|
||||||
apiAccessToken: 'token123'
|
apiAccessToken: 'token123',
|
||||||
|
apiConfigured: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return auto storage configuration with apiConfigured flag', async () => {
|
||||||
|
// Create a new instance with auto storage config and partial API settings
|
||||||
|
vi.mocked(ConfigMerger).mockImplementationOnce(
|
||||||
|
() =>
|
||||||
|
({
|
||||||
|
addSource: vi.fn(),
|
||||||
|
clearSources: vi.fn(),
|
||||||
|
merge: vi.fn().mockReturnValue({
|
||||||
|
storage: {
|
||||||
|
type: 'auto',
|
||||||
|
apiEndpoint: 'https://api.example.com'
|
||||||
|
// No apiAccessToken - partial config
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
getSources: vi.fn().mockReturnValue([])
|
||||||
|
}) as any
|
||||||
|
);
|
||||||
|
|
||||||
|
const autoManager = await ConfigManager.create(testProjectRoot);
|
||||||
|
|
||||||
|
const storage = autoManager.getStorageConfig();
|
||||||
|
expect(storage).toEqual({
|
||||||
|
type: 'auto',
|
||||||
|
apiEndpoint: 'https://api.example.com',
|
||||||
|
apiAccessToken: undefined,
|
||||||
|
apiConfigured: true // true because apiEndpoint is provided
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return auto storage with apiConfigured false when no API settings', async () => {
|
||||||
|
// Create a new instance with auto storage but no API settings
|
||||||
|
vi.mocked(ConfigMerger).mockImplementationOnce(
|
||||||
|
() =>
|
||||||
|
({
|
||||||
|
addSource: vi.fn(),
|
||||||
|
clearSources: vi.fn(),
|
||||||
|
merge: vi.fn().mockReturnValue({
|
||||||
|
storage: {
|
||||||
|
type: 'auto'
|
||||||
|
// No API settings at all
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
getSources: vi.fn().mockReturnValue([])
|
||||||
|
}) as any
|
||||||
|
);
|
||||||
|
|
||||||
|
const autoManager = await ConfigManager.create(testProjectRoot);
|
||||||
|
|
||||||
|
const storage = autoManager.getStorageConfig();
|
||||||
|
expect(storage).toEqual({
|
||||||
|
type: 'auto',
|
||||||
|
apiEndpoint: undefined,
|
||||||
|
apiAccessToken: undefined,
|
||||||
|
apiConfigured: false // false because no API settings
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -85,6 +85,11 @@ describe('EnvironmentConfigProvider', () => {
|
|||||||
provider = new EnvironmentConfigProvider(); // Reset provider
|
provider = new EnvironmentConfigProvider(); // Reset provider
|
||||||
config = provider.loadConfig();
|
config = provider.loadConfig();
|
||||||
expect(config.storage?.type).toBe('api');
|
expect(config.storage?.type).toBe('api');
|
||||||
|
|
||||||
|
process.env.TASKMASTER_STORAGE_TYPE = 'auto';
|
||||||
|
provider = new EnvironmentConfigProvider(); // Reset provider
|
||||||
|
config = provider.loadConfig();
|
||||||
|
expect(config.storage?.type).toBe('auto');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle nested configuration paths', () => {
|
it('should handle nested configuration paths', () => {
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ export class EnvironmentConfigProvider {
|
|||||||
{
|
{
|
||||||
env: 'TASKMASTER_STORAGE_TYPE',
|
env: 'TASKMASTER_STORAGE_TYPE',
|
||||||
path: ['storage', 'type'],
|
path: ['storage', 'type'],
|
||||||
validate: (v: string) => ['file', 'api'].includes(v)
|
validate: (v: string) => ['file', 'api', 'auto'].includes(v)
|
||||||
},
|
},
|
||||||
{ env: 'TASKMASTER_API_ENDPOINT', path: ['storage', 'apiEndpoint'] },
|
{ env: 'TASKMASTER_API_ENDPOINT', path: ['storage', 'apiEndpoint'] },
|
||||||
{ env: 'TASKMASTER_API_TOKEN', path: ['storage', 'apiAccessToken'] },
|
{ env: 'TASKMASTER_API_TOKEN', path: ['storage', 'apiAccessToken'] },
|
||||||
|
|||||||
@@ -55,3 +55,7 @@ export {
|
|||||||
|
|
||||||
// Re-export logger
|
// Re-export logger
|
||||||
export { getLogger, createLogger, setGlobalLogger } from './logger/index.js';
|
export { getLogger, createLogger, setGlobalLogger } from './logger/index.js';
|
||||||
|
|
||||||
|
// Re-export workflow
|
||||||
|
export { WorkflowService, type WorkflowServiceConfig } from './workflow/index.js';
|
||||||
|
export type * from './workflow/index.js';
|
||||||
|
|||||||
@@ -3,11 +3,7 @@
|
|||||||
* This file defines the contract for configuration management
|
* This file defines the contract for configuration management
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {
|
import type { TaskComplexity, TaskPriority } from '../types/index.js';
|
||||||
TaskComplexity,
|
|
||||||
TaskPriority,
|
|
||||||
StorageType
|
|
||||||
} from '../types/index.js';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Model configuration for different AI roles
|
* Model configuration for different AI roles
|
||||||
@@ -77,6 +73,14 @@ export interface TagSettings {
|
|||||||
tagNamingConvention: 'kebab-case' | 'camelCase' | 'snake_case';
|
tagNamingConvention: 'kebab-case' | 'camelCase' | 'snake_case';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Storage type options
|
||||||
|
* - 'file': Local file system storage
|
||||||
|
* - 'api': Remote API storage (Hamster integration)
|
||||||
|
* - 'auto': Automatically detect based on auth status
|
||||||
|
*/
|
||||||
|
export type StorageType = 'file' | 'api' | 'auto';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Runtime storage configuration used for storage backend selection
|
* Runtime storage configuration used for storage backend selection
|
||||||
* This is what getStorageConfig() returns and what StorageFactory expects
|
* This is what getStorageConfig() returns and what StorageFactory expects
|
||||||
|
|||||||
@@ -1,170 +0,0 @@
|
|||||||
import { Task, Subtask } from '../types/index.js';
|
|
||||||
import { Database, Tables } from '../types/database.types.js';
|
|
||||||
|
|
||||||
type TaskRow = Tables<'tasks'>;
|
|
||||||
type DependencyRow = Tables<'task_dependencies'>;
|
|
||||||
|
|
||||||
export class TaskMapper {
|
|
||||||
/**
|
|
||||||
* Maps database tasks to internal Task format
|
|
||||||
*/
|
|
||||||
static mapDatabaseTasksToTasks(
|
|
||||||
dbTasks: TaskRow[],
|
|
||||||
dbDependencies: DependencyRow[]
|
|
||||||
): Task[] {
|
|
||||||
if (!dbTasks || dbTasks.length === 0) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Group dependencies by task_id
|
|
||||||
const dependenciesByTaskId = this.groupDependenciesByTaskId(dbDependencies);
|
|
||||||
|
|
||||||
// Separate parent tasks and subtasks
|
|
||||||
const parentTasks = dbTasks.filter((t) => !t.parent_task_id);
|
|
||||||
const subtasksByParentId = this.groupSubtasksByParentId(dbTasks);
|
|
||||||
|
|
||||||
// Map parent tasks with their subtasks
|
|
||||||
return parentTasks.map((taskRow) =>
|
|
||||||
this.mapDatabaseTaskToTask(
|
|
||||||
taskRow,
|
|
||||||
subtasksByParentId.get(taskRow.id) || [],
|
|
||||||
dependenciesByTaskId
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Maps a single database task to internal Task format
|
|
||||||
*/
|
|
||||||
static mapDatabaseTaskToTask(
|
|
||||||
dbTask: TaskRow,
|
|
||||||
dbSubtasks: TaskRow[],
|
|
||||||
dependenciesByTaskId: Map<string, string[]>
|
|
||||||
): Task {
|
|
||||||
// Map subtasks
|
|
||||||
const subtasks: Subtask[] = dbSubtasks.map((subtask, index) => ({
|
|
||||||
id: index + 1, // Use numeric ID for subtasks
|
|
||||||
parentId: dbTask.id,
|
|
||||||
title: subtask.title,
|
|
||||||
description: subtask.description || '',
|
|
||||||
status: this.mapStatus(subtask.status),
|
|
||||||
priority: this.mapPriority(subtask.priority),
|
|
||||||
dependencies: dependenciesByTaskId.get(subtask.id) || [],
|
|
||||||
details: (subtask.metadata as any)?.details || '',
|
|
||||||
testStrategy: (subtask.metadata as any)?.testStrategy || '',
|
|
||||||
createdAt: subtask.created_at,
|
|
||||||
updatedAt: subtask.updated_at,
|
|
||||||
assignee: subtask.assignee_id || undefined,
|
|
||||||
complexity: subtask.complexity
|
|
||||||
? this.mapComplexityToInternal(subtask.complexity)
|
|
||||||
: undefined
|
|
||||||
}));
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: dbTask.display_id || dbTask.id, // Use display_id if available
|
|
||||||
title: dbTask.title,
|
|
||||||
description: dbTask.description || '',
|
|
||||||
status: this.mapStatus(dbTask.status),
|
|
||||||
priority: this.mapPriority(dbTask.priority),
|
|
||||||
dependencies: dependenciesByTaskId.get(dbTask.id) || [],
|
|
||||||
details: (dbTask.metadata as any)?.details || '',
|
|
||||||
testStrategy: (dbTask.metadata as any)?.testStrategy || '',
|
|
||||||
subtasks,
|
|
||||||
createdAt: dbTask.created_at,
|
|
||||||
updatedAt: dbTask.updated_at,
|
|
||||||
assignee: dbTask.assignee_id || undefined,
|
|
||||||
complexity: dbTask.complexity
|
|
||||||
? this.mapComplexityToInternal(dbTask.complexity)
|
|
||||||
: undefined,
|
|
||||||
effort: dbTask.estimated_hours || undefined,
|
|
||||||
actualEffort: dbTask.actual_hours || undefined
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Groups dependencies by task ID
|
|
||||||
*/
|
|
||||||
private static groupDependenciesByTaskId(
|
|
||||||
dependencies: DependencyRow[]
|
|
||||||
): Map<string, string[]> {
|
|
||||||
const dependenciesByTaskId = new Map<string, string[]>();
|
|
||||||
|
|
||||||
if (dependencies) {
|
|
||||||
for (const dep of dependencies) {
|
|
||||||
const deps = dependenciesByTaskId.get(dep.task_id) || [];
|
|
||||||
deps.push(dep.depends_on_task_id);
|
|
||||||
dependenciesByTaskId.set(dep.task_id, deps);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return dependenciesByTaskId;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Groups subtasks by their parent ID
|
|
||||||
*/
|
|
||||||
private static groupSubtasksByParentId(
|
|
||||||
tasks: TaskRow[]
|
|
||||||
): Map<string, TaskRow[]> {
|
|
||||||
const subtasksByParentId = new Map<string, TaskRow[]>();
|
|
||||||
|
|
||||||
for (const task of tasks) {
|
|
||||||
if (task.parent_task_id) {
|
|
||||||
const subtasks = subtasksByParentId.get(task.parent_task_id) || [];
|
|
||||||
subtasks.push(task);
|
|
||||||
subtasksByParentId.set(task.parent_task_id, subtasks);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort subtasks by subtask_position for each parent
|
|
||||||
for (const subtasks of subtasksByParentId.values()) {
|
|
||||||
subtasks.sort((a, b) => a.subtask_position - b.subtask_position);
|
|
||||||
}
|
|
||||||
|
|
||||||
return subtasksByParentId;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Maps database status to internal status
|
|
||||||
*/
|
|
||||||
private static mapStatus(
|
|
||||||
status: Database['public']['Enums']['task_status']
|
|
||||||
): Task['status'] {
|
|
||||||
switch (status) {
|
|
||||||
case 'todo':
|
|
||||||
return 'pending';
|
|
||||||
case 'in_progress':
|
|
||||||
return 'in-progress';
|
|
||||||
case 'done':
|
|
||||||
return 'done';
|
|
||||||
default:
|
|
||||||
return 'pending';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Maps database priority to internal priority
|
|
||||||
*/
|
|
||||||
private static mapPriority(
|
|
||||||
priority: Database['public']['Enums']['task_priority']
|
|
||||||
): Task['priority'] {
|
|
||||||
switch (priority) {
|
|
||||||
case 'urgent':
|
|
||||||
return 'critical';
|
|
||||||
default:
|
|
||||||
return priority as Task['priority'];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Maps numeric complexity to descriptive complexity
|
|
||||||
*/
|
|
||||||
private static mapComplexityToInternal(
|
|
||||||
complexity: number
|
|
||||||
): Task['complexity'] {
|
|
||||||
if (complexity <= 2) return 'simple';
|
|
||||||
if (complexity <= 5) return 'moderate';
|
|
||||||
if (complexity <= 8) return 'complex';
|
|
||||||
return 'very-complex';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,110 +0,0 @@
|
|||||||
import { SupabaseClient } from '@supabase/supabase-js';
|
|
||||||
import { Task } from '../types/index.js';
|
|
||||||
import { Database } from '../types/database.types.js';
|
|
||||||
import { TaskMapper } from '../mappers/TaskMapper.js';
|
|
||||||
import { AuthManager } from '../auth/auth-manager.js';
|
|
||||||
|
|
||||||
export class SupabaseTaskRepository {
|
|
||||||
constructor(private supabase: SupabaseClient<Database>) {}
|
|
||||||
|
|
||||||
async getTasks(_projectId?: string): Promise<Task[]> {
|
|
||||||
// Get the current context to determine briefId
|
|
||||||
const authManager = AuthManager.getInstance();
|
|
||||||
const context = authManager.getContext();
|
|
||||||
|
|
||||||
if (!context || !context.briefId) {
|
|
||||||
throw new Error(
|
|
||||||
'No brief selected. Please select a brief first using: tm context brief'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get all tasks for the brief using the exact query structure
|
|
||||||
const { data: tasks, error } = await this.supabase
|
|
||||||
.from('tasks')
|
|
||||||
.select(`
|
|
||||||
*,
|
|
||||||
document:document_id (
|
|
||||||
id,
|
|
||||||
document_name,
|
|
||||||
title,
|
|
||||||
description
|
|
||||||
)
|
|
||||||
`)
|
|
||||||
.eq('brief_id', context.briefId)
|
|
||||||
.order('position', { ascending: true })
|
|
||||||
.order('subtask_position', { ascending: true })
|
|
||||||
.order('created_at', { ascending: true });
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw new Error(`Failed to fetch tasks: ${error.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tasks || tasks.length === 0) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get all dependencies for these tasks
|
|
||||||
const taskIds = tasks.map((t: any) => t.id);
|
|
||||||
const { data: depsData, error: depsError } = await this.supabase
|
|
||||||
.from('task_dependencies')
|
|
||||||
.select('*')
|
|
||||||
.in('task_id', taskIds);
|
|
||||||
|
|
||||||
if (depsError) {
|
|
||||||
throw new Error(
|
|
||||||
`Failed to fetch task dependencies: ${depsError.message}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use mapper to convert to internal format
|
|
||||||
return TaskMapper.mapDatabaseTasksToTasks(tasks, depsData || []);
|
|
||||||
}
|
|
||||||
|
|
||||||
async getTask(accountId: string, taskId: string): Promise<Task | null> {
|
|
||||||
const { data, error } = await this.supabase
|
|
||||||
.from('tasks')
|
|
||||||
.select('*')
|
|
||||||
.eq('account_id', accountId)
|
|
||||||
.eq('id', taskId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
if (error.code === 'PGRST116') {
|
|
||||||
return null; // Not found
|
|
||||||
}
|
|
||||||
throw new Error(`Failed to fetch task: ${error.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get dependencies for this task
|
|
||||||
const { data: depsData } = await this.supabase
|
|
||||||
.from('task_dependencies')
|
|
||||||
.select('*')
|
|
||||||
.eq('task_id', taskId);
|
|
||||||
|
|
||||||
// Get subtasks if this is a parent task
|
|
||||||
const { data: subtasksData } = await this.supabase
|
|
||||||
.from('tasks')
|
|
||||||
.select('*')
|
|
||||||
.eq('parent_task_id', taskId)
|
|
||||||
.order('subtask_position', { ascending: true });
|
|
||||||
|
|
||||||
// Create dependency map
|
|
||||||
const dependenciesByTaskId = new Map<string, string[]>();
|
|
||||||
if (depsData) {
|
|
||||||
dependenciesByTaskId.set(
|
|
||||||
taskId,
|
|
||||||
depsData.map(
|
|
||||||
(d: Database['public']['Tables']['task_dependencies']['Row']) =>
|
|
||||||
d.depends_on_task_id
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use mapper to convert single task
|
|
||||||
return TaskMapper.mapDatabaseTaskToTask(
|
|
||||||
data,
|
|
||||||
subtasksData || [],
|
|
||||||
dependenciesByTaskId
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
import { Task, TaskTag } from '../types/index.js';
|
|
||||||
|
|
||||||
export interface TaskRepository {
|
|
||||||
// Task operations
|
|
||||||
getTasks(projectId: string): Promise<Task[]>;
|
|
||||||
getTask(projectId: string, taskId: string): Promise<Task | null>;
|
|
||||||
createTask(projectId: string, task: Omit<Task, 'id'>): Promise<Task>;
|
|
||||||
updateTask(
|
|
||||||
projectId: string,
|
|
||||||
taskId: string,
|
|
||||||
updates: Partial<Task>
|
|
||||||
): Promise<Task>;
|
|
||||||
deleteTask(projectId: string, taskId: string): Promise<void>;
|
|
||||||
|
|
||||||
// Tag operations
|
|
||||||
getTags(projectId: string): Promise<TaskTag[]>;
|
|
||||||
getTag(projectId: string, tagName: string): Promise<TaskTag | null>;
|
|
||||||
createTag(projectId: string, tag: TaskTag): Promise<TaskTag>;
|
|
||||||
updateTag(
|
|
||||||
projectId: string,
|
|
||||||
tagName: string,
|
|
||||||
updates: Partial<TaskTag>
|
|
||||||
): Promise<TaskTag>;
|
|
||||||
deleteTag(projectId: string, tagName: string): Promise<void>;
|
|
||||||
|
|
||||||
// Bulk operations
|
|
||||||
bulkCreateTasks(
|
|
||||||
projectId: string,
|
|
||||||
tasks: Omit<Task, 'id'>[]
|
|
||||||
): Promise<Task[]>;
|
|
||||||
bulkUpdateTasks(
|
|
||||||
projectId: string,
|
|
||||||
updates: Array<{ id: string; updates: Partial<Task> }>
|
|
||||||
): Promise<Task[]>;
|
|
||||||
bulkDeleteTasks(projectId: string, taskIds: string[]): Promise<void>;
|
|
||||||
}
|
|
||||||
@@ -4,5 +4,3 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
export { TaskService } from './task-service.js';
|
export { TaskService } from './task-service.js';
|
||||||
export { OrganizationService } from './organization.service.js';
|
|
||||||
export type { Organization, Brief } from './organization.service.js';
|
|
||||||
|
|||||||
@@ -1,363 +0,0 @@
|
|||||||
/**
|
|
||||||
* @fileoverview Organization and Brief management service
|
|
||||||
* Handles fetching and managing organizations and briefs from the API
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { SupabaseClient } from '@supabase/supabase-js';
|
|
||||||
import { Database } from '../types/database.types.js';
|
|
||||||
import { TaskMasterError, ERROR_CODES } from '../errors/task-master-error.js';
|
|
||||||
import { getLogger } from '../logger/index.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Organization data structure
|
|
||||||
*/
|
|
||||||
export interface Organization {
|
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
slug: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Brief data structure
|
|
||||||
*/
|
|
||||||
export interface Brief {
|
|
||||||
id: string;
|
|
||||||
accountId: string;
|
|
||||||
documentId: string;
|
|
||||||
status: string;
|
|
||||||
createdAt: string;
|
|
||||||
updatedAt: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Task data structure from the remote database
|
|
||||||
*/
|
|
||||||
export interface RemoteTask {
|
|
||||||
id: string;
|
|
||||||
briefId: string;
|
|
||||||
documentId: string;
|
|
||||||
position: number | null;
|
|
||||||
subtaskPosition: number | null;
|
|
||||||
status: string;
|
|
||||||
createdAt: string;
|
|
||||||
updatedAt: string;
|
|
||||||
// Document details from join
|
|
||||||
document?: {
|
|
||||||
id: string;
|
|
||||||
document_name: string;
|
|
||||||
title: string;
|
|
||||||
description: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Service for managing organizations and briefs
|
|
||||||
*/
|
|
||||||
export class OrganizationService {
|
|
||||||
private logger = getLogger('OrganizationService');
|
|
||||||
|
|
||||||
constructor(private supabaseClient: SupabaseClient<Database>) {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all organizations for the authenticated user
|
|
||||||
*/
|
|
||||||
async getOrganizations(): Promise<Organization[]> {
|
|
||||||
try {
|
|
||||||
// The user is already authenticated via the Authorization header
|
|
||||||
// Query the user_accounts view/table (filtered by RLS for current user)
|
|
||||||
const { data, error } = await this.supabaseClient
|
|
||||||
.from('user_accounts')
|
|
||||||
.select(`
|
|
||||||
id,
|
|
||||||
name,
|
|
||||||
slug
|
|
||||||
`);
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw new TaskMasterError(
|
|
||||||
`Failed to fetch organizations: ${error.message}`,
|
|
||||||
ERROR_CODES.API_ERROR,
|
|
||||||
{ operation: 'getOrganizations' },
|
|
||||||
error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!data || data.length === 0) {
|
|
||||||
this.logger.debug('No organizations found for user');
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Map to our Organization interface
|
|
||||||
return data.map((org) => ({
|
|
||||||
id: org.id ?? '',
|
|
||||||
name: org.name ?? '',
|
|
||||||
slug: org.slug ?? org.id ?? '' // Use ID as fallback if slug is null
|
|
||||||
}));
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof TaskMasterError) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new TaskMasterError(
|
|
||||||
'Failed to fetch organizations',
|
|
||||||
ERROR_CODES.API_ERROR,
|
|
||||||
{ operation: 'getOrganizations' },
|
|
||||||
error as Error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a specific organization by ID
|
|
||||||
*/
|
|
||||||
async getOrganization(orgId: string): Promise<Organization | null> {
|
|
||||||
try {
|
|
||||||
const { data, error } = await this.supabaseClient
|
|
||||||
.from('accounts')
|
|
||||||
.select(`
|
|
||||||
id,
|
|
||||||
name,
|
|
||||||
slug
|
|
||||||
`)
|
|
||||||
.eq('id', orgId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
if (error.code === 'PGRST116') {
|
|
||||||
// No rows found
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
throw new TaskMasterError(
|
|
||||||
`Failed to fetch organization: ${error.message}`,
|
|
||||||
ERROR_CODES.API_ERROR,
|
|
||||||
{ operation: 'getOrganization', orgId },
|
|
||||||
error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!data) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const accountData =
|
|
||||||
data as Database['public']['Tables']['accounts']['Row'];
|
|
||||||
return {
|
|
||||||
id: accountData.id,
|
|
||||||
name: accountData.name,
|
|
||||||
slug: accountData.slug || accountData.id
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof TaskMasterError) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new TaskMasterError(
|
|
||||||
'Failed to fetch organization',
|
|
||||||
ERROR_CODES.API_ERROR,
|
|
||||||
{ operation: 'getOrganization', orgId },
|
|
||||||
error as Error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all briefs for a specific organization
|
|
||||||
*/
|
|
||||||
async getBriefs(orgId: string): Promise<Brief[]> {
|
|
||||||
try {
|
|
||||||
const { data, error } = await this.supabaseClient
|
|
||||||
.from('brief')
|
|
||||||
.select(`
|
|
||||||
id,
|
|
||||||
account_id,
|
|
||||||
document_id,
|
|
||||||
status,
|
|
||||||
created_at,
|
|
||||||
updated_at
|
|
||||||
`)
|
|
||||||
.eq('account_id', orgId);
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw new TaskMasterError(
|
|
||||||
`Failed to fetch briefs: ${error.message}`,
|
|
||||||
ERROR_CODES.API_ERROR,
|
|
||||||
{ operation: 'getBriefs', orgId },
|
|
||||||
error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!data || data.length === 0) {
|
|
||||||
this.logger.debug(`No briefs found for organization ${orgId}`);
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Map to our Brief interface
|
|
||||||
return data.map((brief: any) => ({
|
|
||||||
id: brief.id,
|
|
||||||
accountId: brief.account_id,
|
|
||||||
documentId: brief.document_id,
|
|
||||||
status: brief.status,
|
|
||||||
createdAt: brief.created_at,
|
|
||||||
updatedAt: brief.updated_at
|
|
||||||
}));
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof TaskMasterError) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new TaskMasterError(
|
|
||||||
'Failed to fetch briefs',
|
|
||||||
ERROR_CODES.API_ERROR,
|
|
||||||
{ operation: 'getBriefs', orgId },
|
|
||||||
error as Error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a specific brief by ID
|
|
||||||
*/
|
|
||||||
async getBrief(briefId: string): Promise<Brief | null> {
|
|
||||||
try {
|
|
||||||
const { data, error } = await this.supabaseClient
|
|
||||||
.from('brief')
|
|
||||||
.select(`
|
|
||||||
id,
|
|
||||||
account_id,
|
|
||||||
document_id,
|
|
||||||
status,
|
|
||||||
created_at,
|
|
||||||
updated_at
|
|
||||||
`)
|
|
||||||
.eq('id', briefId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
if (error.code === 'PGRST116') {
|
|
||||||
// No rows found
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
throw new TaskMasterError(
|
|
||||||
`Failed to fetch brief: ${error.message}`,
|
|
||||||
ERROR_CODES.API_ERROR,
|
|
||||||
{ operation: 'getBrief', briefId },
|
|
||||||
error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!data) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const briefData = data as any;
|
|
||||||
return {
|
|
||||||
id: briefData.id,
|
|
||||||
accountId: briefData.account_id,
|
|
||||||
documentId: briefData.document_id,
|
|
||||||
status: briefData.status,
|
|
||||||
createdAt: briefData.created_at,
|
|
||||||
updatedAt: briefData.updated_at
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof TaskMasterError) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new TaskMasterError(
|
|
||||||
'Failed to fetch brief',
|
|
||||||
ERROR_CODES.API_ERROR,
|
|
||||||
{ operation: 'getBrief', briefId },
|
|
||||||
error as Error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate that a user has access to an organization
|
|
||||||
*/
|
|
||||||
async validateOrgAccess(orgId: string): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
const org = await this.getOrganization(orgId);
|
|
||||||
return org !== null;
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error(`Failed to validate org access: ${error}`);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate that a user has access to a brief
|
|
||||||
*/
|
|
||||||
async validateBriefAccess(briefId: string): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
const brief = await this.getBrief(briefId);
|
|
||||||
return brief !== null;
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error(`Failed to validate brief access: ${error}`);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all tasks for a specific brief
|
|
||||||
*/
|
|
||||||
async getTasks(briefId: string): Promise<RemoteTask[]> {
|
|
||||||
try {
|
|
||||||
const { data, error } = await this.supabaseClient
|
|
||||||
.from('tasks')
|
|
||||||
.select(`
|
|
||||||
*,
|
|
||||||
document:document_id (
|
|
||||||
id,
|
|
||||||
document_name,
|
|
||||||
title,
|
|
||||||
description
|
|
||||||
)
|
|
||||||
`)
|
|
||||||
.eq('brief_id', briefId)
|
|
||||||
.order('position', { ascending: true })
|
|
||||||
.order('subtask_position', { ascending: true })
|
|
||||||
.order('created_at', { ascending: true });
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw new TaskMasterError(
|
|
||||||
`Failed to fetch tasks: ${error.message}`,
|
|
||||||
ERROR_CODES.API_ERROR,
|
|
||||||
{ operation: 'getTasks', briefId },
|
|
||||||
error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!data || data.length === 0) {
|
|
||||||
this.logger.debug(`No tasks found for brief ${briefId}`);
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Map to our RemoteTask interface
|
|
||||||
return data.map((task: any) => ({
|
|
||||||
id: task.id,
|
|
||||||
briefId: task.brief_id,
|
|
||||||
documentId: task.document_id,
|
|
||||||
position: task.position,
|
|
||||||
subtaskPosition: task.subtask_position,
|
|
||||||
status: task.status,
|
|
||||||
createdAt: task.created_at,
|
|
||||||
updatedAt: task.updated_at,
|
|
||||||
document: task.document
|
|
||||||
? {
|
|
||||||
id: task.document.id,
|
|
||||||
document_name: task.document.document_name,
|
|
||||||
title: task.document.title,
|
|
||||||
description: task.document.description
|
|
||||||
}
|
|
||||||
: undefined
|
|
||||||
}));
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof TaskMasterError) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
throw new TaskMasterError(
|
|
||||||
'Failed to fetch tasks',
|
|
||||||
ERROR_CODES.API_ERROR,
|
|
||||||
{ operation: 'getTasks', briefId },
|
|
||||||
error as Error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -3,12 +3,7 @@
|
|||||||
* Core service for task operations - handles business logic between storage and API
|
* Core service for task operations - handles business logic between storage and API
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {
|
import type { Task, TaskFilter, TaskStatus } from '../types/index.js';
|
||||||
Task,
|
|
||||||
TaskFilter,
|
|
||||||
TaskStatus,
|
|
||||||
StorageType
|
|
||||||
} from '../types/index.js';
|
|
||||||
import type { IStorage } from '../interfaces/storage.interface.js';
|
import type { IStorage } from '../interfaces/storage.interface.js';
|
||||||
import { ConfigManager } from '../config/config-manager.js';
|
import { ConfigManager } from '../config/config-manager.js';
|
||||||
import { StorageFactory } from '../storage/storage-factory.js';
|
import { StorageFactory } from '../storage/storage-factory.js';
|
||||||
@@ -27,8 +22,8 @@ export interface TaskListResult {
|
|||||||
filtered: number;
|
filtered: number;
|
||||||
/** The tag these tasks belong to (only present if explicitly provided) */
|
/** The tag these tasks belong to (only present if explicitly provided) */
|
||||||
tag?: string;
|
tag?: string;
|
||||||
/** Storage type being used */
|
/** Storage type being used - includes 'auto' for automatic detection */
|
||||||
storageType: StorageType;
|
storageType: 'file' | 'api' | 'auto';
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -118,7 +113,7 @@ export class TaskService {
|
|||||||
total: rawTasks.length,
|
total: rawTasks.length,
|
||||||
filtered: filteredEntities.length,
|
filtered: filteredEntities.length,
|
||||||
tag: options.tag, // Only include tag if explicitly provided
|
tag: options.tag, // Only include tag if explicitly provided
|
||||||
storageType: this.getStorageType()
|
storageType: this.configManager.getStorageConfig().type
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
@@ -171,7 +166,7 @@ export class TaskService {
|
|||||||
byStatus: Record<TaskStatus, number>;
|
byStatus: Record<TaskStatus, number>;
|
||||||
withSubtasks: number;
|
withSubtasks: number;
|
||||||
blocked: number;
|
blocked: number;
|
||||||
storageType: StorageType;
|
storageType: 'file' | 'api' | 'auto';
|
||||||
}> {
|
}> {
|
||||||
const result = await this.getTaskList({
|
const result = await this.getTaskList({
|
||||||
tag,
|
tag,
|
||||||
@@ -339,12 +334,8 @@ export class TaskService {
|
|||||||
/**
|
/**
|
||||||
* Get current storage type
|
* Get current storage type
|
||||||
*/
|
*/
|
||||||
getStorageType(): StorageType {
|
getStorageType(): 'file' | 'api' | 'auto' {
|
||||||
// Prefer the runtime storage type if available to avoid exposing 'auto'
|
return this.configManager.getStorageConfig().type;
|
||||||
const s = this.storage as { getType?: () => 'file' | 'api' } | null;
|
|
||||||
const runtimeType = s?.getType?.();
|
|
||||||
return (runtimeType ??
|
|
||||||
this.configManager.getStorageConfig().type) as StorageType;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,29 +1,27 @@
|
|||||||
/**
|
/**
|
||||||
* @fileoverview API-based storage implementation using repository pattern
|
* @fileoverview API-based storage implementation for Hamster integration
|
||||||
* This provides storage via repository abstraction for flexibility
|
* This provides storage via REST API instead of local file system
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type {
|
import type {
|
||||||
IStorage,
|
IStorage,
|
||||||
StorageStats
|
StorageStats
|
||||||
} from '../interfaces/storage.interface.js';
|
} from '../interfaces/storage.interface.js';
|
||||||
import type { Task, TaskMetadata, TaskTag } from '../types/index.js';
|
import type { Task, TaskMetadata } from '../types/index.js';
|
||||||
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
|
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
|
||||||
import { TaskRepository } from '../repositories/task-repository.interface.js';
|
|
||||||
import { SupabaseTaskRepository } from '../repositories/supabase-task-repository.js';
|
|
||||||
import { SupabaseClient } from '@supabase/supabase-js';
|
|
||||||
import { AuthManager } from '../auth/auth-manager.js';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* API storage configuration
|
* API storage configuration
|
||||||
*/
|
*/
|
||||||
export interface ApiStorageConfig {
|
export interface ApiStorageConfig {
|
||||||
/** Supabase client instance */
|
/** API endpoint base URL */
|
||||||
supabaseClient?: SupabaseClient;
|
endpoint: string;
|
||||||
/** Custom repository implementation */
|
/** Access token for authentication */
|
||||||
repository?: TaskRepository;
|
accessToken: string;
|
||||||
/** Project ID for scoping */
|
/** Optional project ID */
|
||||||
projectId: string;
|
projectId?: string;
|
||||||
|
/** Request timeout in milliseconds */
|
||||||
|
timeout?: number;
|
||||||
/** Enable request retries */
|
/** Enable request retries */
|
||||||
enableRetry?: boolean;
|
enableRetry?: boolean;
|
||||||
/** Maximum retry attempts */
|
/** Maximum retry attempts */
|
||||||
@@ -31,58 +29,64 @@ export interface ApiStorageConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* ApiStorage implementation using repository pattern
|
* API response wrapper
|
||||||
* Provides flexibility to swap between different backend implementations
|
*/
|
||||||
|
interface ApiResponse<T> {
|
||||||
|
success: boolean;
|
||||||
|
data?: T;
|
||||||
|
error?: string;
|
||||||
|
message?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ApiStorage implementation for Hamster integration
|
||||||
|
* Fetches and stores tasks via REST API
|
||||||
*/
|
*/
|
||||||
export class ApiStorage implements IStorage {
|
export class ApiStorage implements IStorage {
|
||||||
private readonly repository: TaskRepository;
|
private readonly config: Required<ApiStorageConfig>;
|
||||||
private readonly projectId: string;
|
|
||||||
private readonly enableRetry: boolean;
|
|
||||||
private readonly maxRetries: number;
|
|
||||||
private initialized = false;
|
private initialized = false;
|
||||||
private tagsCache: Map<string, TaskTag> = new Map();
|
|
||||||
|
|
||||||
constructor(config: ApiStorageConfig) {
|
constructor(config: ApiStorageConfig) {
|
||||||
this.validateConfig(config);
|
this.validateConfig(config);
|
||||||
|
|
||||||
// Use provided repository or create Supabase repository
|
this.config = {
|
||||||
if (config.repository) {
|
endpoint: config.endpoint.replace(/\/$/, ''), // Remove trailing slash
|
||||||
this.repository = config.repository;
|
accessToken: config.accessToken,
|
||||||
} else if (config.supabaseClient) {
|
projectId: config.projectId || 'default',
|
||||||
// TODO: SupabaseTaskRepository doesn't implement all TaskRepository methods yet
|
timeout: config.timeout || 30000,
|
||||||
// Cast for now until full implementation is complete
|
enableRetry: config.enableRetry ?? true,
|
||||||
this.repository = new SupabaseTaskRepository(
|
maxRetries: config.maxRetries || 3
|
||||||
config.supabaseClient
|
};
|
||||||
) as unknown as TaskRepository;
|
|
||||||
} else {
|
|
||||||
throw new TaskMasterError(
|
|
||||||
'Either repository or supabaseClient must be provided',
|
|
||||||
ERROR_CODES.MISSING_CONFIGURATION
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.projectId = config.projectId;
|
|
||||||
this.enableRetry = config.enableRetry ?? true;
|
|
||||||
this.maxRetries = config.maxRetries ?? 3;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate API storage configuration
|
* Validate API storage configuration
|
||||||
*/
|
*/
|
||||||
private validateConfig(config: ApiStorageConfig): void {
|
private validateConfig(config: ApiStorageConfig): void {
|
||||||
if (!config.projectId) {
|
if (!config.endpoint) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Project ID is required for API storage',
|
'API endpoint is required for API storage',
|
||||||
ERROR_CODES.MISSING_CONFIGURATION
|
ERROR_CODES.MISSING_CONFIGURATION
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!config.repository && !config.supabaseClient) {
|
if (!config.accessToken) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Either repository or supabaseClient must be provided',
|
'Access token is required for API storage',
|
||||||
ERROR_CODES.MISSING_CONFIGURATION
|
ERROR_CODES.MISSING_CONFIGURATION
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate endpoint URL format
|
||||||
|
try {
|
||||||
|
new URL(config.endpoint);
|
||||||
|
} catch {
|
||||||
|
throw new TaskMasterError(
|
||||||
|
'Invalid API endpoint URL',
|
||||||
|
ERROR_CODES.INVALID_INPUT,
|
||||||
|
{ endpoint: config.endpoint }
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -92,8 +96,8 @@ export class ApiStorage implements IStorage {
|
|||||||
if (this.initialized) return;
|
if (this.initialized) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Load initial tags
|
// Verify API connectivity
|
||||||
await this.loadTagsIntoCache();
|
await this.verifyConnection();
|
||||||
this.initialized = true;
|
this.initialized = true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
@@ -106,71 +110,39 @@ export class ApiStorage implements IStorage {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load tags into cache
|
* Verify API connection
|
||||||
* In our API-based system, "tags" represent briefs
|
|
||||||
*/
|
*/
|
||||||
private async loadTagsIntoCache(): Promise<void> {
|
private async verifyConnection(): Promise<void> {
|
||||||
try {
|
const response = await this.makeRequest<{ status: string }>('/health');
|
||||||
const authManager = AuthManager.getInstance();
|
|
||||||
const context = authManager.getContext();
|
|
||||||
|
|
||||||
// If we have a selected brief, create a virtual "tag" for it
|
if (!response.success) {
|
||||||
if (context?.briefId) {
|
throw new Error(`API health check failed: ${response.error}`);
|
||||||
// Create a virtual tag representing the current brief
|
|
||||||
const briefTag: TaskTag = {
|
|
||||||
name: context.briefId,
|
|
||||||
tasks: [], // Will be populated when tasks are loaded
|
|
||||||
metadata: {
|
|
||||||
briefId: context.briefId,
|
|
||||||
briefName: context.briefName,
|
|
||||||
organizationId: context.orgId
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
this.tagsCache.clear();
|
|
||||||
this.tagsCache.set(context.briefId, briefTag);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// If no brief is selected, that's okay - user needs to select one first
|
|
||||||
console.debug('No brief selected, starting with empty cache');
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load tasks from API
|
* Load tasks from API
|
||||||
* In our system, the tag parameter represents a brief ID
|
|
||||||
*/
|
*/
|
||||||
async loadTasks(tag?: string): Promise<Task[]> {
|
async loadTasks(tag?: string): Promise<Task[]> {
|
||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const authManager = AuthManager.getInstance();
|
const endpoint = tag
|
||||||
const context = authManager.getContext();
|
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}`
|
||||||
|
: `/projects/${this.config.projectId}/tasks`;
|
||||||
|
|
||||||
// If no brief is selected in context, throw an error
|
const response = await this.makeRequest<{ tasks: Task[] }>(endpoint);
|
||||||
if (!context?.briefId) {
|
|
||||||
throw new Error(
|
if (!response.success) {
|
||||||
'No brief selected. Please select a brief first using: tm context brief <brief-id>'
|
throw new Error(response.error || 'Failed to load tasks');
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load tasks from the current brief context
|
return response.data?.tasks || [];
|
||||||
const tasks = await this.retryOperation(() =>
|
|
||||||
this.repository.getTasks(this.projectId)
|
|
||||||
);
|
|
||||||
|
|
||||||
// Update the tag cache with the loaded task IDs
|
|
||||||
const briefTag = this.tagsCache.get(context.briefId);
|
|
||||||
if (briefTag) {
|
|
||||||
briefTag.tasks = tasks.map((task) => task.id);
|
|
||||||
}
|
|
||||||
|
|
||||||
return tasks;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to load tasks from API',
|
'Failed to load tasks from API',
|
||||||
ERROR_CODES.STORAGE_ERROR,
|
ERROR_CODES.STORAGE_ERROR,
|
||||||
{ operation: 'loadTasks', tag, context: 'brief-based loading' },
|
{ operation: 'loadTasks', tag },
|
||||||
error as Error
|
error as Error
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -183,29 +155,15 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (tag) {
|
const endpoint = tag
|
||||||
// Update tag with task IDs
|
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}`
|
||||||
const tagData = this.tagsCache.get(tag) || {
|
: `/projects/${this.config.projectId}/tasks`;
|
||||||
name: tag,
|
|
||||||
tasks: [],
|
|
||||||
metadata: {}
|
|
||||||
};
|
|
||||||
tagData.tasks = tasks.map((t) => t.id);
|
|
||||||
|
|
||||||
// Save or update tag
|
const response = await this.makeRequest(endpoint, 'PUT', { tasks });
|
||||||
if (this.tagsCache.has(tag)) {
|
|
||||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
|
||||||
} else {
|
|
||||||
await this.repository.createTag(this.projectId, tagData);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.tagsCache.set(tag, tagData);
|
if (!response.success) {
|
||||||
|
throw new Error(response.error || 'Failed to save tasks');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save tasks using bulk operation
|
|
||||||
await this.retryOperation(() =>
|
|
||||||
this.repository.bulkCreateTasks(this.projectId, tasks)
|
|
||||||
);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to save tasks to API',
|
'Failed to save tasks to API',
|
||||||
@@ -223,17 +181,20 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (tag) {
|
const endpoint = tag
|
||||||
// Check if task is in tag
|
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}`
|
||||||
const tagData = this.tagsCache.get(tag);
|
: `/projects/${this.config.projectId}/tasks/${taskId}`;
|
||||||
if (!tagData || !tagData.tasks.includes(taskId)) {
|
|
||||||
|
const response = await this.makeRequest<{ task: Task }>(endpoint);
|
||||||
|
|
||||||
|
if (!response.success) {
|
||||||
|
if (response.error?.includes('not found')) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
throw new Error(response.error || 'Failed to load task');
|
||||||
}
|
}
|
||||||
|
|
||||||
return await this.retryOperation(() =>
|
return response.data?.task || null;
|
||||||
this.repository.getTask(this.projectId, taskId)
|
|
||||||
);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to load task from API',
|
'Failed to load task from API',
|
||||||
@@ -251,26 +212,14 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Check if task exists
|
const endpoint = tag
|
||||||
const existing = await this.repository.getTask(this.projectId, task.id);
|
? `/projects/${this.config.projectId}/tasks/${task.id}?tag=${encodeURIComponent(tag)}`
|
||||||
|
: `/projects/${this.config.projectId}/tasks/${task.id}`;
|
||||||
|
|
||||||
if (existing) {
|
const response = await this.makeRequest(endpoint, 'PUT', { task });
|
||||||
await this.retryOperation(() =>
|
|
||||||
this.repository.updateTask(this.projectId, task.id, task)
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
await this.retryOperation(() =>
|
|
||||||
this.repository.createTask(this.projectId, task)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update tag if specified
|
if (!response.success) {
|
||||||
if (tag) {
|
throw new Error(response.error || 'Failed to save task');
|
||||||
const tagData = this.tagsCache.get(tag);
|
|
||||||
if (tagData && !tagData.tasks.includes(task.id)) {
|
|
||||||
tagData.tasks.push(task.id);
|
|
||||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
@@ -289,17 +238,14 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await this.retryOperation(() =>
|
const endpoint = tag
|
||||||
this.repository.deleteTask(this.projectId, taskId)
|
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}`
|
||||||
);
|
: `/projects/${this.config.projectId}/tasks/${taskId}`;
|
||||||
|
|
||||||
// Remove from tag if specified
|
const response = await this.makeRequest(endpoint, 'DELETE');
|
||||||
if (tag) {
|
|
||||||
const tagData = this.tagsCache.get(tag);
|
if (!response.success) {
|
||||||
if (tagData) {
|
throw new Error(response.error || 'Failed to delete task');
|
||||||
tagData.tasks = tagData.tasks.filter((id) => id !== taskId);
|
|
||||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
@@ -312,24 +258,21 @@ export class ApiStorage implements IStorage {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List available tags (briefs in our system)
|
* List available tags
|
||||||
*/
|
*/
|
||||||
async listTags(): Promise<string[]> {
|
async listTags(): Promise<string[]> {
|
||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const authManager = AuthManager.getInstance();
|
const response = await this.makeRequest<{ tags: string[] }>(
|
||||||
const context = authManager.getContext();
|
`/projects/${this.config.projectId}/tags`
|
||||||
|
);
|
||||||
|
|
||||||
// In our API-based system, we only have one "tag" at a time - the current brief
|
if (!response.success) {
|
||||||
if (context?.briefId) {
|
throw new Error(response.error || 'Failed to list tags');
|
||||||
// Ensure the current brief is in our cache
|
|
||||||
await this.loadTagsIntoCache();
|
|
||||||
return [context.briefId];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// No brief selected, return empty array
|
return response.data?.tags || [];
|
||||||
return [];
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to list tags from API',
|
'Failed to list tags from API',
|
||||||
@@ -347,15 +290,19 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (tag) {
|
const endpoint = tag
|
||||||
const tagData = this.tagsCache.get(tag);
|
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}`
|
||||||
return (tagData?.metadata as TaskMetadata) || null;
|
: `/projects/${this.config.projectId}/metadata`;
|
||||||
|
|
||||||
|
const response = await this.makeRequest<{ metadata: TaskMetadata }>(
|
||||||
|
endpoint
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.success) {
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return global metadata if no tag specified
|
return response.data?.metadata || null;
|
||||||
// This could be stored in a special system tag
|
|
||||||
const systemTag = await this.repository.getTag(this.projectId, '_system');
|
|
||||||
return (systemTag?.metadata as TaskMetadata) || null;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to load metadata from API',
|
'Failed to load metadata from API',
|
||||||
@@ -373,38 +320,14 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (tag) {
|
const endpoint = tag
|
||||||
const tagData = this.tagsCache.get(tag) || {
|
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}`
|
||||||
name: tag,
|
: `/projects/${this.config.projectId}/metadata`;
|
||||||
tasks: [],
|
|
||||||
metadata: {}
|
|
||||||
};
|
|
||||||
tagData.metadata = metadata as any;
|
|
||||||
|
|
||||||
if (this.tagsCache.has(tag)) {
|
const response = await this.makeRequest(endpoint, 'PUT', { metadata });
|
||||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
|
||||||
} else {
|
|
||||||
await this.repository.createTag(this.projectId, tagData);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.tagsCache.set(tag, tagData);
|
if (!response.success) {
|
||||||
} else {
|
throw new Error(response.error || 'Failed to save metadata');
|
||||||
// Save to system tag
|
|
||||||
const systemTag: TaskTag = {
|
|
||||||
name: '_system',
|
|
||||||
tasks: [],
|
|
||||||
metadata: metadata as any
|
|
||||||
};
|
|
||||||
|
|
||||||
const existing = await this.repository.getTag(
|
|
||||||
this.projectId,
|
|
||||||
'_system'
|
|
||||||
);
|
|
||||||
if (existing) {
|
|
||||||
await this.repository.updateTag(this.projectId, '_system', systemTag);
|
|
||||||
} else {
|
|
||||||
await this.repository.createTag(this.projectId, systemTag);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
@@ -435,30 +358,14 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Use bulk create - repository should handle duplicates
|
// First load existing tasks
|
||||||
await this.retryOperation(() =>
|
const existingTasks = await this.loadTasks(tag);
|
||||||
this.repository.bulkCreateTasks(this.projectId, tasks)
|
|
||||||
);
|
|
||||||
|
|
||||||
// Update tag if specified
|
// Append new tasks
|
||||||
if (tag) {
|
const allTasks = [...existingTasks, ...tasks];
|
||||||
const tagData = this.tagsCache.get(tag) || {
|
|
||||||
name: tag,
|
|
||||||
tasks: [],
|
|
||||||
metadata: {}
|
|
||||||
};
|
|
||||||
|
|
||||||
const newTaskIds = tasks.map((t) => t.id);
|
// Save all tasks
|
||||||
tagData.tasks = [...new Set([...tagData.tasks, ...newTaskIds])];
|
await this.saveTasks(allTasks, tag);
|
||||||
|
|
||||||
if (this.tagsCache.has(tag)) {
|
|
||||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
|
||||||
} else {
|
|
||||||
await this.repository.createTag(this.projectId, tagData);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.tagsCache.set(tag, tagData);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to append tasks to API',
|
'Failed to append tasks to API',
|
||||||
@@ -480,9 +387,18 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await this.retryOperation(() =>
|
// Load the task
|
||||||
this.repository.updateTask(this.projectId, taskId, updates)
|
const task = await this.loadTask(taskId, tag);
|
||||||
);
|
|
||||||
|
if (!task) {
|
||||||
|
throw new Error(`Task ${taskId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge updates
|
||||||
|
const updatedTask = { ...task, ...updates, id: taskId };
|
||||||
|
|
||||||
|
// Save updated task
|
||||||
|
await this.saveTask(updatedTask, tag);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to update task via API',
|
'Failed to update task via API',
|
||||||
@@ -507,11 +423,14 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await this.retryOperation(() =>
|
const response = await this.makeRequest(
|
||||||
this.repository.deleteTag(this.projectId, tag)
|
`/projects/${this.config.projectId}/tags/${encodeURIComponent(tag)}`,
|
||||||
|
'DELETE'
|
||||||
);
|
);
|
||||||
|
|
||||||
this.tagsCache.delete(tag);
|
if (!response.success) {
|
||||||
|
throw new Error(response.error || 'Failed to delete tag');
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to delete tag via API',
|
'Failed to delete tag via API',
|
||||||
@@ -529,21 +448,15 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const tagData = this.tagsCache.get(oldTag);
|
const response = await this.makeRequest(
|
||||||
if (!tagData) {
|
`/projects/${this.config.projectId}/tags/${encodeURIComponent(oldTag)}/rename`,
|
||||||
throw new Error(`Tag ${oldTag} not found`);
|
'POST',
|
||||||
|
{ newTag }
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.success) {
|
||||||
|
throw new Error(response.error || 'Failed to rename tag');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create new tag with same data
|
|
||||||
const newTagData = { ...tagData, name: newTag };
|
|
||||||
await this.repository.createTag(this.projectId, newTagData);
|
|
||||||
|
|
||||||
// Delete old tag
|
|
||||||
await this.repository.deleteTag(this.projectId, oldTag);
|
|
||||||
|
|
||||||
// Update cache
|
|
||||||
this.tagsCache.delete(oldTag);
|
|
||||||
this.tagsCache.set(newTag, newTagData);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to rename tag via API',
|
'Failed to rename tag via API',
|
||||||
@@ -561,17 +474,15 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const sourceData = this.tagsCache.get(sourceTag);
|
const response = await this.makeRequest(
|
||||||
if (!sourceData) {
|
`/projects/${this.config.projectId}/tags/${encodeURIComponent(sourceTag)}/copy`,
|
||||||
throw new Error(`Source tag ${sourceTag} not found`);
|
'POST',
|
||||||
|
{ targetTag }
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.success) {
|
||||||
|
throw new Error(response.error || 'Failed to copy tag');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create new tag with copied data
|
|
||||||
const targetData = { ...sourceData, name: targetTag };
|
|
||||||
await this.repository.createTag(this.projectId, targetData);
|
|
||||||
|
|
||||||
// Update cache
|
|
||||||
this.tagsCache.set(targetTag, targetData);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to copy tag via API',
|
'Failed to copy tag via API',
|
||||||
@@ -589,22 +500,24 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const tasks = await this.repository.getTasks(this.projectId);
|
const response = await this.makeRequest<{
|
||||||
const tags = await this.repository.getTags(this.projectId);
|
stats: StorageStats;
|
||||||
|
}>(`/projects/${this.config.projectId}/stats`);
|
||||||
|
|
||||||
const tagStats = tags.map((tag) => ({
|
if (!response.success) {
|
||||||
tag: tag.name,
|
throw new Error(response.error || 'Failed to get stats');
|
||||||
taskCount: tag.tasks.length,
|
}
|
||||||
lastModified: new Date().toISOString() // TODO: Get actual last modified from tag data
|
|
||||||
}));
|
|
||||||
|
|
||||||
return {
|
// Return stats or default values
|
||||||
totalTasks: tasks.length,
|
return (
|
||||||
totalTags: tags.length,
|
response.data?.stats || {
|
||||||
storageSize: 0, // Not applicable for API storage
|
totalTasks: 0,
|
||||||
lastModified: new Date().toISOString(),
|
totalTags: 0,
|
||||||
tagStats
|
storageSize: 0,
|
||||||
};
|
lastModified: new Date().toISOString(),
|
||||||
|
tagStats: []
|
||||||
|
}
|
||||||
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to get stats from API',
|
'Failed to get stats from API',
|
||||||
@@ -622,15 +535,16 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Export all data
|
const response = await this.makeRequest<{ backupId: string }>(
|
||||||
await this.repository.getTasks(this.projectId);
|
`/projects/${this.config.projectId}/backup`,
|
||||||
await this.repository.getTags(this.projectId);
|
'POST'
|
||||||
|
);
|
||||||
|
|
||||||
// TODO: In a real implementation, this would:
|
if (!response.success) {
|
||||||
// 1. Create backup data structure with tasks and tags
|
throw new Error(response.error || 'Failed to create backup');
|
||||||
// 2. Save the backup to a storage service
|
}
|
||||||
// For now, return a backup identifier
|
|
||||||
return `backup-${this.projectId}-${Date.now()}`;
|
return response.data?.backupId || 'unknown';
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to create backup via API',
|
'Failed to create backup via API',
|
||||||
@@ -644,16 +558,27 @@ export class ApiStorage implements IStorage {
|
|||||||
/**
|
/**
|
||||||
* Restore from backup
|
* Restore from backup
|
||||||
*/
|
*/
|
||||||
async restore(backupId: string): Promise<void> {
|
async restore(backupPath: string): Promise<void> {
|
||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
// This would restore from a backup service
|
try {
|
||||||
// Implementation depends on backup strategy
|
const response = await this.makeRequest(
|
||||||
throw new TaskMasterError(
|
`/projects/${this.config.projectId}/restore`,
|
||||||
'Restore not implemented for API storage',
|
'POST',
|
||||||
ERROR_CODES.NOT_IMPLEMENTED,
|
{ backupId: backupPath }
|
||||||
{ operation: 'restore', backupId }
|
);
|
||||||
);
|
|
||||||
|
if (!response.success) {
|
||||||
|
throw new Error(response.error || 'Failed to restore backup');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
throw new TaskMasterError(
|
||||||
|
'Failed to restore backup via API',
|
||||||
|
ERROR_CODES.STORAGE_ERROR,
|
||||||
|
{ operation: 'restore', backupPath },
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -663,23 +588,14 @@ export class ApiStorage implements IStorage {
|
|||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Delete all tasks
|
const response = await this.makeRequest(
|
||||||
const tasks = await this.repository.getTasks(this.projectId);
|
`/projects/${this.config.projectId}/clear`,
|
||||||
if (tasks.length > 0) {
|
'POST'
|
||||||
await this.repository.bulkDeleteTasks(
|
);
|
||||||
this.projectId,
|
|
||||||
tasks.map((t) => t.id)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete all tags
|
if (!response.success) {
|
||||||
const tags = await this.repository.getTags(this.projectId);
|
throw new Error(response.error || 'Failed to clear data');
|
||||||
for (const tag of tags) {
|
|
||||||
await this.repository.deleteTag(this.projectId, tag.name);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clear cache
|
|
||||||
this.tagsCache.clear();
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to clear data via API',
|
'Failed to clear data via API',
|
||||||
@@ -695,7 +611,6 @@ export class ApiStorage implements IStorage {
|
|||||||
*/
|
*/
|
||||||
async close(): Promise<void> {
|
async close(): Promise<void> {
|
||||||
this.initialized = false;
|
this.initialized = false;
|
||||||
this.tagsCache.clear();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -708,21 +623,102 @@ export class ApiStorage implements IStorage {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retry an operation with exponential backoff
|
* Make HTTP request to API
|
||||||
*/
|
*/
|
||||||
private async retryOperation<T>(
|
private async makeRequest<T>(
|
||||||
operation: () => Promise<T>,
|
path: string,
|
||||||
attempt: number = 1
|
method: 'GET' | 'POST' | 'PUT' | 'DELETE' = 'GET',
|
||||||
): Promise<T> {
|
body?: unknown
|
||||||
|
): Promise<ApiResponse<T>> {
|
||||||
|
const url = `${this.config.endpoint}${path}`;
|
||||||
|
const controller = new AbortController();
|
||||||
|
const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return await operation();
|
const options: RequestInit = {
|
||||||
} catch (error) {
|
method,
|
||||||
if (this.enableRetry && attempt < this.maxRetries) {
|
headers: {
|
||||||
const delay = Math.pow(2, attempt) * 1000;
|
Authorization: `Bearer ${this.config.accessToken}`,
|
||||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
'Content-Type': 'application/json',
|
||||||
return this.retryOperation(operation, attempt + 1);
|
Accept: 'application/json'
|
||||||
|
},
|
||||||
|
signal: controller.signal
|
||||||
|
};
|
||||||
|
|
||||||
|
if (body && (method === 'POST' || method === 'PUT')) {
|
||||||
|
options.body = JSON.stringify(body);
|
||||||
}
|
}
|
||||||
throw error;
|
|
||||||
|
let lastError: Error | null = null;
|
||||||
|
let attempt = 0;
|
||||||
|
|
||||||
|
while (attempt < this.config.maxRetries) {
|
||||||
|
attempt++;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, options);
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
return { success: true, data: data as T };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle specific error codes
|
||||||
|
if (response.status === 401) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Authentication failed - check access token'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.status === 404) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Resource not found'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.status === 429) {
|
||||||
|
// Rate limited - retry with backoff
|
||||||
|
if (this.config.enableRetry && attempt < this.config.maxRetries) {
|
||||||
|
await this.delay(Math.pow(2, attempt) * 1000);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const errorData = data as any;
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
errorData.error ||
|
||||||
|
errorData.message ||
|
||||||
|
`HTTP ${response.status}: ${response.statusText}`
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
lastError = error as Error;
|
||||||
|
|
||||||
|
// Retry on network errors
|
||||||
|
if (this.config.enableRetry && attempt < this.config.maxRetries) {
|
||||||
|
await this.delay(Math.pow(2, attempt) * 1000);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// All retries exhausted
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: lastError?.message || 'Request failed after retries'
|
||||||
|
};
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delay helper for retries
|
||||||
|
*/
|
||||||
|
private delay(ms: number): Promise<void> {
|
||||||
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ import { ApiStorage } from './api-storage.js';
|
|||||||
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
|
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
|
||||||
import { AuthManager } from '../auth/auth-manager.js';
|
import { AuthManager } from '../auth/auth-manager.js';
|
||||||
import { getLogger } from '../logger/index.js';
|
import { getLogger } from '../logger/index.js';
|
||||||
import { SupabaseAuthClient } from '../clients/supabase-client.js';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Factory for creating storage implementations based on configuration
|
* Factory for creating storage implementations based on configuration
|
||||||
@@ -149,13 +148,29 @@ export class StorageFactory {
|
|||||||
* Create API storage implementation
|
* Create API storage implementation
|
||||||
*/
|
*/
|
||||||
private static createApiStorage(config: Partial<IConfiguration>): ApiStorage {
|
private static createApiStorage(config: Partial<IConfiguration>): ApiStorage {
|
||||||
// Use our SupabaseAuthClient instead of creating a raw Supabase client
|
const { apiEndpoint, apiAccessToken } = config.storage || {};
|
||||||
const supabaseAuthClient = new SupabaseAuthClient();
|
|
||||||
const supabaseClient = supabaseAuthClient.getClient();
|
if (!apiEndpoint) {
|
||||||
|
throw new TaskMasterError(
|
||||||
|
'API endpoint is required for API storage',
|
||||||
|
ERROR_CODES.MISSING_CONFIGURATION,
|
||||||
|
{ storageType: 'api' }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!apiAccessToken) {
|
||||||
|
throw new TaskMasterError(
|
||||||
|
'API access token is required for API storage',
|
||||||
|
ERROR_CODES.MISSING_CONFIGURATION,
|
||||||
|
{ storageType: 'api' }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return new ApiStorage({
|
return new ApiStorage({
|
||||||
supabaseClient,
|
endpoint: apiEndpoint,
|
||||||
projectId: config.projectPath || '',
|
accessToken: apiAccessToken,
|
||||||
|
projectId: config.projectPath,
|
||||||
|
timeout: config.retry?.requestTimeout,
|
||||||
enableRetry: config.retry?.retryOnNetworkError,
|
enableRetry: config.retry?.retryOnNetworkError,
|
||||||
maxRetries: config.retry?.retryAttempts
|
maxRetries: config.retry?.retryAttempts
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -10,12 +10,8 @@ import {
|
|||||||
} from './services/task-service.js';
|
} from './services/task-service.js';
|
||||||
import { ERROR_CODES, TaskMasterError } from './errors/task-master-error.js';
|
import { ERROR_CODES, TaskMasterError } from './errors/task-master-error.js';
|
||||||
import type { IConfiguration } from './interfaces/configuration.interface.js';
|
import type { IConfiguration } from './interfaces/configuration.interface.js';
|
||||||
import type {
|
import type { Task, TaskStatus, TaskFilter } from './types/index.js';
|
||||||
Task,
|
import { WorkflowService, type WorkflowServiceConfig } from './workflow/index.js';
|
||||||
TaskStatus,
|
|
||||||
TaskFilter,
|
|
||||||
StorageType
|
|
||||||
} from './types/index.js';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Options for creating TaskMasterCore instance
|
* Options for creating TaskMasterCore instance
|
||||||
@@ -23,6 +19,7 @@ import type {
|
|||||||
export interface TaskMasterCoreOptions {
|
export interface TaskMasterCoreOptions {
|
||||||
projectPath: string;
|
projectPath: string;
|
||||||
configuration?: Partial<IConfiguration>;
|
configuration?: Partial<IConfiguration>;
|
||||||
|
workflow?: Partial<WorkflowServiceConfig>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -38,6 +35,7 @@ export type { GetTaskListOptions } from './services/task-service.js';
|
|||||||
export class TaskMasterCore {
|
export class TaskMasterCore {
|
||||||
private configManager: ConfigManager;
|
private configManager: ConfigManager;
|
||||||
private taskService: TaskService;
|
private taskService: TaskService;
|
||||||
|
private workflowService: WorkflowService;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create and initialize a new TaskMasterCore instance
|
* Create and initialize a new TaskMasterCore instance
|
||||||
@@ -60,6 +58,7 @@ export class TaskMasterCore {
|
|||||||
// Services will be initialized in the initialize() method
|
// Services will be initialized in the initialize() method
|
||||||
this.configManager = null as any;
|
this.configManager = null as any;
|
||||||
this.taskService = null as any;
|
this.taskService = null as any;
|
||||||
|
this.workflowService = null as any;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -86,6 +85,28 @@ export class TaskMasterCore {
|
|||||||
// Create task service
|
// Create task service
|
||||||
this.taskService = new TaskService(this.configManager);
|
this.taskService = new TaskService(this.configManager);
|
||||||
await this.taskService.initialize();
|
await this.taskService.initialize();
|
||||||
|
|
||||||
|
// Create workflow service
|
||||||
|
const workflowConfig: WorkflowServiceConfig = {
|
||||||
|
projectRoot: options.projectPath,
|
||||||
|
...options.workflow
|
||||||
|
};
|
||||||
|
|
||||||
|
// Pass task retrieval function to workflow service
|
||||||
|
this.workflowService = new WorkflowService(
|
||||||
|
workflowConfig,
|
||||||
|
async (taskId: string) => {
|
||||||
|
const task = await this.getTask(taskId);
|
||||||
|
if (!task) {
|
||||||
|
throw new TaskMasterError(
|
||||||
|
`Task ${taskId} not found`,
|
||||||
|
ERROR_CODES.TASK_NOT_FOUND
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return task;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
await this.workflowService.initialize();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new TaskMasterError(
|
throw new TaskMasterError(
|
||||||
'Failed to initialize TaskMasterCore',
|
'Failed to initialize TaskMasterCore',
|
||||||
@@ -157,7 +178,7 @@ export class TaskMasterCore {
|
|||||||
/**
|
/**
|
||||||
* Get current storage type
|
* Get current storage type
|
||||||
*/
|
*/
|
||||||
getStorageType(): StorageType {
|
getStorageType(): 'file' | 'api' | 'auto' {
|
||||||
return this.taskService.getStorageType();
|
return this.taskService.getStorageType();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -175,11 +196,21 @@ export class TaskMasterCore {
|
|||||||
await this.configManager.setActiveTag(tag);
|
await this.configManager.setActiveTag(tag);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get workflow service for workflow operations
|
||||||
|
*/
|
||||||
|
get workflow(): WorkflowService {
|
||||||
|
return this.workflowService;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Close and cleanup resources
|
* Close and cleanup resources
|
||||||
*/
|
*/
|
||||||
async close(): Promise<void> {
|
async close(): Promise<void> {
|
||||||
// TaskService handles storage cleanup internally
|
// TaskService handles storage cleanup internally
|
||||||
|
if (this.workflowService) {
|
||||||
|
await this.workflowService.dispose();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,491 +0,0 @@
|
|||||||
export type Json =
|
|
||||||
| string
|
|
||||||
| number
|
|
||||||
| boolean
|
|
||||||
| null
|
|
||||||
| { [key: string]: Json | undefined }
|
|
||||||
| Json[];
|
|
||||||
|
|
||||||
export type Database = {
|
|
||||||
public: {
|
|
||||||
Tables: {
|
|
||||||
accounts: {
|
|
||||||
Row: {
|
|
||||||
created_at: string | null;
|
|
||||||
created_by: string | null;
|
|
||||||
email: string | null;
|
|
||||||
id: string;
|
|
||||||
is_personal_account: boolean;
|
|
||||||
name: string;
|
|
||||||
picture_url: string | null;
|
|
||||||
primary_owner_user_id: string;
|
|
||||||
public_data: Json;
|
|
||||||
slug: string | null;
|
|
||||||
updated_at: string | null;
|
|
||||||
updated_by: string | null;
|
|
||||||
};
|
|
||||||
Insert: {
|
|
||||||
created_at?: string | null;
|
|
||||||
created_by?: string | null;
|
|
||||||
email?: string | null;
|
|
||||||
id?: string;
|
|
||||||
is_personal_account?: boolean;
|
|
||||||
name: string;
|
|
||||||
picture_url?: string | null;
|
|
||||||
primary_owner_user_id?: string;
|
|
||||||
public_data?: Json;
|
|
||||||
slug?: string | null;
|
|
||||||
updated_at?: string | null;
|
|
||||||
updated_by?: string | null;
|
|
||||||
};
|
|
||||||
Update: {
|
|
||||||
created_at?: string | null;
|
|
||||||
created_by?: string | null;
|
|
||||||
email?: string | null;
|
|
||||||
id?: string;
|
|
||||||
is_personal_account?: boolean;
|
|
||||||
name?: string;
|
|
||||||
picture_url?: string | null;
|
|
||||||
primary_owner_user_id?: string;
|
|
||||||
public_data?: Json;
|
|
||||||
slug?: string | null;
|
|
||||||
updated_at?: string | null;
|
|
||||||
updated_by?: string | null;
|
|
||||||
};
|
|
||||||
Relationships: [];
|
|
||||||
};
|
|
||||||
brief: {
|
|
||||||
Row: {
|
|
||||||
account_id: string;
|
|
||||||
created_at: string;
|
|
||||||
created_by: string;
|
|
||||||
document_id: string;
|
|
||||||
id: string;
|
|
||||||
plan_generation_completed_at: string | null;
|
|
||||||
plan_generation_error: string | null;
|
|
||||||
plan_generation_started_at: string | null;
|
|
||||||
plan_generation_status: Database['public']['Enums']['plan_generation_status'];
|
|
||||||
status: Database['public']['Enums']['brief_status'];
|
|
||||||
updated_at: string;
|
|
||||||
};
|
|
||||||
Insert: {
|
|
||||||
account_id: string;
|
|
||||||
created_at?: string;
|
|
||||||
created_by: string;
|
|
||||||
document_id: string;
|
|
||||||
id?: string;
|
|
||||||
plan_generation_completed_at?: string | null;
|
|
||||||
plan_generation_error?: string | null;
|
|
||||||
plan_generation_started_at?: string | null;
|
|
||||||
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
|
|
||||||
status?: Database['public']['Enums']['brief_status'];
|
|
||||||
updated_at?: string;
|
|
||||||
};
|
|
||||||
Update: {
|
|
||||||
account_id?: string;
|
|
||||||
created_at?: string;
|
|
||||||
created_by?: string;
|
|
||||||
document_id?: string;
|
|
||||||
id?: string;
|
|
||||||
plan_generation_completed_at?: string | null;
|
|
||||||
plan_generation_error?: string | null;
|
|
||||||
plan_generation_started_at?: string | null;
|
|
||||||
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
|
|
||||||
status?: Database['public']['Enums']['brief_status'];
|
|
||||||
updated_at?: string;
|
|
||||||
};
|
|
||||||
Relationships: [
|
|
||||||
{
|
|
||||||
foreignKeyName: 'brief_account_id_fkey';
|
|
||||||
columns: ['account_id'];
|
|
||||||
isOneToOne: false;
|
|
||||||
referencedRelation: 'accounts';
|
|
||||||
referencedColumns: ['id'];
|
|
||||||
},
|
|
||||||
{
|
|
||||||
foreignKeyName: 'brief_document_id_fkey';
|
|
||||||
columns: ['document_id'];
|
|
||||||
isOneToOne: false;
|
|
||||||
referencedRelation: 'document';
|
|
||||||
referencedColumns: ['id'];
|
|
||||||
}
|
|
||||||
];
|
|
||||||
};
|
|
||||||
document: {
|
|
||||||
Row: {
|
|
||||||
account_id: string;
|
|
||||||
created_at: string;
|
|
||||||
created_by: string;
|
|
||||||
description: string | null;
|
|
||||||
document_name: string;
|
|
||||||
document_type: Database['public']['Enums']['document_type'];
|
|
||||||
file_path: string | null;
|
|
||||||
file_size: number | null;
|
|
||||||
id: string;
|
|
||||||
metadata: Json | null;
|
|
||||||
mime_type: string | null;
|
|
||||||
processed_at: string | null;
|
|
||||||
processing_error: string | null;
|
|
||||||
processing_status:
|
|
||||||
| Database['public']['Enums']['document_processing_status']
|
|
||||||
| null;
|
|
||||||
source_id: string | null;
|
|
||||||
source_type: string | null;
|
|
||||||
title: string;
|
|
||||||
updated_at: string;
|
|
||||||
};
|
|
||||||
Insert: {
|
|
||||||
account_id: string;
|
|
||||||
created_at?: string;
|
|
||||||
created_by: string;
|
|
||||||
description?: string | null;
|
|
||||||
document_name: string;
|
|
||||||
document_type?: Database['public']['Enums']['document_type'];
|
|
||||||
file_path?: string | null;
|
|
||||||
file_size?: number | null;
|
|
||||||
id?: string;
|
|
||||||
metadata?: Json | null;
|
|
||||||
mime_type?: string | null;
|
|
||||||
processed_at?: string | null;
|
|
||||||
processing_error?: string | null;
|
|
||||||
processing_status?:
|
|
||||||
| Database['public']['Enums']['document_processing_status']
|
|
||||||
| null;
|
|
||||||
source_id?: string | null;
|
|
||||||
source_type?: string | null;
|
|
||||||
title: string;
|
|
||||||
updated_at?: string;
|
|
||||||
};
|
|
||||||
Update: {
|
|
||||||
account_id?: string;
|
|
||||||
created_at?: string;
|
|
||||||
created_by?: string;
|
|
||||||
description?: string | null;
|
|
||||||
document_name?: string;
|
|
||||||
document_type?: Database['public']['Enums']['document_type'];
|
|
||||||
file_path?: string | null;
|
|
||||||
file_size?: number | null;
|
|
||||||
id?: string;
|
|
||||||
metadata?: Json | null;
|
|
||||||
mime_type?: string | null;
|
|
||||||
processed_at?: string | null;
|
|
||||||
processing_error?: string | null;
|
|
||||||
processing_status?:
|
|
||||||
| Database['public']['Enums']['document_processing_status']
|
|
||||||
| null;
|
|
||||||
source_id?: string | null;
|
|
||||||
source_type?: string | null;
|
|
||||||
title?: string;
|
|
||||||
updated_at?: string;
|
|
||||||
};
|
|
||||||
Relationships: [
|
|
||||||
{
|
|
||||||
foreignKeyName: 'document_account_id_fkey';
|
|
||||||
columns: ['account_id'];
|
|
||||||
isOneToOne: false;
|
|
||||||
referencedRelation: 'accounts';
|
|
||||||
referencedColumns: ['id'];
|
|
||||||
}
|
|
||||||
];
|
|
||||||
};
|
|
||||||
tasks: {
|
|
||||||
Row: {
|
|
||||||
account_id: string;
|
|
||||||
actual_hours: number;
|
|
||||||
assignee_id: string | null;
|
|
||||||
brief_id: string | null;
|
|
||||||
completed_subtasks: number;
|
|
||||||
complexity: number | null;
|
|
||||||
created_at: string;
|
|
||||||
created_by: string;
|
|
||||||
description: string | null;
|
|
||||||
display_id: string | null;
|
|
||||||
document_id: string | null;
|
|
||||||
due_date: string | null;
|
|
||||||
estimated_hours: number | null;
|
|
||||||
id: string;
|
|
||||||
metadata: Json;
|
|
||||||
parent_task_id: string | null;
|
|
||||||
position: number;
|
|
||||||
priority: Database['public']['Enums']['task_priority'];
|
|
||||||
status: Database['public']['Enums']['task_status'];
|
|
||||||
subtask_position: number;
|
|
||||||
title: string;
|
|
||||||
total_subtasks: number;
|
|
||||||
updated_at: string;
|
|
||||||
updated_by: string;
|
|
||||||
};
|
|
||||||
Insert: {
|
|
||||||
account_id: string;
|
|
||||||
actual_hours?: number;
|
|
||||||
assignee_id?: string | null;
|
|
||||||
brief_id?: string | null;
|
|
||||||
completed_subtasks?: number;
|
|
||||||
complexity?: number | null;
|
|
||||||
created_at?: string;
|
|
||||||
created_by: string;
|
|
||||||
description?: string | null;
|
|
||||||
display_id?: string | null;
|
|
||||||
document_id?: string | null;
|
|
||||||
due_date?: string | null;
|
|
||||||
estimated_hours?: number | null;
|
|
||||||
id?: string;
|
|
||||||
metadata?: Json;
|
|
||||||
parent_task_id?: string | null;
|
|
||||||
position?: number;
|
|
||||||
priority?: Database['public']['Enums']['task_priority'];
|
|
||||||
status?: Database['public']['Enums']['task_status'];
|
|
||||||
subtask_position?: number;
|
|
||||||
title: string;
|
|
||||||
total_subtasks?: number;
|
|
||||||
updated_at?: string;
|
|
||||||
updated_by: string;
|
|
||||||
};
|
|
||||||
Update: {
|
|
||||||
account_id?: string;
|
|
||||||
actual_hours?: number;
|
|
||||||
assignee_id?: string | null;
|
|
||||||
brief_id?: string | null;
|
|
||||||
completed_subtasks?: number;
|
|
||||||
complexity?: number | null;
|
|
||||||
created_at?: string;
|
|
||||||
created_by?: string;
|
|
||||||
description?: string | null;
|
|
||||||
display_id?: string | null;
|
|
||||||
document_id?: string | null;
|
|
||||||
due_date?: string | null;
|
|
||||||
estimated_hours?: number | null;
|
|
||||||
id?: string;
|
|
||||||
metadata?: Json;
|
|
||||||
parent_task_id?: string | null;
|
|
||||||
position?: number;
|
|
||||||
priority?: Database['public']['Enums']['task_priority'];
|
|
||||||
status?: Database['public']['Enums']['task_status'];
|
|
||||||
subtask_position?: number;
|
|
||||||
title?: string;
|
|
||||||
total_subtasks?: number;
|
|
||||||
updated_at?: string;
|
|
||||||
updated_by?: string;
|
|
||||||
};
|
|
||||||
Relationships: [
|
|
||||||
{
|
|
||||||
foreignKeyName: 'tasks_account_id_fkey';
|
|
||||||
columns: ['account_id'];
|
|
||||||
isOneToOne: false;
|
|
||||||
referencedRelation: 'accounts';
|
|
||||||
referencedColumns: ['id'];
|
|
||||||
},
|
|
||||||
{
|
|
||||||
foreignKeyName: 'tasks_brief_id_fkey';
|
|
||||||
columns: ['brief_id'];
|
|
||||||
isOneToOne: false;
|
|
||||||
referencedRelation: 'brief';
|
|
||||||
referencedColumns: ['id'];
|
|
||||||
},
|
|
||||||
{
|
|
||||||
foreignKeyName: 'tasks_document_id_fkey';
|
|
||||||
columns: ['document_id'];
|
|
||||||
isOneToOne: false;
|
|
||||||
referencedRelation: 'document';
|
|
||||||
referencedColumns: ['id'];
|
|
||||||
},
|
|
||||||
{
|
|
||||||
foreignKeyName: 'tasks_parent_task_id_fkey';
|
|
||||||
columns: ['parent_task_id'];
|
|
||||||
isOneToOne: false;
|
|
||||||
referencedRelation: 'tasks';
|
|
||||||
referencedColumns: ['id'];
|
|
||||||
}
|
|
||||||
];
|
|
||||||
};
|
|
||||||
task_dependencies: {
|
|
||||||
Row: {
|
|
||||||
account_id: string;
|
|
||||||
created_at: string;
|
|
||||||
depends_on_task_id: string;
|
|
||||||
id: string;
|
|
||||||
task_id: string;
|
|
||||||
};
|
|
||||||
Insert: {
|
|
||||||
account_id: string;
|
|
||||||
created_at?: string;
|
|
||||||
depends_on_task_id: string;
|
|
||||||
id?: string;
|
|
||||||
task_id: string;
|
|
||||||
};
|
|
||||||
Update: {
|
|
||||||
account_id?: string;
|
|
||||||
created_at?: string;
|
|
||||||
depends_on_task_id?: string;
|
|
||||||
id?: string;
|
|
||||||
task_id?: string;
|
|
||||||
};
|
|
||||||
Relationships: [
|
|
||||||
{
|
|
||||||
foreignKeyName: 'task_dependencies_account_id_fkey';
|
|
||||||
columns: ['account_id'];
|
|
||||||
isOneToOne: false;
|
|
||||||
referencedRelation: 'accounts';
|
|
||||||
referencedColumns: ['id'];
|
|
||||||
},
|
|
||||||
{
|
|
||||||
foreignKeyName: 'task_dependencies_depends_on_task_id_fkey';
|
|
||||||
columns: ['depends_on_task_id'];
|
|
||||||
isOneToOne: false;
|
|
||||||
referencedRelation: 'tasks';
|
|
||||||
referencedColumns: ['id'];
|
|
||||||
},
|
|
||||||
{
|
|
||||||
foreignKeyName: 'task_dependencies_task_id_fkey';
|
|
||||||
columns: ['task_id'];
|
|
||||||
isOneToOne: false;
|
|
||||||
referencedRelation: 'tasks';
|
|
||||||
referencedColumns: ['id'];
|
|
||||||
}
|
|
||||||
];
|
|
||||||
};
|
|
||||||
user_accounts: {
|
|
||||||
Row: {
|
|
||||||
id: string | null;
|
|
||||||
name: string | null;
|
|
||||||
picture_url: string | null;
|
|
||||||
role: string | null;
|
|
||||||
slug: string | null;
|
|
||||||
};
|
|
||||||
Insert: {
|
|
||||||
id?: string | null;
|
|
||||||
name?: string | null;
|
|
||||||
picture_url?: string | null;
|
|
||||||
role?: string | null;
|
|
||||||
slug?: string | null;
|
|
||||||
};
|
|
||||||
Update: {
|
|
||||||
id?: string | null;
|
|
||||||
name?: string | null;
|
|
||||||
picture_url?: string | null;
|
|
||||||
role?: string | null;
|
|
||||||
slug?: string | null;
|
|
||||||
};
|
|
||||||
Relationships: [];
|
|
||||||
};
|
|
||||||
};
|
|
||||||
Views: {
|
|
||||||
[_ in never]: never;
|
|
||||||
};
|
|
||||||
Functions: {
|
|
||||||
[_ in never]: never;
|
|
||||||
};
|
|
||||||
Enums: {
|
|
||||||
brief_status:
|
|
||||||
| 'draft'
|
|
||||||
| 'refining'
|
|
||||||
| 'aligned'
|
|
||||||
| 'delivering'
|
|
||||||
| 'delivered'
|
|
||||||
| 'done'
|
|
||||||
| 'archived';
|
|
||||||
document_processing_status: 'pending' | 'processing' | 'ready' | 'failed';
|
|
||||||
document_type:
|
|
||||||
| 'brief'
|
|
||||||
| 'blueprint'
|
|
||||||
| 'file'
|
|
||||||
| 'note'
|
|
||||||
| 'transcript'
|
|
||||||
| 'generated_plan'
|
|
||||||
| 'generated_task'
|
|
||||||
| 'generated_summary'
|
|
||||||
| 'method'
|
|
||||||
| 'task';
|
|
||||||
plan_generation_status:
|
|
||||||
| 'not_started'
|
|
||||||
| 'generating'
|
|
||||||
| 'completed'
|
|
||||||
| 'failed';
|
|
||||||
task_priority: 'low' | 'medium' | 'high' | 'urgent';
|
|
||||||
task_status: 'todo' | 'in_progress' | 'done';
|
|
||||||
};
|
|
||||||
CompositeTypes: {
|
|
||||||
[_ in never]: never;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
export type Tables<
|
|
||||||
PublicTableNameOrOptions extends
|
|
||||||
| keyof (Database['public']['Tables'] & Database['public']['Views'])
|
|
||||||
| { schema: keyof Database },
|
|
||||||
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
|
|
||||||
? keyof (Database[PublicTableNameOrOptions['schema']]['Tables'] &
|
|
||||||
Database[PublicTableNameOrOptions['schema']]['Views'])
|
|
||||||
: never = never
|
|
||||||
> = PublicTableNameOrOptions extends { schema: keyof Database }
|
|
||||||
? (Database[PublicTableNameOrOptions['schema']]['Tables'] &
|
|
||||||
Database[PublicTableNameOrOptions['schema']]['Views'])[TableName] extends {
|
|
||||||
Row: infer R;
|
|
||||||
}
|
|
||||||
? R
|
|
||||||
: never
|
|
||||||
: PublicTableNameOrOptions extends keyof (Database['public']['Tables'] &
|
|
||||||
Database['public']['Views'])
|
|
||||||
? (Database['public']['Tables'] &
|
|
||||||
Database['public']['Views'])[PublicTableNameOrOptions] extends {
|
|
||||||
Row: infer R;
|
|
||||||
}
|
|
||||||
? R
|
|
||||||
: never
|
|
||||||
: never;
|
|
||||||
|
|
||||||
export type TablesInsert<
|
|
||||||
PublicTableNameOrOptions extends
|
|
||||||
| keyof Database['public']['Tables']
|
|
||||||
| { schema: keyof Database },
|
|
||||||
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
|
|
||||||
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
|
|
||||||
: never = never
|
|
||||||
> = PublicTableNameOrOptions extends { schema: keyof Database }
|
|
||||||
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
|
|
||||||
Insert: infer I;
|
|
||||||
}
|
|
||||||
? I
|
|
||||||
: never
|
|
||||||
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
|
|
||||||
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
|
|
||||||
Insert: infer I;
|
|
||||||
}
|
|
||||||
? I
|
|
||||||
: never
|
|
||||||
: never;
|
|
||||||
|
|
||||||
export type TablesUpdate<
|
|
||||||
PublicTableNameOrOptions extends
|
|
||||||
| keyof Database['public']['Tables']
|
|
||||||
| { schema: keyof Database },
|
|
||||||
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
|
|
||||||
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
|
|
||||||
: never = never
|
|
||||||
> = PublicTableNameOrOptions extends { schema: keyof Database }
|
|
||||||
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
|
|
||||||
Update: infer U;
|
|
||||||
}
|
|
||||||
? U
|
|
||||||
: never
|
|
||||||
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
|
|
||||||
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
|
|
||||||
Update: infer U;
|
|
||||||
}
|
|
||||||
? U
|
|
||||||
: never
|
|
||||||
: never;
|
|
||||||
|
|
||||||
export type Enums<
|
|
||||||
PublicEnumNameOrOptions extends
|
|
||||||
| keyof Database['public']['Enums']
|
|
||||||
| { schema: keyof Database },
|
|
||||||
EnumName extends PublicEnumNameOrOptions extends { schema: keyof Database }
|
|
||||||
? keyof Database[PublicEnumNameOrOptions['schema']]['Enums']
|
|
||||||
: never = never
|
|
||||||
> = PublicEnumNameOrOptions extends { schema: keyof Database }
|
|
||||||
? Database[PublicEnumNameOrOptions['schema']]['Enums'][EnumName]
|
|
||||||
: PublicEnumNameOrOptions extends keyof Database['public']['Enums']
|
|
||||||
? Database['public']['Enums'][PublicEnumNameOrOptions]
|
|
||||||
: never;
|
|
||||||
@@ -2,14 +2,6 @@
|
|||||||
* Core type definitions for Task Master
|
* Core type definitions for Task Master
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
|
||||||
* Storage type options
|
|
||||||
* - 'file': Local file system storage
|
|
||||||
* - 'api': Remote API storage (Hamster integration)
|
|
||||||
* - 'auto': Automatically detect based on auth status
|
|
||||||
*/
|
|
||||||
export type StorageType = 'file' | 'api' | 'auto';
|
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// Type Literals
|
// Type Literals
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -104,15 +96,6 @@ export interface TaskCollection {
|
|||||||
metadata: TaskMetadata;
|
metadata: TaskMetadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Task tag for organizing tasks
|
|
||||||
*/
|
|
||||||
export interface TaskTag {
|
|
||||||
name: string;
|
|
||||||
tasks: string[]; // Task IDs belonging to this tag
|
|
||||||
metadata: Record<string, any>;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// Utility Types
|
// Utility Types
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|||||||
17
packages/tm-core/src/workflow/index.ts
Normal file
17
packages/tm-core/src/workflow/index.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Module
|
||||||
|
* Public exports for workflow functionality
|
||||||
|
*/
|
||||||
|
|
||||||
|
export { WorkflowService, type WorkflowServiceConfig } from './workflow-service.js';
|
||||||
|
|
||||||
|
// Re-export workflow engine types for convenience
|
||||||
|
export type {
|
||||||
|
WorkflowExecutionContext,
|
||||||
|
WorkflowStatus,
|
||||||
|
WorkflowEvent,
|
||||||
|
WorkflowEventType,
|
||||||
|
WorkflowProcess,
|
||||||
|
ProcessStatus,
|
||||||
|
WorktreeInfo
|
||||||
|
} from '@tm/workflow-engine';
|
||||||
218
packages/tm-core/src/workflow/workflow-service.ts
Normal file
218
packages/tm-core/src/workflow/workflow-service.ts
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Service
|
||||||
|
* Integrates workflow engine into Task Master Core
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {
|
||||||
|
TaskExecutionManager,
|
||||||
|
type TaskExecutionManagerConfig,
|
||||||
|
type WorkflowExecutionContext
|
||||||
|
} from '@tm/workflow-engine';
|
||||||
|
import type { Task } from '../types/index.js';
|
||||||
|
import { TaskMasterError } from '../errors/index.js';
|
||||||
|
|
||||||
|
export interface WorkflowServiceConfig {
|
||||||
|
/** Project root directory */
|
||||||
|
projectRoot: string;
|
||||||
|
/** Maximum number of concurrent workflows */
|
||||||
|
maxConcurrent?: number;
|
||||||
|
/** Default timeout for workflow execution (minutes) */
|
||||||
|
defaultTimeout?: number;
|
||||||
|
/** Base directory for worktrees */
|
||||||
|
worktreeBase?: string;
|
||||||
|
/** Claude Code executable path */
|
||||||
|
claudeExecutable?: string;
|
||||||
|
/** Enable debug logging */
|
||||||
|
debug?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WorkflowService provides Task Master workflow capabilities through core
|
||||||
|
*/
|
||||||
|
export class WorkflowService {
|
||||||
|
private workflowEngine: TaskExecutionManager;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
config: WorkflowServiceConfig,
|
||||||
|
private getTask: (taskId: string) => Promise<Task>
|
||||||
|
) {
|
||||||
|
|
||||||
|
const engineConfig: TaskExecutionManagerConfig = {
|
||||||
|
projectRoot: config.projectRoot,
|
||||||
|
maxConcurrent: config.maxConcurrent || 5,
|
||||||
|
defaultTimeout: config.defaultTimeout || 60,
|
||||||
|
worktreeBase:
|
||||||
|
config.worktreeBase ||
|
||||||
|
require('path').join(config.projectRoot, '..', 'task-worktrees'),
|
||||||
|
claudeExecutable: config.claudeExecutable || 'claude',
|
||||||
|
debug: config.debug || false
|
||||||
|
};
|
||||||
|
|
||||||
|
this.workflowEngine = new TaskExecutionManager(engineConfig);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the workflow service
|
||||||
|
*/
|
||||||
|
async initialize(): Promise<void> {
|
||||||
|
await this.workflowEngine.initialize();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start a workflow for a task
|
||||||
|
*/
|
||||||
|
async start(
|
||||||
|
taskId: string,
|
||||||
|
options?: {
|
||||||
|
branchName?: string;
|
||||||
|
timeout?: number;
|
||||||
|
env?: Record<string, string>;
|
||||||
|
}
|
||||||
|
): Promise<string> {
|
||||||
|
try {
|
||||||
|
// Get task from core
|
||||||
|
const task = await this.getTask(taskId);
|
||||||
|
|
||||||
|
// Start workflow using engine
|
||||||
|
return await this.workflowEngine.startTaskExecution(task, options);
|
||||||
|
} catch (error) {
|
||||||
|
throw new TaskMasterError(
|
||||||
|
`Failed to start workflow for task ${taskId}`,
|
||||||
|
'WORKFLOW_START_FAILED',
|
||||||
|
error instanceof Error ? error : undefined
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop a workflow
|
||||||
|
*/
|
||||||
|
async stop(workflowId: string, force = false): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.workflowEngine.stopTaskExecution(workflowId, force);
|
||||||
|
} catch (error) {
|
||||||
|
throw new TaskMasterError(
|
||||||
|
`Failed to stop workflow ${workflowId}`,
|
||||||
|
'WORKFLOW_STOP_FAILED',
|
||||||
|
error instanceof Error ? error : undefined
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pause a workflow
|
||||||
|
*/
|
||||||
|
async pause(workflowId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.workflowEngine.pauseTaskExecution(workflowId);
|
||||||
|
} catch (error) {
|
||||||
|
throw new TaskMasterError(
|
||||||
|
`Failed to pause workflow ${workflowId}`,
|
||||||
|
'WORKFLOW_PAUSE_FAILED',
|
||||||
|
error instanceof Error ? error : undefined
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resume a paused workflow
|
||||||
|
*/
|
||||||
|
async resume(workflowId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.workflowEngine.resumeTaskExecution(workflowId);
|
||||||
|
} catch (error) {
|
||||||
|
throw new TaskMasterError(
|
||||||
|
`Failed to resume workflow ${workflowId}`,
|
||||||
|
'WORKFLOW_RESUME_FAILED',
|
||||||
|
error instanceof Error ? error : undefined
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get workflow status
|
||||||
|
*/
|
||||||
|
getStatus(workflowId: string): WorkflowExecutionContext | undefined {
|
||||||
|
return this.workflowEngine.getWorkflowStatus(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get workflow by task ID
|
||||||
|
*/
|
||||||
|
getByTaskId(taskId: string): WorkflowExecutionContext | undefined {
|
||||||
|
return this.workflowEngine.getWorkflowByTaskId(taskId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all workflows
|
||||||
|
*/
|
||||||
|
list(): WorkflowExecutionContext[] {
|
||||||
|
return this.workflowEngine.listWorkflows();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List active workflows
|
||||||
|
*/
|
||||||
|
listActive(): WorkflowExecutionContext[] {
|
||||||
|
return this.workflowEngine.listActiveWorkflows();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send input to a running workflow
|
||||||
|
*/
|
||||||
|
async sendInput(workflowId: string, input: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.workflowEngine.sendInputToWorkflow(workflowId, input);
|
||||||
|
} catch (error) {
|
||||||
|
throw new TaskMasterError(
|
||||||
|
`Failed to send input to workflow ${workflowId}`,
|
||||||
|
'WORKFLOW_INPUT_FAILED',
|
||||||
|
error instanceof Error ? error : undefined
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up all workflows
|
||||||
|
*/
|
||||||
|
async cleanup(force = false): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.workflowEngine.cleanup(force);
|
||||||
|
} catch (error) {
|
||||||
|
throw new TaskMasterError(
|
||||||
|
'Failed to cleanup workflows',
|
||||||
|
'WORKFLOW_CLEANUP_FAILED',
|
||||||
|
error instanceof Error ? error : undefined
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Subscribe to workflow events
|
||||||
|
*/
|
||||||
|
on(event: string, listener: (...args: any[]) => void): void {
|
||||||
|
this.workflowEngine.on(event, listener);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unsubscribe from workflow events
|
||||||
|
*/
|
||||||
|
off(event: string, listener: (...args: any[]) => void): void {
|
||||||
|
this.workflowEngine.off(event, listener);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get workflow engine instance (for advanced usage)
|
||||||
|
*/
|
||||||
|
getEngine(): TaskExecutionManager {
|
||||||
|
return this.workflowEngine;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dispose of the workflow service
|
||||||
|
*/
|
||||||
|
async dispose(): Promise<void> {
|
||||||
|
await this.cleanup(true);
|
||||||
|
this.workflowEngine.removeAllListeners();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,13 +1,12 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"target": "ES2022",
|
"target": "ES2022",
|
||||||
"module": "NodeNext",
|
"module": "ESNext",
|
||||||
"lib": ["ES2022"],
|
"lib": ["ES2022"],
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
"declarationMap": true,
|
"declarationMap": true,
|
||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"outDir": "./dist",
|
"outDir": "./dist",
|
||||||
"baseUrl": ".",
|
|
||||||
"rootDir": "./src",
|
"rootDir": "./src",
|
||||||
"strict": true,
|
"strict": true,
|
||||||
"noImplicitAny": true,
|
"noImplicitAny": true,
|
||||||
@@ -24,12 +23,25 @@
|
|||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
"forceConsistentCasingInFileNames": true,
|
"forceConsistentCasingInFileNames": true,
|
||||||
"moduleResolution": "NodeNext",
|
"moduleResolution": "bundler",
|
||||||
"moduleDetection": "force",
|
"moduleDetection": "force",
|
||||||
"types": ["node"],
|
"types": ["node"],
|
||||||
"resolveJsonModule": true,
|
"resolveJsonModule": true,
|
||||||
"isolatedModules": true,
|
"isolatedModules": true,
|
||||||
"allowImportingTsExtensions": false
|
"paths": {
|
||||||
|
"@/*": ["./src/*"],
|
||||||
|
"@/auth": ["./src/auth"],
|
||||||
|
"@/config": ["./src/config"],
|
||||||
|
"@/errors": ["./src/errors"],
|
||||||
|
"@/interfaces": ["./src/interfaces"],
|
||||||
|
"@/logger": ["./src/logger"],
|
||||||
|
"@/parser": ["./src/parser"],
|
||||||
|
"@/providers": ["./src/providers"],
|
||||||
|
"@/services": ["./src/services"],
|
||||||
|
"@/storage": ["./src/storage"],
|
||||||
|
"@/types": ["./src/types"],
|
||||||
|
"@/utils": ["./src/utils"]
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"include": ["src/**/*"],
|
"include": ["src/**/*"],
|
||||||
"exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"]
|
"exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"]
|
||||||
|
|||||||
53
packages/tm-core/tsup.config.ts
Normal file
53
packages/tm-core/tsup.config.ts
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import { defineConfig } from 'tsup';
|
||||||
|
import { dotenvLoad } from 'dotenv-mono';
|
||||||
|
dotenvLoad();
|
||||||
|
|
||||||
|
// Get all TM_PUBLIC_* env variables for build-time injection
|
||||||
|
const getBuildTimeEnvs = () => {
|
||||||
|
const envs: Record<string, string> = {};
|
||||||
|
for (const [key, value] of Object.entries(process.env)) {
|
||||||
|
if (key.startsWith('TM_PUBLIC_')) {
|
||||||
|
// Return the actual value, not JSON.stringify'd
|
||||||
|
envs[key] = value || '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return envs;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
entry: {
|
||||||
|
index: 'src/index.ts',
|
||||||
|
'auth/index': 'src/auth/index.ts',
|
||||||
|
'config/index': 'src/config/index.ts',
|
||||||
|
'errors/index': 'src/errors/index.ts',
|
||||||
|
'interfaces/index': 'src/interfaces/index.ts',
|
||||||
|
'logger/index': 'src/logger/index.ts',
|
||||||
|
'parser/index': 'src/parser/index.ts',
|
||||||
|
'providers/index': 'src/providers/index.ts',
|
||||||
|
'services/index': 'src/services/index.ts',
|
||||||
|
'storage/index': 'src/storage/index.ts',
|
||||||
|
'types/index': 'src/types/index.ts',
|
||||||
|
'utils/index': 'src/utils/index.ts',
|
||||||
|
'workflow/index': 'src/workflow/index.ts'
|
||||||
|
},
|
||||||
|
format: ['cjs', 'esm'],
|
||||||
|
dts: true,
|
||||||
|
sourcemap: true,
|
||||||
|
clean: true,
|
||||||
|
splitting: false,
|
||||||
|
treeshake: true,
|
||||||
|
minify: false,
|
||||||
|
target: 'es2022',
|
||||||
|
tsconfig: './tsconfig.json',
|
||||||
|
outDir: 'dist',
|
||||||
|
// Replace process.env.TM_PUBLIC_* with actual values at build time
|
||||||
|
env: getBuildTimeEnvs(),
|
||||||
|
// Auto-external all dependencies from package.json
|
||||||
|
external: [
|
||||||
|
// External all node_modules - everything not starting with . or /
|
||||||
|
/^[^./]/
|
||||||
|
],
|
||||||
|
esbuildOptions(options) {
|
||||||
|
options.conditions = ['module'];
|
||||||
|
}
|
||||||
|
});
|
||||||
371
packages/workflow-engine/README.md
Normal file
371
packages/workflow-engine/README.md
Normal file
@@ -0,0 +1,371 @@
|
|||||||
|
# @tm/workflow-engine
|
||||||
|
|
||||||
|
Enhanced Task Master workflow execution engine with git worktree isolation and Claude Code process management.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The Workflow Engine extends Task Master with advanced execution capabilities:
|
||||||
|
|
||||||
|
- **Git Worktree Isolation**: Each task runs in its own isolated worktree
|
||||||
|
- **Process Sandboxing**: Spawns dedicated Claude Code processes for task execution
|
||||||
|
- **Real-time Monitoring**: Track workflow progress and process output
|
||||||
|
- **State Management**: Persistent workflow state across sessions
|
||||||
|
- **Parallel Execution**: Run multiple tasks concurrently with resource limits
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
TaskExecutionManager
|
||||||
|
├── WorktreeManager # Git worktree lifecycle
|
||||||
|
├── ProcessSandbox # Claude Code process management
|
||||||
|
└── WorkflowStateManager # Persistent state tracking
|
||||||
|
```
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { TaskExecutionManager } from '@tm/workflow-engine';
|
||||||
|
|
||||||
|
const manager = new TaskExecutionManager({
|
||||||
|
projectRoot: '/path/to/project',
|
||||||
|
worktreeBase: '/path/to/worktrees',
|
||||||
|
claudeExecutable: 'claude',
|
||||||
|
maxConcurrent: 3,
|
||||||
|
defaultTimeout: 60,
|
||||||
|
debug: true
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.initialize();
|
||||||
|
|
||||||
|
// Start task execution
|
||||||
|
const workflowId = await manager.startTaskExecution({
|
||||||
|
id: '1.2',
|
||||||
|
title: 'Implement authentication',
|
||||||
|
description: 'Add JWT-based auth system',
|
||||||
|
status: 'pending',
|
||||||
|
priority: 'high'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Monitor workflow
|
||||||
|
const workflow = manager.getWorkflowStatus(workflowId);
|
||||||
|
console.log(`Status: ${workflow.status}`);
|
||||||
|
|
||||||
|
// Stop when complete
|
||||||
|
await manager.stopTaskExecution(workflowId);
|
||||||
|
```
|
||||||
|
|
||||||
|
## CLI Integration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start workflow
|
||||||
|
tm workflow start 1.2
|
||||||
|
|
||||||
|
# List active workflows
|
||||||
|
tm workflow list
|
||||||
|
|
||||||
|
# Check status
|
||||||
|
tm workflow status workflow-1.2-1234567890-abc123
|
||||||
|
|
||||||
|
# Stop workflow
|
||||||
|
tm workflow stop workflow-1.2-1234567890-abc123
|
||||||
|
```
|
||||||
|
|
||||||
|
## VS Code Extension
|
||||||
|
|
||||||
|
The workflow engine integrates with the Task Master VS Code extension to provide:
|
||||||
|
|
||||||
|
- **Workflow Tree View**: Visual workflow management
|
||||||
|
- **Process Monitoring**: Real-time output streaming
|
||||||
|
- **Worktree Navigation**: Quick access to isolated workspaces
|
||||||
|
- **Status Indicators**: Visual workflow state tracking
|
||||||
|
|
||||||
|
## Core Components
|
||||||
|
|
||||||
|
### TaskExecutionManager
|
||||||
|
|
||||||
|
Orchestrates complete workflow lifecycle:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Event-driven workflow management
|
||||||
|
manager.on('workflow.started', (event) => {
|
||||||
|
console.log(`Started: ${event.workflowId}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
manager.on('process.output', (event) => {
|
||||||
|
console.log(`[${event.data.stream}]: ${event.data.data}`);
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### WorktreeManager
|
||||||
|
|
||||||
|
Manages git worktree operations:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { WorktreeManager } from '@tm/workflow-engine';
|
||||||
|
|
||||||
|
const manager = new WorktreeManager({
|
||||||
|
worktreeBase: './worktrees',
|
||||||
|
projectRoot: process.cwd(),
|
||||||
|
autoCleanup: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create isolated workspace
|
||||||
|
const worktree = await manager.createWorktree('task-1.2');
|
||||||
|
console.log(`Created: ${worktree.path}`);
|
||||||
|
|
||||||
|
// List all worktrees
|
||||||
|
const worktrees = await manager.listWorktrees();
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
await manager.removeWorktree('task-1.2');
|
||||||
|
```
|
||||||
|
|
||||||
|
### ProcessSandbox
|
||||||
|
|
||||||
|
Spawns and manages Claude Code processes:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { ProcessSandbox } from '@tm/workflow-engine';
|
||||||
|
|
||||||
|
const sandbox = new ProcessSandbox({
|
||||||
|
claudeExecutable: 'claude',
|
||||||
|
defaultTimeout: 30,
|
||||||
|
debug: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start isolated process
|
||||||
|
const process = await sandbox.startProcess(
|
||||||
|
'workflow-123',
|
||||||
|
'task-1.2',
|
||||||
|
'Implement user authentication with JWT tokens',
|
||||||
|
{ cwd: '/path/to/worktree' }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Send input
|
||||||
|
await sandbox.sendInput('workflow-123', 'npm test');
|
||||||
|
|
||||||
|
// Monitor output
|
||||||
|
sandbox.on('process.output', (event) => {
|
||||||
|
console.log(event.data.data);
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### WorkflowStateManager
|
||||||
|
|
||||||
|
Persistent workflow state management:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { WorkflowStateManager } from '@tm/workflow-engine';
|
||||||
|
|
||||||
|
const stateManager = new WorkflowStateManager({
|
||||||
|
projectRoot: process.cwd()
|
||||||
|
});
|
||||||
|
|
||||||
|
await stateManager.loadState();
|
||||||
|
|
||||||
|
// Register workflow
|
||||||
|
const workflowId = await stateManager.registerWorkflow({
|
||||||
|
taskId: '1.2',
|
||||||
|
taskTitle: 'Authentication',
|
||||||
|
// ... other context
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update status
|
||||||
|
await stateManager.updateWorkflowStatus(workflowId, 'running');
|
||||||
|
|
||||||
|
// Query workflows
|
||||||
|
const running = stateManager.listWorkflowsByStatus('running');
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
- `TASKMASTER_WORKFLOW_DEBUG`: Enable debug logging
|
||||||
|
- `TASKMASTER_CLAUDE_PATH`: Custom Claude Code executable path
|
||||||
|
- `TASKMASTER_WORKTREE_BASE`: Base directory for worktrees
|
||||||
|
- `TASKMASTER_MAX_CONCURRENT`: Maximum concurrent workflows
|
||||||
|
|
||||||
|
### Config Object
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface TaskExecutionManagerConfig {
|
||||||
|
projectRoot: string; // Project root directory
|
||||||
|
worktreeBase: string; // Worktree base path
|
||||||
|
claudeExecutable: string; // Claude executable
|
||||||
|
maxConcurrent: number; // Concurrent limit
|
||||||
|
defaultTimeout: number; // Timeout (minutes)
|
||||||
|
debug: boolean; // Debug logging
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Workflow States
|
||||||
|
|
||||||
|
| State | Description |
|
||||||
|
|-------|-------------|
|
||||||
|
| `pending` | Created but not started |
|
||||||
|
| `initializing` | Setting up worktree/process |
|
||||||
|
| `running` | Active execution |
|
||||||
|
| `paused` | Temporarily stopped |
|
||||||
|
| `completed` | Successfully finished |
|
||||||
|
| `failed` | Error occurred |
|
||||||
|
| `cancelled` | User cancelled |
|
||||||
|
| `timeout` | Exceeded time limit |
|
||||||
|
|
||||||
|
## Events
|
||||||
|
|
||||||
|
The workflow engine emits events for real-time monitoring:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Workflow lifecycle
|
||||||
|
manager.on('workflow.started', (event) => {});
|
||||||
|
manager.on('workflow.completed', (event) => {});
|
||||||
|
manager.on('workflow.failed', (event) => {});
|
||||||
|
|
||||||
|
// Process events
|
||||||
|
manager.on('process.started', (event) => {});
|
||||||
|
manager.on('process.output', (event) => {});
|
||||||
|
manager.on('process.stopped', (event) => {});
|
||||||
|
|
||||||
|
// Worktree events
|
||||||
|
manager.on('worktree.created', (event) => {});
|
||||||
|
manager.on('worktree.deleted', (event) => {});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
The workflow engine provides specialized error types:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
WorkflowError,
|
||||||
|
WorktreeError,
|
||||||
|
ProcessError,
|
||||||
|
MaxConcurrentWorkflowsError
|
||||||
|
} from '@tm/workflow-engine';
|
||||||
|
|
||||||
|
try {
|
||||||
|
await manager.startTaskExecution(task);
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof MaxConcurrentWorkflowsError) {
|
||||||
|
console.log('Too many concurrent workflows');
|
||||||
|
} else if (error instanceof WorktreeError) {
|
||||||
|
console.log('Worktree operation failed');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install dependencies
|
||||||
|
npm install
|
||||||
|
|
||||||
|
# Build package
|
||||||
|
npm run build
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
npm test
|
||||||
|
|
||||||
|
# Development mode
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integration Examples
|
||||||
|
|
||||||
|
### With Task Master Core
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { createTaskMasterCore } from '@tm/core';
|
||||||
|
import { TaskExecutionManager } from '@tm/workflow-engine';
|
||||||
|
|
||||||
|
const core = await createTaskMasterCore({ projectPath: '.' });
|
||||||
|
const workflows = new TaskExecutionManager({ /*...*/ });
|
||||||
|
|
||||||
|
// Get task from core
|
||||||
|
const tasks = await core.getTaskList({});
|
||||||
|
const task = tasks.tasks.find(t => t.id === '1.2');
|
||||||
|
|
||||||
|
// Execute with workflow engine
|
||||||
|
if (task) {
|
||||||
|
const workflowId = await workflows.startTaskExecution(task);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### With VS Code Extension
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { WorkflowProvider } from './workflow-provider';
|
||||||
|
|
||||||
|
// Register tree view
|
||||||
|
const provider = new WorkflowProvider(context);
|
||||||
|
vscode.window.createTreeView('taskmaster.workflows', {
|
||||||
|
treeDataProvider: provider
|
||||||
|
});
|
||||||
|
|
||||||
|
// Register commands
|
||||||
|
vscode.commands.registerCommand('taskmaster.workflow.start',
|
||||||
|
async (taskId) => {
|
||||||
|
await provider.startWorkflow(taskId);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
1. **Worktree Creation Fails**
|
||||||
|
```bash
|
||||||
|
# Check git version (requires 2.5+)
|
||||||
|
git --version
|
||||||
|
|
||||||
|
# Verify project is git repository
|
||||||
|
git status
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Claude Code Not Found**
|
||||||
|
```bash
|
||||||
|
# Check Claude installation
|
||||||
|
which claude
|
||||||
|
|
||||||
|
# Set custom path
|
||||||
|
export TASKMASTER_CLAUDE_PATH=/path/to/claude
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Permission Errors**
|
||||||
|
```bash
|
||||||
|
# Check worktree directory permissions
|
||||||
|
chmod -R 755 ./worktrees
|
||||||
|
```
|
||||||
|
|
||||||
|
### Debug Mode
|
||||||
|
|
||||||
|
Enable debug logging for troubleshooting:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const manager = new TaskExecutionManager({
|
||||||
|
// ... other config
|
||||||
|
debug: true
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
Or via environment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export TASKMASTER_WORKFLOW_DEBUG=true
|
||||||
|
tm workflow start 1.2
|
||||||
|
```
|
||||||
|
|
||||||
|
## Roadmap
|
||||||
|
|
||||||
|
- [ ] Process resource monitoring (CPU, memory)
|
||||||
|
- [ ] Workflow templates and presets
|
||||||
|
- [ ] Integration with CI/CD pipelines
|
||||||
|
- [ ] Workflow scheduling and queueing
|
||||||
|
- [ ] Multi-machine workflow distribution
|
||||||
|
- [ ] Advanced debugging and profiling tools
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT WITH Commons-Clause
|
||||||
56
packages/workflow-engine/package.json
Normal file
56
packages/workflow-engine/package.json
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"name": "@tm/workflow-engine",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"description": "Task Master workflow execution engine with git worktree and process management",
|
||||||
|
"type": "module",
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"types": "dist/index.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"import": "./dist/index.js",
|
||||||
|
"types": "./dist/index.d.ts"
|
||||||
|
},
|
||||||
|
"./task-execution": {
|
||||||
|
"import": "./dist/task-execution/index.js",
|
||||||
|
"types": "./dist/task-execution/index.d.ts"
|
||||||
|
},
|
||||||
|
"./worktree": {
|
||||||
|
"import": "./dist/worktree/index.js",
|
||||||
|
"types": "./dist/worktree/index.d.ts"
|
||||||
|
},
|
||||||
|
"./process": {
|
||||||
|
"import": "./dist/process/index.js",
|
||||||
|
"types": "./dist/process/index.d.ts"
|
||||||
|
},
|
||||||
|
"./state": {
|
||||||
|
"import": "./dist/state/index.js",
|
||||||
|
"types": "./dist/state/index.d.ts"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsup",
|
||||||
|
"dev": "tsup --watch",
|
||||||
|
"test": "vitest",
|
||||||
|
"test:watch": "vitest --watch",
|
||||||
|
"type-check": "tsc --noEmit"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@tm/core": "*"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.0.0",
|
||||||
|
"tsup": "^8.0.0",
|
||||||
|
"typescript": "^5.5.0",
|
||||||
|
"vitest": "^2.0.0"
|
||||||
|
},
|
||||||
|
"files": ["dist"],
|
||||||
|
"keywords": [
|
||||||
|
"task-master",
|
||||||
|
"workflow",
|
||||||
|
"git-worktree",
|
||||||
|
"process-management",
|
||||||
|
"claude-code"
|
||||||
|
],
|
||||||
|
"author": "Task Master AI Team",
|
||||||
|
"license": "MIT"
|
||||||
|
}
|
||||||
6
packages/workflow-engine/src/errors/index.ts
Normal file
6
packages/workflow-engine/src/errors/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Engine Errors
|
||||||
|
* Public error exports
|
||||||
|
*/
|
||||||
|
|
||||||
|
export * from './workflow.errors.js';
|
||||||
59
packages/workflow-engine/src/errors/workflow.errors.ts
Normal file
59
packages/workflow-engine/src/errors/workflow.errors.ts
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Engine Errors
|
||||||
|
* Custom error classes for workflow operations
|
||||||
|
*/
|
||||||
|
|
||||||
|
export class WorkflowError extends Error {
|
||||||
|
constructor(
|
||||||
|
message: string,
|
||||||
|
public code: string,
|
||||||
|
public workflowId?: string,
|
||||||
|
public taskId?: string,
|
||||||
|
public cause?: Error
|
||||||
|
) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'WorkflowError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class WorktreeError extends WorkflowError {
|
||||||
|
constructor(message: string, public path?: string, cause?: Error) {
|
||||||
|
super(message, 'WORKTREE_ERROR', undefined, undefined, cause);
|
||||||
|
this.name = 'WorktreeError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ProcessError extends WorkflowError {
|
||||||
|
constructor(message: string, public pid?: number, cause?: Error) {
|
||||||
|
super(message, 'PROCESS_ERROR', undefined, undefined, cause);
|
||||||
|
this.name = 'ProcessError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class WorkflowTimeoutError extends WorkflowError {
|
||||||
|
constructor(workflowId: string, timeoutMinutes: number) {
|
||||||
|
super(
|
||||||
|
`Workflow ${workflowId} timed out after ${timeoutMinutes} minutes`,
|
||||||
|
'WORKFLOW_TIMEOUT',
|
||||||
|
workflowId
|
||||||
|
);
|
||||||
|
this.name = 'WorkflowTimeoutError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class WorkflowNotFoundError extends WorkflowError {
|
||||||
|
constructor(workflowId: string) {
|
||||||
|
super(`Workflow ${workflowId} not found`, 'WORKFLOW_NOT_FOUND', workflowId);
|
||||||
|
this.name = 'WorkflowNotFoundError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MaxConcurrentWorkflowsError extends WorkflowError {
|
||||||
|
constructor(maxConcurrent: number) {
|
||||||
|
super(
|
||||||
|
`Maximum concurrent workflows (${maxConcurrent}) reached`,
|
||||||
|
'MAX_CONCURRENT_WORKFLOWS'
|
||||||
|
);
|
||||||
|
this.name = 'MaxConcurrentWorkflowsError';
|
||||||
|
}
|
||||||
|
}
|
||||||
19
packages/workflow-engine/src/index.ts
Normal file
19
packages/workflow-engine/src/index.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Engine
|
||||||
|
* Main entry point for the Task Master workflow execution engine
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Core task execution
|
||||||
|
export * from './task-execution/index.js';
|
||||||
|
|
||||||
|
// Component managers
|
||||||
|
export * from './worktree/index.js';
|
||||||
|
export * from './process/index.js';
|
||||||
|
export * from './state/index.js';
|
||||||
|
|
||||||
|
// Types and errors
|
||||||
|
export * from './types/index.js';
|
||||||
|
export * from './errors/index.js';
|
||||||
|
|
||||||
|
// Convenience exports
|
||||||
|
export { TaskExecutionManager as WorkflowEngine } from './task-execution/index.js';
|
||||||
6
packages/workflow-engine/src/process/index.ts
Normal file
6
packages/workflow-engine/src/process/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Process Management
|
||||||
|
* Public exports for process operations
|
||||||
|
*/
|
||||||
|
|
||||||
|
export * from './process-sandbox.js';
|
||||||
378
packages/workflow-engine/src/process/process-sandbox.ts
Normal file
378
packages/workflow-engine/src/process/process-sandbox.ts
Normal file
@@ -0,0 +1,378 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Process Sandbox
|
||||||
|
* Manages Claude Code process execution in isolated environments
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { spawn, ChildProcess } from 'node:child_process';
|
||||||
|
import { EventEmitter } from 'node:events';
|
||||||
|
import type {
|
||||||
|
WorkflowProcess,
|
||||||
|
WorkflowEvent,
|
||||||
|
WorkflowEventType
|
||||||
|
} from '../types/workflow.types.js';
|
||||||
|
import { ProcessError } from '../errors/workflow.errors.js';
|
||||||
|
|
||||||
|
export interface ProcessSandboxConfig {
|
||||||
|
/** Claude Code executable path */
|
||||||
|
claudeExecutable: string;
|
||||||
|
/** Default timeout for processes (minutes) */
|
||||||
|
defaultTimeout: number;
|
||||||
|
/** Environment variables to pass to processes */
|
||||||
|
environment?: Record<string, string>;
|
||||||
|
/** Enable debug output */
|
||||||
|
debug: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ProcessOptions {
|
||||||
|
/** Working directory for the process */
|
||||||
|
cwd: string;
|
||||||
|
/** Environment variables (merged with config) */
|
||||||
|
env?: Record<string, string>;
|
||||||
|
/** Timeout in minutes (overrides default) */
|
||||||
|
timeout?: number;
|
||||||
|
/** Additional Claude Code arguments */
|
||||||
|
args?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ProcessSandbox manages Claude Code process lifecycle
|
||||||
|
* Single responsibility: Process spawning, monitoring, and cleanup
|
||||||
|
*/
|
||||||
|
export class ProcessSandbox extends EventEmitter {
|
||||||
|
private config: ProcessSandboxConfig;
|
||||||
|
private activeProcesses = new Map<string, WorkflowProcess>();
|
||||||
|
private childProcesses = new Map<string, ChildProcess>();
|
||||||
|
private timeouts = new Map<string, NodeJS.Timeout>();
|
||||||
|
|
||||||
|
constructor(config: ProcessSandboxConfig) {
|
||||||
|
super();
|
||||||
|
this.config = config;
|
||||||
|
this.setupCleanupHandlers();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start a Claude Code process for task execution
|
||||||
|
*/
|
||||||
|
async startProcess(
|
||||||
|
workflowId: string,
|
||||||
|
taskId: string,
|
||||||
|
taskPrompt: string,
|
||||||
|
options: ProcessOptions
|
||||||
|
): Promise<WorkflowProcess> {
|
||||||
|
if (this.activeProcesses.has(workflowId)) {
|
||||||
|
throw new ProcessError(
|
||||||
|
`Process already running for workflow ${workflowId}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare command and arguments
|
||||||
|
const args = [
|
||||||
|
'-p', // Print mode for non-interactive execution
|
||||||
|
taskPrompt,
|
||||||
|
...(options.args || [])
|
||||||
|
];
|
||||||
|
|
||||||
|
// Prepare environment
|
||||||
|
const env = {
|
||||||
|
...process.env,
|
||||||
|
...this.config.environment,
|
||||||
|
...options.env,
|
||||||
|
// Ensure task context is available
|
||||||
|
TASKMASTER_WORKFLOW_ID: workflowId,
|
||||||
|
TASKMASTER_TASK_ID: taskId
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Spawn Claude Code process
|
||||||
|
const childProcess = spawn(this.config.claudeExecutable, args, {
|
||||||
|
cwd: options.cwd,
|
||||||
|
env,
|
||||||
|
stdio: ['pipe', 'pipe', 'pipe']
|
||||||
|
});
|
||||||
|
|
||||||
|
const workflowProcess: WorkflowProcess = {
|
||||||
|
pid: childProcess.pid!,
|
||||||
|
command: this.config.claudeExecutable,
|
||||||
|
args,
|
||||||
|
cwd: options.cwd,
|
||||||
|
env,
|
||||||
|
startedAt: new Date(),
|
||||||
|
status: 'starting'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Store process references
|
||||||
|
this.activeProcesses.set(workflowId, workflowProcess);
|
||||||
|
this.childProcesses.set(workflowId, childProcess);
|
||||||
|
|
||||||
|
// Setup process event handlers
|
||||||
|
this.setupProcessHandlers(workflowId, taskId, childProcess);
|
||||||
|
|
||||||
|
// Setup timeout if specified
|
||||||
|
const timeoutMinutes = options.timeout || this.config.defaultTimeout;
|
||||||
|
if (timeoutMinutes > 0) {
|
||||||
|
this.setupProcessTimeout(workflowId, timeoutMinutes);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit process started event
|
||||||
|
this.emitEvent('process.started', workflowId, taskId, {
|
||||||
|
pid: workflowProcess.pid,
|
||||||
|
command: workflowProcess.command
|
||||||
|
});
|
||||||
|
|
||||||
|
workflowProcess.status = 'running';
|
||||||
|
return workflowProcess;
|
||||||
|
} catch (error) {
|
||||||
|
throw new ProcessError(
|
||||||
|
`Failed to start process for workflow ${workflowId}`,
|
||||||
|
undefined,
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop a running process
|
||||||
|
*/
|
||||||
|
async stopProcess(workflowId: string, force = false): Promise<void> {
|
||||||
|
const process = this.activeProcesses.get(workflowId);
|
||||||
|
const childProcess = this.childProcesses.get(workflowId);
|
||||||
|
|
||||||
|
if (!process || !childProcess) {
|
||||||
|
throw new ProcessError(
|
||||||
|
`No running process found for workflow ${workflowId}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Clear timeout
|
||||||
|
const timeout = this.timeouts.get(workflowId);
|
||||||
|
if (timeout) {
|
||||||
|
clearTimeout(timeout);
|
||||||
|
this.timeouts.delete(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Kill the process
|
||||||
|
if (force) {
|
||||||
|
childProcess.kill('SIGKILL');
|
||||||
|
} else {
|
||||||
|
childProcess.kill('SIGTERM');
|
||||||
|
|
||||||
|
// Give it 5 seconds to gracefully exit, then force kill
|
||||||
|
setTimeout(() => {
|
||||||
|
if (!childProcess.killed) {
|
||||||
|
childProcess.kill('SIGKILL');
|
||||||
|
}
|
||||||
|
}, 5000);
|
||||||
|
}
|
||||||
|
|
||||||
|
process.status = 'stopped';
|
||||||
|
|
||||||
|
// Emit process stopped event
|
||||||
|
this.emitEvent('process.stopped', workflowId, process.pid.toString(), {
|
||||||
|
pid: process.pid,
|
||||||
|
forced: force
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
throw new ProcessError(
|
||||||
|
`Failed to stop process for workflow ${workflowId}`,
|
||||||
|
process.pid,
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send input to a running process
|
||||||
|
*/
|
||||||
|
async sendInput(workflowId: string, input: string): Promise<void> {
|
||||||
|
const childProcess = this.childProcesses.get(workflowId);
|
||||||
|
if (!childProcess) {
|
||||||
|
throw new ProcessError(
|
||||||
|
`No running process found for workflow ${workflowId}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
childProcess.stdin?.write(input);
|
||||||
|
childProcess.stdin?.write('\n');
|
||||||
|
} catch (error) {
|
||||||
|
throw new ProcessError(
|
||||||
|
`Failed to send input to process for workflow ${workflowId}`,
|
||||||
|
childProcess.pid,
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get process information
|
||||||
|
*/
|
||||||
|
getProcess(workflowId: string): WorkflowProcess | undefined {
|
||||||
|
return this.activeProcesses.get(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all active processes
|
||||||
|
*/
|
||||||
|
listProcesses(): WorkflowProcess[] {
|
||||||
|
return Array.from(this.activeProcesses.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a process is running
|
||||||
|
*/
|
||||||
|
isProcessRunning(workflowId: string): boolean {
|
||||||
|
const process = this.activeProcesses.get(workflowId);
|
||||||
|
return process?.status === 'running' || process?.status === 'starting';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up all processes
|
||||||
|
*/
|
||||||
|
async cleanupAll(force = false): Promise<void> {
|
||||||
|
const workflowIds = Array.from(this.activeProcesses.keys());
|
||||||
|
|
||||||
|
await Promise.all(
|
||||||
|
workflowIds.map(async (workflowId) => {
|
||||||
|
try {
|
||||||
|
await this.stopProcess(workflowId, force);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`Failed to cleanup process for workflow ${workflowId}:`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup process event handlers
|
||||||
|
*/
|
||||||
|
private setupProcessHandlers(
|
||||||
|
workflowId: string,
|
||||||
|
taskId: string,
|
||||||
|
childProcess: ChildProcess
|
||||||
|
): void {
|
||||||
|
const process = this.activeProcesses.get(workflowId);
|
||||||
|
if (!process) return;
|
||||||
|
|
||||||
|
// Handle stdout
|
||||||
|
childProcess.stdout?.on('data', (data) => {
|
||||||
|
const output = data.toString();
|
||||||
|
if (this.config.debug) {
|
||||||
|
console.log(`[${workflowId}] STDOUT:`, output);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.emitEvent('process.output', workflowId, taskId, {
|
||||||
|
stream: 'stdout',
|
||||||
|
data: output
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle stderr
|
||||||
|
childProcess.stderr?.on('data', (data) => {
|
||||||
|
const output = data.toString();
|
||||||
|
if (this.config.debug) {
|
||||||
|
console.error(`[${workflowId}] STDERR:`, output);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.emitEvent('process.output', workflowId, taskId, {
|
||||||
|
stream: 'stderr',
|
||||||
|
data: output
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle process exit
|
||||||
|
childProcess.on('exit', (code, signal) => {
|
||||||
|
process.status = code === 0 ? 'stopped' : 'crashed';
|
||||||
|
|
||||||
|
this.emitEvent('process.stopped', workflowId, taskId, {
|
||||||
|
pid: process.pid,
|
||||||
|
exitCode: code,
|
||||||
|
signal
|
||||||
|
});
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
this.activeProcesses.delete(workflowId);
|
||||||
|
this.childProcesses.delete(workflowId);
|
||||||
|
|
||||||
|
const timeout = this.timeouts.get(workflowId);
|
||||||
|
if (timeout) {
|
||||||
|
clearTimeout(timeout);
|
||||||
|
this.timeouts.delete(workflowId);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle process errors
|
||||||
|
childProcess.on('error', (error) => {
|
||||||
|
process.status = 'crashed';
|
||||||
|
|
||||||
|
this.emitEvent('process.error', workflowId, taskId, undefined, error);
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
this.activeProcesses.delete(workflowId);
|
||||||
|
this.childProcesses.delete(workflowId);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup process timeout
|
||||||
|
*/
|
||||||
|
private setupProcessTimeout(
|
||||||
|
workflowId: string,
|
||||||
|
timeoutMinutes: number
|
||||||
|
): void {
|
||||||
|
const timeout = setTimeout(
|
||||||
|
async () => {
|
||||||
|
console.warn(`Process timeout reached for workflow ${workflowId}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.stopProcess(workflowId, true);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to stop timed out process:', error);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
timeoutMinutes * 60 * 1000
|
||||||
|
);
|
||||||
|
|
||||||
|
this.timeouts.set(workflowId, timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Emit workflow event
|
||||||
|
*/
|
||||||
|
private emitEvent(
|
||||||
|
type: WorkflowEventType,
|
||||||
|
workflowId: string,
|
||||||
|
taskId: string,
|
||||||
|
data?: any,
|
||||||
|
error?: Error
|
||||||
|
): void {
|
||||||
|
const event: WorkflowEvent = {
|
||||||
|
type,
|
||||||
|
workflowId,
|
||||||
|
taskId,
|
||||||
|
timestamp: new Date(),
|
||||||
|
data,
|
||||||
|
error
|
||||||
|
};
|
||||||
|
|
||||||
|
this.emit('event', event);
|
||||||
|
this.emit(type, event);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup cleanup handlers for graceful shutdown
|
||||||
|
*/
|
||||||
|
private setupCleanupHandlers(): void {
|
||||||
|
const cleanup = () => {
|
||||||
|
console.log('Cleaning up processes...');
|
||||||
|
this.cleanupAll(true).catch(console.error);
|
||||||
|
};
|
||||||
|
|
||||||
|
process.on('SIGINT', cleanup);
|
||||||
|
process.on('SIGTERM', cleanup);
|
||||||
|
process.on('exit', cleanup);
|
||||||
|
}
|
||||||
|
}
|
||||||
6
packages/workflow-engine/src/state/index.ts
Normal file
6
packages/workflow-engine/src/state/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview State Management
|
||||||
|
* Public exports for workflow state operations
|
||||||
|
*/
|
||||||
|
|
||||||
|
export * from './workflow-state-manager.js';
|
||||||
320
packages/workflow-engine/src/state/workflow-state-manager.ts
Normal file
320
packages/workflow-engine/src/state/workflow-state-manager.ts
Normal file
@@ -0,0 +1,320 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow State Manager
|
||||||
|
* Extends tm-core RuntimeStateManager with workflow tracking capabilities
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { promises as fs } from 'node:fs';
|
||||||
|
import path from 'node:path';
|
||||||
|
import type {
|
||||||
|
WorkflowExecutionContext,
|
||||||
|
WorkflowStatus,
|
||||||
|
WorkflowEvent
|
||||||
|
} from '../types/workflow.types.js';
|
||||||
|
import { WorkflowError } from '../errors/workflow.errors.js';
|
||||||
|
|
||||||
|
export interface WorkflowStateConfig {
|
||||||
|
/** Project root directory */
|
||||||
|
projectRoot: string;
|
||||||
|
/** Custom state directory (defaults to .taskmaster) */
|
||||||
|
stateDir?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WorkflowRegistryEntry {
|
||||||
|
/** Workflow ID */
|
||||||
|
workflowId: string;
|
||||||
|
/** Task ID being executed */
|
||||||
|
taskId: string;
|
||||||
|
/** Workflow status */
|
||||||
|
status: WorkflowStatus;
|
||||||
|
/** Worktree path */
|
||||||
|
worktreePath: string;
|
||||||
|
/** Process ID if running */
|
||||||
|
processId?: number;
|
||||||
|
/** Start timestamp */
|
||||||
|
startedAt: string;
|
||||||
|
/** Last activity timestamp */
|
||||||
|
lastActivity: string;
|
||||||
|
/** Branch name */
|
||||||
|
branchName: string;
|
||||||
|
/** Additional metadata */
|
||||||
|
metadata?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WorkflowStateManager manages workflow execution state
|
||||||
|
* Extends the concept of RuntimeStateManager to track active workflows globally
|
||||||
|
*/
|
||||||
|
export class WorkflowStateManager {
|
||||||
|
private config: WorkflowStateConfig;
|
||||||
|
private stateFilePath: string;
|
||||||
|
private activeWorkflows = new Map<string, WorkflowExecutionContext>();
|
||||||
|
|
||||||
|
constructor(config: WorkflowStateConfig) {
|
||||||
|
this.config = config;
|
||||||
|
const stateDir = config.stateDir || '.taskmaster';
|
||||||
|
this.stateFilePath = path.join(config.projectRoot, stateDir, 'workflows.json');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load workflow state from disk
|
||||||
|
*/
|
||||||
|
async loadState(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const stateData = await fs.readFile(this.stateFilePath, 'utf-8');
|
||||||
|
const registry = JSON.parse(stateData) as Record<string, WorkflowRegistryEntry>;
|
||||||
|
|
||||||
|
// Convert registry entries to WorkflowExecutionContext
|
||||||
|
for (const [workflowId, entry] of Object.entries(registry)) {
|
||||||
|
const context: WorkflowExecutionContext = {
|
||||||
|
taskId: entry.taskId,
|
||||||
|
taskTitle: `Task ${entry.taskId}`, // Will be updated when task details are loaded
|
||||||
|
taskDescription: '',
|
||||||
|
projectRoot: this.config.projectRoot,
|
||||||
|
worktreePath: entry.worktreePath,
|
||||||
|
branchName: entry.branchName,
|
||||||
|
processId: entry.processId,
|
||||||
|
startedAt: new Date(entry.startedAt),
|
||||||
|
status: entry.status,
|
||||||
|
lastActivity: new Date(entry.lastActivity),
|
||||||
|
metadata: entry.metadata
|
||||||
|
};
|
||||||
|
|
||||||
|
this.activeWorkflows.set(workflowId, context);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.code === 'ENOENT') {
|
||||||
|
// Workflows file doesn't exist, start with empty state
|
||||||
|
console.debug('No workflows.json found, starting with empty state');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.warn('Failed to load workflow state:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save workflow state to disk
|
||||||
|
*/
|
||||||
|
async saveState(): Promise<void> {
|
||||||
|
const stateDir = path.dirname(this.stateFilePath);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await fs.mkdir(stateDir, { recursive: true });
|
||||||
|
|
||||||
|
// Convert contexts to registry entries
|
||||||
|
const registry: Record<string, WorkflowRegistryEntry> = {};
|
||||||
|
|
||||||
|
for (const [workflowId, context] of this.activeWorkflows.entries()) {
|
||||||
|
registry[workflowId] = {
|
||||||
|
workflowId,
|
||||||
|
taskId: context.taskId,
|
||||||
|
status: context.status,
|
||||||
|
worktreePath: context.worktreePath,
|
||||||
|
processId: context.processId,
|
||||||
|
startedAt: context.startedAt.toISOString(),
|
||||||
|
lastActivity: context.lastActivity.toISOString(),
|
||||||
|
branchName: context.branchName,
|
||||||
|
metadata: context.metadata
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
await fs.writeFile(
|
||||||
|
this.stateFilePath,
|
||||||
|
JSON.stringify(registry, null, 2),
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
throw new WorkflowError(
|
||||||
|
'Failed to save workflow state',
|
||||||
|
'WORKFLOW_STATE_SAVE_ERROR',
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a new workflow
|
||||||
|
*/
|
||||||
|
async registerWorkflow(context: WorkflowExecutionContext): Promise<string> {
|
||||||
|
const workflowId = this.generateWorkflowId(context.taskId);
|
||||||
|
|
||||||
|
this.activeWorkflows.set(workflowId, {
|
||||||
|
...context,
|
||||||
|
lastActivity: new Date()
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.saveState();
|
||||||
|
return workflowId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update workflow context
|
||||||
|
*/
|
||||||
|
async updateWorkflow(
|
||||||
|
workflowId: string,
|
||||||
|
updates: Partial<WorkflowExecutionContext>
|
||||||
|
): Promise<void> {
|
||||||
|
const existing = this.activeWorkflows.get(workflowId);
|
||||||
|
if (!existing) {
|
||||||
|
throw new WorkflowError(
|
||||||
|
`Workflow ${workflowId} not found`,
|
||||||
|
'WORKFLOW_NOT_FOUND',
|
||||||
|
workflowId
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const updated = {
|
||||||
|
...existing,
|
||||||
|
...updates,
|
||||||
|
lastActivity: new Date()
|
||||||
|
};
|
||||||
|
|
||||||
|
this.activeWorkflows.set(workflowId, updated);
|
||||||
|
await this.saveState();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update workflow status
|
||||||
|
*/
|
||||||
|
async updateWorkflowStatus(workflowId: string, status: WorkflowStatus): Promise<void> {
|
||||||
|
await this.updateWorkflow(workflowId, { status });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unregister a workflow (remove from state)
|
||||||
|
*/
|
||||||
|
async unregisterWorkflow(workflowId: string): Promise<void> {
|
||||||
|
if (!this.activeWorkflows.has(workflowId)) {
|
||||||
|
throw new WorkflowError(
|
||||||
|
`Workflow ${workflowId} not found`,
|
||||||
|
'WORKFLOW_NOT_FOUND',
|
||||||
|
workflowId
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.activeWorkflows.delete(workflowId);
|
||||||
|
await this.saveState();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get workflow context by ID
|
||||||
|
*/
|
||||||
|
getWorkflow(workflowId: string): WorkflowExecutionContext | undefined {
|
||||||
|
return this.activeWorkflows.get(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get workflow by task ID
|
||||||
|
*/
|
||||||
|
getWorkflowByTaskId(taskId: string): WorkflowExecutionContext | undefined {
|
||||||
|
for (const context of this.activeWorkflows.values()) {
|
||||||
|
if (context.taskId === taskId) {
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all active workflows
|
||||||
|
*/
|
||||||
|
listWorkflows(): WorkflowExecutionContext[] {
|
||||||
|
return Array.from(this.activeWorkflows.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List workflows by status
|
||||||
|
*/
|
||||||
|
listWorkflowsByStatus(status: WorkflowStatus): WorkflowExecutionContext[] {
|
||||||
|
return this.listWorkflows().filter(w => w.status === status);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get running workflows count
|
||||||
|
*/
|
||||||
|
getRunningCount(): number {
|
||||||
|
return this.listWorkflowsByStatus('running').length;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a task has an active workflow
|
||||||
|
*/
|
||||||
|
hasActiveWorkflow(taskId: string): boolean {
|
||||||
|
return this.getWorkflowByTaskId(taskId) !== undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up completed/failed workflows older than specified time
|
||||||
|
*/
|
||||||
|
async cleanupOldWorkflows(olderThanHours = 24): Promise<number> {
|
||||||
|
const cutoffTime = new Date(Date.now() - (olderThanHours * 60 * 60 * 1000));
|
||||||
|
let cleaned = 0;
|
||||||
|
|
||||||
|
for (const [workflowId, context] of this.activeWorkflows.entries()) {
|
||||||
|
const isOld = context.lastActivity < cutoffTime;
|
||||||
|
const isFinished = ['completed', 'failed', 'cancelled', 'timeout'].includes(context.status);
|
||||||
|
|
||||||
|
if (isOld && isFinished) {
|
||||||
|
this.activeWorkflows.delete(workflowId);
|
||||||
|
cleaned++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cleaned > 0) {
|
||||||
|
await this.saveState();
|
||||||
|
}
|
||||||
|
|
||||||
|
return cleaned;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all workflow state
|
||||||
|
*/
|
||||||
|
async clearState(): Promise<void> {
|
||||||
|
try {
|
||||||
|
await fs.unlink(this.stateFilePath);
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.code !== 'ENOENT') {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.activeWorkflows.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record workflow event (for audit trail)
|
||||||
|
*/
|
||||||
|
async recordEvent(event: WorkflowEvent): Promise<void> {
|
||||||
|
// Update workflow last activity
|
||||||
|
const workflow = this.activeWorkflows.get(event.workflowId);
|
||||||
|
if (workflow) {
|
||||||
|
workflow.lastActivity = event.timestamp;
|
||||||
|
await this.saveState();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optional: Could extend to maintain event log file
|
||||||
|
if (process.env.TASKMASTER_DEBUG) {
|
||||||
|
console.log('Workflow Event:', {
|
||||||
|
type: event.type,
|
||||||
|
workflowId: event.workflowId,
|
||||||
|
taskId: event.taskId,
|
||||||
|
timestamp: event.timestamp.toISOString(),
|
||||||
|
data: event.data
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate unique workflow ID
|
||||||
|
*/
|
||||||
|
private generateWorkflowId(taskId: string): string {
|
||||||
|
const timestamp = Date.now();
|
||||||
|
const random = Math.random().toString(36).substring(2, 8);
|
||||||
|
return `workflow-${taskId}-${timestamp}-${random}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
6
packages/workflow-engine/src/task-execution/index.ts
Normal file
6
packages/workflow-engine/src/task-execution/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Task Execution Management
|
||||||
|
* Public exports for task execution operations
|
||||||
|
*/
|
||||||
|
|
||||||
|
export * from './task-execution-manager.js';
|
||||||
@@ -0,0 +1,433 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Task Execution Manager
|
||||||
|
* Orchestrates the complete task execution workflow using worktrees and processes
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { EventEmitter } from 'node:events';
|
||||||
|
import path from 'node:path';
|
||||||
|
import type { Task } from '@tm/core';
|
||||||
|
import {
|
||||||
|
WorktreeManager,
|
||||||
|
type WorktreeManagerConfig
|
||||||
|
} from '../worktree/worktree-manager.js';
|
||||||
|
import {
|
||||||
|
ProcessSandbox,
|
||||||
|
type ProcessSandboxConfig
|
||||||
|
} from '../process/process-sandbox.js';
|
||||||
|
import {
|
||||||
|
WorkflowStateManager,
|
||||||
|
type WorkflowStateConfig
|
||||||
|
} from '../state/workflow-state-manager.js';
|
||||||
|
import type {
|
||||||
|
WorkflowConfig,
|
||||||
|
WorkflowExecutionContext,
|
||||||
|
WorkflowStatus,
|
||||||
|
WorkflowEvent
|
||||||
|
} from '../types/workflow.types.js';
|
||||||
|
import {
|
||||||
|
WorkflowError,
|
||||||
|
WorkflowNotFoundError,
|
||||||
|
MaxConcurrentWorkflowsError,
|
||||||
|
WorkflowTimeoutError
|
||||||
|
} from '../errors/workflow.errors.js';
|
||||||
|
|
||||||
|
export interface TaskExecutionManagerConfig extends WorkflowConfig {
|
||||||
|
/** Project root directory */
|
||||||
|
projectRoot: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TaskExecutionManager orchestrates the complete task execution workflow
|
||||||
|
* Coordinates worktree creation, process spawning, and state management
|
||||||
|
*/
|
||||||
|
export class TaskExecutionManager extends EventEmitter {
|
||||||
|
private config: TaskExecutionManagerConfig;
|
||||||
|
private worktreeManager: WorktreeManager;
|
||||||
|
private processSandbox: ProcessSandbox;
|
||||||
|
private stateManager: WorkflowStateManager;
|
||||||
|
private initialized = false;
|
||||||
|
|
||||||
|
constructor(config: TaskExecutionManagerConfig) {
|
||||||
|
super();
|
||||||
|
this.config = config;
|
||||||
|
|
||||||
|
// Initialize component managers
|
||||||
|
const worktreeConfig: WorktreeManagerConfig = {
|
||||||
|
worktreeBase: config.worktreeBase,
|
||||||
|
projectRoot: config.projectRoot,
|
||||||
|
autoCleanup: true
|
||||||
|
};
|
||||||
|
|
||||||
|
const processConfig: ProcessSandboxConfig = {
|
||||||
|
claudeExecutable: config.claudeExecutable,
|
||||||
|
defaultTimeout: config.defaultTimeout,
|
||||||
|
debug: config.debug
|
||||||
|
};
|
||||||
|
|
||||||
|
const stateConfig: WorkflowStateConfig = {
|
||||||
|
projectRoot: config.projectRoot
|
||||||
|
};
|
||||||
|
|
||||||
|
this.worktreeManager = new WorktreeManager(worktreeConfig);
|
||||||
|
this.processSandbox = new ProcessSandbox(processConfig);
|
||||||
|
this.stateManager = new WorkflowStateManager(stateConfig);
|
||||||
|
|
||||||
|
// Forward events from components
|
||||||
|
this.processSandbox.on('event', (event: WorkflowEvent) => {
|
||||||
|
this.stateManager.recordEvent(event);
|
||||||
|
this.emit('event', event);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the task execution manager
|
||||||
|
*/
|
||||||
|
async initialize(): Promise<void> {
|
||||||
|
if (this.initialized) return;
|
||||||
|
|
||||||
|
await this.stateManager.loadState();
|
||||||
|
|
||||||
|
// Clean up any stale workflows
|
||||||
|
await this.cleanupStaleWorkflows();
|
||||||
|
|
||||||
|
this.initialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start task execution workflow
|
||||||
|
*/
|
||||||
|
async startTaskExecution(
|
||||||
|
task: Task,
|
||||||
|
options?: {
|
||||||
|
branchName?: string;
|
||||||
|
timeout?: number;
|
||||||
|
env?: Record<string, string>;
|
||||||
|
}
|
||||||
|
): Promise<string> {
|
||||||
|
if (!this.initialized) {
|
||||||
|
await this.initialize();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check concurrent workflow limit
|
||||||
|
const runningCount = this.stateManager.getRunningCount();
|
||||||
|
if (runningCount >= this.config.maxConcurrent) {
|
||||||
|
throw new MaxConcurrentWorkflowsError(this.config.maxConcurrent);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if task already has an active workflow
|
||||||
|
if (this.stateManager.hasActiveWorkflow(task.id)) {
|
||||||
|
throw new WorkflowError(
|
||||||
|
`Task ${task.id} already has an active workflow`,
|
||||||
|
'TASK_ALREADY_EXECUTING',
|
||||||
|
undefined,
|
||||||
|
task.id
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Create worktree
|
||||||
|
const worktreeInfo = await this.worktreeManager.createWorktree(
|
||||||
|
task.id,
|
||||||
|
options?.branchName
|
||||||
|
);
|
||||||
|
|
||||||
|
// Prepare task context
|
||||||
|
const context: WorkflowExecutionContext = {
|
||||||
|
taskId: task.id,
|
||||||
|
taskTitle: task.title,
|
||||||
|
taskDescription: task.description,
|
||||||
|
taskDetails: task.details,
|
||||||
|
projectRoot: this.config.projectRoot,
|
||||||
|
worktreePath: worktreeInfo.path,
|
||||||
|
branchName: worktreeInfo.branch,
|
||||||
|
startedAt: new Date(),
|
||||||
|
status: 'initializing',
|
||||||
|
lastActivity: new Date(),
|
||||||
|
metadata: {
|
||||||
|
priority: task.priority,
|
||||||
|
dependencies: task.dependencies
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Register workflow
|
||||||
|
const workflowId = await this.stateManager.registerWorkflow(context);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Prepare task prompt for Claude Code
|
||||||
|
const taskPrompt = this.generateTaskPrompt(task);
|
||||||
|
|
||||||
|
// Start Claude Code process
|
||||||
|
const process = await this.processSandbox.startProcess(
|
||||||
|
workflowId,
|
||||||
|
task.id,
|
||||||
|
taskPrompt,
|
||||||
|
{
|
||||||
|
cwd: worktreeInfo.path,
|
||||||
|
timeout: options?.timeout,
|
||||||
|
env: options?.env
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Update workflow with process information
|
||||||
|
await this.stateManager.updateWorkflow(workflowId, {
|
||||||
|
processId: process.pid,
|
||||||
|
status: 'running'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Emit workflow started event
|
||||||
|
this.emitEvent('workflow.started', workflowId, task.id, {
|
||||||
|
worktreePath: worktreeInfo.path,
|
||||||
|
processId: process.pid
|
||||||
|
});
|
||||||
|
|
||||||
|
return workflowId;
|
||||||
|
} catch (error) {
|
||||||
|
// Clean up worktree if process failed to start
|
||||||
|
await this.worktreeManager.removeWorktree(task.id, true);
|
||||||
|
await this.stateManager.unregisterWorkflow(workflowId);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
throw new WorkflowError(
|
||||||
|
`Failed to start task execution for ${task.id}`,
|
||||||
|
'TASK_EXECUTION_START_ERROR',
|
||||||
|
undefined,
|
||||||
|
task.id,
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop task execution workflow
|
||||||
|
*/
|
||||||
|
async stopTaskExecution(workflowId: string, force = false): Promise<void> {
|
||||||
|
const workflow = this.stateManager.getWorkflow(workflowId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new WorkflowNotFoundError(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Stop the process if running
|
||||||
|
if (this.processSandbox.isProcessRunning(workflowId)) {
|
||||||
|
await this.processSandbox.stopProcess(workflowId, force);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update workflow status
|
||||||
|
const status: WorkflowStatus = force ? 'cancelled' : 'completed';
|
||||||
|
await this.stateManager.updateWorkflowStatus(workflowId, status);
|
||||||
|
|
||||||
|
// Clean up worktree
|
||||||
|
await this.worktreeManager.removeWorktree(workflow.taskId, force);
|
||||||
|
|
||||||
|
// Emit workflow stopped event
|
||||||
|
this.emitEvent('workflow.completed', workflowId, workflow.taskId, {
|
||||||
|
status,
|
||||||
|
forced: force
|
||||||
|
});
|
||||||
|
|
||||||
|
// Unregister workflow
|
||||||
|
await this.stateManager.unregisterWorkflow(workflowId);
|
||||||
|
} catch (error) {
|
||||||
|
throw new WorkflowError(
|
||||||
|
`Failed to stop workflow ${workflowId}`,
|
||||||
|
'WORKFLOW_STOP_ERROR',
|
||||||
|
workflowId,
|
||||||
|
workflow.taskId,
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pause task execution
|
||||||
|
*/
|
||||||
|
async pauseTaskExecution(workflowId: string): Promise<void> {
|
||||||
|
const workflow = this.stateManager.getWorkflow(workflowId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new WorkflowNotFoundError(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflow.status !== 'running') {
|
||||||
|
throw new WorkflowError(
|
||||||
|
`Cannot pause workflow ${workflowId} - not currently running`,
|
||||||
|
'WORKFLOW_NOT_RUNNING',
|
||||||
|
workflowId,
|
||||||
|
workflow.taskId
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// For now, we'll just mark as paused - in the future could implement
|
||||||
|
// process suspension or other pause mechanisms
|
||||||
|
await this.stateManager.updateWorkflowStatus(workflowId, 'paused');
|
||||||
|
|
||||||
|
this.emitEvent('workflow.paused', workflowId, workflow.taskId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resume paused task execution
|
||||||
|
*/
|
||||||
|
async resumeTaskExecution(workflowId: string): Promise<void> {
|
||||||
|
const workflow = this.stateManager.getWorkflow(workflowId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new WorkflowNotFoundError(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflow.status !== 'paused') {
|
||||||
|
throw new WorkflowError(
|
||||||
|
`Cannot resume workflow ${workflowId} - not currently paused`,
|
||||||
|
'WORKFLOW_NOT_PAUSED',
|
||||||
|
workflowId,
|
||||||
|
workflow.taskId
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.stateManager.updateWorkflowStatus(workflowId, 'running');
|
||||||
|
|
||||||
|
this.emitEvent('workflow.resumed', workflowId, workflow.taskId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get workflow status
|
||||||
|
*/
|
||||||
|
getWorkflowStatus(workflowId: string): WorkflowExecutionContext | undefined {
|
||||||
|
return this.stateManager.getWorkflow(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get workflow by task ID
|
||||||
|
*/
|
||||||
|
getWorkflowByTaskId(taskId: string): WorkflowExecutionContext | undefined {
|
||||||
|
return this.stateManager.getWorkflowByTaskId(taskId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all workflows
|
||||||
|
*/
|
||||||
|
listWorkflows(): WorkflowExecutionContext[] {
|
||||||
|
return this.stateManager.listWorkflows();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List active workflows
|
||||||
|
*/
|
||||||
|
listActiveWorkflows(): WorkflowExecutionContext[] {
|
||||||
|
return this.stateManager.listWorkflowsByStatus('running');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send input to a running workflow
|
||||||
|
*/
|
||||||
|
async sendInputToWorkflow(workflowId: string, input: string): Promise<void> {
|
||||||
|
const workflow = this.stateManager.getWorkflow(workflowId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new WorkflowNotFoundError(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.processSandbox.isProcessRunning(workflowId)) {
|
||||||
|
throw new WorkflowError(
|
||||||
|
`Cannot send input to workflow ${workflowId} - process not running`,
|
||||||
|
'PROCESS_NOT_RUNNING',
|
||||||
|
workflowId,
|
||||||
|
workflow.taskId
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.processSandbox.sendInput(workflowId, input);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up all workflows
|
||||||
|
*/
|
||||||
|
async cleanup(force = false): Promise<void> {
|
||||||
|
// Stop all processes
|
||||||
|
await this.processSandbox.cleanupAll(force);
|
||||||
|
|
||||||
|
// Clean up all worktrees
|
||||||
|
await this.worktreeManager.cleanupAll(force);
|
||||||
|
|
||||||
|
// Clear workflow state
|
||||||
|
await this.stateManager.clearState();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate task prompt for Claude Code
|
||||||
|
*/
|
||||||
|
private generateTaskPrompt(task: Task): string {
|
||||||
|
const prompt = [
|
||||||
|
`Work on Task ${task.id}: ${task.title}`,
|
||||||
|
'',
|
||||||
|
`Description: ${task.description}`
|
||||||
|
];
|
||||||
|
|
||||||
|
if (task.details) {
|
||||||
|
prompt.push('', `Details: ${task.details}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (task.testStrategy) {
|
||||||
|
prompt.push('', `Test Strategy: ${task.testStrategy}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (task.dependencies?.length) {
|
||||||
|
prompt.push('', `Dependencies: ${task.dependencies.join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
prompt.push(
|
||||||
|
'',
|
||||||
|
'Please implement this task following the project conventions and best practices.',
|
||||||
|
'When complete, update the task status appropriately using the available Task Master commands.'
|
||||||
|
);
|
||||||
|
|
||||||
|
return prompt.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up stale workflows from previous sessions
|
||||||
|
*/
|
||||||
|
private async cleanupStaleWorkflows(): Promise<void> {
|
||||||
|
const workflows = this.stateManager.listWorkflows();
|
||||||
|
|
||||||
|
for (const workflow of workflows) {
|
||||||
|
const isStale =
|
||||||
|
workflow.status === 'running' &&
|
||||||
|
!this.processSandbox.isProcessRunning(`workflow-${workflow.taskId}`);
|
||||||
|
|
||||||
|
if (isStale) {
|
||||||
|
console.log(`Cleaning up stale workflow for task ${workflow.taskId}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.stateManager.updateWorkflowStatus(
|
||||||
|
`workflow-${workflow.taskId}`,
|
||||||
|
'failed'
|
||||||
|
);
|
||||||
|
|
||||||
|
// Try to clean up worktree
|
||||||
|
await this.worktreeManager.removeWorktree(workflow.taskId, true);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to cleanup stale workflow:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Emit workflow event
|
||||||
|
*/
|
||||||
|
private emitEvent(
|
||||||
|
type: string,
|
||||||
|
workflowId: string,
|
||||||
|
taskId: string,
|
||||||
|
data?: any
|
||||||
|
): void {
|
||||||
|
const event: WorkflowEvent = {
|
||||||
|
type: type as any,
|
||||||
|
workflowId,
|
||||||
|
taskId,
|
||||||
|
timestamp: new Date(),
|
||||||
|
data
|
||||||
|
};
|
||||||
|
|
||||||
|
this.emit('event', event);
|
||||||
|
this.emit(type, event);
|
||||||
|
}
|
||||||
|
}
|
||||||
6
packages/workflow-engine/src/types/index.ts
Normal file
6
packages/workflow-engine/src/types/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Engine Types
|
||||||
|
* Public type exports
|
||||||
|
*/
|
||||||
|
|
||||||
|
export * from './workflow.types.js';
|
||||||
119
packages/workflow-engine/src/types/workflow.types.ts
Normal file
119
packages/workflow-engine/src/types/workflow.types.ts
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Workflow Engine Types
|
||||||
|
* Core types for workflow execution system
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface WorkflowConfig {
|
||||||
|
/** Maximum number of concurrent workflows */
|
||||||
|
maxConcurrent: number;
|
||||||
|
/** Default timeout for workflow execution (minutes) */
|
||||||
|
defaultTimeout: number;
|
||||||
|
/** Base directory for worktrees */
|
||||||
|
worktreeBase: string;
|
||||||
|
/** Claude Code executable path */
|
||||||
|
claudeExecutable: string;
|
||||||
|
/** Enable debug logging */
|
||||||
|
debug: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WorkflowExecutionContext {
|
||||||
|
/** Task ID being executed */
|
||||||
|
taskId: string;
|
||||||
|
/** Task title for display */
|
||||||
|
taskTitle: string;
|
||||||
|
/** Full task description */
|
||||||
|
taskDescription: string;
|
||||||
|
/** Task implementation details */
|
||||||
|
taskDetails?: string;
|
||||||
|
/** Project root path */
|
||||||
|
projectRoot: string;
|
||||||
|
/** Worktree path */
|
||||||
|
worktreePath: string;
|
||||||
|
/** Branch name for this workflow */
|
||||||
|
branchName: string;
|
||||||
|
/** Process ID of running Claude Code */
|
||||||
|
processId?: number;
|
||||||
|
/** Workflow start time */
|
||||||
|
startedAt: Date;
|
||||||
|
/** Workflow status */
|
||||||
|
status: WorkflowStatus;
|
||||||
|
/** Last activity timestamp */
|
||||||
|
lastActivity: Date;
|
||||||
|
/** Execution metadata */
|
||||||
|
metadata?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type WorkflowStatus =
|
||||||
|
| 'pending' // Created but not started
|
||||||
|
| 'initializing' // Setting up worktree/process
|
||||||
|
| 'running' // Active execution
|
||||||
|
| 'paused' // Temporarily stopped
|
||||||
|
| 'completed' // Successfully finished
|
||||||
|
| 'failed' // Error occurred
|
||||||
|
| 'cancelled' // User cancelled
|
||||||
|
| 'timeout'; // Exceeded time limit
|
||||||
|
|
||||||
|
export interface WorkflowEvent {
|
||||||
|
type: WorkflowEventType;
|
||||||
|
workflowId: string;
|
||||||
|
taskId: string;
|
||||||
|
timestamp: Date;
|
||||||
|
data?: any;
|
||||||
|
error?: Error;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type WorkflowEventType =
|
||||||
|
| 'workflow.created'
|
||||||
|
| 'workflow.started'
|
||||||
|
| 'workflow.paused'
|
||||||
|
| 'workflow.resumed'
|
||||||
|
| 'workflow.completed'
|
||||||
|
| 'workflow.failed'
|
||||||
|
| 'workflow.cancelled'
|
||||||
|
| 'worktree.created'
|
||||||
|
| 'worktree.deleted'
|
||||||
|
| 'process.started'
|
||||||
|
| 'process.stopped'
|
||||||
|
| 'process.output'
|
||||||
|
| 'process.error';
|
||||||
|
|
||||||
|
export interface WorkflowProcess {
|
||||||
|
/** Process ID */
|
||||||
|
pid: number;
|
||||||
|
/** Command that was executed */
|
||||||
|
command: string;
|
||||||
|
/** Command arguments */
|
||||||
|
args: string[];
|
||||||
|
/** Working directory */
|
||||||
|
cwd: string;
|
||||||
|
/** Environment variables */
|
||||||
|
env?: Record<string, string>;
|
||||||
|
/** Process start time */
|
||||||
|
startedAt: Date;
|
||||||
|
/** Process status */
|
||||||
|
status: ProcessStatus;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ProcessStatus =
|
||||||
|
| 'starting'
|
||||||
|
| 'running'
|
||||||
|
| 'stopped'
|
||||||
|
| 'crashed'
|
||||||
|
| 'killed';
|
||||||
|
|
||||||
|
export interface WorktreeInfo {
|
||||||
|
/** Worktree path */
|
||||||
|
path: string;
|
||||||
|
/** Branch name */
|
||||||
|
branch: string;
|
||||||
|
/** Creation timestamp */
|
||||||
|
createdAt: Date;
|
||||||
|
/** Associated task ID */
|
||||||
|
taskId: string;
|
||||||
|
/** Git commit hash */
|
||||||
|
commit?: string;
|
||||||
|
/** Worktree lock status */
|
||||||
|
locked: boolean;
|
||||||
|
/** Lock reason if applicable */
|
||||||
|
lockReason?: string;
|
||||||
|
}
|
||||||
6
packages/workflow-engine/src/worktree/index.ts
Normal file
6
packages/workflow-engine/src/worktree/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Worktree Management
|
||||||
|
* Public exports for worktree operations
|
||||||
|
*/
|
||||||
|
|
||||||
|
export * from './worktree-manager.js';
|
||||||
351
packages/workflow-engine/src/worktree/worktree-manager.ts
Normal file
351
packages/workflow-engine/src/worktree/worktree-manager.ts
Normal file
@@ -0,0 +1,351 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview Worktree Manager
|
||||||
|
* Manages git worktree lifecycle for task execution
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { spawn } from 'node:child_process';
|
||||||
|
import { promises as fs } from 'node:fs';
|
||||||
|
import path from 'node:path';
|
||||||
|
import type { WorktreeInfo } from '../types/workflow.types.js';
|
||||||
|
import { WorktreeError } from '../errors/workflow.errors.js';
|
||||||
|
|
||||||
|
export interface WorktreeManagerConfig {
|
||||||
|
/** Base directory for all worktrees */
|
||||||
|
worktreeBase: string;
|
||||||
|
/** Project root directory */
|
||||||
|
projectRoot: string;
|
||||||
|
/** Auto-cleanup on process exit */
|
||||||
|
autoCleanup: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* WorktreeManager handles git worktree operations
|
||||||
|
* Single responsibility: Git worktree lifecycle management
|
||||||
|
*/
|
||||||
|
export class WorktreeManager {
|
||||||
|
private config: WorktreeManagerConfig;
|
||||||
|
private activeWorktrees = new Map<string, WorktreeInfo>();
|
||||||
|
|
||||||
|
constructor(config: WorktreeManagerConfig) {
|
||||||
|
this.config = config;
|
||||||
|
|
||||||
|
if (config.autoCleanup) {
|
||||||
|
this.setupCleanupHandlers();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new worktree for task execution
|
||||||
|
*/
|
||||||
|
async createWorktree(taskId: string, branchName?: string): Promise<WorktreeInfo> {
|
||||||
|
const sanitizedTaskId = this.sanitizeTaskId(taskId);
|
||||||
|
const worktreePath = path.join(this.config.worktreeBase, `task-${sanitizedTaskId}`);
|
||||||
|
|
||||||
|
// Ensure base directory exists
|
||||||
|
await fs.mkdir(this.config.worktreeBase, { recursive: true });
|
||||||
|
|
||||||
|
// Generate unique branch name if not provided
|
||||||
|
const branch = branchName || `task/${sanitizedTaskId}-${Date.now()}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check if worktree path already exists
|
||||||
|
if (await this.pathExists(worktreePath)) {
|
||||||
|
throw new WorktreeError(`Worktree path already exists: ${worktreePath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the worktree
|
||||||
|
await this.executeGitCommand(['worktree', 'add', '-b', branch, worktreePath], {
|
||||||
|
cwd: this.config.projectRoot
|
||||||
|
});
|
||||||
|
|
||||||
|
const worktreeInfo: WorktreeInfo = {
|
||||||
|
path: worktreePath,
|
||||||
|
branch,
|
||||||
|
createdAt: new Date(),
|
||||||
|
taskId,
|
||||||
|
locked: false
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get commit hash
|
||||||
|
try {
|
||||||
|
const commit = await this.executeGitCommand(['rev-parse', 'HEAD'], {
|
||||||
|
cwd: worktreePath
|
||||||
|
});
|
||||||
|
worktreeInfo.commit = commit.trim();
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to get commit hash for worktree:', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.activeWorktrees.set(taskId, worktreeInfo);
|
||||||
|
return worktreeInfo;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
throw new WorktreeError(
|
||||||
|
`Failed to create worktree for task ${taskId}`,
|
||||||
|
worktreePath,
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a worktree and clean up
|
||||||
|
*/
|
||||||
|
async removeWorktree(taskId: string, force = false): Promise<void> {
|
||||||
|
const worktreeInfo = this.activeWorktrees.get(taskId);
|
||||||
|
if (!worktreeInfo) {
|
||||||
|
throw new WorktreeError(`No active worktree found for task ${taskId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Remove the worktree
|
||||||
|
const args = ['worktree', 'remove', worktreeInfo.path];
|
||||||
|
if (force) {
|
||||||
|
args.push('--force');
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.executeGitCommand(args, {
|
||||||
|
cwd: this.config.projectRoot
|
||||||
|
});
|
||||||
|
|
||||||
|
// Remove branch if it's a task-specific branch
|
||||||
|
if (worktreeInfo.branch.startsWith('task/')) {
|
||||||
|
try {
|
||||||
|
await this.executeGitCommand(['branch', '-D', worktreeInfo.branch], {
|
||||||
|
cwd: this.config.projectRoot
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to delete branch ${worktreeInfo.branch}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.activeWorktrees.delete(taskId);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
throw new WorktreeError(
|
||||||
|
`Failed to remove worktree for task ${taskId}`,
|
||||||
|
worktreeInfo.path,
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all active worktrees for this project
|
||||||
|
*/
|
||||||
|
async listWorktrees(): Promise<WorktreeInfo[]> {
|
||||||
|
try {
|
||||||
|
const output = await this.executeGitCommand(['worktree', 'list', '--porcelain'], {
|
||||||
|
cwd: this.config.projectRoot
|
||||||
|
});
|
||||||
|
|
||||||
|
const worktrees: WorktreeInfo[] = [];
|
||||||
|
const lines = output.trim().split('\n');
|
||||||
|
|
||||||
|
let currentWorktree: Partial<WorktreeInfo> = {};
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.startsWith('worktree ')) {
|
||||||
|
if (currentWorktree.path) {
|
||||||
|
// Complete previous worktree
|
||||||
|
worktrees.push(this.completeWorktreeInfo(currentWorktree));
|
||||||
|
}
|
||||||
|
currentWorktree = { path: line.substring(9) };
|
||||||
|
} else if (line.startsWith('HEAD ')) {
|
||||||
|
currentWorktree.commit = line.substring(5);
|
||||||
|
} else if (line.startsWith('branch ')) {
|
||||||
|
currentWorktree.branch = line.substring(7).replace('refs/heads/', '');
|
||||||
|
} else if (line === 'locked') {
|
||||||
|
currentWorktree.locked = true;
|
||||||
|
} else if (line.startsWith('locked ')) {
|
||||||
|
currentWorktree.locked = true;
|
||||||
|
currentWorktree.lockReason = line.substring(7);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the last worktree
|
||||||
|
if (currentWorktree.path) {
|
||||||
|
worktrees.push(this.completeWorktreeInfo(currentWorktree));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter to only our task worktrees
|
||||||
|
return worktrees.filter(wt =>
|
||||||
|
wt.path.startsWith(this.config.worktreeBase) &&
|
||||||
|
wt.branch?.startsWith('task/')
|
||||||
|
);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
throw new WorktreeError('Failed to list worktrees', undefined, error as Error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get worktree info for a specific task
|
||||||
|
*/
|
||||||
|
getWorktreeInfo(taskId: string): WorktreeInfo | undefined {
|
||||||
|
return this.activeWorktrees.get(taskId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lock a worktree to prevent cleanup
|
||||||
|
*/
|
||||||
|
async lockWorktree(taskId: string, reason?: string): Promise<void> {
|
||||||
|
const worktreeInfo = this.activeWorktrees.get(taskId);
|
||||||
|
if (!worktreeInfo) {
|
||||||
|
throw new WorktreeError(`No active worktree found for task ${taskId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const args = ['worktree', 'lock', worktreeInfo.path];
|
||||||
|
if (reason) {
|
||||||
|
args.push('--reason', reason);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.executeGitCommand(args, {
|
||||||
|
cwd: this.config.projectRoot
|
||||||
|
});
|
||||||
|
|
||||||
|
worktreeInfo.locked = true;
|
||||||
|
worktreeInfo.lockReason = reason;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
throw new WorktreeError(
|
||||||
|
`Failed to lock worktree for task ${taskId}`,
|
||||||
|
worktreeInfo.path,
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unlock a worktree
|
||||||
|
*/
|
||||||
|
async unlockWorktree(taskId: string): Promise<void> {
|
||||||
|
const worktreeInfo = this.activeWorktrees.get(taskId);
|
||||||
|
if (!worktreeInfo) {
|
||||||
|
throw new WorktreeError(`No active worktree found for task ${taskId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.executeGitCommand(['worktree', 'unlock', worktreeInfo.path], {
|
||||||
|
cwd: this.config.projectRoot
|
||||||
|
});
|
||||||
|
|
||||||
|
worktreeInfo.locked = false;
|
||||||
|
delete worktreeInfo.lockReason;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
throw new WorktreeError(
|
||||||
|
`Failed to unlock worktree for task ${taskId}`,
|
||||||
|
worktreeInfo.path,
|
||||||
|
error as Error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up all task-related worktrees
|
||||||
|
*/
|
||||||
|
async cleanupAll(force = false): Promise<void> {
|
||||||
|
const worktrees = await this.listWorktrees();
|
||||||
|
|
||||||
|
for (const worktree of worktrees) {
|
||||||
|
if (worktree.taskId) {
|
||||||
|
try {
|
||||||
|
await this.removeWorktree(worktree.taskId, force);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to cleanup worktree for task ${worktree.taskId}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute git command and return output
|
||||||
|
*/
|
||||||
|
private async executeGitCommand(
|
||||||
|
args: string[],
|
||||||
|
options: { cwd: string }
|
||||||
|
): Promise<string> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const git = spawn('git', args, {
|
||||||
|
cwd: options.cwd,
|
||||||
|
stdio: ['ignore', 'pipe', 'pipe']
|
||||||
|
});
|
||||||
|
|
||||||
|
let stdout = '';
|
||||||
|
let stderr = '';
|
||||||
|
|
||||||
|
git.stdout.on('data', (data) => {
|
||||||
|
stdout += data.toString();
|
||||||
|
});
|
||||||
|
|
||||||
|
git.stderr.on('data', (data) => {
|
||||||
|
stderr += data.toString();
|
||||||
|
});
|
||||||
|
|
||||||
|
git.on('close', (code) => {
|
||||||
|
if (code === 0) {
|
||||||
|
resolve(stdout);
|
||||||
|
} else {
|
||||||
|
reject(new Error(`Git command failed (${code}): ${stderr || stdout}`));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
git.on('error', (error) => {
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sanitize task ID for use in filesystem paths
|
||||||
|
*/
|
||||||
|
private sanitizeTaskId(taskId: string): string {
|
||||||
|
return taskId.replace(/[^a-zA-Z0-9.-]/g, '-');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if path exists
|
||||||
|
*/
|
||||||
|
private async pathExists(path: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await fs.access(path);
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Complete worktree info with defaults
|
||||||
|
*/
|
||||||
|
private completeWorktreeInfo(partial: Partial<WorktreeInfo>): WorktreeInfo {
|
||||||
|
const branch = partial.branch || 'unknown';
|
||||||
|
const taskIdMatch = branch.match(/^task\/(.+?)-/);
|
||||||
|
|
||||||
|
return {
|
||||||
|
path: partial.path || '',
|
||||||
|
branch,
|
||||||
|
createdAt: partial.createdAt || new Date(),
|
||||||
|
taskId: taskIdMatch?.[1] || partial.taskId || 'unknown',
|
||||||
|
commit: partial.commit,
|
||||||
|
locked: partial.locked || false,
|
||||||
|
lockReason: partial.lockReason
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup cleanup handlers for graceful shutdown
|
||||||
|
*/
|
||||||
|
private setupCleanupHandlers(): void {
|
||||||
|
const cleanup = () => {
|
||||||
|
console.log('Cleaning up worktrees...');
|
||||||
|
this.cleanupAll(true).catch(console.error);
|
||||||
|
};
|
||||||
|
|
||||||
|
process.on('SIGINT', cleanup);
|
||||||
|
process.on('SIGTERM', cleanup);
|
||||||
|
process.on('exit', cleanup);
|
||||||
|
}
|
||||||
|
}
|
||||||
19
packages/workflow-engine/tsconfig.json
Normal file
19
packages/workflow-engine/tsconfig.json
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
{
|
||||||
|
"extends": "../../tsconfig.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"outDir": "dist",
|
||||||
|
"rootDir": "src",
|
||||||
|
"declaration": true,
|
||||||
|
"declarationMap": true,
|
||||||
|
"sourceMap": true
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"src/**/*"
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
"dist",
|
||||||
|
"node_modules",
|
||||||
|
"**/*.test.ts",
|
||||||
|
"**/*.spec.ts"
|
||||||
|
]
|
||||||
|
}
|
||||||
17
packages/workflow-engine/tsup.config.ts
Normal file
17
packages/workflow-engine/tsup.config.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { defineConfig } from 'tsup';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
entry: [
|
||||||
|
'src/index.ts',
|
||||||
|
'src/task-execution/index.ts',
|
||||||
|
'src/worktree/index.ts',
|
||||||
|
'src/process/index.ts',
|
||||||
|
'src/state/index.ts'
|
||||||
|
],
|
||||||
|
format: ['esm'],
|
||||||
|
dts: true,
|
||||||
|
sourcemap: true,
|
||||||
|
clean: true,
|
||||||
|
splitting: false,
|
||||||
|
treeshake: true
|
||||||
|
});
|
||||||
19
packages/workflow-engine/vitest.config.ts
Normal file
19
packages/workflow-engine/vitest.config.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { defineConfig } from 'vitest/config';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
test: {
|
||||||
|
environment: 'node',
|
||||||
|
globals: true,
|
||||||
|
coverage: {
|
||||||
|
provider: 'v8',
|
||||||
|
reporter: ['text', 'json', 'html'],
|
||||||
|
exclude: [
|
||||||
|
'node_modules/',
|
||||||
|
'dist/',
|
||||||
|
'**/*.d.ts',
|
||||||
|
'**/*.test.ts',
|
||||||
|
'**/*.spec.ts'
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -15,13 +15,8 @@ import search from '@inquirer/search';
|
|||||||
import ora from 'ora'; // Import ora
|
import ora from 'ora'; // Import ora
|
||||||
|
|
||||||
import { log, readJSON } from './utils.js';
|
import { log, readJSON } from './utils.js';
|
||||||
// Import new commands from @tm/cli
|
// Import command registry from @tm/cli
|
||||||
import {
|
import { registerAllCommands } from '@tm/cli';
|
||||||
ListTasksCommand,
|
|
||||||
ShowCommand,
|
|
||||||
AuthCommand,
|
|
||||||
ContextCommand
|
|
||||||
} from '@tm/cli';
|
|
||||||
|
|
||||||
import {
|
import {
|
||||||
parsePRD,
|
parsePRD,
|
||||||
@@ -1742,21 +1737,9 @@ function registerCommands(programInstance) {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// NEW: Register the new list command from @tm/cli
|
// Register all commands from @tm/cli using the command registry
|
||||||
// This command handles all its own configuration and logic
|
// This automatically registers ListTasksCommand, AuthCommand, and any future commands
|
||||||
ListTasksCommand.registerOn(programInstance);
|
registerAllCommands(programInstance);
|
||||||
|
|
||||||
// Register the auth command from @tm/cli
|
|
||||||
// Handles authentication with tryhamster.com
|
|
||||||
AuthCommand.registerOn(programInstance);
|
|
||||||
|
|
||||||
// Register the context command from @tm/cli
|
|
||||||
// Manages workspace context (org/brief selection)
|
|
||||||
ContextCommand.registerOn(programInstance);
|
|
||||||
|
|
||||||
// Register the show command from @tm/cli
|
|
||||||
// Displays detailed information about tasks
|
|
||||||
ShowCommand.registerOn(programInstance);
|
|
||||||
|
|
||||||
// expand command
|
// expand command
|
||||||
programInstance
|
programInstance
|
||||||
@@ -2576,6 +2559,80 @@ ${result.result}
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// show command
|
||||||
|
programInstance
|
||||||
|
.command('show')
|
||||||
|
.description(
|
||||||
|
`Display detailed information about one or more tasks${chalk.reset('')}`
|
||||||
|
)
|
||||||
|
.argument('[id]', 'Task ID(s) to show (comma-separated for multiple)')
|
||||||
|
.option(
|
||||||
|
'-i, --id <id>',
|
||||||
|
'Task ID(s) to show (comma-separated for multiple)'
|
||||||
|
)
|
||||||
|
.option('-s, --status <status>', 'Filter subtasks by status')
|
||||||
|
.option(
|
||||||
|
'-f, --file <file>',
|
||||||
|
'Path to the tasks file',
|
||||||
|
TASKMASTER_TASKS_FILE
|
||||||
|
)
|
||||||
|
.option(
|
||||||
|
'-r, --report <report>',
|
||||||
|
'Path to the complexity report file',
|
||||||
|
COMPLEXITY_REPORT_FILE
|
||||||
|
)
|
||||||
|
.option('--tag <tag>', 'Specify tag context for task operations')
|
||||||
|
.action(async (taskId, options) => {
|
||||||
|
// Initialize TaskMaster
|
||||||
|
const initOptions = {
|
||||||
|
tasksPath: options.file || true,
|
||||||
|
tag: options.tag
|
||||||
|
};
|
||||||
|
// Only pass complexityReportPath if user provided a custom path
|
||||||
|
if (options.report && options.report !== COMPLEXITY_REPORT_FILE) {
|
||||||
|
initOptions.complexityReportPath = options.report;
|
||||||
|
}
|
||||||
|
const taskMaster = initTaskMaster(initOptions);
|
||||||
|
|
||||||
|
const idArg = taskId || options.id;
|
||||||
|
const statusFilter = options.status;
|
||||||
|
const tag = taskMaster.getCurrentTag();
|
||||||
|
|
||||||
|
// Show current tag context
|
||||||
|
displayCurrentTagIndicator(tag);
|
||||||
|
|
||||||
|
if (!idArg) {
|
||||||
|
console.error(chalk.red('Error: Please provide a task ID'));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if multiple IDs are provided (comma-separated)
|
||||||
|
const taskIds = idArg
|
||||||
|
.split(',')
|
||||||
|
.map((id) => id.trim())
|
||||||
|
.filter((id) => id.length > 0);
|
||||||
|
|
||||||
|
if (taskIds.length > 1) {
|
||||||
|
// Multiple tasks - use compact summary view with interactive drill-down
|
||||||
|
await displayMultipleTasksSummary(
|
||||||
|
taskMaster.getTasksPath(),
|
||||||
|
taskIds,
|
||||||
|
taskMaster.getComplexityReportPath(),
|
||||||
|
statusFilter,
|
||||||
|
{ projectRoot: taskMaster.getProjectRoot(), tag }
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// Single task - use detailed view
|
||||||
|
await displayTaskById(
|
||||||
|
taskMaster.getTasksPath(),
|
||||||
|
taskIds[0],
|
||||||
|
taskMaster.getComplexityReportPath(),
|
||||||
|
statusFilter,
|
||||||
|
{ projectRoot: taskMaster.getProjectRoot(), tag }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// add-dependency command
|
// add-dependency command
|
||||||
programInstance
|
programInstance
|
||||||
.command('add-dependency')
|
.command('add-dependency')
|
||||||
|
|||||||
@@ -198,13 +198,11 @@ jest.unstable_mockModule('fs', () => ({
|
|||||||
default: {
|
default: {
|
||||||
existsSync: jest.fn(() => false),
|
existsSync: jest.fn(() => false),
|
||||||
readFileSync: jest.fn(),
|
readFileSync: jest.fn(),
|
||||||
writeFileSync: mockWriteFileSync,
|
writeFileSync: mockWriteFileSync
|
||||||
unlinkSync: jest.fn()
|
|
||||||
},
|
},
|
||||||
existsSync: jest.fn(() => false),
|
existsSync: jest.fn(() => false),
|
||||||
readFileSync: jest.fn(),
|
readFileSync: jest.fn(),
|
||||||
writeFileSync: mockWriteFileSync,
|
writeFileSync: mockWriteFileSync
|
||||||
unlinkSync: jest.fn()
|
|
||||||
}));
|
}));
|
||||||
|
|
||||||
jest.unstable_mockModule(
|
jest.unstable_mockModule(
|
||||||
|
|||||||
@@ -1,26 +1,51 @@
|
|||||||
import { defineConfig } from 'tsup';
|
import { defineConfig } from 'tsup';
|
||||||
import { baseConfig, mergeConfig } from '@tm/build-config';
|
import { dotenvLoad } from 'dotenv-mono';
|
||||||
|
|
||||||
export default defineConfig(
|
// Load .env from root level (monorepo support)
|
||||||
mergeConfig(baseConfig, {
|
dotenvLoad();
|
||||||
entry: {
|
|
||||||
'task-master': 'bin/task-master.js',
|
// Get all TM_PUBLIC_* env variables for build-time injection
|
||||||
'mcp-server': 'mcp-server/server.js'
|
const getBuildTimeEnvs = () => {
|
||||||
},
|
const envs: Record<string, string> = {};
|
||||||
outDir: 'dist',
|
for (const [key, value] of Object.entries(process.env)) {
|
||||||
publicDir: 'public',
|
if (key.startsWith('TM_PUBLIC_')) {
|
||||||
// Bundle our monorepo packages but keep node_modules external
|
// Return the actual value, not JSON.stringify'd
|
||||||
noExternal: [/@tm\/.*/],
|
envs[key] = value || '';
|
||||||
// Ensure no code splitting
|
}
|
||||||
splitting: false,
|
}
|
||||||
// Better watch configuration
|
return envs;
|
||||||
ignoreWatch: [
|
};
|
||||||
'dist',
|
|
||||||
'node_modules',
|
export default defineConfig({
|
||||||
'.git',
|
entry: {
|
||||||
'tests',
|
'task-master': 'bin/task-master.js',
|
||||||
'*.test.*',
|
'mcp-server': 'mcp-server/server.js'
|
||||||
'*.spec.*'
|
},
|
||||||
]
|
format: ['esm'],
|
||||||
})
|
target: 'node18',
|
||||||
);
|
splitting: false,
|
||||||
|
sourcemap: true,
|
||||||
|
clean: true,
|
||||||
|
bundle: true, // Bundle everything into one file
|
||||||
|
outDir: 'dist',
|
||||||
|
publicDir: 'public',
|
||||||
|
// Handle TypeScript imports transparently
|
||||||
|
loader: {
|
||||||
|
'.js': 'jsx',
|
||||||
|
'.ts': 'ts'
|
||||||
|
},
|
||||||
|
// Replace process.env.TM_PUBLIC_* with actual values at build time
|
||||||
|
env: getBuildTimeEnvs(),
|
||||||
|
esbuildOptions(options) {
|
||||||
|
options.platform = 'node';
|
||||||
|
// Allow importing TypeScript from JavaScript
|
||||||
|
options.resolveExtensions = ['.ts', '.js', '.mjs', '.json'];
|
||||||
|
},
|
||||||
|
// Bundle our monorepo packages but keep node_modules external
|
||||||
|
noExternal: [/@tm\/.*/],
|
||||||
|
// Don't bundle any other dependencies (auto-external all node_modules)
|
||||||
|
// This regex matches anything that doesn't start with . or /
|
||||||
|
external: [/^[^./]/],
|
||||||
|
// Add success message for debugging
|
||||||
|
onSuccess: 'echo "✅ Build completed successfully"'
|
||||||
|
});
|
||||||
|
|||||||
45
turbo.json
45
turbo.json
@@ -1,45 +0,0 @@
|
|||||||
{
|
|
||||||
"$schema": "https://turbo.build/schema.json",
|
|
||||||
"extends": ["//"],
|
|
||||||
"tasks": {
|
|
||||||
"build": {
|
|
||||||
"dependsOn": ["^build"],
|
|
||||||
"outputs": ["dist/**"],
|
|
||||||
"outputLogs": "new-only"
|
|
||||||
},
|
|
||||||
"dev": {
|
|
||||||
"cache": false,
|
|
||||||
"persistent": true,
|
|
||||||
"dependsOn": ["^build"],
|
|
||||||
"inputs": [
|
|
||||||
"$TURBO_DEFAULT$",
|
|
||||||
"!{packages,apps}/**/dist/**",
|
|
||||||
"!{packages,apps}/**/node_modules/**"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"test:watch": {
|
|
||||||
"cache": false,
|
|
||||||
"persistent": true,
|
|
||||||
"dependsOn": ["^build"]
|
|
||||||
},
|
|
||||||
"lint": {
|
|
||||||
"dependsOn": ["^build"],
|
|
||||||
"inputs": [
|
|
||||||
"$TURBO_DEFAULT$",
|
|
||||||
"!{packages,apps}/**/dist/**",
|
|
||||||
"!{packages,apps}/**/node_modules/**"
|
|
||||||
],
|
|
||||||
"outputLogs": "new-only"
|
|
||||||
},
|
|
||||||
"typecheck": {
|
|
||||||
"dependsOn": ["^build"],
|
|
||||||
"inputs": [
|
|
||||||
"$TURBO_DEFAULT$",
|
|
||||||
"!{packages,apps}/**/dist/**",
|
|
||||||
"!{packages,apps}/**/node_modules/**"
|
|
||||||
],
|
|
||||||
"outputLogs": "new-only"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"globalDependencies": ["turbo.json", "tsconfig.json", ".env*"]
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user