Compare commits
32 Commits
ralph/feat
...
docs/auto-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
80929711be | ||
|
|
1a3a528bf7 | ||
|
|
c164adc6ff | ||
|
|
9d61e0447d | ||
|
|
ee11b735b3 | ||
|
|
6d978228d9 | ||
|
|
ea9341e7af | ||
|
|
4296e383ea | ||
|
|
97b2781709 | ||
|
|
96553e4a5f | ||
|
|
7582219365 | ||
|
|
84baedc3d2 | ||
|
|
78da39edff | ||
|
|
4d1416b175 | ||
|
|
dc811eb45e | ||
|
|
3c41a113fe | ||
|
|
0e8c42c7cb | ||
|
|
799d1d2cce | ||
|
|
83af314879 | ||
|
|
dd03374496 | ||
|
|
4ab0affba7 | ||
|
|
77e1ddc237 | ||
|
|
3eeb19590a | ||
|
|
587745046f | ||
|
|
c61c73f827 | ||
|
|
15900d9fd5 | ||
|
|
7cf4004038 | ||
|
|
0f3ab00f26 | ||
|
|
e81040def5 | ||
|
|
597f6b03b4 | ||
|
|
a7ad4c8e92 | ||
|
|
0d54747894 |
@@ -1,5 +0,0 @@
|
||||
---
|
||||
"task-master-ai": patch
|
||||
---
|
||||
|
||||
docs(move): clarify cross-tag move docs; deprecate "force"; add explicit --with-dependencies/--ignore-dependencies examples
|
||||
@@ -6,7 +6,7 @@
|
||||
"repo": "eyaltoledano/claude-task-master"
|
||||
}
|
||||
],
|
||||
"commit": false,
|
||||
"commit": true,
|
||||
"fixed": [],
|
||||
"linked": [],
|
||||
"access": "public",
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
"task-master-ai": minor
|
||||
---
|
||||
|
||||
Restore Taskmaster claude-code commands and move clear commands under /remove to avoid collision with the claude-code /clear command.
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
"task-master-ai": minor
|
||||
---
|
||||
|
||||
Enhanced Gemini CLI provider with codebase-aware task generation
|
||||
|
||||
Added automatic codebase analysis for Gemini CLI provider in parse-prd, and analyze-complexity, add-task, udpate-task, update, update-subtask commands
|
||||
When using Gemini CLI as the AI provider, Task Master now instructs the AI to analyze the project structure, existing implementations, and patterns before generating tasks or subtasks
|
||||
Tasks and subtasks generated by Claude Code are now informed by actual codebase analysis, resulting in more accurate and contextual outputs
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"mode": "pre",
|
||||
"tag": "rc",
|
||||
"initialVersions": {
|
||||
"task-master-ai": "0.25.1",
|
||||
"docs": "0.0.1",
|
||||
"extension": "0.24.1"
|
||||
},
|
||||
"changesets": [
|
||||
"clarify-force-move-docs",
|
||||
"curvy-moons-dig",
|
||||
"sour-coins-lay",
|
||||
"strong-eagles-vanish",
|
||||
"wet-candies-accept"
|
||||
]
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
---
|
||||
"task-master-ai": minor
|
||||
---
|
||||
|
||||
Add configurable codebase analysis feature flag with multiple configuration sources
|
||||
|
||||
Users can now control whether codebase analysis features (Claude Code and Gemini CLI integration) are enabled through environment variables, MCP configuration, or project config files.
|
||||
|
||||
Priority order: .env > MCP session env > .taskmaster/config.json.
|
||||
|
||||
Set `TASKMASTER_ENABLE_CODEBASE_ANALYSIS=false` in `.env` to disable codebase analysis prompts and tool integration.
|
||||
@@ -1,12 +0,0 @@
|
||||
---
|
||||
"task-master-ai": minor
|
||||
---
|
||||
|
||||
feat(move): improve cross-tag move UX and safety
|
||||
|
||||
- CLI: print "Next Steps" tips after cross-tag moves that used --ignore-dependencies (validate/fix guidance)
|
||||
- CLI: show dedicated help block on ID collisions (destination tag already has the ID)
|
||||
- Core: add structured suggestions to TASK_ALREADY_EXISTS errors
|
||||
- MCP: map ID collision errors to TASK_ALREADY_EXISTS and include suggestions
|
||||
- Tests: cover MCP options, error suggestions, CLI tips printing, and integration error payload suggestions
|
||||
---
|
||||
@@ -1,14 +0,0 @@
|
||||
---
|
||||
"task-master-ai": minor
|
||||
---
|
||||
|
||||
Enhanced Claude Code and Google CLI integration with automatic codebase analysis for task operations
|
||||
|
||||
When using Claude Code as the AI provider, task management commands now automatically analyze your codebase before generating or updating tasks. This provides more accurate, context-aware implementation details that align with your project's existing architecture and patterns.
|
||||
|
||||
Commands contextualised:
|
||||
|
||||
- add-task
|
||||
- update-subtask
|
||||
- update-task
|
||||
- update
|
||||
5
.changeset/wild-ears-look.md
Normal file
5
.changeset/wild-ears-look.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"extension": minor
|
||||
---
|
||||
|
||||
Added a Start Build button to the VSCODE Task Properties Right Panel
|
||||
122
.github/workflows/ci.yml
vendored
122
.github/workflows/ci.yml
vendored
@@ -9,70 +9,121 @@ on:
|
||||
branches:
|
||||
- main
|
||||
- next
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
DO_NOT_TRACK: 1
|
||||
NODE_ENV: development
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install Dependencies
|
||||
id: install
|
||||
run: npm ci
|
||||
timeout-minutes: 2
|
||||
|
||||
- name: Cache node_modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
|
||||
|
||||
# Fast checks that can run in parallel
|
||||
format-check:
|
||||
needs: setup
|
||||
name: Format Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
|
||||
- name: Restore node_modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
|
||||
- name: Install dependencies
|
||||
run: npm install --frozen-lockfile --prefer-offline
|
||||
timeout-minutes: 5
|
||||
|
||||
- name: Format Check
|
||||
run: npm run format-check
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
test:
|
||||
needs: setup
|
||||
typecheck:
|
||||
name: Typecheck
|
||||
timeout-minutes: 10
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
|
||||
- name: Restore node_modules
|
||||
uses: actions/cache@v4
|
||||
- name: Install dependencies
|
||||
run: npm install --frozen-lockfile --prefer-offline
|
||||
timeout-minutes: 5
|
||||
|
||||
- name: Typecheck
|
||||
run: npm run turbo:typecheck
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
# Build job to ensure everything compiles
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
|
||||
fetch-depth: 2
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --frozen-lockfile --prefer-offline
|
||||
timeout-minutes: 5
|
||||
|
||||
- name: Build
|
||||
run: npm run turbo:build
|
||||
env:
|
||||
NODE_ENV: production
|
||||
FORCE_COLOR: 1
|
||||
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-artifacts
|
||||
path: dist/
|
||||
retention-days: 1
|
||||
|
||||
test:
|
||||
name: Test
|
||||
timeout-minutes: 15
|
||||
runs-on: ubuntu-latest
|
||||
needs: [format-check, typecheck, build]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --frozen-lockfile --prefer-offline
|
||||
timeout-minutes: 5
|
||||
|
||||
- name: Download build artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build-artifacts
|
||||
path: dist/
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
@@ -81,7 +132,6 @@ jobs:
|
||||
NODE_ENV: test
|
||||
CI: true
|
||||
FORCE_COLOR: 1
|
||||
timeout-minutes: 10
|
||||
|
||||
- name: Upload Test Results
|
||||
if: always()
|
||||
|
||||
57
.github/workflows/claude-docs-trigger.yml
vendored
Normal file
57
.github/workflows/claude-docs-trigger.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Trigger Claude Documentation Update
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- next
|
||||
paths-ignore:
|
||||
- "apps/docs/**"
|
||||
- "*.md"
|
||||
- ".github/workflows/**"
|
||||
|
||||
jobs:
|
||||
trigger-docs-update:
|
||||
# Only run if changes were merged (not direct pushes from bots)
|
||||
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2 # Need previous commit for comparison
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
echo "Changed files in this push:"
|
||||
git diff --name-only HEAD^ HEAD | tee changed_files.txt
|
||||
|
||||
# Store changed files for Claude to analyze (escaped for JSON)
|
||||
CHANGED_FILES=$(git diff --name-only HEAD^ HEAD | jq -Rs .)
|
||||
echo "changed_files=$CHANGED_FILES" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get the commit message (escaped for JSON)
|
||||
COMMIT_MSG=$(git log -1 --pretty=%B | jq -Rs .)
|
||||
echo "commit_message=$COMMIT_MSG" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get diff for documentation context (escaped for JSON)
|
||||
COMMIT_DIFF=$(git diff HEAD^ HEAD --stat | jq -Rs .)
|
||||
echo "commit_diff=$COMMIT_DIFF" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get commit SHA
|
||||
echo "commit_sha=${{ github.sha }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Trigger Claude workflow
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Trigger the Claude docs updater workflow with the change information
|
||||
gh workflow run claude-docs-updater.yml \
|
||||
--ref next \
|
||||
-f commit_sha="${{ steps.changed-files.outputs.commit_sha }}" \
|
||||
-f commit_message=${{ steps.changed-files.outputs.commit_message }} \
|
||||
-f changed_files=${{ steps.changed-files.outputs.changed_files }} \
|
||||
-f commit_diff=${{ steps.changed-files.outputs.commit_diff }}
|
||||
65
.github/workflows/claude-docs-updater.yml
vendored
65
.github/workflows/claude-docs-updater.yml
vendored
@@ -1,18 +1,27 @@
|
||||
name: Claude Documentation Updater
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- next
|
||||
paths-ignore:
|
||||
- "apps/docs/**"
|
||||
- "*.md"
|
||||
- ".github/workflows/**"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
commit_sha:
|
||||
description: 'The commit SHA that triggered this update'
|
||||
required: true
|
||||
type: string
|
||||
commit_message:
|
||||
description: 'The commit message'
|
||||
required: true
|
||||
type: string
|
||||
changed_files:
|
||||
description: 'List of changed files'
|
||||
required: true
|
||||
type: string
|
||||
commit_diff:
|
||||
description: 'Diff summary of changes'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
update-docs:
|
||||
# Only run if changes were merged (not direct pushes from bots)
|
||||
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -22,28 +31,8 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2 # Need previous commit for comparison
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
echo "Changed files in this push:"
|
||||
git diff --name-only HEAD^ HEAD | tee changed_files.txt
|
||||
|
||||
# Store changed files for Claude to analyze
|
||||
echo "changed_files<<EOF" >> $GITHUB_OUTPUT
|
||||
git diff --name-only HEAD^ HEAD >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get the commit message and changes summary
|
||||
echo "commit_message<<EOF" >> $GITHUB_OUTPUT
|
||||
git log -1 --pretty=%B >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get diff for documentation context
|
||||
echo "commit_diff<<EOF" >> $GITHUB_OUTPUT
|
||||
git diff HEAD^ HEAD --stat >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
ref: next
|
||||
fetch-depth: 0 # Need full history to checkout specific commit
|
||||
|
||||
- name: Create docs update branch
|
||||
id: create-branch
|
||||
@@ -71,12 +60,12 @@ jobs:
|
||||
You are a documentation specialist. Analyze the recent changes pushed to the 'next' branch and update the documentation accordingly.
|
||||
|
||||
Recent changes:
|
||||
- Commit: ${{ steps.changed-files.outputs.commit_message }}
|
||||
- Commit: ${{ inputs.commit_message }}
|
||||
- Changed files:
|
||||
${{ steps.changed-files.outputs.changed_files }}
|
||||
${{ inputs.changed_files }}
|
||||
|
||||
- Changes summary:
|
||||
${{ steps.changed-files.outputs.commit_diff }}
|
||||
${{ inputs.commit_diff }}
|
||||
|
||||
Your task:
|
||||
1. Analyze the changes to understand what functionality was added, modified, or removed
|
||||
@@ -113,7 +102,7 @@ jobs:
|
||||
|
||||
This PR was automatically generated to update documentation based on recent changes.
|
||||
|
||||
Original commit: ${{ steps.changed-files.outputs.commit_message }}
|
||||
Original commit: ${{ inputs.commit_message }}
|
||||
|
||||
Co-authored-by: Claude <claude-assistant@anthropic.com>"
|
||||
fi
|
||||
@@ -133,12 +122,12 @@ jobs:
|
||||
This PR automatically updates documentation based on recent changes merged to the \`next\` branch.
|
||||
|
||||
### Original Changes
|
||||
**Commit:** ${{ github.sha }}
|
||||
**Message:** ${{ steps.changed-files.outputs.commit_message }}
|
||||
**Commit:** ${{ inputs.commit_sha }}
|
||||
**Message:** ${{ inputs.commit_message }}
|
||||
|
||||
### Changed Files in Original Commit
|
||||
\`\`\`
|
||||
${{ steps.changed-files.outputs.changed_files }}
|
||||
${{ inputs.changed_files }}
|
||||
\`\`\`
|
||||
|
||||
### Documentation Updates
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -93,4 +93,7 @@ dev-debug.log
|
||||
apps/extension/.vscode-test/
|
||||
|
||||
# apps/extension
|
||||
apps/extension/vsix-build/
|
||||
apps/extension/vsix-build/
|
||||
|
||||
# turbo
|
||||
.turbo
|
||||
43
CHANGELOG.md
43
CHANGELOG.md
@@ -1,5 +1,48 @@
|
||||
# task-master-ai
|
||||
|
||||
## 0.26.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- [#1133](https://github.com/eyaltoledano/claude-task-master/pull/1133) [`df26c65`](https://github.com/eyaltoledano/claude-task-master/commit/df26c65632000874a73504963b08f18c46283144) Thanks [@neonwatty](https://github.com/neonwatty)! - Restore Taskmaster claude-code commands and move clear commands under /remove to avoid collision with the claude-code /clear command.
|
||||
|
||||
- [#1163](https://github.com/eyaltoledano/claude-task-master/pull/1163) [`37af0f1`](https://github.com/eyaltoledano/claude-task-master/commit/37af0f191227a68d119b7f89a377bf932ee3ac66) Thanks [@Crunchyman-ralph](https://github.com/Crunchyman-ralph)! - Enhanced Gemini CLI provider with codebase-aware task generation
|
||||
|
||||
Added automatic codebase analysis for Gemini CLI provider in parse-prd, and analyze-complexity, add-task, udpate-task, update, update-subtask commands
|
||||
When using Gemini CLI as the AI provider, Task Master now instructs the AI to analyze the project structure, existing implementations, and patterns before generating tasks or subtasks
|
||||
Tasks and subtasks generated by Claude Code are now informed by actual codebase analysis, resulting in more accurate and contextual outputs
|
||||
|
||||
- [#1165](https://github.com/eyaltoledano/claude-task-master/pull/1165) [`c4f92f6`](https://github.com/eyaltoledano/claude-task-master/commit/c4f92f6a0aee3435c56eb8d27d9aa9204284833e) Thanks [@Crunchyman-ralph](https://github.com/Crunchyman-ralph)! - Add configurable codebase analysis feature flag with multiple configuration sources
|
||||
|
||||
Users can now control whether codebase analysis features (Claude Code and Gemini CLI integration) are enabled through environment variables, MCP configuration, or project config files.
|
||||
|
||||
Priority order: .env > MCP session env > .taskmaster/config.json.
|
||||
|
||||
Set `TASKMASTER_ENABLE_CODEBASE_ANALYSIS=false` in `.env` to disable codebase analysis prompts and tool integration.
|
||||
|
||||
- [#1135](https://github.com/eyaltoledano/claude-task-master/pull/1135) [`8783708`](https://github.com/eyaltoledano/claude-task-master/commit/8783708e5e3389890a78fcf685d3da0580e73b3f) Thanks [@mm-parthy](https://github.com/mm-parthy)! - feat(move): improve cross-tag move UX and safety
|
||||
- CLI: print "Next Steps" tips after cross-tag moves that used --ignore-dependencies (validate/fix guidance)
|
||||
- CLI: show dedicated help block on ID collisions (destination tag already has the ID)
|
||||
- Core: add structured suggestions to TASK_ALREADY_EXISTS errors
|
||||
- MCP: map ID collision errors to TASK_ALREADY_EXISTS and include suggestions
|
||||
- Tests: cover MCP options, error suggestions, CLI tips printing, and integration error payload suggestions
|
||||
|
||||
***
|
||||
|
||||
- [#1162](https://github.com/eyaltoledano/claude-task-master/pull/1162) [`4dad2fd`](https://github.com/eyaltoledano/claude-task-master/commit/4dad2fd613ceac56a65ae9d3c1c03092b8860ac9) Thanks [@Crunchyman-ralph](https://github.com/Crunchyman-ralph)! - Enhanced Claude Code and Google CLI integration with automatic codebase analysis for task operations
|
||||
|
||||
When using Claude Code as the AI provider, task management commands now automatically analyze your codebase before generating or updating tasks. This provides more accurate, context-aware implementation details that align with your project's existing architecture and patterns.
|
||||
|
||||
Commands contextualised:
|
||||
- add-task
|
||||
- update-subtask
|
||||
- update-task
|
||||
- update
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [#1135](https://github.com/eyaltoledano/claude-task-master/pull/1135) [`8783708`](https://github.com/eyaltoledano/claude-task-master/commit/8783708e5e3389890a78fcf685d3da0580e73b3f) Thanks [@mm-parthy](https://github.com/mm-parthy)! - docs(move): clarify cross-tag move docs; deprecate "force"; add explicit --with-dependencies/--ignore-dependencies examples
|
||||
|
||||
## 0.26.0-rc.1
|
||||
|
||||
### Minor Changes
|
||||
|
||||
@@ -4,40 +4,36 @@
|
||||
"description": "Task Master CLI - Command line interface for task management",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"types": "./src/index.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./src/index.ts",
|
||||
"import": "./dist/index.js",
|
||||
"require": "./dist/index.js"
|
||||
}
|
||||
".": "./src/index.ts"
|
||||
},
|
||||
"files": ["dist", "README.md"],
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"dev": "tsup --watch",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"lint": "biome check src",
|
||||
"format": "biome format --write src",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
"test:watch": "vitest",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"test:unit": "vitest run -t unit",
|
||||
"test:integration": "vitest run -t integration",
|
||||
"test:e2e": "vitest run --dir tests/e2e",
|
||||
"test:ci": "vitest run --coverage --reporter=dot"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tm/core": "*",
|
||||
"@tm/workflow-engine": "*",
|
||||
"boxen": "^7.1.1",
|
||||
"chalk": "^5.3.0",
|
||||
"chalk": "5.6.2",
|
||||
"cli-table3": "^0.6.5",
|
||||
"commander": "^12.1.0",
|
||||
"inquirer": "^9.2.10",
|
||||
"open": "^10.2.0",
|
||||
"ora": "^8.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
"@types/inquirer": "^9.0.3",
|
||||
"@types/node": "^22.10.5",
|
||||
"tsup": "^8.3.0",
|
||||
"tsx": "^4.20.4",
|
||||
"typescript": "^5.7.3",
|
||||
"vitest": "^2.1.8"
|
||||
@@ -47,5 +43,10 @@
|
||||
},
|
||||
"keywords": ["task-master", "cli", "task-management", "productivity"],
|
||||
"author": "",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"typesVersions": {
|
||||
"*": {
|
||||
"*": ["src/*"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -494,6 +494,17 @@ export class AuthCommand extends Command {
|
||||
|
||||
/**
|
||||
* Static method to register this command on an existing program
|
||||
* This is for gradual migration - allows commands.js to use this
|
||||
*/
|
||||
static registerOn(program: Command): Command {
|
||||
const authCommand = new AuthCommand();
|
||||
program.addCommand(authCommand);
|
||||
return authCommand;
|
||||
}
|
||||
|
||||
/**
|
||||
* Alternative registration that returns the command for chaining
|
||||
* Can also configure the command name if needed
|
||||
*/
|
||||
static register(program: Command, name?: string): AuthCommand {
|
||||
const authCommand = new AuthCommand(name);
|
||||
|
||||
570
apps/cli/src/commands/context.command.ts
Normal file
570
apps/cli/src/commands/context.command.ts
Normal file
@@ -0,0 +1,570 @@
|
||||
/**
|
||||
* @fileoverview Context command for managing org/brief selection
|
||||
* Provides a clean interface for workspace context management
|
||||
*/
|
||||
|
||||
import { Command } from 'commander';
|
||||
import chalk from 'chalk';
|
||||
import inquirer from 'inquirer';
|
||||
import ora from 'ora';
|
||||
import {
|
||||
AuthManager,
|
||||
AuthenticationError,
|
||||
type UserContext
|
||||
} from '@tm/core/auth';
|
||||
import * as ui from '../utils/ui.js';
|
||||
|
||||
/**
|
||||
* Result type from context command
|
||||
*/
|
||||
export interface ContextResult {
|
||||
success: boolean;
|
||||
action: 'show' | 'select-org' | 'select-brief' | 'clear' | 'set';
|
||||
context?: UserContext;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* ContextCommand extending Commander's Command class
|
||||
* Manages user's workspace context (org/brief selection)
|
||||
*/
|
||||
export class ContextCommand extends Command {
|
||||
private authManager: AuthManager;
|
||||
private lastResult?: ContextResult;
|
||||
|
||||
constructor(name?: string) {
|
||||
super(name || 'context');
|
||||
|
||||
// Initialize auth manager
|
||||
this.authManager = AuthManager.getInstance();
|
||||
|
||||
// Configure the command
|
||||
this.description(
|
||||
'Manage workspace context (organization and brief selection)'
|
||||
);
|
||||
|
||||
// Add subcommands
|
||||
this.addOrgCommand();
|
||||
this.addBriefCommand();
|
||||
this.addClearCommand();
|
||||
this.addSetCommand();
|
||||
|
||||
// Default action shows current context
|
||||
this.action(async () => {
|
||||
await this.executeShow();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add org selection subcommand
|
||||
*/
|
||||
private addOrgCommand(): void {
|
||||
this.command('org')
|
||||
.description('Select an organization')
|
||||
.action(async () => {
|
||||
await this.executeSelectOrg();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add brief selection subcommand
|
||||
*/
|
||||
private addBriefCommand(): void {
|
||||
this.command('brief')
|
||||
.description('Select a brief within the current organization')
|
||||
.action(async () => {
|
||||
await this.executeSelectBrief();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add clear subcommand
|
||||
*/
|
||||
private addClearCommand(): void {
|
||||
this.command('clear')
|
||||
.description('Clear all context selections')
|
||||
.action(async () => {
|
||||
await this.executeClear();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add set subcommand for direct context setting
|
||||
*/
|
||||
private addSetCommand(): void {
|
||||
this.command('set')
|
||||
.description('Set context directly')
|
||||
.option('--org <id>', 'Organization ID')
|
||||
.option('--org-name <name>', 'Organization name')
|
||||
.option('--brief <id>', 'Brief ID')
|
||||
.option('--brief-name <name>', 'Brief name')
|
||||
.action(async (options) => {
|
||||
await this.executeSet(options);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute show current context
|
||||
*/
|
||||
private async executeShow(): Promise<void> {
|
||||
try {
|
||||
const result = this.displayContext();
|
||||
this.setLastResult(result);
|
||||
} catch (error: any) {
|
||||
this.handleError(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Display current context
|
||||
*/
|
||||
private displayContext(): ContextResult {
|
||||
// Check authentication first
|
||||
if (!this.authManager.isAuthenticated()) {
|
||||
console.log(chalk.yellow('✗ Not authenticated'));
|
||||
console.log(chalk.gray('\n Run "tm auth login" to authenticate first'));
|
||||
|
||||
return {
|
||||
success: false,
|
||||
action: 'show',
|
||||
message: 'Not authenticated'
|
||||
};
|
||||
}
|
||||
|
||||
const context = this.authManager.getContext();
|
||||
|
||||
console.log(chalk.cyan('\n🌍 Workspace Context\n'));
|
||||
|
||||
if (context && (context.orgId || context.briefId)) {
|
||||
if (context.orgName || context.orgId) {
|
||||
console.log(chalk.green('✓ Organization'));
|
||||
if (context.orgName) {
|
||||
console.log(chalk.white(` ${context.orgName}`));
|
||||
}
|
||||
if (context.orgId) {
|
||||
console.log(chalk.gray(` ID: ${context.orgId}`));
|
||||
}
|
||||
}
|
||||
|
||||
if (context.briefName || context.briefId) {
|
||||
console.log(chalk.green('\n✓ Brief'));
|
||||
if (context.briefName) {
|
||||
console.log(chalk.white(` ${context.briefName}`));
|
||||
}
|
||||
if (context.briefId) {
|
||||
console.log(chalk.gray(` ID: ${context.briefId}`));
|
||||
}
|
||||
}
|
||||
|
||||
if (context.updatedAt) {
|
||||
console.log(
|
||||
chalk.gray(
|
||||
`\n Last updated: ${new Date(context.updatedAt).toLocaleString()}`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'show',
|
||||
context,
|
||||
message: 'Context loaded'
|
||||
};
|
||||
} else {
|
||||
console.log(chalk.yellow('✗ No context selected'));
|
||||
console.log(
|
||||
chalk.gray('\n Run "tm context org" to select an organization')
|
||||
);
|
||||
console.log(chalk.gray(' Run "tm context brief" to select a brief'));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'show',
|
||||
message: 'No context selected'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute org selection
|
||||
*/
|
||||
private async executeSelectOrg(): Promise<void> {
|
||||
try {
|
||||
// Check authentication
|
||||
if (!this.authManager.isAuthenticated()) {
|
||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const result = await this.selectOrganization();
|
||||
this.setLastResult(result);
|
||||
|
||||
if (!result.success) {
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error: any) {
|
||||
this.handleError(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select an organization interactively
|
||||
*/
|
||||
private async selectOrganization(): Promise<ContextResult> {
|
||||
const spinner = ora('Fetching organizations...').start();
|
||||
|
||||
try {
|
||||
// Fetch organizations from API
|
||||
const organizations = await this.authManager.getOrganizations();
|
||||
spinner.stop();
|
||||
|
||||
if (organizations.length === 0) {
|
||||
ui.displayWarning('No organizations available');
|
||||
return {
|
||||
success: false,
|
||||
action: 'select-org',
|
||||
message: 'No organizations available'
|
||||
};
|
||||
}
|
||||
|
||||
// Prompt for selection
|
||||
const { selectedOrg } = await inquirer.prompt([
|
||||
{
|
||||
type: 'list',
|
||||
name: 'selectedOrg',
|
||||
message: 'Select an organization:',
|
||||
choices: organizations.map((org) => ({
|
||||
name: org.name,
|
||||
value: org
|
||||
}))
|
||||
}
|
||||
]);
|
||||
|
||||
// Update context
|
||||
await this.authManager.updateContext({
|
||||
orgId: selectedOrg.id,
|
||||
orgName: selectedOrg.name,
|
||||
// Clear brief when changing org
|
||||
briefId: undefined,
|
||||
briefName: undefined
|
||||
});
|
||||
|
||||
ui.displaySuccess(`Selected organization: ${selectedOrg.name}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'select-org',
|
||||
context: this.authManager.getContext() || undefined,
|
||||
message: `Selected organization: ${selectedOrg.name}`
|
||||
};
|
||||
} catch (error) {
|
||||
spinner.fail('Failed to fetch organizations');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute brief selection
|
||||
*/
|
||||
private async executeSelectBrief(): Promise<void> {
|
||||
try {
|
||||
// Check authentication
|
||||
if (!this.authManager.isAuthenticated()) {
|
||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Check if org is selected
|
||||
const context = this.authManager.getContext();
|
||||
if (!context?.orgId) {
|
||||
ui.displayError(
|
||||
'No organization selected. Run "tm context org" first.'
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const result = await this.selectBrief(context.orgId);
|
||||
this.setLastResult(result);
|
||||
|
||||
if (!result.success) {
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error: any) {
|
||||
this.handleError(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select a brief within the current organization
|
||||
*/
|
||||
private async selectBrief(orgId: string): Promise<ContextResult> {
|
||||
const spinner = ora('Fetching briefs...').start();
|
||||
|
||||
try {
|
||||
// Fetch briefs from API
|
||||
const briefs = await this.authManager.getBriefs(orgId);
|
||||
spinner.stop();
|
||||
|
||||
if (briefs.length === 0) {
|
||||
ui.displayWarning('No briefs available in this organization');
|
||||
return {
|
||||
success: false,
|
||||
action: 'select-brief',
|
||||
message: 'No briefs available'
|
||||
};
|
||||
}
|
||||
|
||||
// Prompt for selection
|
||||
const { selectedBrief } = await inquirer.prompt([
|
||||
{
|
||||
type: 'list',
|
||||
name: 'selectedBrief',
|
||||
message: 'Select a brief:',
|
||||
choices: [
|
||||
{ name: '(No brief - organization level)', value: null },
|
||||
...briefs.map((brief) => ({
|
||||
name: `Brief ${brief.id.slice(0, 8)} (${new Date(brief.createdAt).toLocaleDateString()})`,
|
||||
value: brief
|
||||
}))
|
||||
]
|
||||
}
|
||||
]);
|
||||
|
||||
if (selectedBrief) {
|
||||
// Update context with brief
|
||||
const briefName = `Brief ${selectedBrief.id.slice(0, 8)}`;
|
||||
await this.authManager.updateContext({
|
||||
briefId: selectedBrief.id,
|
||||
briefName: briefName
|
||||
});
|
||||
|
||||
ui.displaySuccess(`Selected brief: ${briefName}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'select-brief',
|
||||
context: this.authManager.getContext() || undefined,
|
||||
message: `Selected brief: ${selectedBrief.name}`
|
||||
};
|
||||
} else {
|
||||
// Clear brief selection
|
||||
await this.authManager.updateContext({
|
||||
briefId: undefined,
|
||||
briefName: undefined
|
||||
});
|
||||
|
||||
ui.displaySuccess('Cleared brief selection (organization level)');
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'select-brief',
|
||||
context: this.authManager.getContext() || undefined,
|
||||
message: 'Cleared brief selection'
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
spinner.fail('Failed to fetch briefs');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute clear context
|
||||
*/
|
||||
private async executeClear(): Promise<void> {
|
||||
try {
|
||||
// Check authentication
|
||||
if (!this.authManager.isAuthenticated()) {
|
||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const result = await this.clearContext();
|
||||
this.setLastResult(result);
|
||||
|
||||
if (!result.success) {
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error: any) {
|
||||
this.handleError(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all context selections
|
||||
*/
|
||||
private async clearContext(): Promise<ContextResult> {
|
||||
try {
|
||||
await this.authManager.clearContext();
|
||||
ui.displaySuccess('Context cleared');
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'clear',
|
||||
message: 'Context cleared'
|
||||
};
|
||||
} catch (error) {
|
||||
ui.displayError(`Failed to clear context: ${(error as Error).message}`);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
action: 'clear',
|
||||
message: `Failed to clear context: ${(error as Error).message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute set context with options
|
||||
*/
|
||||
private async executeSet(options: any): Promise<void> {
|
||||
try {
|
||||
// Check authentication
|
||||
if (!this.authManager.isAuthenticated()) {
|
||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const result = await this.setContext(options);
|
||||
this.setLastResult(result);
|
||||
|
||||
if (!result.success) {
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error: any) {
|
||||
this.handleError(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set context directly from options
|
||||
*/
|
||||
private async setContext(options: any): Promise<ContextResult> {
|
||||
try {
|
||||
const context: Partial<UserContext> = {};
|
||||
|
||||
if (options.org) {
|
||||
context.orgId = options.org;
|
||||
}
|
||||
if (options.orgName) {
|
||||
context.orgName = options.orgName;
|
||||
}
|
||||
if (options.brief) {
|
||||
context.briefId = options.brief;
|
||||
}
|
||||
if (options.briefName) {
|
||||
context.briefName = options.briefName;
|
||||
}
|
||||
|
||||
if (Object.keys(context).length === 0) {
|
||||
ui.displayWarning('No context options provided');
|
||||
return {
|
||||
success: false,
|
||||
action: 'set',
|
||||
message: 'No context options provided'
|
||||
};
|
||||
}
|
||||
|
||||
await this.authManager.updateContext(context);
|
||||
ui.displaySuccess('Context updated');
|
||||
|
||||
// Display what was set
|
||||
if (context.orgName || context.orgId) {
|
||||
console.log(
|
||||
chalk.gray(` Organization: ${context.orgName || context.orgId}`)
|
||||
);
|
||||
}
|
||||
if (context.briefName || context.briefId) {
|
||||
console.log(
|
||||
chalk.gray(` Brief: ${context.briefName || context.briefId}`)
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'set',
|
||||
context: this.authManager.getContext() || undefined,
|
||||
message: 'Context updated'
|
||||
};
|
||||
} catch (error) {
|
||||
ui.displayError(`Failed to set context: ${(error as Error).message}`);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
action: 'set',
|
||||
message: `Failed to set context: ${(error as Error).message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle errors
|
||||
*/
|
||||
private handleError(error: any): void {
|
||||
if (error instanceof AuthenticationError) {
|
||||
console.error(chalk.red(`\n✗ ${error.message}`));
|
||||
|
||||
if (error.code === 'NOT_AUTHENTICATED') {
|
||||
ui.displayWarning('Please authenticate first: tm auth login');
|
||||
}
|
||||
} else {
|
||||
const msg = error?.message ?? String(error);
|
||||
console.error(chalk.red(`Error: ${msg}`));
|
||||
|
||||
if (error.stack && process.env.DEBUG) {
|
||||
console.error(chalk.gray(error.stack));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the last result for programmatic access
|
||||
*/
|
||||
private setLastResult(result: ContextResult): void {
|
||||
this.lastResult = result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the last result (for programmatic usage)
|
||||
*/
|
||||
getLastResult(): ContextResult | undefined {
|
||||
return this.lastResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current context (for programmatic usage)
|
||||
*/
|
||||
getContext(): UserContext | null {
|
||||
return this.authManager.getContext();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up resources
|
||||
*/
|
||||
async cleanup(): Promise<void> {
|
||||
// No resources to clean up for context command
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method to register this command on an existing program
|
||||
*/
|
||||
static registerOn(program: Command): Command {
|
||||
const contextCommand = new ContextCommand();
|
||||
program.addCommand(contextCommand);
|
||||
return contextCommand;
|
||||
}
|
||||
|
||||
/**
|
||||
* Alternative registration that returns the command for chaining
|
||||
*/
|
||||
static register(program: Command, name?: string): ContextCommand {
|
||||
const contextCommand = new ContextCommand(name);
|
||||
program.addCommand(contextCommand);
|
||||
return contextCommand;
|
||||
}
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
/**
|
||||
* Command registry - exports all CLI commands for central registration
|
||||
*/
|
||||
|
||||
import type { Command } from 'commander';
|
||||
import { ListTasksCommand } from './list.command.js';
|
||||
import { AuthCommand } from './auth.command.js';
|
||||
import WorkflowCommand from './workflow.command.js';
|
||||
|
||||
// Define interface for command classes that can register themselves
|
||||
export interface CommandRegistrar {
|
||||
register(program: Command, name?: string): any;
|
||||
}
|
||||
|
||||
// Future commands can be added here as they're created
|
||||
// The pattern is: each command exports a class with a static register(program: Command, name?: string) method
|
||||
|
||||
/**
|
||||
* Auto-register all exported commands that implement the CommandRegistrar interface
|
||||
*/
|
||||
export function registerAllCommands(program: Command): void {
|
||||
// Get all exports from this module
|
||||
const commands = [
|
||||
ListTasksCommand,
|
||||
AuthCommand,
|
||||
WorkflowCommand
|
||||
// Add new commands here as they're imported above
|
||||
];
|
||||
|
||||
commands.forEach((CommandClass) => {
|
||||
if (
|
||||
'register' in CommandClass &&
|
||||
typeof CommandClass.register === 'function'
|
||||
) {
|
||||
CommandClass.register(program);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -15,7 +15,20 @@ import {
|
||||
STATUS_ICONS,
|
||||
type OutputFormat
|
||||
} from '@tm/core';
|
||||
import type { StorageType } from '@tm/core/types';
|
||||
import * as ui from '../utils/ui.js';
|
||||
import {
|
||||
displayHeader,
|
||||
displayDashboards,
|
||||
calculateTaskStatistics,
|
||||
calculateSubtaskStatistics,
|
||||
calculateDependencyStatistics,
|
||||
getPriorityBreakdown,
|
||||
displayRecommendedNextTask,
|
||||
getTaskDescription,
|
||||
displaySuggestedNextSteps,
|
||||
type NextTaskInfo
|
||||
} from '../ui/index.js';
|
||||
|
||||
/**
|
||||
* Options interface for the list command
|
||||
@@ -37,7 +50,7 @@ export interface ListTasksResult {
|
||||
total: number;
|
||||
filtered: number;
|
||||
tag?: string;
|
||||
storageType: 'file' | 'api';
|
||||
storageType: Exclude<StorageType, 'auto'>;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -244,19 +257,16 @@ export class ListTasksCommand extends Command {
|
||||
* Display in text format with tables
|
||||
*/
|
||||
private displayText(data: ListTasksResult, withSubtasks?: boolean): void {
|
||||
const { tasks, total, filtered, tag, storageType } = data;
|
||||
const { tasks, tag } = data;
|
||||
|
||||
// Header
|
||||
ui.displayBanner(`Task List${tag ? ` (${tag})` : ''}`);
|
||||
// Get file path for display
|
||||
const filePath = this.tmCore ? `.taskmaster/tasks/tasks.json` : undefined;
|
||||
|
||||
// Statistics
|
||||
console.log(chalk.blue.bold('\n📊 Statistics:\n'));
|
||||
console.log(` Total tasks: ${chalk.cyan(total)}`);
|
||||
console.log(` Filtered: ${chalk.cyan(filtered)}`);
|
||||
if (tag) {
|
||||
console.log(` Tag: ${chalk.cyan(tag)}`);
|
||||
}
|
||||
console.log(` Storage: ${chalk.cyan(storageType)}`);
|
||||
// Display header without banner (banner already shown by main CLI)
|
||||
displayHeader({
|
||||
tag: tag || 'master',
|
||||
filePath: filePath
|
||||
});
|
||||
|
||||
// No tasks message
|
||||
if (tasks.length === 0) {
|
||||
@@ -264,21 +274,50 @@ export class ListTasksCommand extends Command {
|
||||
return;
|
||||
}
|
||||
|
||||
// Task table
|
||||
console.log(chalk.blue.bold(`\n📋 Tasks (${tasks.length}):\n`));
|
||||
// Calculate statistics
|
||||
const taskStats = calculateTaskStatistics(tasks);
|
||||
const subtaskStats = calculateSubtaskStatistics(tasks);
|
||||
const depStats = calculateDependencyStatistics(tasks);
|
||||
const priorityBreakdown = getPriorityBreakdown(tasks);
|
||||
|
||||
// Find next task following the same logic as findNextTask
|
||||
const nextTask = this.findNextTask(tasks);
|
||||
|
||||
// Display dashboard boxes
|
||||
displayDashboards(
|
||||
taskStats,
|
||||
subtaskStats,
|
||||
priorityBreakdown,
|
||||
depStats,
|
||||
nextTask
|
||||
);
|
||||
|
||||
// Task table - no title, just show the table directly
|
||||
console.log(
|
||||
ui.createTaskTable(tasks, {
|
||||
showSubtasks: withSubtasks,
|
||||
showDependencies: true
|
||||
showDependencies: true,
|
||||
showComplexity: true // Enable complexity column
|
||||
})
|
||||
);
|
||||
|
||||
// Progress bar
|
||||
const completedCount = tasks.filter(
|
||||
(t: Task) => t.status === 'done'
|
||||
).length;
|
||||
console.log(chalk.blue.bold('\n📊 Overall Progress:\n'));
|
||||
console.log(` ${ui.createProgressBar(completedCount, tasks.length)}`);
|
||||
// Display recommended next task section immediately after table
|
||||
if (nextTask) {
|
||||
// Find the full task object to get description
|
||||
const fullTask = tasks.find((t) => String(t.id) === String(nextTask.id));
|
||||
const description = fullTask ? getTaskDescription(fullTask) : undefined;
|
||||
|
||||
displayRecommendedNextTask({
|
||||
...nextTask,
|
||||
status: 'pending', // Next task is typically pending
|
||||
description
|
||||
});
|
||||
} else {
|
||||
displayRecommendedNextTask(undefined);
|
||||
}
|
||||
|
||||
// Display suggested next steps at the end
|
||||
displaySuggestedNextSteps();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -288,6 +327,128 @@ export class ListTasksCommand extends Command {
|
||||
this.lastResult = result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the next task to work on
|
||||
* Implements the same logic as scripts/modules/task-manager/find-next-task.js
|
||||
*/
|
||||
private findNextTask(tasks: Task[]): NextTaskInfo | undefined {
|
||||
const priorityValues: Record<string, number> = {
|
||||
critical: 4,
|
||||
high: 3,
|
||||
medium: 2,
|
||||
low: 1
|
||||
};
|
||||
|
||||
// Build set of completed task IDs (including subtasks)
|
||||
const completedIds = new Set<string>();
|
||||
tasks.forEach((t) => {
|
||||
if (t.status === 'done' || t.status === 'completed') {
|
||||
completedIds.add(String(t.id));
|
||||
}
|
||||
if (t.subtasks) {
|
||||
t.subtasks.forEach((st) => {
|
||||
if (st.status === 'done' || st.status === 'completed') {
|
||||
completedIds.add(`${t.id}.${st.id}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// First, look for eligible subtasks in in-progress parent tasks
|
||||
const candidateSubtasks: NextTaskInfo[] = [];
|
||||
|
||||
tasks
|
||||
.filter(
|
||||
(t) => t.status === 'in-progress' && t.subtasks && t.subtasks.length > 0
|
||||
)
|
||||
.forEach((parent) => {
|
||||
parent.subtasks!.forEach((st) => {
|
||||
const stStatus = (st.status || 'pending').toLowerCase();
|
||||
if (stStatus !== 'pending' && stStatus !== 'in-progress') return;
|
||||
|
||||
// Check if dependencies are satisfied
|
||||
const fullDeps =
|
||||
st.dependencies?.map((d) => {
|
||||
// Handle both numeric and string IDs
|
||||
if (typeof d === 'string' && d.includes('.')) {
|
||||
return d;
|
||||
}
|
||||
return `${parent.id}.${d}`;
|
||||
}) ?? [];
|
||||
|
||||
const depsSatisfied =
|
||||
fullDeps.length === 0 ||
|
||||
fullDeps.every((depId) => completedIds.has(String(depId)));
|
||||
|
||||
if (depsSatisfied) {
|
||||
candidateSubtasks.push({
|
||||
id: `${parent.id}.${st.id}`,
|
||||
title: st.title || `Subtask ${st.id}`,
|
||||
priority: st.priority || parent.priority || 'medium',
|
||||
dependencies: fullDeps.map((d) => String(d))
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (candidateSubtasks.length > 0) {
|
||||
// Sort by priority, then by dependencies count, then by ID
|
||||
candidateSubtasks.sort((a, b) => {
|
||||
const pa = priorityValues[a.priority || 'medium'] ?? 2;
|
||||
const pb = priorityValues[b.priority || 'medium'] ?? 2;
|
||||
if (pb !== pa) return pb - pa;
|
||||
|
||||
const depCountA = a.dependencies?.length || 0;
|
||||
const depCountB = b.dependencies?.length || 0;
|
||||
if (depCountA !== depCountB) return depCountA - depCountB;
|
||||
|
||||
return String(a.id).localeCompare(String(b.id));
|
||||
});
|
||||
return candidateSubtasks[0];
|
||||
}
|
||||
|
||||
// Fall back to finding eligible top-level tasks
|
||||
const eligibleTasks = tasks.filter((task) => {
|
||||
// Skip non-eligible statuses
|
||||
const status = (task.status || 'pending').toLowerCase();
|
||||
if (status !== 'pending' && status !== 'in-progress') return false;
|
||||
|
||||
// Check dependencies
|
||||
const deps = task.dependencies || [];
|
||||
const depsSatisfied =
|
||||
deps.length === 0 ||
|
||||
deps.every((depId) => completedIds.has(String(depId)));
|
||||
|
||||
return depsSatisfied;
|
||||
});
|
||||
|
||||
if (eligibleTasks.length === 0) return undefined;
|
||||
|
||||
// Sort eligible tasks
|
||||
eligibleTasks.sort((a, b) => {
|
||||
// Priority (higher first)
|
||||
const pa = priorityValues[a.priority || 'medium'] ?? 2;
|
||||
const pb = priorityValues[b.priority || 'medium'] ?? 2;
|
||||
if (pb !== pa) return pb - pa;
|
||||
|
||||
// Dependencies count (fewer first)
|
||||
const depCountA = a.dependencies?.length || 0;
|
||||
const depCountB = b.dependencies?.length || 0;
|
||||
if (depCountA !== depCountB) return depCountA - depCountB;
|
||||
|
||||
// ID (lower first)
|
||||
return Number(a.id) - Number(b.id);
|
||||
});
|
||||
|
||||
const nextTask = eligibleTasks[0];
|
||||
return {
|
||||
id: nextTask.id,
|
||||
title: nextTask.title,
|
||||
priority: nextTask.priority,
|
||||
dependencies: nextTask.dependencies?.map((d) => String(d))
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the last result (for programmatic usage)
|
||||
*/
|
||||
@@ -307,6 +468,17 @@ export class ListTasksCommand extends Command {
|
||||
|
||||
/**
|
||||
* Static method to register this command on an existing program
|
||||
* This is for gradual migration - allows commands.js to use this
|
||||
*/
|
||||
static registerOn(program: Command): Command {
|
||||
const listCommand = new ListTasksCommand();
|
||||
program.addCommand(listCommand);
|
||||
return listCommand;
|
||||
}
|
||||
|
||||
/**
|
||||
* Alternative registration that returns the command for chaining
|
||||
* Can also configure the command name if needed
|
||||
*/
|
||||
static register(program: Command, name?: string): ListTasksCommand {
|
||||
const listCommand = new ListTasksCommand(name);
|
||||
|
||||
383
apps/cli/src/commands/show.command.ts
Normal file
383
apps/cli/src/commands/show.command.ts
Normal file
@@ -0,0 +1,383 @@
|
||||
/**
|
||||
* @fileoverview ShowCommand using Commander's native class pattern
|
||||
* Extends Commander.Command for better integration with the framework
|
||||
*/
|
||||
|
||||
import { Command } from 'commander';
|
||||
import chalk from 'chalk';
|
||||
import boxen from 'boxen';
|
||||
import { createTaskMasterCore, type Task, type TaskMasterCore } from '@tm/core';
|
||||
import type { StorageType } from '@tm/core/types';
|
||||
import * as ui from '../utils/ui.js';
|
||||
import {
|
||||
displayTaskHeader,
|
||||
displayTaskProperties,
|
||||
displayImplementationDetails,
|
||||
displayTestStrategy,
|
||||
displaySubtasks,
|
||||
displaySuggestedActions
|
||||
} from '../ui/components/task-detail.component.js';
|
||||
|
||||
/**
|
||||
* Options interface for the show command
|
||||
*/
|
||||
export interface ShowCommandOptions {
|
||||
id?: string;
|
||||
status?: string;
|
||||
format?: 'text' | 'json';
|
||||
silent?: boolean;
|
||||
project?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result type from show command
|
||||
*/
|
||||
export interface ShowTaskResult {
|
||||
task: Task | null;
|
||||
found: boolean;
|
||||
storageType: Exclude<StorageType, 'auto'>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result type for multiple tasks
|
||||
*/
|
||||
export interface ShowMultipleTasksResult {
|
||||
tasks: Task[];
|
||||
notFound: string[];
|
||||
storageType: Exclude<StorageType, 'auto'>;
|
||||
}
|
||||
|
||||
/**
|
||||
* ShowCommand extending Commander's Command class
|
||||
* This is a thin presentation layer over @tm/core
|
||||
*/
|
||||
export class ShowCommand extends Command {
|
||||
private tmCore?: TaskMasterCore;
|
||||
private lastResult?: ShowTaskResult | ShowMultipleTasksResult;
|
||||
|
||||
constructor(name?: string) {
|
||||
super(name || 'show');
|
||||
|
||||
// Configure the command
|
||||
this.description('Display detailed information about one or more tasks')
|
||||
.argument('[id]', 'Task ID(s) to show (comma-separated for multiple)')
|
||||
.option(
|
||||
'-i, --id <id>',
|
||||
'Task ID(s) to show (comma-separated for multiple)'
|
||||
)
|
||||
.option('-s, --status <status>', 'Filter subtasks by status')
|
||||
.option('-f, --format <format>', 'Output format (text, json)', 'text')
|
||||
.option('--silent', 'Suppress output (useful for programmatic usage)')
|
||||
.option('-p, --project <path>', 'Project root directory', process.cwd())
|
||||
.action(
|
||||
async (taskId: string | undefined, options: ShowCommandOptions) => {
|
||||
await this.executeCommand(taskId, options);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the show command
|
||||
*/
|
||||
private async executeCommand(
|
||||
taskId: string | undefined,
|
||||
options: ShowCommandOptions
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Validate options
|
||||
if (!this.validateOptions(options)) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Initialize tm-core
|
||||
await this.initializeCore(options.project || process.cwd());
|
||||
|
||||
// Get the task ID from argument or option
|
||||
const idArg = taskId || options.id;
|
||||
if (!idArg) {
|
||||
console.error(chalk.red('Error: Please provide a task ID'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Check if multiple IDs are provided (comma-separated)
|
||||
const taskIds = idArg
|
||||
.split(',')
|
||||
.map((id) => id.trim())
|
||||
.filter((id) => id.length > 0);
|
||||
|
||||
// Get tasks from core
|
||||
const result =
|
||||
taskIds.length > 1
|
||||
? await this.getMultipleTasks(taskIds, options)
|
||||
: await this.getSingleTask(taskIds[0], options);
|
||||
|
||||
// Store result for programmatic access
|
||||
this.setLastResult(result);
|
||||
|
||||
// Display results
|
||||
if (!options.silent) {
|
||||
this.displayResults(result, options);
|
||||
}
|
||||
} catch (error: any) {
|
||||
const msg = error?.getSanitizedDetails?.() ?? {
|
||||
message: error?.message ?? String(error)
|
||||
};
|
||||
console.error(chalk.red(`Error: ${msg.message || 'Unexpected error'}`));
|
||||
if (error.stack && process.env.DEBUG) {
|
||||
console.error(chalk.gray(error.stack));
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate command options
|
||||
*/
|
||||
private validateOptions(options: ShowCommandOptions): boolean {
|
||||
// Validate format
|
||||
if (options.format && !['text', 'json'].includes(options.format)) {
|
||||
console.error(chalk.red(`Invalid format: ${options.format}`));
|
||||
console.error(chalk.gray(`Valid formats: text, json`));
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize TaskMasterCore
|
||||
*/
|
||||
private async initializeCore(projectRoot: string): Promise<void> {
|
||||
if (!this.tmCore) {
|
||||
this.tmCore = await createTaskMasterCore({ projectPath: projectRoot });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single task from tm-core
|
||||
*/
|
||||
private async getSingleTask(
|
||||
taskId: string,
|
||||
_options: ShowCommandOptions
|
||||
): Promise<ShowTaskResult> {
|
||||
if (!this.tmCore) {
|
||||
throw new Error('TaskMasterCore not initialized');
|
||||
}
|
||||
|
||||
// Get the task
|
||||
const task = await this.tmCore.getTask(taskId);
|
||||
|
||||
// Get storage type
|
||||
const storageType = this.tmCore.getStorageType();
|
||||
|
||||
return {
|
||||
task,
|
||||
found: task !== null,
|
||||
storageType: storageType as Exclude<StorageType, 'auto'>
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get multiple tasks from tm-core
|
||||
*/
|
||||
private async getMultipleTasks(
|
||||
taskIds: string[],
|
||||
_options: ShowCommandOptions
|
||||
): Promise<ShowMultipleTasksResult> {
|
||||
if (!this.tmCore) {
|
||||
throw new Error('TaskMasterCore not initialized');
|
||||
}
|
||||
|
||||
const tasks: Task[] = [];
|
||||
const notFound: string[] = [];
|
||||
|
||||
// Get each task individually
|
||||
for (const taskId of taskIds) {
|
||||
const task = await this.tmCore.getTask(taskId);
|
||||
if (task) {
|
||||
tasks.push(task);
|
||||
} else {
|
||||
notFound.push(taskId);
|
||||
}
|
||||
}
|
||||
|
||||
// Get storage type
|
||||
const storageType = this.tmCore.getStorageType();
|
||||
|
||||
return {
|
||||
tasks,
|
||||
notFound,
|
||||
storageType: storageType as Exclude<StorageType, 'auto'>
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Display results based on format
|
||||
*/
|
||||
private displayResults(
|
||||
result: ShowTaskResult | ShowMultipleTasksResult,
|
||||
options: ShowCommandOptions
|
||||
): void {
|
||||
const format = options.format || 'text';
|
||||
|
||||
switch (format) {
|
||||
case 'json':
|
||||
this.displayJson(result);
|
||||
break;
|
||||
|
||||
case 'text':
|
||||
default:
|
||||
if ('task' in result) {
|
||||
// Single task result
|
||||
this.displaySingleTask(result, options);
|
||||
} else {
|
||||
// Multiple tasks result
|
||||
this.displayMultipleTasks(result, options);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Display in JSON format
|
||||
*/
|
||||
private displayJson(result: ShowTaskResult | ShowMultipleTasksResult): void {
|
||||
console.log(JSON.stringify(result, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Display a single task in text format
|
||||
*/
|
||||
private displaySingleTask(
|
||||
result: ShowTaskResult,
|
||||
options: ShowCommandOptions
|
||||
): void {
|
||||
if (!result.found || !result.task) {
|
||||
console.log(
|
||||
boxen(chalk.yellow(`Task not found!`), {
|
||||
padding: { top: 0, bottom: 0, left: 1, right: 1 },
|
||||
borderColor: 'yellow',
|
||||
borderStyle: 'round',
|
||||
margin: { top: 1 }
|
||||
})
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const task = result.task;
|
||||
|
||||
// Display header with tag
|
||||
displayTaskHeader(task.id, task.title);
|
||||
|
||||
// Display task properties in table format
|
||||
displayTaskProperties(task);
|
||||
|
||||
// Display implementation details if available
|
||||
if (task.details) {
|
||||
console.log(); // Empty line for spacing
|
||||
displayImplementationDetails(task.details);
|
||||
}
|
||||
|
||||
// Display test strategy if available
|
||||
if ('testStrategy' in task && task.testStrategy) {
|
||||
console.log(); // Empty line for spacing
|
||||
displayTestStrategy(task.testStrategy as string);
|
||||
}
|
||||
|
||||
// Display subtasks if available
|
||||
if (task.subtasks && task.subtasks.length > 0) {
|
||||
// Filter subtasks by status if provided
|
||||
const filteredSubtasks = options.status
|
||||
? task.subtasks.filter((sub) => sub.status === options.status)
|
||||
: task.subtasks;
|
||||
|
||||
if (filteredSubtasks.length === 0 && options.status) {
|
||||
console.log(
|
||||
chalk.gray(` No subtasks with status '${options.status}'`)
|
||||
);
|
||||
} else {
|
||||
displaySubtasks(filteredSubtasks, task.id);
|
||||
}
|
||||
}
|
||||
|
||||
// Display suggested actions
|
||||
displaySuggestedActions(task.id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Display multiple tasks in text format
|
||||
*/
|
||||
private displayMultipleTasks(
|
||||
result: ShowMultipleTasksResult,
|
||||
_options: ShowCommandOptions
|
||||
): void {
|
||||
// Header
|
||||
ui.displayBanner(`Tasks (${result.tasks.length} found)`);
|
||||
|
||||
if (result.notFound.length > 0) {
|
||||
console.log(chalk.yellow(`\n⚠ Not found: ${result.notFound.join(', ')}`));
|
||||
}
|
||||
|
||||
if (result.tasks.length === 0) {
|
||||
ui.displayWarning('No tasks found matching the criteria.');
|
||||
return;
|
||||
}
|
||||
|
||||
// Task table
|
||||
console.log(chalk.blue.bold(`\n📋 Tasks:\n`));
|
||||
console.log(
|
||||
ui.createTaskTable(result.tasks, {
|
||||
showSubtasks: true,
|
||||
showDependencies: true
|
||||
})
|
||||
);
|
||||
|
||||
console.log(`\n${chalk.gray('Storage: ' + result.storageType)}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the last result for programmatic access
|
||||
*/
|
||||
private setLastResult(
|
||||
result: ShowTaskResult | ShowMultipleTasksResult
|
||||
): void {
|
||||
this.lastResult = result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the last result (for programmatic usage)
|
||||
*/
|
||||
getLastResult(): ShowTaskResult | ShowMultipleTasksResult | undefined {
|
||||
return this.lastResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up resources
|
||||
*/
|
||||
async cleanup(): Promise<void> {
|
||||
if (this.tmCore) {
|
||||
await this.tmCore.close();
|
||||
this.tmCore = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method to register this command on an existing program
|
||||
* This is for gradual migration - allows commands.js to use this
|
||||
*/
|
||||
static registerOn(program: Command): Command {
|
||||
const showCommand = new ShowCommand();
|
||||
program.addCommand(showCommand);
|
||||
return showCommand;
|
||||
}
|
||||
|
||||
/**
|
||||
* Alternative registration that returns the command for chaining
|
||||
* Can also configure the command name if needed
|
||||
*/
|
||||
static register(program: Command, name?: string): ShowCommand {
|
||||
const showCommand = new ShowCommand(name);
|
||||
program.addCommand(showCommand);
|
||||
return showCommand;
|
||||
}
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Command
|
||||
* Main workflow command with subcommands
|
||||
*/
|
||||
|
||||
import { Command } from 'commander';
|
||||
import {
|
||||
WorkflowStartCommand,
|
||||
WorkflowListCommand,
|
||||
WorkflowStopCommand,
|
||||
WorkflowStatusCommand
|
||||
} from './workflow/index.js';
|
||||
|
||||
/**
|
||||
* WorkflowCommand - Main workflow command with subcommands
|
||||
*/
|
||||
export class WorkflowCommand extends Command {
|
||||
constructor(name?: string) {
|
||||
super(name || 'workflow');
|
||||
|
||||
this.description('Manage task execution workflows with git worktrees and Claude Code')
|
||||
.alias('wf');
|
||||
|
||||
// Register subcommands
|
||||
this.addSubcommands();
|
||||
}
|
||||
|
||||
private addSubcommands(): void {
|
||||
// Start workflow
|
||||
WorkflowStartCommand.register(this);
|
||||
|
||||
// List workflows
|
||||
WorkflowListCommand.register(this);
|
||||
|
||||
// Stop workflow
|
||||
WorkflowStopCommand.register(this);
|
||||
|
||||
// Show workflow status
|
||||
WorkflowStatusCommand.register(this);
|
||||
|
||||
// Alias commands for convenience
|
||||
this.addCommand(new WorkflowStartCommand('run')); // tm workflow run <task-id>
|
||||
this.addCommand(new WorkflowStopCommand('kill')); // tm workflow kill <workflow-id>
|
||||
this.addCommand(new WorkflowStatusCommand('info')); // tm workflow info <workflow-id>
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method to register this command on an existing program
|
||||
*/
|
||||
static register(program: Command, name?: string): WorkflowCommand {
|
||||
const workflowCommand = new WorkflowCommand(name);
|
||||
program.addCommand(workflowCommand);
|
||||
return workflowCommand;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default WorkflowCommand;
|
||||
@@ -1,9 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Commands
|
||||
* Exports for all workflow-related CLI commands
|
||||
*/
|
||||
|
||||
export * from './workflow-start.command.js';
|
||||
export * from './workflow-list.command.js';
|
||||
export * from './workflow-stop.command.js';
|
||||
export * from './workflow-status.command.js';
|
||||
@@ -1,253 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow List Command
|
||||
* List active and recent workflow executions
|
||||
*/
|
||||
|
||||
import { Command } from 'commander';
|
||||
import chalk from 'chalk';
|
||||
import path from 'node:path';
|
||||
import {
|
||||
TaskExecutionManager,
|
||||
type TaskExecutionManagerConfig,
|
||||
type WorkflowExecutionContext
|
||||
} from '@tm/workflow-engine';
|
||||
import * as ui from '../../utils/ui.js';
|
||||
|
||||
export interface WorkflowListOptions {
|
||||
project?: string;
|
||||
status?: string;
|
||||
format?: 'text' | 'json' | 'compact';
|
||||
worktreeBase?: string;
|
||||
claude?: string;
|
||||
all?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* WorkflowListCommand - List workflow executions
|
||||
*/
|
||||
export class WorkflowListCommand extends Command {
|
||||
private workflowManager?: TaskExecutionManager;
|
||||
|
||||
constructor(name?: string) {
|
||||
super(name || 'list');
|
||||
|
||||
this.description('List active and recent workflow executions')
|
||||
.alias('ls')
|
||||
.option('-p, --project <path>', 'Project root directory', process.cwd())
|
||||
.option('-s, --status <status>', 'Filter by status (running, completed, failed, etc.)')
|
||||
.option('-f, --format <format>', 'Output format (text, json, compact)', 'text')
|
||||
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
|
||||
.option('--claude <path>', 'Claude Code executable path', 'claude')
|
||||
.option('--all', 'Show all workflows including completed ones')
|
||||
.action(async (options: WorkflowListOptions) => {
|
||||
await this.executeCommand(options);
|
||||
});
|
||||
}
|
||||
|
||||
private async executeCommand(options: WorkflowListOptions): Promise<void> {
|
||||
try {
|
||||
// Initialize workflow manager
|
||||
await this.initializeWorkflowManager(options);
|
||||
|
||||
// Get workflows
|
||||
let workflows = this.workflowManager!.listWorkflows();
|
||||
|
||||
// Apply status filter
|
||||
if (options.status) {
|
||||
workflows = workflows.filter(w => w.status === options.status);
|
||||
}
|
||||
|
||||
// Apply active filter (default behavior)
|
||||
if (!options.all) {
|
||||
workflows = workflows.filter(w =>
|
||||
['pending', 'initializing', 'running', 'paused'].includes(w.status)
|
||||
);
|
||||
}
|
||||
|
||||
// Display results
|
||||
this.displayResults(workflows, options);
|
||||
|
||||
} catch (error: any) {
|
||||
ui.displayError(error.message || 'Failed to list workflows');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private async initializeWorkflowManager(options: WorkflowListOptions): Promise<void> {
|
||||
if (!this.workflowManager) {
|
||||
const projectRoot = options.project || process.cwd();
|
||||
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
|
||||
|
||||
const config: TaskExecutionManagerConfig = {
|
||||
projectRoot,
|
||||
maxConcurrent: 5,
|
||||
defaultTimeout: 60,
|
||||
worktreeBase,
|
||||
claudeExecutable: options.claude || 'claude',
|
||||
debug: false
|
||||
};
|
||||
|
||||
this.workflowManager = new TaskExecutionManager(config);
|
||||
await this.workflowManager.initialize();
|
||||
}
|
||||
}
|
||||
|
||||
private displayResults(workflows: WorkflowExecutionContext[], options: WorkflowListOptions): void {
|
||||
switch (options.format) {
|
||||
case 'json':
|
||||
this.displayJson(workflows);
|
||||
break;
|
||||
case 'compact':
|
||||
this.displayCompact(workflows);
|
||||
break;
|
||||
case 'text':
|
||||
default:
|
||||
this.displayText(workflows);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private displayJson(workflows: WorkflowExecutionContext[]): void {
|
||||
console.log(JSON.stringify({
|
||||
workflows: workflows.map(w => ({
|
||||
workflowId: `workflow-${w.taskId}`,
|
||||
taskId: w.taskId,
|
||||
taskTitle: w.taskTitle,
|
||||
status: w.status,
|
||||
worktreePath: w.worktreePath,
|
||||
branchName: w.branchName,
|
||||
processId: w.processId,
|
||||
startedAt: w.startedAt,
|
||||
lastActivity: w.lastActivity,
|
||||
metadata: w.metadata
|
||||
})),
|
||||
total: workflows.length,
|
||||
timestamp: new Date().toISOString()
|
||||
}, null, 2));
|
||||
}
|
||||
|
||||
private displayCompact(workflows: WorkflowExecutionContext[]): void {
|
||||
if (workflows.length === 0) {
|
||||
console.log(chalk.gray('No workflows found'));
|
||||
return;
|
||||
}
|
||||
|
||||
workflows.forEach(workflow => {
|
||||
const workflowId = `workflow-${workflow.taskId}`;
|
||||
const statusDisplay = this.getStatusDisplay(workflow.status);
|
||||
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
|
||||
|
||||
console.log(
|
||||
`${chalk.cyan(workflowId)} ${statusDisplay} ${workflow.taskTitle} ${chalk.gray(`(${duration})`)}`
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
private displayText(workflows: WorkflowExecutionContext[]): void {
|
||||
ui.displayBanner('Active Workflows');
|
||||
|
||||
if (workflows.length === 0) {
|
||||
ui.displayWarning('No workflows found');
|
||||
console.log();
|
||||
console.log(chalk.blue('💡 Start a new workflow with:'));
|
||||
console.log(` ${chalk.cyan('tm workflow start <task-id>')}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Statistics
|
||||
console.log(chalk.blue.bold('\n📊 Statistics:\n'));
|
||||
const statusCounts = this.getStatusCounts(workflows);
|
||||
Object.entries(statusCounts).forEach(([status, count]) => {
|
||||
console.log(` ${this.getStatusDisplay(status)}: ${chalk.cyan(count)}`);
|
||||
});
|
||||
|
||||
// Workflows table
|
||||
console.log(chalk.blue.bold(`\n🔄 Workflows (${workflows.length}):\n`));
|
||||
|
||||
const tableData = workflows.map(workflow => {
|
||||
const workflowId = `workflow-${workflow.taskId}`;
|
||||
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
|
||||
|
||||
return [
|
||||
chalk.cyan(workflowId),
|
||||
chalk.yellow(workflow.taskId),
|
||||
workflow.taskTitle.substring(0, 30) + (workflow.taskTitle.length > 30 ? '...' : ''),
|
||||
this.getStatusDisplay(workflow.status),
|
||||
workflow.processId ? chalk.green(workflow.processId.toString()) : chalk.gray('N/A'),
|
||||
chalk.gray(duration),
|
||||
chalk.gray(path.basename(workflow.worktreePath))
|
||||
];
|
||||
});
|
||||
|
||||
console.log(ui.createTable(
|
||||
['Workflow ID', 'Task ID', 'Task Title', 'Status', 'PID', 'Duration', 'Worktree'],
|
||||
tableData
|
||||
));
|
||||
|
||||
// Running workflows actions
|
||||
const runningWorkflows = workflows.filter(w => w.status === 'running');
|
||||
if (runningWorkflows.length > 0) {
|
||||
console.log(chalk.blue.bold('\n🚀 Quick Actions:\n'));
|
||||
runningWorkflows.slice(0, 3).forEach(workflow => {
|
||||
const workflowId = `workflow-${workflow.taskId}`;
|
||||
console.log(` • Attach to ${chalk.cyan(workflowId)}: ${chalk.gray(`tm workflow attach ${workflowId}`)}`);
|
||||
});
|
||||
|
||||
if (runningWorkflows.length > 3) {
|
||||
console.log(` ${chalk.gray(`... and ${runningWorkflows.length - 3} more`)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private getStatusDisplay(status: string): string {
|
||||
const statusMap = {
|
||||
pending: { icon: '⏳', color: chalk.yellow },
|
||||
initializing: { icon: '🔄', color: chalk.blue },
|
||||
running: { icon: '🚀', color: chalk.green },
|
||||
paused: { icon: '⏸️', color: chalk.orange },
|
||||
completed: { icon: '✅', color: chalk.green },
|
||||
failed: { icon: '❌', color: chalk.red },
|
||||
cancelled: { icon: '🛑', color: chalk.gray },
|
||||
timeout: { icon: '⏰', color: chalk.red }
|
||||
};
|
||||
|
||||
const statusInfo = statusMap[status as keyof typeof statusMap] || { icon: '❓', color: chalk.white };
|
||||
return `${statusInfo.icon} ${statusInfo.color(status)}`;
|
||||
}
|
||||
|
||||
private getStatusCounts(workflows: WorkflowExecutionContext[]): Record<string, number> {
|
||||
const counts: Record<string, number> = {};
|
||||
|
||||
workflows.forEach(workflow => {
|
||||
counts[workflow.status] = (counts[workflow.status] || 0) + 1;
|
||||
});
|
||||
|
||||
return counts;
|
||||
}
|
||||
|
||||
private formatDuration(start: Date, end: Date): string {
|
||||
const diff = end.getTime() - start.getTime();
|
||||
const minutes = Math.floor(diff / (1000 * 60));
|
||||
const hours = Math.floor(minutes / 60);
|
||||
|
||||
if (hours > 0) {
|
||||
return `${hours}h ${minutes % 60}m`;
|
||||
} else if (minutes > 0) {
|
||||
return `${minutes}m`;
|
||||
} else {
|
||||
return '<1m';
|
||||
}
|
||||
}
|
||||
|
||||
async cleanup(): Promise<void> {
|
||||
if (this.workflowManager) {
|
||||
this.workflowManager.removeAllListeners();
|
||||
}
|
||||
}
|
||||
|
||||
static register(program: Command, name?: string): WorkflowListCommand {
|
||||
const command = new WorkflowListCommand(name);
|
||||
program.addCommand(command);
|
||||
return command;
|
||||
}
|
||||
}
|
||||
@@ -1,239 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Start Command
|
||||
* Start task execution in isolated worktree with Claude Code process
|
||||
*/
|
||||
|
||||
import { Command } from 'commander';
|
||||
import chalk from 'chalk';
|
||||
import path from 'node:path';
|
||||
import {
|
||||
createTaskMasterCore,
|
||||
type TaskMasterCore
|
||||
} from '@tm/core';
|
||||
import {
|
||||
TaskExecutionManager,
|
||||
type TaskExecutionManagerConfig
|
||||
} from '@tm/workflow-engine';
|
||||
import * as ui from '../../utils/ui.js';
|
||||
|
||||
export interface WorkflowStartOptions {
|
||||
project?: string;
|
||||
branch?: string;
|
||||
timeout?: number;
|
||||
worktreeBase?: string;
|
||||
claude?: string;
|
||||
debug?: boolean;
|
||||
env?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* WorkflowStartCommand - Start task execution workflow
|
||||
*/
|
||||
export class WorkflowStartCommand extends Command {
|
||||
private tmCore?: TaskMasterCore;
|
||||
private workflowManager?: TaskExecutionManager;
|
||||
|
||||
constructor(name?: string) {
|
||||
super(name || 'start');
|
||||
|
||||
this.description('Start task execution in isolated worktree')
|
||||
.argument('<task-id>', 'Task ID to execute')
|
||||
.option('-p, --project <path>', 'Project root directory', process.cwd())
|
||||
.option('-b, --branch <name>', 'Custom branch name for worktree')
|
||||
.option('-t, --timeout <minutes>', 'Execution timeout in minutes', '60')
|
||||
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
|
||||
.option('--claude <path>', 'Claude Code executable path', 'claude')
|
||||
.option('--debug', 'Enable debug logging')
|
||||
.option('--env <vars>', 'Environment variables (KEY=VALUE,KEY2=VALUE2)')
|
||||
.action(async (taskId: string, options: WorkflowStartOptions) => {
|
||||
await this.executeCommand(taskId, options);
|
||||
});
|
||||
}
|
||||
|
||||
private async executeCommand(taskId: string, options: WorkflowStartOptions): Promise<void> {
|
||||
try {
|
||||
// Initialize components
|
||||
await this.initializeCore(options.project || process.cwd());
|
||||
await this.initializeWorkflowManager(options);
|
||||
|
||||
// Get task details
|
||||
const task = await this.getTask(taskId);
|
||||
if (!task) {
|
||||
throw new Error(`Task ${taskId} not found`);
|
||||
}
|
||||
|
||||
// Check if task already has active workflow
|
||||
const existingWorkflow = this.workflowManager!.getWorkflowByTaskId(taskId);
|
||||
if (existingWorkflow) {
|
||||
ui.displayWarning(`Task ${taskId} already has an active workflow`);
|
||||
console.log(`Workflow ID: ${chalk.cyan('workflow-' + taskId)}`);
|
||||
console.log(`Status: ${this.getStatusDisplay(existingWorkflow.status)}`);
|
||||
console.log(`Worktree: ${chalk.gray(existingWorkflow.worktreePath)}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse environment variables
|
||||
const env = this.parseEnvironmentVariables(options.env);
|
||||
|
||||
// Display task info
|
||||
ui.displayBanner(`Starting Workflow for Task ${taskId}`);
|
||||
console.log(`${chalk.blue('Task:')} ${task.title}`);
|
||||
console.log(`${chalk.blue('Description:')} ${task.description}`);
|
||||
|
||||
if (task.dependencies?.length) {
|
||||
console.log(`${chalk.blue('Dependencies:')} ${task.dependencies.join(', ')}`);
|
||||
}
|
||||
|
||||
console.log(`${chalk.blue('Priority:')} ${task.priority || 'normal'}`);
|
||||
console.log();
|
||||
|
||||
// Start workflow
|
||||
ui.displaySpinner('Creating worktree and starting Claude Code process...');
|
||||
|
||||
const workflowId = await this.workflowManager!.startTaskExecution(task, {
|
||||
branchName: options.branch,
|
||||
timeout: parseInt(options.timeout || '60'),
|
||||
env
|
||||
});
|
||||
|
||||
const workflow = this.workflowManager!.getWorkflowStatus(workflowId);
|
||||
|
||||
ui.displaySuccess('Workflow started successfully!');
|
||||
console.log();
|
||||
console.log(`${chalk.green('✓')} Workflow ID: ${chalk.cyan(workflowId)}`);
|
||||
console.log(`${chalk.green('✓')} Worktree: ${chalk.gray(workflow?.worktreePath)}`);
|
||||
console.log(`${chalk.green('✓')} Branch: ${chalk.gray(workflow?.branchName)}`);
|
||||
console.log(`${chalk.green('✓')} Process ID: ${chalk.gray(workflow?.processId)}`);
|
||||
console.log();
|
||||
|
||||
// Display next steps
|
||||
console.log(chalk.blue.bold('📋 Next Steps:'));
|
||||
console.log(` • Monitor: ${chalk.cyan(`tm workflow status ${workflowId}`)}`);
|
||||
console.log(` • Attach: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
|
||||
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
|
||||
console.log();
|
||||
|
||||
// Setup event listeners for real-time updates
|
||||
this.setupEventListeners();
|
||||
|
||||
} catch (error: any) {
|
||||
ui.displayError(error.message || 'Failed to start workflow');
|
||||
|
||||
if (options.debug && error.stack) {
|
||||
console.error(chalk.gray(error.stack));
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private async initializeCore(projectRoot: string): Promise<void> {
|
||||
if (!this.tmCore) {
|
||||
this.tmCore = await createTaskMasterCore({ projectPath: projectRoot });
|
||||
}
|
||||
}
|
||||
|
||||
private async initializeWorkflowManager(options: WorkflowStartOptions): Promise<void> {
|
||||
if (!this.workflowManager) {
|
||||
const projectRoot = options.project || process.cwd();
|
||||
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
|
||||
|
||||
const config: TaskExecutionManagerConfig = {
|
||||
projectRoot,
|
||||
maxConcurrent: 5,
|
||||
defaultTimeout: parseInt(options.timeout || '60'),
|
||||
worktreeBase,
|
||||
claudeExecutable: options.claude || 'claude',
|
||||
debug: options.debug || false
|
||||
};
|
||||
|
||||
this.workflowManager = new TaskExecutionManager(config);
|
||||
await this.workflowManager.initialize();
|
||||
}
|
||||
}
|
||||
|
||||
private async getTask(taskId: string) {
|
||||
if (!this.tmCore) {
|
||||
throw new Error('TaskMasterCore not initialized');
|
||||
}
|
||||
|
||||
const result = await this.tmCore.getTaskList({});
|
||||
return result.tasks.find(task => task.id === taskId);
|
||||
}
|
||||
|
||||
private parseEnvironmentVariables(envString?: string): Record<string, string> | undefined {
|
||||
if (!envString) return undefined;
|
||||
|
||||
const env: Record<string, string> = {};
|
||||
|
||||
for (const pair of envString.split(',')) {
|
||||
const [key, ...valueParts] = pair.trim().split('=');
|
||||
if (key && valueParts.length > 0) {
|
||||
env[key] = valueParts.join('=');
|
||||
}
|
||||
}
|
||||
|
||||
return Object.keys(env).length > 0 ? env : undefined;
|
||||
}
|
||||
|
||||
private getStatusDisplay(status: string): string {
|
||||
const colors = {
|
||||
pending: chalk.yellow,
|
||||
initializing: chalk.blue,
|
||||
running: chalk.green,
|
||||
paused: chalk.orange,
|
||||
completed: chalk.green,
|
||||
failed: chalk.red,
|
||||
cancelled: chalk.gray,
|
||||
timeout: chalk.red
|
||||
};
|
||||
|
||||
const color = colors[status as keyof typeof colors] || chalk.white;
|
||||
return color(status);
|
||||
}
|
||||
|
||||
private setupEventListeners(): void {
|
||||
if (!this.workflowManager) return;
|
||||
|
||||
this.workflowManager.on('workflow.started', (event) => {
|
||||
console.log(`${chalk.green('🚀')} Workflow started: ${event.workflowId}`);
|
||||
});
|
||||
|
||||
this.workflowManager.on('process.output', (event) => {
|
||||
if (event.data?.stream === 'stdout') {
|
||||
console.log(`${chalk.blue('[OUT]')} ${event.data.data.trim()}`);
|
||||
} else if (event.data?.stream === 'stderr') {
|
||||
console.log(`${chalk.red('[ERR]')} ${event.data.data.trim()}`);
|
||||
}
|
||||
});
|
||||
|
||||
this.workflowManager.on('workflow.completed', (event) => {
|
||||
console.log(`${chalk.green('✅')} Workflow completed: ${event.workflowId}`);
|
||||
});
|
||||
|
||||
this.workflowManager.on('workflow.failed', (event) => {
|
||||
console.log(`${chalk.red('❌')} Workflow failed: ${event.workflowId}`);
|
||||
if (event.error) {
|
||||
console.log(`${chalk.red('Error:')} ${event.error.message}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async cleanup(): Promise<void> {
|
||||
if (this.workflowManager) {
|
||||
// Don't cleanup workflows, just disconnect
|
||||
this.workflowManager.removeAllListeners();
|
||||
}
|
||||
|
||||
if (this.tmCore) {
|
||||
await this.tmCore.close();
|
||||
this.tmCore = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
static register(program: Command, name?: string): WorkflowStartCommand {
|
||||
const command = new WorkflowStartCommand(name);
|
||||
program.addCommand(command);
|
||||
return command;
|
||||
}
|
||||
}
|
||||
@@ -1,339 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Status Command
|
||||
* Show detailed status of a specific workflow
|
||||
*/
|
||||
|
||||
import { Command } from 'commander';
|
||||
import chalk from 'chalk';
|
||||
import path from 'node:path';
|
||||
import {
|
||||
TaskExecutionManager,
|
||||
type TaskExecutionManagerConfig
|
||||
} from '@tm/workflow-engine';
|
||||
import * as ui from '../../utils/ui.js';
|
||||
|
||||
export interface WorkflowStatusOptions {
|
||||
project?: string;
|
||||
worktreeBase?: string;
|
||||
claude?: string;
|
||||
watch?: boolean;
|
||||
format?: 'text' | 'json';
|
||||
}
|
||||
|
||||
/**
|
||||
* WorkflowStatusCommand - Show workflow execution status
|
||||
*/
|
||||
export class WorkflowStatusCommand extends Command {
|
||||
private workflowManager?: TaskExecutionManager;
|
||||
|
||||
constructor(name?: string) {
|
||||
super(name || 'status');
|
||||
|
||||
this.description('Show detailed status of a workflow execution')
|
||||
.argument('<workflow-id>', 'Workflow ID or task ID to check')
|
||||
.option('-p, --project <path>', 'Project root directory', process.cwd())
|
||||
.option('--worktree-base <path>', 'Base directory for worktrees', '../task-worktrees')
|
||||
.option('--claude <path>', 'Claude Code executable path', 'claude')
|
||||
.option('-w, --watch', 'Watch for status changes (refresh every 2 seconds)')
|
||||
.option('-f, --format <format>', 'Output format (text, json)', 'text')
|
||||
.action(async (workflowId: string, options: WorkflowStatusOptions) => {
|
||||
await this.executeCommand(workflowId, options);
|
||||
});
|
||||
}
|
||||
|
||||
private async executeCommand(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
|
||||
try {
|
||||
// Initialize workflow manager
|
||||
await this.initializeWorkflowManager(options);
|
||||
|
||||
if (options.watch) {
|
||||
await this.watchWorkflowStatus(workflowId, options);
|
||||
} else {
|
||||
await this.showWorkflowStatus(workflowId, options);
|
||||
}
|
||||
|
||||
} catch (error: any) {
|
||||
ui.displayError(error.message || 'Failed to get workflow status');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private async initializeWorkflowManager(options: WorkflowStatusOptions): Promise<void> {
|
||||
if (!this.workflowManager) {
|
||||
const projectRoot = options.project || process.cwd();
|
||||
const worktreeBase = path.resolve(projectRoot, options.worktreeBase || '../task-worktrees');
|
||||
|
||||
const config: TaskExecutionManagerConfig = {
|
||||
projectRoot,
|
||||
maxConcurrent: 5,
|
||||
defaultTimeout: 60,
|
||||
worktreeBase,
|
||||
claudeExecutable: options.claude || 'claude',
|
||||
debug: false
|
||||
};
|
||||
|
||||
this.workflowManager = new TaskExecutionManager(config);
|
||||
await this.workflowManager.initialize();
|
||||
}
|
||||
}
|
||||
|
||||
private async showWorkflowStatus(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
|
||||
// Try to find workflow by ID or task ID
|
||||
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
|
||||
|
||||
if (!workflow) {
|
||||
// Try as task ID
|
||||
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
|
||||
}
|
||||
|
||||
if (!workflow) {
|
||||
throw new Error(`Workflow not found: ${workflowId}`);
|
||||
}
|
||||
|
||||
if (options.format === 'json') {
|
||||
this.displayJsonStatus(workflow);
|
||||
} else {
|
||||
this.displayTextStatus(workflow);
|
||||
}
|
||||
}
|
||||
|
||||
private async watchWorkflowStatus(workflowId: string, options: WorkflowStatusOptions): Promise<void> {
|
||||
console.log(chalk.blue.bold('👀 Watching workflow status (Press Ctrl+C to exit)\n'));
|
||||
|
||||
let lastStatus = '';
|
||||
let updateCount = 0;
|
||||
|
||||
const updateStatus = async () => {
|
||||
try {
|
||||
// Clear screen and move cursor to top
|
||||
if (updateCount > 0) {
|
||||
process.stdout.write('\x1b[2J\x1b[0f');
|
||||
}
|
||||
|
||||
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
|
||||
|
||||
if (!workflow) {
|
||||
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
|
||||
}
|
||||
|
||||
if (!workflow) {
|
||||
console.log(chalk.red(`Workflow not found: ${workflowId}`));
|
||||
return;
|
||||
}
|
||||
|
||||
// Display header with timestamp
|
||||
console.log(chalk.blue.bold('👀 Watching Workflow Status'));
|
||||
console.log(chalk.gray(`Last updated: ${new Date().toLocaleTimeString()}\n`));
|
||||
|
||||
this.displayTextStatus(workflow);
|
||||
|
||||
// Check if workflow has ended
|
||||
if (['completed', 'failed', 'cancelled', 'timeout'].includes(workflow.status)) {
|
||||
console.log(chalk.yellow('\n⚠️ Workflow has ended. Stopping watch mode.'));
|
||||
return;
|
||||
}
|
||||
|
||||
updateCount++;
|
||||
|
||||
} catch (error) {
|
||||
console.error(chalk.red('Error updating status:'), error);
|
||||
}
|
||||
};
|
||||
|
||||
// Initial display
|
||||
await updateStatus();
|
||||
|
||||
// Setup interval for updates
|
||||
const interval = setInterval(updateStatus, 2000);
|
||||
|
||||
// Handle Ctrl+C
|
||||
process.on('SIGINT', () => {
|
||||
clearInterval(interval);
|
||||
console.log(chalk.yellow('\n👋 Stopped watching workflow status'));
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
// Keep the process alive
|
||||
await new Promise(() => {});
|
||||
}
|
||||
|
||||
private displayJsonStatus(workflow: any): void {
|
||||
const status = {
|
||||
workflowId: `workflow-${workflow.taskId}`,
|
||||
taskId: workflow.taskId,
|
||||
taskTitle: workflow.taskTitle,
|
||||
taskDescription: workflow.taskDescription,
|
||||
status: workflow.status,
|
||||
worktreePath: workflow.worktreePath,
|
||||
branchName: workflow.branchName,
|
||||
processId: workflow.processId,
|
||||
startedAt: workflow.startedAt,
|
||||
lastActivity: workflow.lastActivity,
|
||||
duration: this.calculateDuration(workflow.startedAt, workflow.lastActivity),
|
||||
metadata: workflow.metadata
|
||||
};
|
||||
|
||||
console.log(JSON.stringify(status, null, 2));
|
||||
}
|
||||
|
||||
private displayTextStatus(workflow: any): void {
|
||||
const workflowId = `workflow-${workflow.taskId}`;
|
||||
const duration = this.formatDuration(workflow.startedAt, workflow.lastActivity);
|
||||
|
||||
ui.displayBanner(`Workflow Status: ${workflowId}`);
|
||||
|
||||
// Basic information
|
||||
console.log(chalk.blue.bold('\n📋 Basic Information:\n'));
|
||||
console.log(` Workflow ID: ${chalk.cyan(workflowId)}`);
|
||||
console.log(` Task ID: ${chalk.cyan(workflow.taskId)}`);
|
||||
console.log(` Task Title: ${workflow.taskTitle}`);
|
||||
console.log(` Status: ${this.getStatusDisplay(workflow.status)}`);
|
||||
console.log(` Duration: ${chalk.gray(duration)}`);
|
||||
|
||||
// Task details
|
||||
if (workflow.taskDescription) {
|
||||
console.log(chalk.blue.bold('\n📝 Task Details:\n'));
|
||||
console.log(` ${workflow.taskDescription}`);
|
||||
}
|
||||
|
||||
// Process information
|
||||
console.log(chalk.blue.bold('\n⚙️ Process Information:\n'));
|
||||
console.log(` Process ID: ${workflow.processId ? chalk.green(workflow.processId) : chalk.gray('N/A')}`);
|
||||
console.log(` Worktree: ${chalk.gray(workflow.worktreePath)}`);
|
||||
console.log(` Branch: ${chalk.gray(workflow.branchName)}`);
|
||||
|
||||
// Timing information
|
||||
console.log(chalk.blue.bold('\n⏰ Timing:\n'));
|
||||
console.log(` Started: ${chalk.gray(workflow.startedAt.toLocaleString())}`);
|
||||
console.log(` Last Activity: ${chalk.gray(workflow.lastActivity.toLocaleString())}`);
|
||||
|
||||
// Metadata
|
||||
if (workflow.metadata && Object.keys(workflow.metadata).length > 0) {
|
||||
console.log(chalk.blue.bold('\n🔖 Metadata:\n'));
|
||||
Object.entries(workflow.metadata).forEach(([key, value]) => {
|
||||
console.log(` ${key}: ${chalk.gray(String(value))}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Status-specific information
|
||||
this.displayStatusSpecificInfo(workflow);
|
||||
|
||||
// Actions
|
||||
this.displayAvailableActions(workflow);
|
||||
}
|
||||
|
||||
private displayStatusSpecificInfo(workflow: any): void {
|
||||
const workflowId = `workflow-${workflow.taskId}`;
|
||||
|
||||
switch (workflow.status) {
|
||||
case 'running':
|
||||
console.log(chalk.blue.bold('\n🚀 Running Status:\n'));
|
||||
console.log(` ${chalk.green('●')} Process is actively executing`);
|
||||
console.log(` ${chalk.blue('ℹ')} Monitor output with: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
|
||||
break;
|
||||
|
||||
case 'paused':
|
||||
console.log(chalk.blue.bold('\n⏸️ Paused Status:\n'));
|
||||
console.log(` ${chalk.yellow('●')} Workflow is paused`);
|
||||
console.log(` ${chalk.blue('ℹ')} Resume with: ${chalk.cyan(`tm workflow resume ${workflowId}`)}`);
|
||||
break;
|
||||
|
||||
case 'completed':
|
||||
console.log(chalk.blue.bold('\n✅ Completed Status:\n'));
|
||||
console.log(` ${chalk.green('●')} Workflow completed successfully`);
|
||||
console.log(` ${chalk.blue('ℹ')} Resources have been cleaned up`);
|
||||
break;
|
||||
|
||||
case 'failed':
|
||||
console.log(chalk.blue.bold('\n❌ Failed Status:\n'));
|
||||
console.log(` ${chalk.red('●')} Workflow execution failed`);
|
||||
console.log(` ${chalk.blue('ℹ')} Check logs for error details`);
|
||||
break;
|
||||
|
||||
case 'initializing':
|
||||
console.log(chalk.blue.bold('\n🔄 Initializing Status:\n'));
|
||||
console.log(` ${chalk.blue('●')} Setting up worktree and process`);
|
||||
console.log(` ${chalk.blue('ℹ')} This should complete shortly`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private displayAvailableActions(workflow: any): void {
|
||||
const workflowId = `workflow-${workflow.taskId}`;
|
||||
console.log(chalk.blue.bold('\n🎯 Available Actions:\n'));
|
||||
|
||||
switch (workflow.status) {
|
||||
case 'running':
|
||||
console.log(` • Attach: ${chalk.cyan(`tm workflow attach ${workflowId}`)}`);
|
||||
console.log(` • Pause: ${chalk.cyan(`tm workflow pause ${workflowId}`)}`);
|
||||
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
|
||||
break;
|
||||
|
||||
case 'paused':
|
||||
console.log(` • Resume: ${chalk.cyan(`tm workflow resume ${workflowId}`)}`);
|
||||
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
|
||||
break;
|
||||
|
||||
case 'pending':
|
||||
case 'initializing':
|
||||
console.log(` • Stop: ${chalk.cyan(`tm workflow stop ${workflowId}`)}`);
|
||||
break;
|
||||
|
||||
case 'completed':
|
||||
case 'failed':
|
||||
case 'cancelled':
|
||||
console.log(` • View logs: ${chalk.cyan(`tm workflow logs ${workflowId}`)}`);
|
||||
console.log(` • Start new: ${chalk.cyan(`tm workflow start ${workflow.taskId}`)}`);
|
||||
break;
|
||||
}
|
||||
|
||||
console.log(` • List all: ${chalk.cyan('tm workflow list')}`);
|
||||
}
|
||||
|
||||
private getStatusDisplay(status: string): string {
|
||||
const statusMap = {
|
||||
pending: { icon: '⏳', color: chalk.yellow },
|
||||
initializing: { icon: '🔄', color: chalk.blue },
|
||||
running: { icon: '🚀', color: chalk.green },
|
||||
paused: { icon: '⏸️', color: chalk.orange },
|
||||
completed: { icon: '✅', color: chalk.green },
|
||||
failed: { icon: '❌', color: chalk.red },
|
||||
cancelled: { icon: '🛑', color: chalk.gray },
|
||||
timeout: { icon: '⏰', color: chalk.red }
|
||||
};
|
||||
|
||||
const statusInfo = statusMap[status as keyof typeof statusMap] || { icon: '❓', color: chalk.white };
|
||||
return `${statusInfo.icon} ${statusInfo.color(status)}`;
|
||||
}
|
||||
|
||||
private formatDuration(start: Date, end: Date): string {
|
||||
const diff = end.getTime() - start.getTime();
|
||||
const minutes = Math.floor(diff / (1000 * 60));
|
||||
const hours = Math.floor(minutes / 60);
|
||||
const seconds = Math.floor((diff % (1000 * 60)) / 1000);
|
||||
|
||||
if (hours > 0) {
|
||||
return `${hours}h ${minutes % 60}m ${seconds}s`;
|
||||
} else if (minutes > 0) {
|
||||
return `${minutes}m ${seconds}s`;
|
||||
} else {
|
||||
return `${seconds}s`;
|
||||
}
|
||||
}
|
||||
|
||||
private calculateDuration(start: Date, end: Date): number {
|
||||
return Math.floor((end.getTime() - start.getTime()) / 1000);
|
||||
}
|
||||
|
||||
async cleanup(): Promise<void> {
|
||||
if (this.workflowManager) {
|
||||
this.workflowManager.removeAllListeners();
|
||||
}
|
||||
}
|
||||
|
||||
static register(program: Command, name?: string): WorkflowStatusCommand {
|
||||
const command = new WorkflowStatusCommand(name);
|
||||
program.addCommand(command);
|
||||
return command;
|
||||
}
|
||||
}
|
||||
@@ -1,260 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Stop Command
|
||||
* Stop and clean up workflow execution
|
||||
*/
|
||||
|
||||
import { Command } from 'commander';
|
||||
import chalk from 'chalk';
|
||||
import path from 'node:path';
|
||||
import {
|
||||
TaskExecutionManager,
|
||||
type TaskExecutionManagerConfig
|
||||
} from '@tm/workflow-engine';
|
||||
import * as ui from '../../utils/ui.js';
|
||||
|
||||
export interface WorkflowStopOptions {
|
||||
project?: string;
|
||||
worktreeBase?: string;
|
||||
claude?: string;
|
||||
force?: boolean;
|
||||
all?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* WorkflowStopCommand - Stop workflow execution
|
||||
*/
|
||||
export class WorkflowStopCommand extends Command {
|
||||
private workflowManager?: TaskExecutionManager;
|
||||
|
||||
constructor(name?: string) {
|
||||
super(name || 'stop');
|
||||
|
||||
this.description('Stop workflow execution and clean up resources')
|
||||
.argument('[workflow-id]', 'Workflow ID to stop (or task ID)')
|
||||
.option('-p, --project <path>', 'Project root directory', process.cwd())
|
||||
.option(
|
||||
'--worktree-base <path>',
|
||||
'Base directory for worktrees',
|
||||
'../task-worktrees'
|
||||
)
|
||||
.option('--claude <path>', 'Claude Code executable path', 'claude')
|
||||
.option('-f, --force', 'Force stop (kill process immediately)')
|
||||
.option('--all', 'Stop all running workflows')
|
||||
.action(
|
||||
async (
|
||||
workflowId: string | undefined,
|
||||
options: WorkflowStopOptions
|
||||
) => {
|
||||
await this.executeCommand(workflowId, options);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
private async executeCommand(
|
||||
workflowId: string | undefined,
|
||||
options: WorkflowStopOptions
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Initialize workflow manager
|
||||
await this.initializeWorkflowManager(options);
|
||||
|
||||
if (options.all) {
|
||||
await this.stopAllWorkflows(options);
|
||||
} else if (workflowId) {
|
||||
await this.stopSingleWorkflow(workflowId, options);
|
||||
} else {
|
||||
ui.displayError('Please specify a workflow ID or use --all flag');
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error: any) {
|
||||
ui.displayError(error.message || 'Failed to stop workflow');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private async initializeWorkflowManager(
|
||||
options: WorkflowStopOptions
|
||||
): Promise<void> {
|
||||
if (!this.workflowManager) {
|
||||
const projectRoot = options.project || process.cwd();
|
||||
const worktreeBase = path.resolve(
|
||||
projectRoot,
|
||||
options.worktreeBase || '../task-worktrees'
|
||||
);
|
||||
|
||||
const config: TaskExecutionManagerConfig = {
|
||||
projectRoot,
|
||||
maxConcurrent: 5,
|
||||
defaultTimeout: 60,
|
||||
worktreeBase,
|
||||
claudeExecutable: options.claude || 'claude',
|
||||
debug: false
|
||||
};
|
||||
|
||||
this.workflowManager = new TaskExecutionManager(config);
|
||||
await this.workflowManager.initialize();
|
||||
}
|
||||
}
|
||||
|
||||
private async stopSingleWorkflow(
|
||||
workflowId: string,
|
||||
options: WorkflowStopOptions
|
||||
): Promise<void> {
|
||||
// Try to find workflow by ID or task ID
|
||||
let workflow = this.workflowManager!.getWorkflowStatus(workflowId);
|
||||
|
||||
if (!workflow) {
|
||||
// Try as task ID
|
||||
workflow = this.workflowManager!.getWorkflowByTaskId(workflowId);
|
||||
}
|
||||
|
||||
if (!workflow) {
|
||||
throw new Error(`Workflow not found: ${workflowId}`);
|
||||
}
|
||||
|
||||
const actualWorkflowId = `workflow-${workflow.taskId}`;
|
||||
|
||||
// Display workflow info
|
||||
console.log(chalk.blue.bold(`🛑 Stopping Workflow: ${actualWorkflowId}`));
|
||||
console.log(`${chalk.blue('Task:')} ${workflow.taskTitle}`);
|
||||
console.log(
|
||||
`${chalk.blue('Status:')} ${this.getStatusDisplay(workflow.status)}`
|
||||
);
|
||||
console.log(
|
||||
`${chalk.blue('Worktree:')} ${chalk.gray(workflow.worktreePath)}`
|
||||
);
|
||||
|
||||
if (workflow.processId) {
|
||||
console.log(
|
||||
`${chalk.blue('Process ID:')} ${chalk.gray(workflow.processId)}`
|
||||
);
|
||||
}
|
||||
|
||||
console.log();
|
||||
|
||||
// Confirm if not forced
|
||||
if (!options.force && ['running', 'paused'].includes(workflow.status)) {
|
||||
const shouldProceed = await ui.confirm(
|
||||
`Are you sure you want to stop this ${workflow.status} workflow?`
|
||||
);
|
||||
|
||||
if (!shouldProceed) {
|
||||
console.log(chalk.gray('Operation cancelled'));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Stop the workflow
|
||||
ui.displaySpinner('Stopping workflow and cleaning up resources...');
|
||||
|
||||
await this.workflowManager!.stopTaskExecution(
|
||||
actualWorkflowId,
|
||||
options.force
|
||||
);
|
||||
|
||||
ui.displaySuccess('Workflow stopped successfully!');
|
||||
console.log();
|
||||
console.log(`${chalk.green('✓')} Process terminated`);
|
||||
console.log(`${chalk.green('✓')} Worktree cleaned up`);
|
||||
console.log(`${chalk.green('✓')} State updated`);
|
||||
}
|
||||
|
||||
private async stopAllWorkflows(options: WorkflowStopOptions): Promise<void> {
|
||||
const workflows = this.workflowManager!.listWorkflows();
|
||||
const activeWorkflows = workflows.filter((w) =>
|
||||
['pending', 'initializing', 'running', 'paused'].includes(w.status)
|
||||
);
|
||||
|
||||
if (activeWorkflows.length === 0) {
|
||||
ui.displayWarning('No active workflows to stop');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
chalk.blue.bold(`🛑 Stopping ${activeWorkflows.length} Active Workflows`)
|
||||
);
|
||||
console.log();
|
||||
|
||||
// List workflows to be stopped
|
||||
activeWorkflows.forEach((workflow) => {
|
||||
console.log(
|
||||
` • ${chalk.cyan(`workflow-${workflow.taskId}`)} - ${workflow.taskTitle} ${this.getStatusDisplay(workflow.status)}`
|
||||
);
|
||||
});
|
||||
console.log();
|
||||
|
||||
// Confirm if not forced
|
||||
if (!options.force) {
|
||||
const shouldProceed = await ui.confirm(
|
||||
`Are you sure you want to stop all ${activeWorkflows.length} active workflows?`
|
||||
);
|
||||
|
||||
if (!shouldProceed) {
|
||||
console.log(chalk.gray('Operation cancelled'));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Stop all workflows
|
||||
ui.displaySpinner('Stopping all workflows...');
|
||||
|
||||
let stopped = 0;
|
||||
let failed = 0;
|
||||
|
||||
for (const workflow of activeWorkflows) {
|
||||
try {
|
||||
const workflowId = `workflow-${workflow.taskId}`;
|
||||
await this.workflowManager!.stopTaskExecution(
|
||||
workflowId,
|
||||
options.force
|
||||
);
|
||||
stopped++;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`${chalk.red('✗')} Failed to stop workflow ${workflow.taskId}: ${error}`
|
||||
);
|
||||
failed++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log();
|
||||
if (stopped > 0) {
|
||||
ui.displaySuccess(`Successfully stopped ${stopped} workflows`);
|
||||
}
|
||||
|
||||
if (failed > 0) {
|
||||
ui.displayWarning(`Failed to stop ${failed} workflows`);
|
||||
}
|
||||
}
|
||||
|
||||
private getStatusDisplay(status: string): string {
|
||||
const statusMap = {
|
||||
pending: { icon: '⏳', color: chalk.yellow },
|
||||
initializing: { icon: '🔄', color: chalk.blue },
|
||||
running: { icon: '🚀', color: chalk.green },
|
||||
paused: { icon: '⏸️', color: chalk.hex('#FFA500') },
|
||||
completed: { icon: '✅', color: chalk.green },
|
||||
failed: { icon: '❌', color: chalk.red },
|
||||
cancelled: { icon: '🛑', color: chalk.gray },
|
||||
timeout: { icon: '⏰', color: chalk.red }
|
||||
};
|
||||
|
||||
const statusInfo = statusMap[status as keyof typeof statusMap] || {
|
||||
icon: '❓',
|
||||
color: chalk.white
|
||||
};
|
||||
return `${statusInfo.icon} ${statusInfo.color(status)}`;
|
||||
}
|
||||
|
||||
async cleanup(): Promise<void> {
|
||||
if (this.workflowManager) {
|
||||
this.workflowManager.removeAllListeners();
|
||||
}
|
||||
}
|
||||
|
||||
static register(program: Command, name?: string): WorkflowStopCommand {
|
||||
const command = new WorkflowStopCommand(name);
|
||||
program.addCommand(command);
|
||||
return command;
|
||||
}
|
||||
}
|
||||
@@ -5,11 +5,9 @@
|
||||
|
||||
// Commands
|
||||
export { ListTasksCommand } from './commands/list.command.js';
|
||||
export { ShowCommand } from './commands/show.command.js';
|
||||
export { AuthCommand } from './commands/auth.command.js';
|
||||
export { WorkflowCommand } from './commands/workflow.command.js';
|
||||
|
||||
// Command registry
|
||||
export { registerAllCommands } from './commands/index.js';
|
||||
export { ContextCommand } from './commands/context.command.js';
|
||||
|
||||
// UI utilities (for other commands to use)
|
||||
export * as ui from './utils/ui.js';
|
||||
|
||||
567
apps/cli/src/ui/components/dashboard.component.ts
Normal file
567
apps/cli/src/ui/components/dashboard.component.ts
Normal file
@@ -0,0 +1,567 @@
|
||||
/**
|
||||
* @fileoverview Dashboard components for Task Master CLI
|
||||
* Displays project statistics and dependency information
|
||||
*/
|
||||
|
||||
import chalk from 'chalk';
|
||||
import boxen from 'boxen';
|
||||
import type { Task, TaskPriority } from '@tm/core/types';
|
||||
|
||||
/**
|
||||
* Statistics for task collection
|
||||
*/
|
||||
export interface TaskStatistics {
|
||||
total: number;
|
||||
done: number;
|
||||
inProgress: number;
|
||||
pending: number;
|
||||
blocked: number;
|
||||
deferred: number;
|
||||
cancelled: number;
|
||||
review?: number;
|
||||
completionPercentage: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Statistics for dependencies
|
||||
*/
|
||||
export interface DependencyStatistics {
|
||||
tasksWithNoDeps: number;
|
||||
tasksReadyToWork: number;
|
||||
tasksBlockedByDeps: number;
|
||||
mostDependedOnTaskId?: number;
|
||||
mostDependedOnCount?: number;
|
||||
avgDependenciesPerTask: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Next task information
|
||||
*/
|
||||
export interface NextTaskInfo {
|
||||
id: string | number;
|
||||
title: string;
|
||||
priority?: TaskPriority;
|
||||
dependencies?: (string | number)[];
|
||||
complexity?: number | string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Status breakdown for progress bars
|
||||
*/
|
||||
export interface StatusBreakdown {
|
||||
'in-progress'?: number;
|
||||
pending?: number;
|
||||
blocked?: number;
|
||||
deferred?: number;
|
||||
cancelled?: number;
|
||||
review?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a progress bar with color-coded status segments
|
||||
*/
|
||||
function createProgressBar(
|
||||
completionPercentage: number,
|
||||
width: number = 30,
|
||||
statusBreakdown?: StatusBreakdown
|
||||
): string {
|
||||
// If no breakdown provided, use simple green bar
|
||||
if (!statusBreakdown) {
|
||||
const filled = Math.round((completionPercentage / 100) * width);
|
||||
const empty = width - filled;
|
||||
return chalk.green('█').repeat(filled) + chalk.gray('░').repeat(empty);
|
||||
}
|
||||
|
||||
// Build the bar with different colored sections
|
||||
// Order matches the status display: Done, Cancelled, Deferred, In Progress, Review, Pending, Blocked
|
||||
let bar = '';
|
||||
let charsUsed = 0;
|
||||
|
||||
// 1. Green filled blocks for completed tasks (done)
|
||||
const completedChars = Math.round((completionPercentage / 100) * width);
|
||||
if (completedChars > 0) {
|
||||
bar += chalk.green('█').repeat(completedChars);
|
||||
charsUsed += completedChars;
|
||||
}
|
||||
|
||||
// 2. Gray filled blocks for cancelled (won't be done)
|
||||
if (statusBreakdown.cancelled && charsUsed < width) {
|
||||
const cancelledChars = Math.round(
|
||||
(statusBreakdown.cancelled / 100) * width
|
||||
);
|
||||
const actualChars = Math.min(cancelledChars, width - charsUsed);
|
||||
if (actualChars > 0) {
|
||||
bar += chalk.gray('█').repeat(actualChars);
|
||||
charsUsed += actualChars;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Gray filled blocks for deferred (won't be done now)
|
||||
if (statusBreakdown.deferred && charsUsed < width) {
|
||||
const deferredChars = Math.round((statusBreakdown.deferred / 100) * width);
|
||||
const actualChars = Math.min(deferredChars, width - charsUsed);
|
||||
if (actualChars > 0) {
|
||||
bar += chalk.gray('█').repeat(actualChars);
|
||||
charsUsed += actualChars;
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Blue filled blocks for in-progress (actively working)
|
||||
if (statusBreakdown['in-progress'] && charsUsed < width) {
|
||||
const inProgressChars = Math.round(
|
||||
(statusBreakdown['in-progress'] / 100) * width
|
||||
);
|
||||
const actualChars = Math.min(inProgressChars, width - charsUsed);
|
||||
if (actualChars > 0) {
|
||||
bar += chalk.blue('█').repeat(actualChars);
|
||||
charsUsed += actualChars;
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Magenta empty blocks for review (almost done)
|
||||
if (statusBreakdown.review && charsUsed < width) {
|
||||
const reviewChars = Math.round((statusBreakdown.review / 100) * width);
|
||||
const actualChars = Math.min(reviewChars, width - charsUsed);
|
||||
if (actualChars > 0) {
|
||||
bar += chalk.magenta('░').repeat(actualChars);
|
||||
charsUsed += actualChars;
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Yellow empty blocks for pending (ready to start)
|
||||
if (statusBreakdown.pending && charsUsed < width) {
|
||||
const pendingChars = Math.round((statusBreakdown.pending / 100) * width);
|
||||
const actualChars = Math.min(pendingChars, width - charsUsed);
|
||||
if (actualChars > 0) {
|
||||
bar += chalk.yellow('░').repeat(actualChars);
|
||||
charsUsed += actualChars;
|
||||
}
|
||||
}
|
||||
|
||||
// 7. Red empty blocks for blocked (can't start yet)
|
||||
if (statusBreakdown.blocked && charsUsed < width) {
|
||||
const blockedChars = Math.round((statusBreakdown.blocked / 100) * width);
|
||||
const actualChars = Math.min(blockedChars, width - charsUsed);
|
||||
if (actualChars > 0) {
|
||||
bar += chalk.red('░').repeat(actualChars);
|
||||
charsUsed += actualChars;
|
||||
}
|
||||
}
|
||||
|
||||
// Fill any remaining space with gray empty yellow blocks
|
||||
if (charsUsed < width) {
|
||||
bar += chalk.yellow('░').repeat(width - charsUsed);
|
||||
}
|
||||
|
||||
return bar;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate task statistics from a list of tasks
|
||||
*/
|
||||
export function calculateTaskStatistics(tasks: Task[]): TaskStatistics {
|
||||
const stats: TaskStatistics = {
|
||||
total: tasks.length,
|
||||
done: 0,
|
||||
inProgress: 0,
|
||||
pending: 0,
|
||||
blocked: 0,
|
||||
deferred: 0,
|
||||
cancelled: 0,
|
||||
review: 0,
|
||||
completionPercentage: 0
|
||||
};
|
||||
|
||||
tasks.forEach((task) => {
|
||||
switch (task.status) {
|
||||
case 'done':
|
||||
stats.done++;
|
||||
break;
|
||||
case 'in-progress':
|
||||
stats.inProgress++;
|
||||
break;
|
||||
case 'pending':
|
||||
stats.pending++;
|
||||
break;
|
||||
case 'blocked':
|
||||
stats.blocked++;
|
||||
break;
|
||||
case 'deferred':
|
||||
stats.deferred++;
|
||||
break;
|
||||
case 'cancelled':
|
||||
stats.cancelled++;
|
||||
break;
|
||||
case 'review':
|
||||
stats.review = (stats.review || 0) + 1;
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
stats.completionPercentage =
|
||||
stats.total > 0 ? Math.round((stats.done / stats.total) * 100) : 0;
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate subtask statistics from tasks
|
||||
*/
|
||||
export function calculateSubtaskStatistics(tasks: Task[]): TaskStatistics {
|
||||
const stats: TaskStatistics = {
|
||||
total: 0,
|
||||
done: 0,
|
||||
inProgress: 0,
|
||||
pending: 0,
|
||||
blocked: 0,
|
||||
deferred: 0,
|
||||
cancelled: 0,
|
||||
review: 0,
|
||||
completionPercentage: 0
|
||||
};
|
||||
|
||||
tasks.forEach((task) => {
|
||||
if (task.subtasks && task.subtasks.length > 0) {
|
||||
task.subtasks.forEach((subtask) => {
|
||||
stats.total++;
|
||||
switch (subtask.status) {
|
||||
case 'done':
|
||||
stats.done++;
|
||||
break;
|
||||
case 'in-progress':
|
||||
stats.inProgress++;
|
||||
break;
|
||||
case 'pending':
|
||||
stats.pending++;
|
||||
break;
|
||||
case 'blocked':
|
||||
stats.blocked++;
|
||||
break;
|
||||
case 'deferred':
|
||||
stats.deferred++;
|
||||
break;
|
||||
case 'cancelled':
|
||||
stats.cancelled++;
|
||||
break;
|
||||
case 'review':
|
||||
stats.review = (stats.review || 0) + 1;
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
stats.completionPercentage =
|
||||
stats.total > 0 ? Math.round((stats.done / stats.total) * 100) : 0;
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate dependency statistics
|
||||
*/
|
||||
export function calculateDependencyStatistics(
|
||||
tasks: Task[]
|
||||
): DependencyStatistics {
|
||||
const completedTaskIds = new Set(
|
||||
tasks.filter((t) => t.status === 'done').map((t) => t.id)
|
||||
);
|
||||
|
||||
const tasksWithNoDeps = tasks.filter(
|
||||
(t) =>
|
||||
t.status !== 'done' && (!t.dependencies || t.dependencies.length === 0)
|
||||
).length;
|
||||
|
||||
const tasksWithAllDepsSatisfied = tasks.filter(
|
||||
(t) =>
|
||||
t.status !== 'done' &&
|
||||
t.dependencies &&
|
||||
t.dependencies.length > 0 &&
|
||||
t.dependencies.every((depId) => completedTaskIds.has(depId))
|
||||
).length;
|
||||
|
||||
const tasksBlockedByDeps = tasks.filter(
|
||||
(t) =>
|
||||
t.status !== 'done' &&
|
||||
t.dependencies &&
|
||||
t.dependencies.length > 0 &&
|
||||
!t.dependencies.every((depId) => completedTaskIds.has(depId))
|
||||
).length;
|
||||
|
||||
// Calculate most depended-on task
|
||||
const dependencyCount: Record<string, number> = {};
|
||||
tasks.forEach((task) => {
|
||||
if (task.dependencies && task.dependencies.length > 0) {
|
||||
task.dependencies.forEach((depId) => {
|
||||
const key = String(depId);
|
||||
dependencyCount[key] = (dependencyCount[key] || 0) + 1;
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
let mostDependedOnTaskId: number | undefined;
|
||||
let mostDependedOnCount = 0;
|
||||
|
||||
for (const [taskId, count] of Object.entries(dependencyCount)) {
|
||||
if (count > mostDependedOnCount) {
|
||||
mostDependedOnCount = count;
|
||||
mostDependedOnTaskId = parseInt(taskId);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate average dependencies
|
||||
const totalDependencies = tasks.reduce(
|
||||
(sum, task) => sum + (task.dependencies ? task.dependencies.length : 0),
|
||||
0
|
||||
);
|
||||
const avgDependenciesPerTask =
|
||||
tasks.length > 0 ? totalDependencies / tasks.length : 0;
|
||||
|
||||
return {
|
||||
tasksWithNoDeps,
|
||||
tasksReadyToWork: tasksWithNoDeps + tasksWithAllDepsSatisfied,
|
||||
tasksBlockedByDeps,
|
||||
mostDependedOnTaskId,
|
||||
mostDependedOnCount,
|
||||
avgDependenciesPerTask
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get priority counts
|
||||
*/
|
||||
export function getPriorityBreakdown(
|
||||
tasks: Task[]
|
||||
): Record<TaskPriority, number> {
|
||||
const breakdown: Record<TaskPriority, number> = {
|
||||
critical: 0,
|
||||
high: 0,
|
||||
medium: 0,
|
||||
low: 0
|
||||
};
|
||||
|
||||
tasks.forEach((task) => {
|
||||
const priority = task.priority || 'medium';
|
||||
breakdown[priority]++;
|
||||
});
|
||||
|
||||
return breakdown;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate status breakdown as percentages
|
||||
*/
|
||||
function calculateStatusBreakdown(stats: TaskStatistics): StatusBreakdown {
|
||||
if (stats.total === 0) return {};
|
||||
|
||||
return {
|
||||
'in-progress': (stats.inProgress / stats.total) * 100,
|
||||
pending: (stats.pending / stats.total) * 100,
|
||||
blocked: (stats.blocked / stats.total) * 100,
|
||||
deferred: (stats.deferred / stats.total) * 100,
|
||||
cancelled: (stats.cancelled / stats.total) * 100,
|
||||
review: ((stats.review || 0) / stats.total) * 100
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format status counts in the correct order with colors
|
||||
* @param stats - The statistics object containing counts
|
||||
* @param isSubtask - Whether this is for subtasks (affects "Done" vs "Completed" label)
|
||||
*/
|
||||
function formatStatusLine(
|
||||
stats: TaskStatistics,
|
||||
isSubtask: boolean = false
|
||||
): string {
|
||||
const parts: string[] = [];
|
||||
|
||||
// Order: Done, Cancelled, Deferred, In Progress, Review, Pending, Blocked
|
||||
if (isSubtask) {
|
||||
parts.push(`Completed: ${chalk.green(`${stats.done}/${stats.total}`)}`);
|
||||
} else {
|
||||
parts.push(`Done: ${chalk.green(stats.done)}`);
|
||||
}
|
||||
|
||||
parts.push(`Cancelled: ${chalk.gray(stats.cancelled)}`);
|
||||
parts.push(`Deferred: ${chalk.gray(stats.deferred)}`);
|
||||
|
||||
// Add line break for second row
|
||||
const firstLine = parts.join(' ');
|
||||
parts.length = 0;
|
||||
|
||||
parts.push(`In Progress: ${chalk.blue(stats.inProgress)}`);
|
||||
parts.push(`Review: ${chalk.magenta(stats.review || 0)}`);
|
||||
parts.push(`Pending: ${chalk.yellow(stats.pending)}`);
|
||||
parts.push(`Blocked: ${chalk.red(stats.blocked)}`);
|
||||
|
||||
const secondLine = parts.join(' ');
|
||||
|
||||
return firstLine + '\n' + secondLine;
|
||||
}
|
||||
|
||||
/**
|
||||
* Display the project dashboard box
|
||||
*/
|
||||
export function displayProjectDashboard(
|
||||
taskStats: TaskStatistics,
|
||||
subtaskStats: TaskStatistics,
|
||||
priorityBreakdown: Record<TaskPriority, number>
|
||||
): string {
|
||||
// Calculate status breakdowns using the helper function
|
||||
const taskStatusBreakdown = calculateStatusBreakdown(taskStats);
|
||||
const subtaskStatusBreakdown = calculateStatusBreakdown(subtaskStats);
|
||||
|
||||
// Create progress bars with the breakdowns
|
||||
const taskProgressBar = createProgressBar(
|
||||
taskStats.completionPercentage,
|
||||
30,
|
||||
taskStatusBreakdown
|
||||
);
|
||||
const subtaskProgressBar = createProgressBar(
|
||||
subtaskStats.completionPercentage,
|
||||
30,
|
||||
subtaskStatusBreakdown
|
||||
);
|
||||
|
||||
const taskPercentage = `${taskStats.completionPercentage}% ${taskStats.done}/${taskStats.total}`;
|
||||
const subtaskPercentage = `${subtaskStats.completionPercentage}% ${subtaskStats.done}/${subtaskStats.total}`;
|
||||
|
||||
const content =
|
||||
chalk.white.bold('Project Dashboard') +
|
||||
'\n' +
|
||||
`Tasks Progress: ${taskProgressBar} ${chalk.yellow(taskPercentage)}\n` +
|
||||
formatStatusLine(taskStats, false) +
|
||||
'\n\n' +
|
||||
`Subtasks Progress: ${subtaskProgressBar} ${chalk.cyan(subtaskPercentage)}\n` +
|
||||
formatStatusLine(subtaskStats, true) +
|
||||
'\n\n' +
|
||||
chalk.cyan.bold('Priority Breakdown:') +
|
||||
'\n' +
|
||||
`${chalk.red('•')} ${chalk.white('High priority:')} ${priorityBreakdown.high}\n` +
|
||||
`${chalk.yellow('•')} ${chalk.white('Medium priority:')} ${priorityBreakdown.medium}\n` +
|
||||
`${chalk.green('•')} ${chalk.white('Low priority:')} ${priorityBreakdown.low}`;
|
||||
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Display the dependency dashboard box
|
||||
*/
|
||||
export function displayDependencyDashboard(
|
||||
depStats: DependencyStatistics,
|
||||
nextTask?: NextTaskInfo
|
||||
): string {
|
||||
const content =
|
||||
chalk.white.bold('Dependency Status & Next Task') +
|
||||
'\n' +
|
||||
chalk.cyan.bold('Dependency Metrics:') +
|
||||
'\n' +
|
||||
`${chalk.green('•')} ${chalk.white('Tasks with no dependencies:')} ${depStats.tasksWithNoDeps}\n` +
|
||||
`${chalk.green('•')} ${chalk.white('Tasks ready to work on:')} ${depStats.tasksReadyToWork}\n` +
|
||||
`${chalk.yellow('•')} ${chalk.white('Tasks blocked by dependencies:')} ${depStats.tasksBlockedByDeps}\n` +
|
||||
`${chalk.magenta('•')} ${chalk.white('Most depended-on task:')} ${
|
||||
depStats.mostDependedOnTaskId
|
||||
? chalk.cyan(
|
||||
`#${depStats.mostDependedOnTaskId} (${depStats.mostDependedOnCount} dependents)`
|
||||
)
|
||||
: chalk.gray('None')
|
||||
}\n` +
|
||||
`${chalk.blue('•')} ${chalk.white('Avg dependencies per task:')} ${depStats.avgDependenciesPerTask.toFixed(1)}\n\n` +
|
||||
chalk.cyan.bold('Next Task to Work On:') +
|
||||
'\n' +
|
||||
`ID: ${nextTask ? chalk.cyan(String(nextTask.id)) : chalk.gray('N/A')} - ${
|
||||
nextTask
|
||||
? chalk.white.bold(nextTask.title)
|
||||
: chalk.yellow('No task available')
|
||||
}\n` +
|
||||
`Priority: ${nextTask?.priority || chalk.gray('N/A')} Dependencies: ${
|
||||
nextTask?.dependencies?.length
|
||||
? chalk.cyan(nextTask.dependencies.join(', '))
|
||||
: chalk.gray('None')
|
||||
}\n` +
|
||||
`Complexity: ${nextTask?.complexity || chalk.gray('N/A')}`;
|
||||
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Display dashboard boxes side by side or stacked
|
||||
*/
|
||||
export function displayDashboards(
|
||||
taskStats: TaskStatistics,
|
||||
subtaskStats: TaskStatistics,
|
||||
priorityBreakdown: Record<TaskPriority, number>,
|
||||
depStats: DependencyStatistics,
|
||||
nextTask?: NextTaskInfo
|
||||
): void {
|
||||
const projectDashboardContent = displayProjectDashboard(
|
||||
taskStats,
|
||||
subtaskStats,
|
||||
priorityBreakdown
|
||||
);
|
||||
const dependencyDashboardContent = displayDependencyDashboard(
|
||||
depStats,
|
||||
nextTask
|
||||
);
|
||||
|
||||
// Get terminal width
|
||||
const terminalWidth = process.stdout.columns || 80;
|
||||
const minDashboardWidth = 50;
|
||||
const minDependencyWidth = 50;
|
||||
const totalMinWidth = minDashboardWidth + minDependencyWidth + 4;
|
||||
|
||||
// If terminal is wide enough, show side by side
|
||||
if (terminalWidth >= totalMinWidth) {
|
||||
const halfWidth = Math.floor(terminalWidth / 2);
|
||||
const boxContentWidth = halfWidth - 4;
|
||||
|
||||
const dashboardBox = boxen(projectDashboardContent, {
|
||||
padding: 1,
|
||||
borderColor: 'blue',
|
||||
borderStyle: 'round',
|
||||
width: boxContentWidth,
|
||||
dimBorder: false
|
||||
});
|
||||
|
||||
const dependencyBox = boxen(dependencyDashboardContent, {
|
||||
padding: 1,
|
||||
borderColor: 'magenta',
|
||||
borderStyle: 'round',
|
||||
width: boxContentWidth,
|
||||
dimBorder: false
|
||||
});
|
||||
|
||||
// Create side-by-side layout
|
||||
const dashboardLines = dashboardBox.split('\n');
|
||||
const dependencyLines = dependencyBox.split('\n');
|
||||
const maxHeight = Math.max(dashboardLines.length, dependencyLines.length);
|
||||
|
||||
const combinedLines = [];
|
||||
for (let i = 0; i < maxHeight; i++) {
|
||||
const dashLine = i < dashboardLines.length ? dashboardLines[i] : '';
|
||||
const depLine = i < dependencyLines.length ? dependencyLines[i] : '';
|
||||
const paddedDashLine = dashLine.padEnd(halfWidth, ' ');
|
||||
combinedLines.push(paddedDashLine + depLine);
|
||||
}
|
||||
|
||||
console.log(combinedLines.join('\n'));
|
||||
} else {
|
||||
// Show stacked vertically
|
||||
const dashboardBox = boxen(projectDashboardContent, {
|
||||
padding: 1,
|
||||
borderColor: 'blue',
|
||||
borderStyle: 'round',
|
||||
margin: { top: 0, bottom: 1 }
|
||||
});
|
||||
|
||||
const dependencyBox = boxen(dependencyDashboardContent, {
|
||||
padding: 1,
|
||||
borderColor: 'magenta',
|
||||
borderStyle: 'round',
|
||||
margin: { top: 0, bottom: 1 }
|
||||
});
|
||||
|
||||
console.log(dashboardBox);
|
||||
console.log(dependencyBox);
|
||||
}
|
||||
}
|
||||
45
apps/cli/src/ui/components/header.component.ts
Normal file
45
apps/cli/src/ui/components/header.component.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
/**
|
||||
* @fileoverview Task Master header component
|
||||
* Displays the banner, version, project info, and file path
|
||||
*/
|
||||
|
||||
import chalk from 'chalk';
|
||||
|
||||
/**
|
||||
* Header configuration options
|
||||
*/
|
||||
export interface HeaderOptions {
|
||||
title?: string;
|
||||
tag?: string;
|
||||
filePath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Display the Task Master header with project info
|
||||
*/
|
||||
export function displayHeader(options: HeaderOptions = {}): void {
|
||||
const { filePath, tag } = options;
|
||||
|
||||
// Display tag and file path info
|
||||
if (tag) {
|
||||
let tagInfo = '';
|
||||
|
||||
if (tag && tag !== 'master') {
|
||||
tagInfo = `🏷 tag: ${chalk.cyan(tag)}`;
|
||||
} else {
|
||||
tagInfo = `🏷 tag: ${chalk.cyan('master')}`;
|
||||
}
|
||||
|
||||
console.log(tagInfo);
|
||||
|
||||
if (filePath) {
|
||||
// Convert to absolute path if it's relative
|
||||
const absolutePath = filePath.startsWith('/')
|
||||
? filePath
|
||||
: `${process.cwd()}/${filePath}`;
|
||||
console.log(`Listing tasks from: ${chalk.dim(absolutePath)}`);
|
||||
}
|
||||
|
||||
console.log(); // Empty line for spacing
|
||||
}
|
||||
}
|
||||
9
apps/cli/src/ui/components/index.ts
Normal file
9
apps/cli/src/ui/components/index.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* @fileoverview UI components exports
|
||||
*/
|
||||
|
||||
export * from './header.component.js';
|
||||
export * from './dashboard.component.js';
|
||||
export * from './next-task.component.js';
|
||||
export * from './suggested-steps.component.js';
|
||||
export * from './task-detail.component.js';
|
||||
134
apps/cli/src/ui/components/next-task.component.ts
Normal file
134
apps/cli/src/ui/components/next-task.component.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
/**
|
||||
* @fileoverview Next task recommendation component
|
||||
* Displays detailed information about the recommended next task
|
||||
*/
|
||||
|
||||
import chalk from 'chalk';
|
||||
import boxen from 'boxen';
|
||||
import type { Task } from '@tm/core/types';
|
||||
|
||||
/**
|
||||
* Next task display options
|
||||
*/
|
||||
export interface NextTaskDisplayOptions {
|
||||
id: string | number;
|
||||
title: string;
|
||||
priority?: string;
|
||||
status?: string;
|
||||
dependencies?: (string | number)[];
|
||||
description?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Display the recommended next task section
|
||||
*/
|
||||
export function displayRecommendedNextTask(
|
||||
task: NextTaskDisplayOptions | undefined
|
||||
): void {
|
||||
if (!task) {
|
||||
// If no task available, show a message
|
||||
console.log(
|
||||
boxen(
|
||||
chalk.yellow(
|
||||
'No tasks available to work on. All tasks are either completed, blocked by dependencies, or in progress.'
|
||||
),
|
||||
{
|
||||
padding: 1,
|
||||
borderStyle: 'round',
|
||||
borderColor: 'yellow',
|
||||
title: '⚠ NO TASKS AVAILABLE ⚠',
|
||||
titleAlignment: 'center'
|
||||
}
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Build the content for the next task box
|
||||
const content = [];
|
||||
|
||||
// Task header with ID and title
|
||||
content.push(
|
||||
`🔥 ${chalk.hex('#FF8800').bold('Next Task to Work On:')} ${chalk.yellow(`#${task.id}`)}${chalk.hex('#FF8800').bold(` - ${task.title}`)}`
|
||||
);
|
||||
content.push('');
|
||||
|
||||
// Priority and Status line
|
||||
const statusLine = [];
|
||||
if (task.priority) {
|
||||
const priorityColor =
|
||||
task.priority === 'high'
|
||||
? chalk.red
|
||||
: task.priority === 'medium'
|
||||
? chalk.yellow
|
||||
: chalk.gray;
|
||||
statusLine.push(`Priority: ${priorityColor.bold(task.priority)}`);
|
||||
}
|
||||
if (task.status) {
|
||||
const statusDisplay =
|
||||
task.status === 'pending'
|
||||
? chalk.yellow('○ pending')
|
||||
: task.status === 'in-progress'
|
||||
? chalk.blue('▶ in-progress')
|
||||
: chalk.gray(task.status);
|
||||
statusLine.push(`Status: ${statusDisplay}`);
|
||||
}
|
||||
content.push(statusLine.join(' '));
|
||||
|
||||
// Dependencies
|
||||
const depsDisplay =
|
||||
!task.dependencies || task.dependencies.length === 0
|
||||
? chalk.gray('None')
|
||||
: chalk.cyan(task.dependencies.join(', '));
|
||||
content.push(`Dependencies: ${depsDisplay}`);
|
||||
|
||||
// Description if available
|
||||
if (task.description) {
|
||||
content.push('');
|
||||
content.push(`Description: ${chalk.white(task.description)}`);
|
||||
}
|
||||
|
||||
// Action commands
|
||||
content.push('');
|
||||
content.push(
|
||||
`${chalk.cyan('Start working:')} ${chalk.yellow(`task-master set-status --id=${task.id} --status=in-progress`)}`
|
||||
);
|
||||
content.push(
|
||||
`${chalk.cyan('View details:')} ${chalk.yellow(`task-master show ${task.id}`)}`
|
||||
);
|
||||
|
||||
// Display in a styled box with orange border
|
||||
console.log(
|
||||
boxen(content.join('\n'), {
|
||||
padding: 1,
|
||||
margin: { top: 1, bottom: 1 },
|
||||
borderStyle: 'round',
|
||||
borderColor: '#FFA500', // Orange color
|
||||
title: chalk.hex('#FFA500')('⚡ RECOMMENDED NEXT TASK ⚡'),
|
||||
titleAlignment: 'center',
|
||||
width: process.stdout.columns * 0.97,
|
||||
fullscreen: false
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get task description from the full task object
|
||||
*/
|
||||
export function getTaskDescription(task: Task): string | undefined {
|
||||
// Try to get description from the task
|
||||
// This could be from task.description or the first line of task.details
|
||||
if ('description' in task && task.description) {
|
||||
return task.description as string;
|
||||
}
|
||||
|
||||
if ('details' in task && task.details) {
|
||||
// Take first sentence or line from details
|
||||
const details = task.details as string;
|
||||
const firstLine = details.split('\n')[0];
|
||||
const firstSentence = firstLine.split('.')[0];
|
||||
return firstSentence;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
31
apps/cli/src/ui/components/suggested-steps.component.ts
Normal file
31
apps/cli/src/ui/components/suggested-steps.component.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
* @fileoverview Suggested next steps component
|
||||
* Displays helpful command suggestions at the end of the list
|
||||
*/
|
||||
|
||||
import chalk from 'chalk';
|
||||
import boxen from 'boxen';
|
||||
|
||||
/**
|
||||
* Display suggested next steps section
|
||||
*/
|
||||
export function displaySuggestedNextSteps(): void {
|
||||
const steps = [
|
||||
`${chalk.cyan('1.')} Run ${chalk.yellow('task-master next')} to see what to work on next`,
|
||||
`${chalk.cyan('2.')} Run ${chalk.yellow('task-master expand --id=<id>')} to break down a task into subtasks`,
|
||||
`${chalk.cyan('3.')} Run ${chalk.yellow('task-master set-status --id=<id> --status=done')} to mark a task as complete`
|
||||
];
|
||||
|
||||
console.log(
|
||||
boxen(
|
||||
chalk.white.bold('Suggested Next Steps:') + '\n\n' + steps.join('\n'),
|
||||
{
|
||||
padding: 1,
|
||||
margin: { top: 0, bottom: 1 },
|
||||
borderStyle: 'round',
|
||||
borderColor: 'gray',
|
||||
width: process.stdout.columns * 0.97
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
264
apps/cli/src/ui/components/task-detail.component.ts
Normal file
264
apps/cli/src/ui/components/task-detail.component.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
/**
|
||||
* @fileoverview Task detail component for show command
|
||||
* Displays detailed task information in a structured format
|
||||
*/
|
||||
|
||||
import chalk from 'chalk';
|
||||
import boxen from 'boxen';
|
||||
import Table from 'cli-table3';
|
||||
import { marked, MarkedExtension } from 'marked';
|
||||
import { markedTerminal } from 'marked-terminal';
|
||||
import type { Task } from '@tm/core/types';
|
||||
import { getStatusWithColor, getPriorityWithColor } from '../../utils/ui.js';
|
||||
|
||||
// Configure marked to use terminal renderer with subtle colors
|
||||
marked.use(
|
||||
markedTerminal({
|
||||
// More subtle colors that match the overall design
|
||||
code: (code: string) => {
|
||||
// Custom code block handler to preserve formatting
|
||||
return code
|
||||
.split('\n')
|
||||
.map((line) => ' ' + chalk.cyan(line))
|
||||
.join('\n');
|
||||
},
|
||||
blockquote: chalk.gray.italic,
|
||||
html: chalk.gray,
|
||||
heading: chalk.white.bold, // White bold for headings
|
||||
hr: chalk.gray,
|
||||
listitem: chalk.white, // White for list items
|
||||
paragraph: chalk.white, // White for paragraphs (default text color)
|
||||
strong: chalk.white.bold, // White bold for strong text
|
||||
em: chalk.white.italic, // White italic for emphasis
|
||||
codespan: chalk.cyan, // Cyan for inline code (no background)
|
||||
del: chalk.dim.strikethrough,
|
||||
link: chalk.blue,
|
||||
href: chalk.blue.underline,
|
||||
// Add more explicit code block handling
|
||||
showSectionPrefix: false,
|
||||
unescape: true,
|
||||
emoji: false,
|
||||
// Try to preserve whitespace in code blocks
|
||||
tab: 4,
|
||||
width: 120
|
||||
}) as MarkedExtension
|
||||
);
|
||||
|
||||
// Also set marked options to preserve whitespace
|
||||
marked.setOptions({
|
||||
breaks: true,
|
||||
gfm: true
|
||||
});
|
||||
|
||||
/**
|
||||
* Display the task header with tag
|
||||
*/
|
||||
export function displayTaskHeader(
|
||||
taskId: string | number,
|
||||
title: string
|
||||
): void {
|
||||
// Display task header box
|
||||
console.log(
|
||||
boxen(chalk.white.bold(`Task: #${taskId} - ${title}`), {
|
||||
padding: { top: 0, bottom: 0, left: 1, right: 1 },
|
||||
borderColor: 'blue',
|
||||
borderStyle: 'round'
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Display task properties in a table format
|
||||
*/
|
||||
export function displayTaskProperties(task: Task): void {
|
||||
const terminalWidth = process.stdout.columns * 0.95 || 100;
|
||||
// Create table for task properties - simple 2-column layout
|
||||
const table = new Table({
|
||||
head: [],
|
||||
style: {
|
||||
head: [],
|
||||
border: ['grey']
|
||||
},
|
||||
colWidths: [
|
||||
Math.floor(terminalWidth * 0.2),
|
||||
Math.floor(terminalWidth * 0.8)
|
||||
],
|
||||
wordWrap: true
|
||||
});
|
||||
|
||||
const deps =
|
||||
task.dependencies && task.dependencies.length > 0
|
||||
? task.dependencies.map((d) => String(d)).join(', ')
|
||||
: 'None';
|
||||
|
||||
// Build the left column (labels) and right column (values)
|
||||
const labels = [
|
||||
chalk.cyan('ID:'),
|
||||
chalk.cyan('Title:'),
|
||||
chalk.cyan('Status:'),
|
||||
chalk.cyan('Priority:'),
|
||||
chalk.cyan('Dependencies:'),
|
||||
chalk.cyan('Complexity:'),
|
||||
chalk.cyan('Description:')
|
||||
].join('\n');
|
||||
|
||||
const values = [
|
||||
String(task.id),
|
||||
task.title,
|
||||
getStatusWithColor(task.status),
|
||||
getPriorityWithColor(task.priority),
|
||||
deps,
|
||||
'N/A',
|
||||
task.description || ''
|
||||
].join('\n');
|
||||
|
||||
table.push([labels, values]);
|
||||
|
||||
console.log(table.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Display implementation details in a box
|
||||
*/
|
||||
export function displayImplementationDetails(details: string): void {
|
||||
// Handle all escaped characters properly
|
||||
const cleanDetails = details
|
||||
.replace(/\\n/g, '\n') // Convert \n to actual newlines
|
||||
.replace(/\\t/g, '\t') // Convert \t to actual tabs
|
||||
.replace(/\\"/g, '"') // Convert \" to actual quotes
|
||||
.replace(/\\\\/g, '\\'); // Convert \\ to single backslash
|
||||
|
||||
const terminalWidth = process.stdout.columns * 0.95 || 100;
|
||||
|
||||
// Parse markdown to terminal-friendly format
|
||||
const markdownResult = marked(cleanDetails);
|
||||
const formattedDetails =
|
||||
typeof markdownResult === 'string' ? markdownResult.trim() : cleanDetails; // Fallback to original if Promise
|
||||
|
||||
console.log(
|
||||
boxen(
|
||||
chalk.white.bold('Implementation Details:') + '\n\n' + formattedDetails,
|
||||
{
|
||||
padding: 1,
|
||||
borderStyle: 'round',
|
||||
borderColor: 'cyan', // Changed to cyan to match the original
|
||||
width: terminalWidth // Fixed width to match the original
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Display test strategy in a box
|
||||
*/
|
||||
export function displayTestStrategy(testStrategy: string): void {
|
||||
// Handle all escaped characters properly (same as implementation details)
|
||||
const cleanStrategy = testStrategy
|
||||
.replace(/\\n/g, '\n') // Convert \n to actual newlines
|
||||
.replace(/\\t/g, '\t') // Convert \t to actual tabs
|
||||
.replace(/\\"/g, '"') // Convert \" to actual quotes
|
||||
.replace(/\\\\/g, '\\'); // Convert \\ to single backslash
|
||||
|
||||
const terminalWidth = process.stdout.columns * 0.95 || 100;
|
||||
|
||||
// Parse markdown to terminal-friendly format (same as implementation details)
|
||||
const markdownResult = marked(cleanStrategy);
|
||||
const formattedStrategy =
|
||||
typeof markdownResult === 'string' ? markdownResult.trim() : cleanStrategy; // Fallback to original if Promise
|
||||
|
||||
console.log(
|
||||
boxen(chalk.white.bold('Test Strategy:') + '\n\n' + formattedStrategy, {
|
||||
padding: 1,
|
||||
borderStyle: 'round',
|
||||
borderColor: 'cyan', // Changed to cyan to match implementation details
|
||||
width: terminalWidth
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Display subtasks in a table format
|
||||
*/
|
||||
export function displaySubtasks(
|
||||
subtasks: Array<{
|
||||
id: string | number;
|
||||
title: string;
|
||||
status: any;
|
||||
description?: string;
|
||||
dependencies?: string[];
|
||||
}>,
|
||||
parentId: string | number
|
||||
): void {
|
||||
const terminalWidth = process.stdout.columns * 0.95 || 100;
|
||||
// Display subtasks header
|
||||
console.log(
|
||||
boxen(chalk.magenta.bold('Subtasks'), {
|
||||
padding: { top: 0, bottom: 0, left: 1, right: 1 },
|
||||
borderColor: 'magenta',
|
||||
borderStyle: 'round',
|
||||
margin: { top: 1, bottom: 0 }
|
||||
})
|
||||
);
|
||||
|
||||
// Create subtasks table
|
||||
const table = new Table({
|
||||
head: [
|
||||
chalk.magenta.bold('ID'),
|
||||
chalk.magenta.bold('Status'),
|
||||
chalk.magenta.bold('Title'),
|
||||
chalk.magenta.bold('Deps')
|
||||
],
|
||||
style: {
|
||||
head: [],
|
||||
border: ['grey']
|
||||
},
|
||||
colWidths: [
|
||||
Math.floor(terminalWidth * 0.1),
|
||||
Math.floor(terminalWidth * 0.15),
|
||||
Math.floor(terminalWidth * 0.6),
|
||||
Math.floor(terminalWidth * 0.15)
|
||||
],
|
||||
wordWrap: true
|
||||
});
|
||||
|
||||
subtasks.forEach((subtask) => {
|
||||
const subtaskId = `${parentId}.${subtask.id}`;
|
||||
|
||||
// Format dependencies
|
||||
const deps =
|
||||
subtask.dependencies && subtask.dependencies.length > 0
|
||||
? subtask.dependencies.join(', ')
|
||||
: 'None';
|
||||
|
||||
table.push([
|
||||
subtaskId,
|
||||
getStatusWithColor(subtask.status),
|
||||
subtask.title,
|
||||
deps
|
||||
]);
|
||||
});
|
||||
|
||||
console.log(table.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Display suggested actions
|
||||
*/
|
||||
export function displaySuggestedActions(taskId: string | number): void {
|
||||
console.log(
|
||||
boxen(
|
||||
chalk.white.bold('Suggested Actions:') +
|
||||
'\n\n' +
|
||||
`${chalk.cyan('1.')} Run ${chalk.yellow(`task-master set-status --id=${taskId} --status=in-progress`)} to start working\n` +
|
||||
`${chalk.cyan('2.')} Run ${chalk.yellow(`task-master expand --id=${taskId}`)} to break down into subtasks\n` +
|
||||
`${chalk.cyan('3.')} Run ${chalk.yellow(`task-master update-task --id=${taskId} --prompt="..."`)} to update details`,
|
||||
{
|
||||
padding: 1,
|
||||
margin: { top: 1 },
|
||||
borderStyle: 'round',
|
||||
borderColor: 'green',
|
||||
width: process.stdout.columns * 0.95 || 100
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
9
apps/cli/src/ui/index.ts
Normal file
9
apps/cli/src/ui/index.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* @fileoverview Main UI exports
|
||||
*/
|
||||
|
||||
// Export all components
|
||||
export * from './components/index.js';
|
||||
|
||||
// Re-export existing UI utilities
|
||||
export * from '../utils/ui.js';
|
||||
@@ -6,7 +6,7 @@
|
||||
import chalk from 'chalk';
|
||||
import boxen from 'boxen';
|
||||
import Table from 'cli-table3';
|
||||
import type { Task, TaskStatus, TaskPriority } from '@tm/core';
|
||||
import type { Task, TaskStatus, TaskPriority } from '@tm/core/types';
|
||||
|
||||
/**
|
||||
* Get colored status display with ASCII icons (matches scripts/modules/ui.js style)
|
||||
@@ -18,19 +18,44 @@ export function getStatusWithColor(
|
||||
const statusConfig = {
|
||||
done: {
|
||||
color: chalk.green,
|
||||
icon: String.fromCharCode(8730),
|
||||
tableIcon: String.fromCharCode(8730)
|
||||
}, // √
|
||||
pending: { color: chalk.yellow, icon: 'o', tableIcon: 'o' },
|
||||
icon: '✓',
|
||||
tableIcon: '✓'
|
||||
},
|
||||
pending: {
|
||||
color: chalk.yellow,
|
||||
icon: '○',
|
||||
tableIcon: '○'
|
||||
},
|
||||
'in-progress': {
|
||||
color: chalk.hex('#FFA500'),
|
||||
icon: String.fromCharCode(9654),
|
||||
tableIcon: '>'
|
||||
}, // ▶
|
||||
deferred: { color: chalk.gray, icon: 'x', tableIcon: 'x' },
|
||||
blocked: { color: chalk.red, icon: '!', tableIcon: '!' },
|
||||
review: { color: chalk.magenta, icon: '?', tableIcon: '?' },
|
||||
cancelled: { color: chalk.gray, icon: 'X', tableIcon: 'X' }
|
||||
icon: '▶',
|
||||
tableIcon: '▶'
|
||||
},
|
||||
deferred: {
|
||||
color: chalk.gray,
|
||||
icon: 'x',
|
||||
tableIcon: 'x'
|
||||
},
|
||||
review: {
|
||||
color: chalk.magenta,
|
||||
icon: '?',
|
||||
tableIcon: '?'
|
||||
},
|
||||
cancelled: {
|
||||
color: chalk.gray,
|
||||
icon: 'x',
|
||||
tableIcon: 'x'
|
||||
},
|
||||
blocked: {
|
||||
color: chalk.red,
|
||||
icon: '!',
|
||||
tableIcon: '!'
|
||||
},
|
||||
completed: {
|
||||
color: chalk.green,
|
||||
icon: '✓',
|
||||
tableIcon: '✓'
|
||||
}
|
||||
};
|
||||
|
||||
const config = statusConfig[status] || {
|
||||
@@ -39,18 +64,7 @@ export function getStatusWithColor(
|
||||
tableIcon: 'X'
|
||||
};
|
||||
|
||||
// Use simple ASCII characters for stable display
|
||||
const simpleIcons = {
|
||||
done: String.fromCharCode(8730), // √
|
||||
pending: 'o',
|
||||
'in-progress': '>',
|
||||
deferred: 'x',
|
||||
blocked: '!',
|
||||
review: '?',
|
||||
cancelled: 'X'
|
||||
};
|
||||
|
||||
const icon = forTable ? simpleIcons[status] || 'X' : config.icon;
|
||||
const icon = forTable ? config.tableIcon : config.icon;
|
||||
return config.color(`${icon} ${status}`);
|
||||
}
|
||||
|
||||
@@ -245,10 +259,24 @@ export function createTaskTable(
|
||||
} = options || {};
|
||||
|
||||
// Calculate dynamic column widths based on terminal width
|
||||
const terminalWidth = process.stdout.columns || 100;
|
||||
const terminalWidth = process.stdout.columns * 0.9 || 100;
|
||||
// Adjust column widths to better match the original layout
|
||||
const baseColWidths = showComplexity
|
||||
? [8, Math.floor(terminalWidth * 0.35), 18, 12, 15, 12] // ID, Title, Status, Priority, Dependencies, Complexity
|
||||
: [8, Math.floor(terminalWidth * 0.4), 18, 12, 20]; // ID, Title, Status, Priority, Dependencies
|
||||
? [
|
||||
Math.floor(terminalWidth * 0.06),
|
||||
Math.floor(terminalWidth * 0.4),
|
||||
Math.floor(terminalWidth * 0.15),
|
||||
Math.floor(terminalWidth * 0.12),
|
||||
Math.floor(terminalWidth * 0.2),
|
||||
Math.floor(terminalWidth * 0.12)
|
||||
] // ID, Title, Status, Priority, Dependencies, Complexity
|
||||
: [
|
||||
Math.floor(terminalWidth * 0.08),
|
||||
Math.floor(terminalWidth * 0.4),
|
||||
Math.floor(terminalWidth * 0.18),
|
||||
Math.floor(terminalWidth * 0.12),
|
||||
Math.floor(terminalWidth * 0.2)
|
||||
]; // ID, Title, Status, Priority, Dependencies
|
||||
|
||||
const headers = [
|
||||
chalk.blue.bold('ID'),
|
||||
@@ -284,11 +312,19 @@ export function createTaskTable(
|
||||
];
|
||||
|
||||
if (showDependencies) {
|
||||
row.push(formatDependenciesWithStatus(task.dependencies, tasks));
|
||||
// For table display, show simple format without status icons
|
||||
if (!task.dependencies || task.dependencies.length === 0) {
|
||||
row.push(chalk.gray('None'));
|
||||
} else {
|
||||
row.push(
|
||||
chalk.cyan(task.dependencies.map((d) => String(d)).join(', '))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (showComplexity && 'complexity' in task) {
|
||||
row.push(getComplexityWithColor(task.complexity as number | string));
|
||||
if (showComplexity) {
|
||||
// Show N/A if no complexity score
|
||||
row.push(chalk.gray('N/A'));
|
||||
}
|
||||
|
||||
table.push(row);
|
||||
@@ -324,61 +360,3 @@ export function createTaskTable(
|
||||
|
||||
return table.toString();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Display a spinner with message (mock implementation)
|
||||
*/
|
||||
export function displaySpinner(message: string): void {
|
||||
console.log(chalk.blue('◐'), chalk.gray(message));
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple confirmation prompt
|
||||
*/
|
||||
export async function confirm(message: string): Promise<boolean> {
|
||||
// For now, return true. In a real implementation, use inquirer
|
||||
console.log(chalk.yellow('?'), chalk.white(message), chalk.gray('(y/n)'));
|
||||
|
||||
// Mock implementation - in production this would use inquirer
|
||||
return new Promise((resolve) => {
|
||||
process.stdin.once('data', (data) => {
|
||||
const answer = data.toString().trim().toLowerCase();
|
||||
resolve(answer === 'y' || answer === 'yes');
|
||||
});
|
||||
process.stdin.resume();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a generic table
|
||||
*/
|
||||
export function createTable(headers: string[], rows: string[][]): string {
|
||||
const table = new Table({
|
||||
head: headers.map(h => chalk.blue.bold(h)),
|
||||
style: {
|
||||
head: [],
|
||||
border: ['gray']
|
||||
},
|
||||
chars: {
|
||||
'top': '─',
|
||||
'top-mid': '┬',
|
||||
'top-left': '┌',
|
||||
'top-right': '┐',
|
||||
'bottom': '─',
|
||||
'bottom-mid': '┴',
|
||||
'bottom-left': '└',
|
||||
'bottom-right': '┘',
|
||||
'left': '│',
|
||||
'left-mid': '├',
|
||||
'mid': '─',
|
||||
'mid-mid': '┼',
|
||||
'right': '│',
|
||||
'right-mid': '┤',
|
||||
'middle': '│'
|
||||
}
|
||||
});
|
||||
|
||||
rows.forEach(row => table.push(row));
|
||||
return table.toString();
|
||||
}
|
||||
|
||||
@@ -1,27 +1,36 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "ESNext",
|
||||
"module": "NodeNext",
|
||||
"lib": ["ES2022"],
|
||||
"moduleResolution": "bundler",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"esModuleInterop": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true,
|
||||
"outDir": "./dist",
|
||||
"baseUrl": ".",
|
||||
"rootDir": "./src",
|
||||
"resolveJsonModule": true,
|
||||
"allowJs": false,
|
||||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"strictFunctionTypes": true,
|
||||
"strictBindCallApply": true,
|
||||
"strictPropertyInitialization": true,
|
||||
"noImplicitThis": true,
|
||||
"alwaysStrict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noImplicitReturns": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"types": ["node"]
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "NodeNext",
|
||||
"moduleDetection": "force",
|
||||
"types": ["node"],
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"allowImportingTsExtensions": false
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist", "tests"]
|
||||
"exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"]
|
||||
}
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
import { defineConfig } from 'tsup';
|
||||
|
||||
export default defineConfig({
|
||||
entry: ['src/index.ts'],
|
||||
format: ['esm'],
|
||||
target: 'node18',
|
||||
splitting: false,
|
||||
sourcemap: true,
|
||||
clean: true,
|
||||
dts: true,
|
||||
shims: true,
|
||||
esbuildOptions(options) {
|
||||
options.platform = 'node';
|
||||
}
|
||||
});
|
||||
@@ -1,3 +1,5 @@
|
||||
# docs
|
||||
|
||||
## 0.0.2
|
||||
|
||||
## 0.0.1
|
||||
|
||||
@@ -128,6 +128,23 @@ You can reorganize tasks in various ways:
|
||||
- Moving a task to a new ID position: `--from=5 --to=25` (even if task 25 doesn't exist yet)
|
||||
- Moving multiple tasks at once: `--from=10,11,12 --to=16,17,18` (must have same number of IDs, Taskmaster will look through each position)
|
||||
|
||||
#### Cross-Tag Move Safety (v0.26.0+)
|
||||
|
||||
For moves between different tags, use the enhanced dependency handling:
|
||||
|
||||
```bash
|
||||
# Move with all dependencies (safest)
|
||||
task-master move --from=5 --to=3 --with-dependencies --tag-from=feature --tag-to=main
|
||||
|
||||
# Move ignoring dependencies (shows safety tips)
|
||||
task-master move --from=5 --to=3 --ignore-dependencies --tag-from=feature --tag-to=main
|
||||
```
|
||||
|
||||
**Enhanced Safety Features:**
|
||||
- **Next Steps Guidance**: After cross-tag moves with `--ignore-dependencies`, the CLI provides "Next Steps" tips with validation and fix guidance
|
||||
- **ID Collision Help**: Dedicated help blocks appear when destination tags already have the target ID, with structured suggestions
|
||||
- **Better Error Messages**: Task collision errors now include actionable suggestions for resolution
|
||||
|
||||
When moving tasks to new IDs:
|
||||
|
||||
- The system automatically creates placeholder tasks for non-existent destination IDs
|
||||
|
||||
@@ -83,9 +83,40 @@ Taskmaster uses two primary methods for configuration:
|
||||
- `VERTEX_PROJECT_ID`: Your Google Cloud project ID for Vertex AI. Required when using the 'vertex' provider.
|
||||
- `VERTEX_LOCATION`: Google Cloud region for Vertex AI (e.g., 'us-central1'). Default is 'us-central1'.
|
||||
- `GOOGLE_APPLICATION_CREDENTIALS`: Path to service account credentials JSON file for Google Cloud auth (alternative to API key for Vertex AI).
|
||||
- `TASKMASTER_ENABLE_CODEBASE_ANALYSIS`: Control codebase analysis features (Claude Code and Gemini CLI integration). Set to 'false' to disable. Default: enabled.
|
||||
|
||||
**Important:** Settings like model ID selections (`main`, `research`, `fallback`), `maxTokens`, `temperature`, `logLevel`, `defaultSubtasks`, `defaultPriority`, and `projectName` are **managed in `.taskmaster/config.json`** (or `.taskmasterconfig` for unmigrated projects), not environment variables.
|
||||
|
||||
## Codebase Analysis Configuration
|
||||
|
||||
Task Master includes advanced codebase analysis features that enhance task generation by analyzing your project structure, existing implementations, and patterns. This is particularly useful when integrated with Claude Code or using Gemini CLI provider.
|
||||
|
||||
### Configuration Options
|
||||
|
||||
**Environment Variable Control (Priority Order: `.env` > MCP session env > `.taskmaster/config.json`)**
|
||||
|
||||
```bash
|
||||
# In .env file
|
||||
TASKMASTER_ENABLE_CODEBASE_ANALYSIS=false # Disable codebase analysis features
|
||||
```
|
||||
|
||||
When enabled, the following commands benefit from codebase analysis:
|
||||
|
||||
- `parse-prd`: Generates tasks informed by project structure
|
||||
- `analyze-complexity`: Considers existing codebase complexity
|
||||
- `add-task`: Creates tasks aligned with project patterns
|
||||
- `update-task`: Updates tasks with relevant codebase context
|
||||
- `update-subtask`: Enhances subtasks based on implementation details
|
||||
- `expand`: Generates subtasks informed by existing code patterns
|
||||
|
||||
### Integration with Claude Code
|
||||
|
||||
When Task Master detects it's running within Claude Code, it automatically provides codebase context to generate more accurate and relevant task details. This feature can be disabled using the environment variable above.
|
||||
|
||||
### Gemini CLI Provider Enhancement
|
||||
|
||||
The Gemini CLI provider includes automatic codebase analysis when generating or updating tasks, resulting in more contextually accurate outputs aligned with your project's architecture.
|
||||
|
||||
## Tagged Task Lists Configuration (v0.17+)
|
||||
|
||||
Taskmaster includes a tagged task lists system for multi-context task management.
|
||||
@@ -147,6 +178,9 @@ PERPLEXITY_API_KEY=pplx-your-key-here
|
||||
|
||||
# Google Vertex AI Configuration (Required if using 'vertex' provider)
|
||||
# VERTEX_PROJECT_ID=your-gcp-project-id
|
||||
|
||||
# Feature Control
|
||||
# TASKMASTER_ENABLE_CODEBASE_ANALYSIS=false # Disable codebase analysis features
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
@@ -206,4 +206,25 @@ sidebarTitle: "CLI Commands"
|
||||
task-master init
|
||||
```
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="Move Tasks">
|
||||
```bash
|
||||
# Move a task or subtask to a new position
|
||||
task-master move --from=<id> --to=<id>
|
||||
|
||||
# Move multiple tasks at once (same number of IDs required)
|
||||
task-master move --from=1,2,3 --to=7,8,9
|
||||
|
||||
# Cross-tag moves with dependency handling
|
||||
task-master move --from=5 --to=3 --with-dependencies --tag-from=feature --tag-to=main
|
||||
task-master move --from=5 --to=3 --ignore-dependencies --tag-from=feature --tag-to=main
|
||||
|
||||
# Note: --force flag is deprecated, use --with-dependencies or --ignore-dependencies instead
|
||||
```
|
||||
|
||||
**Cross-tag Move Safety Features:**
|
||||
- CLI shows "Next Steps" guidance after moves that ignore dependencies
|
||||
- Dedicated help is displayed for ID collisions with structured suggestions
|
||||
- Better error handling with actionable suggestions for resolution
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
{
|
||||
"name": "docs",
|
||||
"version": "0.0.1",
|
||||
"version": "0.0.2",
|
||||
"private": true,
|
||||
"description": "Task Master documentation powered by Mintlify",
|
||||
"scripts": {
|
||||
"dev": "mintlify dev",
|
||||
"build": "mintlify build",
|
||||
"preview": "mintlify preview"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mintlify": "^4.0.0"
|
||||
"mintlify": "^4.2.111"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
# Change Log
|
||||
|
||||
## 0.24.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`8783708`](https://github.com/eyaltoledano/claude-task-master/commit/8783708e5e3389890a78fcf685d3da0580e73b3f), [`df26c65`](https://github.com/eyaltoledano/claude-task-master/commit/df26c65632000874a73504963b08f18c46283144), [`37af0f1`](https://github.com/eyaltoledano/claude-task-master/commit/37af0f191227a68d119b7f89a377bf932ee3ac66), [`c4f92f6`](https://github.com/eyaltoledano/claude-task-master/commit/c4f92f6a0aee3435c56eb8d27d9aa9204284833e), [`8783708`](https://github.com/eyaltoledano/claude-task-master/commit/8783708e5e3389890a78fcf685d3da0580e73b3f), [`4dad2fd`](https://github.com/eyaltoledano/claude-task-master/commit/4dad2fd613ceac56a65ae9d3c1c03092b8860ac9)]:
|
||||
- task-master-ai@0.26.0
|
||||
|
||||
## 0.24.2-rc.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -103,8 +103,8 @@ async function main() {
|
||||
// This prevents the multiple React instances issue
|
||||
// Ensure React is resolved from the workspace root to avoid duplicates
|
||||
alias: {
|
||||
react: path.resolve(__dirname, 'node_modules/react'),
|
||||
'react-dom': path.resolve(__dirname, 'node_modules/react-dom')
|
||||
react: path.resolve(__dirname, '../../node_modules/react'),
|
||||
'react-dom': path.resolve(__dirname, '../../node_modules/react-dom')
|
||||
},
|
||||
define: {
|
||||
'process.env.NODE_ENV': production ? '"production"' : '"development"',
|
||||
@@ -135,8 +135,8 @@ async function main() {
|
||||
jsxImportSource: 'react',
|
||||
external: ['*.css'],
|
||||
alias: {
|
||||
react: path.resolve(__dirname, 'node_modules/react'),
|
||||
'react-dom': path.resolve(__dirname, 'node_modules/react-dom')
|
||||
react: path.resolve(__dirname, '../../node_modules/react'),
|
||||
'react-dom': path.resolve(__dirname, '../../node_modules/react-dom')
|
||||
},
|
||||
define: {
|
||||
'process.env.NODE_ENV': production ? '"production"' : '"development"',
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"private": true,
|
||||
"displayName": "TaskMaster",
|
||||
"description": "A visual Kanban board interface for TaskMaster projects in VS Code",
|
||||
"version": "0.24.2-rc.1",
|
||||
"version": "0.24.2",
|
||||
"publisher": "Hamster",
|
||||
"icon": "assets/icon.png",
|
||||
"engines": {
|
||||
@@ -229,6 +229,7 @@
|
||||
"build": "npm run build:js && npm run build:css",
|
||||
"build:js": "node ./esbuild.js --production",
|
||||
"build:css": "npx @tailwindcss/cli -i ./src/webview/index.css -o ./dist/index.css --minify",
|
||||
"dev": "npm run watch",
|
||||
"package": "npm exec node ./package.mjs",
|
||||
"package:direct": "node ./package.mjs",
|
||||
"debug:env": "node ./debug-env.mjs",
|
||||
@@ -239,7 +240,7 @@
|
||||
"check-types": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"task-master-ai": "0.26.0-rc.1"
|
||||
"task-master-ai": "0.26.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import type React from 'react';
|
||||
import { useState, useEffect } from 'react';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { Loader2 } from 'lucide-react';
|
||||
import { Loader2, Play } from 'lucide-react';
|
||||
import { PriorityBadge } from './PriorityBadge';
|
||||
import type { TaskMasterTask } from '../../webview/types';
|
||||
import { useVSCodeContext } from '../../webview/contexts/VSCodeContext';
|
||||
|
||||
interface TaskMetadataSidebarProps {
|
||||
currentTask: TaskMasterTask;
|
||||
@@ -28,10 +29,12 @@ export const TaskMetadataSidebar: React.FC<TaskMetadataSidebarProps> = ({
|
||||
isRegenerating = false,
|
||||
isAppending = false
|
||||
}) => {
|
||||
const { vscode } = useVSCodeContext();
|
||||
const [isLoadingComplexity, setIsLoadingComplexity] = useState(false);
|
||||
const [mcpComplexityScore, setMcpComplexityScore] = useState<
|
||||
number | undefined
|
||||
>(undefined);
|
||||
const [isStartingTask, setIsStartingTask] = useState(false);
|
||||
|
||||
// Get complexity score from task
|
||||
const currentComplexityScore = complexity?.score;
|
||||
@@ -97,6 +100,29 @@ export const TaskMetadataSidebar: React.FC<TaskMetadataSidebarProps> = ({
|
||||
}
|
||||
};
|
||||
|
||||
// Handle starting a task
|
||||
const handleStartTask = () => {
|
||||
if (!currentTask || isStartingTask) {
|
||||
return;
|
||||
}
|
||||
|
||||
setIsStartingTask(true);
|
||||
|
||||
// Send message to extension to open terminal
|
||||
if (vscode) {
|
||||
vscode.postMessage({
|
||||
type: 'openTerminal',
|
||||
taskId: currentTask.id,
|
||||
taskTitle: currentTask.title
|
||||
});
|
||||
}
|
||||
|
||||
// Reset loading state after a short delay
|
||||
setTimeout(() => {
|
||||
setIsStartingTask(false);
|
||||
}, 500);
|
||||
};
|
||||
|
||||
// Effect to handle complexity on task change
|
||||
useEffect(() => {
|
||||
if (currentTask?.id) {
|
||||
@@ -284,6 +310,30 @@ export const TaskMetadataSidebar: React.FC<TaskMetadataSidebarProps> = ({
|
||||
{currentTask.dependencies && currentTask.dependencies.length > 0 && (
|
||||
<div className="border-b border-textSeparator-foreground" />
|
||||
)}
|
||||
|
||||
{/* Start Task Button */}
|
||||
<div className="mt-4">
|
||||
<Button
|
||||
onClick={handleStartTask}
|
||||
variant="default"
|
||||
size="sm"
|
||||
className="w-full text-xs"
|
||||
disabled={
|
||||
isRegenerating ||
|
||||
isAppending ||
|
||||
isStartingTask ||
|
||||
currentTask?.status === 'done' ||
|
||||
currentTask?.status === 'in-progress'
|
||||
}
|
||||
>
|
||||
{isStartingTask ? (
|
||||
<Loader2 className="w-4 h-4 mr-2 animate-spin" />
|
||||
) : (
|
||||
<Play className="w-4 h-4 mr-2" />
|
||||
)}
|
||||
{isStartingTask ? 'Starting...' : 'Start Task'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -361,6 +361,30 @@ export class WebviewManager {
|
||||
}
|
||||
return;
|
||||
|
||||
case 'openTerminal':
|
||||
// Open VS Code terminal for task execution
|
||||
this.logger.log(
|
||||
`Opening terminal for task ${data.taskId}: ${data.taskTitle}`
|
||||
);
|
||||
|
||||
try {
|
||||
const terminal = vscode.window.createTerminal({
|
||||
name: `Task ${data.taskId}: ${data.taskTitle}`,
|
||||
cwd: vscode.workspace.workspaceFolders?.[0]?.uri.fsPath
|
||||
});
|
||||
terminal.show();
|
||||
|
||||
this.logger.log('Terminal created and shown successfully');
|
||||
response = { success: true };
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to create terminal:', error);
|
||||
response = {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
};
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown message type: ${type}`);
|
||||
}
|
||||
|
||||
@@ -20,357 +20,8 @@
|
||||
* Main entry point for globally installed package
|
||||
*/
|
||||
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, resolve } from 'path';
|
||||
import { createRequire } from 'module';
|
||||
import { spawn } from 'child_process';
|
||||
import { Command } from 'commander';
|
||||
import { displayHelp, displayBanner } from '../scripts/modules/ui.js';
|
||||
import { registerCommands } from '../scripts/modules/commands.js';
|
||||
import { detectCamelCaseFlags } from '../scripts/modules/utils.js';
|
||||
import chalk from 'chalk';
|
||||
// Direct imports instead of spawning child processes
|
||||
import { runCLI } from '../scripts/modules/commands.js';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
// Get package information
|
||||
const packageJson = require('../package.json');
|
||||
const version = packageJson.version;
|
||||
|
||||
// Get paths to script files
|
||||
const devScriptPath = resolve(__dirname, '../scripts/dev.js');
|
||||
const initScriptPath = resolve(__dirname, '../scripts/init.js');
|
||||
|
||||
// Helper function to run dev.js with arguments
|
||||
function runDevScript(args) {
|
||||
// Debug: Show the transformed arguments when DEBUG=1 is set
|
||||
if (process.env.DEBUG === '1') {
|
||||
console.error('\nDEBUG - CLI Wrapper Analysis:');
|
||||
console.error('- Original command: ' + process.argv.join(' '));
|
||||
console.error('- Transformed args: ' + args.join(' '));
|
||||
console.error(
|
||||
'- dev.js will receive: node ' +
|
||||
devScriptPath +
|
||||
' ' +
|
||||
args.join(' ') +
|
||||
'\n'
|
||||
);
|
||||
}
|
||||
|
||||
// For testing: If TEST_MODE is set, just print args and exit
|
||||
if (process.env.TEST_MODE === '1') {
|
||||
console.log('Would execute:');
|
||||
console.log(`node ${devScriptPath} ${args.join(' ')}`);
|
||||
process.exit(0);
|
||||
return;
|
||||
}
|
||||
|
||||
const child = spawn('node', [devScriptPath, ...args], {
|
||||
stdio: 'inherit',
|
||||
cwd: process.cwd()
|
||||
});
|
||||
|
||||
child.on('close', (code) => {
|
||||
process.exit(code);
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to detect camelCase and convert to kebab-case
|
||||
const toKebabCase = (str) => str.replace(/([A-Z])/g, '-$1').toLowerCase();
|
||||
|
||||
/**
|
||||
* Create a wrapper action that passes the command to dev.js
|
||||
* @param {string} commandName - The name of the command
|
||||
* @returns {Function} Wrapper action function
|
||||
*/
|
||||
function createDevScriptAction(commandName) {
|
||||
return (options, cmd) => {
|
||||
// Check for camelCase flags and error out with helpful message
|
||||
const camelCaseFlags = detectCamelCaseFlags(process.argv);
|
||||
|
||||
// If camelCase flags were found, show error and exit
|
||||
if (camelCaseFlags.length > 0) {
|
||||
console.error('\nError: Please use kebab-case for CLI flags:');
|
||||
camelCaseFlags.forEach((flag) => {
|
||||
console.error(` Instead of: --${flag.original}`);
|
||||
console.error(` Use: --${flag.kebabCase}`);
|
||||
});
|
||||
console.error(
|
||||
'\nExample: task-master parse-prd --num-tasks=5 instead of --numTasks=5\n'
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Since we've ensured no camelCase flags, we can now just:
|
||||
// 1. Start with the command name
|
||||
const args = [commandName];
|
||||
|
||||
// 3. Get positional arguments and explicit flags from the command line
|
||||
const commandArgs = [];
|
||||
const positionals = new Set(); // Track positional args we've seen
|
||||
|
||||
// Find the command in raw process.argv to extract args
|
||||
const commandIndex = process.argv.indexOf(commandName);
|
||||
if (commandIndex !== -1) {
|
||||
// Process all args after the command name
|
||||
for (let i = commandIndex + 1; i < process.argv.length; i++) {
|
||||
const arg = process.argv[i];
|
||||
|
||||
if (arg.startsWith('--')) {
|
||||
// It's a flag - pass through as is
|
||||
commandArgs.push(arg);
|
||||
// Skip the next arg if this is a flag with a value (not --flag=value format)
|
||||
if (
|
||||
!arg.includes('=') &&
|
||||
i + 1 < process.argv.length &&
|
||||
!process.argv[i + 1].startsWith('--')
|
||||
) {
|
||||
commandArgs.push(process.argv[++i]);
|
||||
}
|
||||
} else if (!positionals.has(arg)) {
|
||||
// It's a positional argument we haven't seen
|
||||
commandArgs.push(arg);
|
||||
positionals.add(arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add all command line args we collected
|
||||
args.push(...commandArgs);
|
||||
|
||||
// 4. Add default options from Commander if not specified on command line
|
||||
// Track which options we've seen on the command line
|
||||
const userOptions = new Set();
|
||||
for (const arg of commandArgs) {
|
||||
if (arg.startsWith('--')) {
|
||||
// Extract option name (without -- and value)
|
||||
const name = arg.split('=')[0].slice(2);
|
||||
userOptions.add(name);
|
||||
|
||||
// Add the kebab-case version too, to prevent duplicates
|
||||
const kebabName = name.replace(/([A-Z])/g, '-$1').toLowerCase();
|
||||
userOptions.add(kebabName);
|
||||
|
||||
// Add the camelCase version as well
|
||||
const camelName = kebabName.replace(/-([a-z])/g, (_, letter) =>
|
||||
letter.toUpperCase()
|
||||
);
|
||||
userOptions.add(camelName);
|
||||
}
|
||||
}
|
||||
|
||||
// Add Commander-provided defaults for options not specified by user
|
||||
Object.entries(options).forEach(([key, value]) => {
|
||||
// Debug output to see what keys we're getting
|
||||
if (process.env.DEBUG === '1') {
|
||||
console.error(`DEBUG - Processing option: ${key} = ${value}`);
|
||||
}
|
||||
|
||||
// Special case for numTasks > num-tasks (a known problem case)
|
||||
if (key === 'numTasks') {
|
||||
if (process.env.DEBUG === '1') {
|
||||
console.error('DEBUG - Converting numTasks to num-tasks');
|
||||
}
|
||||
if (!userOptions.has('num-tasks') && !userOptions.has('numTasks')) {
|
||||
args.push(`--num-tasks=${value}`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip built-in Commander properties and options the user provided
|
||||
if (
|
||||
['parent', 'commands', 'options', 'rawArgs'].includes(key) ||
|
||||
userOptions.has(key)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Also check the kebab-case version of this key
|
||||
const kebabKey = key.replace(/([A-Z])/g, '-$1').toLowerCase();
|
||||
if (userOptions.has(kebabKey)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Add default values, using kebab-case for the parameter name
|
||||
if (value !== undefined) {
|
||||
if (typeof value === 'boolean') {
|
||||
if (value === true) {
|
||||
args.push(`--${kebabKey}`);
|
||||
} else if (value === false && key === 'generate') {
|
||||
args.push('--skip-generate');
|
||||
}
|
||||
} else {
|
||||
// Always use kebab-case for option names
|
||||
args.push(`--${kebabKey}=${value}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Special handling for parent parameter (uses -p)
|
||||
if (options.parent && !args.includes('-p') && !userOptions.has('parent')) {
|
||||
args.push('-p', options.parent);
|
||||
}
|
||||
|
||||
// Debug output for troubleshooting
|
||||
if (process.env.DEBUG === '1') {
|
||||
console.error('DEBUG - Command args:', commandArgs);
|
||||
console.error('DEBUG - User options:', Array.from(userOptions));
|
||||
console.error('DEBUG - Commander options:', options);
|
||||
console.error('DEBUG - Final args:', args);
|
||||
}
|
||||
|
||||
// Run the script with our processed args
|
||||
runDevScript(args);
|
||||
};
|
||||
}
|
||||
|
||||
// // Special case for the 'init' command which uses a different script
|
||||
// function registerInitCommand(program) {
|
||||
// program
|
||||
// .command('init')
|
||||
// .description('Initialize a new project')
|
||||
// .option('-y, --yes', 'Skip prompts and use default values')
|
||||
// .option('-n, --name <name>', 'Project name')
|
||||
// .option('-d, --description <description>', 'Project description')
|
||||
// .option('-v, --version <version>', 'Project version')
|
||||
// .option('-a, --author <author>', 'Author name')
|
||||
// .option('--skip-install', 'Skip installing dependencies')
|
||||
// .option('--dry-run', 'Show what would be done without making changes')
|
||||
// .action((options) => {
|
||||
// // Pass through any options to the init script
|
||||
// const args = [
|
||||
// '--yes',
|
||||
// 'name',
|
||||
// 'description',
|
||||
// 'version',
|
||||
// 'author',
|
||||
// 'skip-install',
|
||||
// 'dry-run'
|
||||
// ]
|
||||
// .filter((opt) => options[opt])
|
||||
// .map((opt) => {
|
||||
// if (opt === 'yes' || opt === 'skip-install' || opt === 'dry-run') {
|
||||
// return `--${opt}`;
|
||||
// }
|
||||
// return `--${opt}=${options[opt]}`;
|
||||
// });
|
||||
|
||||
// const child = spawn('node', [initScriptPath, ...args], {
|
||||
// stdio: 'inherit',
|
||||
// cwd: process.cwd()
|
||||
// });
|
||||
|
||||
// child.on('close', (code) => {
|
||||
// process.exit(code);
|
||||
// });
|
||||
// });
|
||||
// }
|
||||
|
||||
// Set up the command-line interface
|
||||
const program = new Command();
|
||||
|
||||
program
|
||||
.name('task-master')
|
||||
.description('Claude Task Master CLI')
|
||||
.version(version)
|
||||
.addHelpText('afterAll', () => {
|
||||
// Use the same help display function as dev.js for consistency
|
||||
displayHelp();
|
||||
return ''; // Return empty string to prevent commander's default help
|
||||
});
|
||||
|
||||
// Add custom help option to directly call our help display
|
||||
program.helpOption('-h, --help', 'Display help information');
|
||||
program.on('--help', () => {
|
||||
displayHelp();
|
||||
});
|
||||
|
||||
// // Add special case commands
|
||||
// registerInitCommand(program);
|
||||
|
||||
program
|
||||
.command('dev')
|
||||
.description('Run the dev.js script')
|
||||
.action(() => {
|
||||
const args = process.argv.slice(process.argv.indexOf('dev') + 1);
|
||||
runDevScript(args);
|
||||
});
|
||||
|
||||
// Use a temporary Command instance to get all command definitions
|
||||
const tempProgram = new Command();
|
||||
registerCommands(tempProgram);
|
||||
|
||||
// For each command in the temp instance, add a modified version to our actual program
|
||||
tempProgram.commands.forEach((cmd) => {
|
||||
if (['dev'].includes(cmd.name())) {
|
||||
// Skip commands we've already defined specially
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a new command with the same name and description
|
||||
const newCmd = program.command(cmd.name()).description(cmd.description());
|
||||
|
||||
// Copy all options
|
||||
cmd.options.forEach((opt) => {
|
||||
newCmd.option(opt.flags, opt.description, opt.defaultValue);
|
||||
});
|
||||
|
||||
// Set the action to proxy to dev.js
|
||||
newCmd.action(createDevScriptAction(cmd.name()));
|
||||
});
|
||||
|
||||
// Parse the command line arguments
|
||||
program.parse(process.argv);
|
||||
|
||||
// Add global error handling for unknown commands and options
|
||||
process.on('uncaughtException', (err) => {
|
||||
// Check if this is a commander.js unknown option error
|
||||
if (err.code === 'commander.unknownOption') {
|
||||
const option = err.message.match(/'([^']+)'/)?.[1];
|
||||
const commandArg = process.argv.find(
|
||||
(arg) =>
|
||||
!arg.startsWith('-') &&
|
||||
arg !== 'task-master' &&
|
||||
!arg.includes('/') &&
|
||||
arg !== 'node'
|
||||
);
|
||||
const command = commandArg || 'unknown';
|
||||
|
||||
console.error(chalk.red(`Error: Unknown option '${option}'`));
|
||||
console.error(
|
||||
chalk.yellow(
|
||||
`Run 'task-master ${command} --help' to see available options for this command`
|
||||
)
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Check if this is a commander.js unknown command error
|
||||
if (err.code === 'commander.unknownCommand') {
|
||||
const command = err.message.match(/'([^']+)'/)?.[1];
|
||||
|
||||
console.error(chalk.red(`Error: Unknown command '${command}'`));
|
||||
console.error(
|
||||
chalk.yellow(`Run 'task-master --help' to see available commands`)
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Handle other uncaught exceptions
|
||||
console.error(chalk.red(`Error: ${err.message}`));
|
||||
if (process.env.DEBUG === '1') {
|
||||
console.error(err);
|
||||
}
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Show help if no command was provided (just 'task-master' with no args)
|
||||
if (process.argv.length <= 2) {
|
||||
displayBanner();
|
||||
displayHelp();
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Add exports at the end of the file
|
||||
export { detectCamelCaseFlags };
|
||||
// Simply run the CLI directly
|
||||
runCLI();
|
||||
|
||||
@@ -18,7 +18,17 @@ export default {
|
||||
testMatch: ['**/__tests__/**/*.js', '**/?(*.)+(spec|test).js'],
|
||||
|
||||
// Transform files
|
||||
transform: {},
|
||||
preset: 'ts-jest/presets/default-esm',
|
||||
extensionsToTreatAsEsm: ['.ts'],
|
||||
moduleFileExtensions: ['js', 'ts', 'json', 'node'],
|
||||
transform: {
|
||||
'^.+\\.ts$': [
|
||||
'ts-jest',
|
||||
{
|
||||
useESM: true
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
// Disable transformations for node_modules
|
||||
transformIgnorePatterns: ['/node_modules/'],
|
||||
@@ -27,6 +37,7 @@ export default {
|
||||
moduleNameMapper: {
|
||||
'^@/(.*)$': '<rootDir>/$1'
|
||||
},
|
||||
resolver: '<rootDir>/jest.resolver.cjs',
|
||||
|
||||
// Setup module aliases
|
||||
moduleDirectories: ['node_modules', '<rootDir>'],
|
||||
|
||||
19
jest.resolver.cjs
Normal file
19
jest.resolver.cjs
Normal file
@@ -0,0 +1,19 @@
|
||||
const { defaultResolver } = require('jest-resolve');
|
||||
module.exports = function customResolver(request, options) {
|
||||
const resolve = options.defaultResolver || defaultResolver;
|
||||
|
||||
try {
|
||||
return resolve(request, options);
|
||||
} catch (error) {
|
||||
if (request.startsWith('.') && request.endsWith('.js')) {
|
||||
try {
|
||||
return resolve(request.replace(/\.js$/, '.ts'), options);
|
||||
} catch (tsError) {
|
||||
tsError.cause = tsError.cause ?? error;
|
||||
throw tsError;
|
||||
}
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
73
output.txt
Normal file
73
output.txt
Normal file
File diff suppressed because one or more lines are too long
10922
package-lock.json
generated
10922
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
40
package.json
40
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "task-master-ai",
|
||||
"version": "0.26.0-rc.1",
|
||||
"version": "0.26.0",
|
||||
"description": "A task management system for ambitious AI-driven development that doesn't overwhelm and confuse Cursor.",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -11,25 +11,25 @@
|
||||
},
|
||||
"workspaces": ["apps/*", "packages/*", "."],
|
||||
"scripts": {
|
||||
"build": "npm run build:packages && tsup",
|
||||
"dev": "npm run build:packages && npm link && (npm run dev:packages & tsup --watch --onSuccess 'echo Build complete && npm link')",
|
||||
"dev:packages": "(cd packages/tm-core && npm run dev) & (cd packages/workflow-engine && npm run dev) & (cd apps/cli && npm run dev) & wait",
|
||||
"dev:core": "cd packages/tm-core && npm run dev",
|
||||
"dev:workflow": "cd packages/workflow-engine && npm run dev",
|
||||
"dev:cli": "cd apps/cli && npm run dev",
|
||||
"build:packages": "npm run build:core && npm run build:workflow && npm run build:cli",
|
||||
"build:core": "cd packages/tm-core && npm run build",
|
||||
"build:workflow": "cd packages/workflow-engine && npm run build",
|
||||
"build:cli": "cd apps/cli && npm run build",
|
||||
"build": "npm run build:build-config && cross-env NODE_ENV=production tsdown",
|
||||
"dev": "tsdown --watch='packages/*/src/**/*' --watch='apps/cli/src/**/*' --watch='bin/**/*' --watch='mcp-server/**/*'",
|
||||
"turbo:dev": "turbo dev",
|
||||
"turbo:build": "turbo build",
|
||||
"turbo:typecheck": "turbo typecheck",
|
||||
"build:build-config": "npm run build -w @tm/build-config",
|
||||
"test": "node --experimental-vm-modules node_modules/.bin/jest",
|
||||
"test:unit": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=unit",
|
||||
"test:integration": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=integration",
|
||||
"test:fails": "node --experimental-vm-modules node_modules/.bin/jest --onlyFailures",
|
||||
"test:watch": "node --experimental-vm-modules node_modules/.bin/jest --watch",
|
||||
"test:coverage": "node --experimental-vm-modules node_modules/.bin/jest --coverage",
|
||||
"test:ci": "node --experimental-vm-modules node_modules/.bin/jest --coverage --ci",
|
||||
"test:e2e": "./tests/e2e/run_e2e.sh",
|
||||
"test:e2e-report": "./tests/e2e/run_e2e.sh --analyze-log",
|
||||
"postpack": "chmod +x dist/task-master.js dist/mcp-server.js",
|
||||
"changeset": "changeset",
|
||||
"release": "changeset publish",
|
||||
"publish-packages": "turbo run build lint test && changeset version && changeset publish",
|
||||
"inspector": "npx @modelcontextprotocol/inspector node dist/mcp-server.js",
|
||||
"mcp-server": "node dist/mcp-server.js",
|
||||
"format-check": "biome format .",
|
||||
@@ -65,11 +65,12 @@
|
||||
"@inquirer/search": "^3.0.15",
|
||||
"@openrouter/ai-sdk-provider": "^0.4.5",
|
||||
"@streamparser/json": "^0.0.22",
|
||||
"@tm/cli": "*",
|
||||
"ai": "^4.3.10",
|
||||
"ajv": "^8.17.1",
|
||||
"ajv-formats": "^3.0.1",
|
||||
"boxen": "^8.0.1",
|
||||
"chalk": "^5.4.1",
|
||||
"chalk": "5.6.2",
|
||||
"cli-highlight": "^2.1.11",
|
||||
"cli-progress": "^3.12.0",
|
||||
"cli-table3": "^0.6.5",
|
||||
@@ -88,6 +89,8 @@
|
||||
"jsonrepair": "^3.13.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"lru-cache": "^10.2.0",
|
||||
"marked": "^15.0.12",
|
||||
"marked-terminal": "^7.3.0",
|
||||
"ollama-ai-provider": "^1.2.0",
|
||||
"openai": "^4.89.0",
|
||||
"ora": "^8.2.0",
|
||||
@@ -103,6 +106,7 @@
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
},
|
||||
"packageManager": "npm@10.9.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/eyaltoledano/claude-task-master.git"
|
||||
@@ -118,21 +122,23 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
|
||||
"@changesets/changelog-github": "^0.5.1",
|
||||
"@changesets/cli": "^2.28.1",
|
||||
"dotenv-mono": "^1.5.1",
|
||||
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/marked-terminal": "^6.1.1",
|
||||
"concurrently": "^9.2.1",
|
||||
"cross-env": "^10.0.0",
|
||||
"dotenv-mono": "^1.5.1",
|
||||
"execa": "^8.0.1",
|
||||
"ink": "^5.0.1",
|
||||
"jest": "^29.7.0",
|
||||
"jest-environment-node": "^29.7.0",
|
||||
"mock-fs": "^5.5.0",
|
||||
"prettier": "^3.5.3",
|
||||
"supertest": "^7.1.0",
|
||||
"tsup": "^8.5.0",
|
||||
"ts-jest": "^29.4.2",
|
||||
"tsdown": "^0.15.2",
|
||||
"tsx": "^4.16.2",
|
||||
"turbo": "^2.5.6",
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
}
|
||||
|
||||
29
packages/build-config/package.json
Normal file
29
packages/build-config/package.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "@tm/build-config",
|
||||
"version": "1.0.0",
|
||||
"description": "Shared build configuration for Task Master monorepo",
|
||||
"type": "module",
|
||||
"main": "./dist/tsdown.base.js",
|
||||
"types": "./src/tsdown.base.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./src/tsdown.base.ts",
|
||||
"import": "./dist/tsdown.base.js"
|
||||
}
|
||||
},
|
||||
"files": ["dist", "src"],
|
||||
"keywords": ["build-config", "tsup", "monorepo"],
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"devDependencies": {
|
||||
"dotenv-mono": "^1.5.1",
|
||||
"typescript": "^5.7.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"tsup": "^8.5.0"
|
||||
}
|
||||
}
|
||||
46
packages/build-config/src/tsdown.base.ts
Normal file
46
packages/build-config/src/tsdown.base.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* Base tsdown configuration for Task Master monorepo
|
||||
* Provides shared configuration that can be extended by individual packages
|
||||
*/
|
||||
import type { UserConfig } from 'tsdown';
|
||||
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const isDevelopment = !isProduction;
|
||||
|
||||
/**
|
||||
* Environment helpers
|
||||
*/
|
||||
export const env = {
|
||||
isProduction,
|
||||
isDevelopment,
|
||||
NODE_ENV: process.env.NODE_ENV || 'development'
|
||||
};
|
||||
|
||||
/**
|
||||
* Base tsdown configuration for all packages
|
||||
* Since everything gets bundled into root dist/ anyway, use consistent settings
|
||||
*/
|
||||
export const baseConfig: Partial<UserConfig> = {
|
||||
sourcemap: isDevelopment,
|
||||
format: 'esm',
|
||||
platform: 'node',
|
||||
dts: isDevelopment,
|
||||
minify: isProduction,
|
||||
treeshake: isProduction,
|
||||
// Keep all npm dependencies external (available via node_modules)
|
||||
external: [/^[^@./]/, /^@(?!tm\/)/]
|
||||
};
|
||||
|
||||
/**
|
||||
* Utility function to merge configurations
|
||||
* Simplified for tsdown usage
|
||||
*/
|
||||
export function mergeConfig(
|
||||
base: Partial<UserConfig>,
|
||||
overrides: Partial<UserConfig>
|
||||
): Partial<UserConfig> {
|
||||
return {
|
||||
...base,
|
||||
...overrides
|
||||
};
|
||||
}
|
||||
21
packages/build-config/tsconfig.json
Normal file
21
packages/build-config/tsconfig.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"lib": ["ES2022"],
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"esModuleInterop": true,
|
||||
"baseUrl": ".",
|
||||
"outDir": "dist",
|
||||
"allowJs": true,
|
||||
"strict": true,
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"declaration": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
@@ -1,76 +1,25 @@
|
||||
{
|
||||
"name": "@tm/core",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"description": "Core library for Task Master - TypeScript task management system",
|
||||
"type": "module",
|
||||
"types": "./dist/index.d.ts",
|
||||
"types": "./src/index.ts",
|
||||
"main": "./dist/index.js",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./src/index.ts",
|
||||
"import": "./dist/index.js",
|
||||
"require": "./dist/index.js"
|
||||
},
|
||||
"./auth": {
|
||||
"types": "./src/auth/index.ts",
|
||||
"import": "./dist/auth/index.js",
|
||||
"require": "./dist/auth/index.js"
|
||||
},
|
||||
"./storage": {
|
||||
"types": "./src/storage/index.ts",
|
||||
"import": "./dist/storage/index.js",
|
||||
"require": "./dist/storage/index.js"
|
||||
},
|
||||
"./config": {
|
||||
"types": "./src/config/index.ts",
|
||||
"import": "./dist/config/index.js",
|
||||
"require": "./dist/config/index.js"
|
||||
},
|
||||
"./providers": {
|
||||
"types": "./src/providers/index.ts",
|
||||
"import": "./dist/providers/index.js",
|
||||
"require": "./dist/providers/index.js"
|
||||
},
|
||||
"./services": {
|
||||
"types": "./src/services/index.ts",
|
||||
"import": "./dist/services/index.js",
|
||||
"require": "./dist/services/index.js"
|
||||
},
|
||||
"./errors": {
|
||||
"types": "./src/errors/index.ts",
|
||||
"import": "./dist/errors/index.js",
|
||||
"require": "./dist/errors/index.js"
|
||||
},
|
||||
"./logger": {
|
||||
"types": "./src/logger/index.ts",
|
||||
"import": "./dist/logger/index.js",
|
||||
"require": "./dist/logger/index.js"
|
||||
},
|
||||
"./types": {
|
||||
"types": "./src/types/index.ts",
|
||||
"import": "./dist/types/index.js",
|
||||
"require": "./dist/types/index.js"
|
||||
},
|
||||
"./interfaces": {
|
||||
"types": "./src/interfaces/index.ts",
|
||||
"import": "./dist/interfaces/index.js",
|
||||
"require": "./dist/interfaces/index.js"
|
||||
},
|
||||
"./utils": {
|
||||
"types": "./src/utils/index.ts",
|
||||
"import": "./dist/utils/index.js",
|
||||
"require": "./dist/utils/index.js"
|
||||
},
|
||||
"./workflow": {
|
||||
"types": "./src/workflow/index.ts",
|
||||
"import": "./dist/workflow/index.js",
|
||||
"require": "./dist/workflow/index.js"
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
".": "./src/index.ts",
|
||||
"./auth": "./src/auth/index.ts",
|
||||
"./storage": "./src/storage/index.ts",
|
||||
"./config": "./src/config/index.ts",
|
||||
"./providers": "./src/providers/index.ts",
|
||||
"./services": "./src/services/index.ts",
|
||||
"./errors": "./src/errors/index.ts",
|
||||
"./logger": "./src/logger/index.ts",
|
||||
"./types": "./src/types/index.ts",
|
||||
"./interfaces": "./src/interfaces/index.ts",
|
||||
"./utils": "./src/utils/index.ts"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"dev": "tsup --watch",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
@@ -82,25 +31,24 @@
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@supabase/supabase-js": "^2.57.0",
|
||||
"@tm/workflow-engine": "*",
|
||||
"chalk": "^5.3.0",
|
||||
"@supabase/supabase-js": "^2.57.4",
|
||||
"zod": "^3.22.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
"@tm/build-config": "*",
|
||||
"@types/node": "^20.11.30",
|
||||
"@vitest/coverage-v8": "^2.0.5",
|
||||
"dotenv-mono": "^1.5.1",
|
||||
"dotenv-mono": "^1.3.14",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsup": "^8.0.2",
|
||||
"tsup": "^8.5.0",
|
||||
"typescript": "^5.4.3",
|
||||
"vitest": "^2.0.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
},
|
||||
"files": ["dist", "README.md", "CHANGELOG.md"],
|
||||
"files": ["src", "README.md", "CHANGELOG.md"],
|
||||
"keywords": ["task-management", "typescript", "ai", "prd", "parser"],
|
||||
"author": "Task Master AI",
|
||||
"license": "MIT"
|
||||
|
||||
@@ -6,11 +6,18 @@ import {
|
||||
AuthCredentials,
|
||||
OAuthFlowOptions,
|
||||
AuthenticationError,
|
||||
AuthConfig
|
||||
AuthConfig,
|
||||
UserContext
|
||||
} from './types.js';
|
||||
import { CredentialStore } from './credential-store.js';
|
||||
import { OAuthService } from './oauth-service.js';
|
||||
import { SupabaseAuthClient } from '../clients/supabase-client.js';
|
||||
import {
|
||||
OrganizationService,
|
||||
type Organization,
|
||||
type Brief,
|
||||
type RemoteTask
|
||||
} from '../services/organization.service.js';
|
||||
import { getLogger } from '../logger/index.js';
|
||||
|
||||
/**
|
||||
@@ -21,11 +28,28 @@ export class AuthManager {
|
||||
private credentialStore: CredentialStore;
|
||||
private oauthService: OAuthService;
|
||||
private supabaseClient: SupabaseAuthClient;
|
||||
private organizationService?: OrganizationService;
|
||||
|
||||
private constructor(config?: Partial<AuthConfig>) {
|
||||
this.credentialStore = new CredentialStore(config);
|
||||
this.credentialStore = CredentialStore.getInstance(config);
|
||||
this.supabaseClient = new SupabaseAuthClient();
|
||||
this.oauthService = new OAuthService(this.credentialStore, config);
|
||||
|
||||
// Initialize Supabase client with session restoration
|
||||
this.initializeSupabaseSession();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize Supabase session from stored credentials
|
||||
*/
|
||||
private async initializeSupabaseSession(): Promise<void> {
|
||||
try {
|
||||
await this.supabaseClient.initialize();
|
||||
} catch (error) {
|
||||
// Log but don't throw - session might not exist yet
|
||||
const logger = getLogger('AuthManager');
|
||||
logger.debug('No existing session to restore');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -49,6 +73,7 @@ export class AuthManager {
|
||||
*/
|
||||
static resetInstance(): void {
|
||||
AuthManager.instance = null;
|
||||
CredentialStore.resetInstance();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -75,39 +100,48 @@ export class AuthManager {
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh authentication token
|
||||
* Refresh authentication token using Supabase session
|
||||
*/
|
||||
async refreshToken(): Promise<AuthCredentials> {
|
||||
const authData = this.credentialStore.getCredentials({
|
||||
allowExpired: true
|
||||
});
|
||||
|
||||
if (!authData || !authData.refreshToken) {
|
||||
throw new AuthenticationError(
|
||||
'No refresh token available',
|
||||
'NO_REFRESH_TOKEN'
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
// Use Supabase client to refresh the token
|
||||
const response = await this.supabaseClient.refreshSession(
|
||||
authData.refreshToken
|
||||
);
|
||||
// Use Supabase's built-in session refresh
|
||||
const session = await this.supabaseClient.refreshSession();
|
||||
|
||||
// Update authentication data
|
||||
if (!session) {
|
||||
throw new AuthenticationError(
|
||||
'Failed to refresh session',
|
||||
'REFRESH_FAILED'
|
||||
);
|
||||
}
|
||||
|
||||
// Get existing credentials to preserve context
|
||||
const existingCredentials = this.credentialStore.getCredentials({
|
||||
allowExpired: true
|
||||
});
|
||||
|
||||
// Update authentication data from session
|
||||
const newAuthData: AuthCredentials = {
|
||||
...authData,
|
||||
token: response.token,
|
||||
refreshToken: response.refreshToken,
|
||||
expiresAt: response.expiresAt,
|
||||
savedAt: new Date().toISOString()
|
||||
token: session.access_token,
|
||||
refreshToken: session.refresh_token,
|
||||
userId: session.user.id,
|
||||
email: session.user.email,
|
||||
expiresAt: session.expires_at
|
||||
? new Date(session.expires_at * 1000).toISOString()
|
||||
: undefined,
|
||||
savedAt: new Date().toISOString(),
|
||||
selectedContext: existingCredentials?.selectedContext
|
||||
};
|
||||
|
||||
this.credentialStore.saveCredentials(newAuthData);
|
||||
return newAuthData;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
if (error instanceof AuthenticationError) {
|
||||
throw error;
|
||||
}
|
||||
throw new AuthenticationError(
|
||||
`Token refresh failed: ${(error as Error).message}`,
|
||||
'REFRESH_FAILED'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,4 +167,114 @@ export class AuthManager {
|
||||
isAuthenticated(): boolean {
|
||||
return this.credentialStore.hasValidCredentials();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current user context (org/brief selection)
|
||||
*/
|
||||
getContext(): UserContext | null {
|
||||
const credentials = this.getCredentials();
|
||||
return credentials?.selectedContext || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the user context (org/brief selection)
|
||||
*/
|
||||
async updateContext(context: Partial<UserContext>): Promise<void> {
|
||||
const credentials = this.getCredentials();
|
||||
if (!credentials) {
|
||||
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
|
||||
}
|
||||
|
||||
// Merge with existing context
|
||||
const existingContext = credentials.selectedContext || {};
|
||||
const newContext: UserContext = {
|
||||
...existingContext,
|
||||
...context,
|
||||
updatedAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
// Save updated credentials with new context
|
||||
const updatedCredentials: AuthCredentials = {
|
||||
...credentials,
|
||||
selectedContext: newContext
|
||||
};
|
||||
|
||||
this.credentialStore.saveCredentials(updatedCredentials);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the user context
|
||||
*/
|
||||
async clearContext(): Promise<void> {
|
||||
const credentials = this.getCredentials();
|
||||
if (!credentials) {
|
||||
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
|
||||
}
|
||||
|
||||
// Remove context from credentials
|
||||
const { selectedContext, ...credentialsWithoutContext } = credentials;
|
||||
this.credentialStore.saveCredentials(credentialsWithoutContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the organization service instance
|
||||
* Uses the Supabase client with the current session or token
|
||||
*/
|
||||
private async getOrganizationService(): Promise<OrganizationService> {
|
||||
if (!this.organizationService) {
|
||||
// First check if we have credentials with a token
|
||||
const credentials = this.getCredentials();
|
||||
if (!credentials || !credentials.token) {
|
||||
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
|
||||
}
|
||||
|
||||
// Initialize session if needed (this will load from our storage adapter)
|
||||
await this.supabaseClient.initialize();
|
||||
|
||||
// Use the SupabaseAuthClient which now has the session
|
||||
const supabaseClient = this.supabaseClient.getClient();
|
||||
this.organizationService = new OrganizationService(supabaseClient as any);
|
||||
}
|
||||
return this.organizationService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all organizations for the authenticated user
|
||||
*/
|
||||
async getOrganizations(): Promise<Organization[]> {
|
||||
const service = await this.getOrganizationService();
|
||||
return service.getOrganizations();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all briefs for a specific organization
|
||||
*/
|
||||
async getBriefs(orgId: string): Promise<Brief[]> {
|
||||
const service = await this.getOrganizationService();
|
||||
return service.getBriefs(orgId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific organization by ID
|
||||
*/
|
||||
async getOrganization(orgId: string): Promise<Organization | null> {
|
||||
const service = await this.getOrganizationService();
|
||||
return service.getOrganization(orgId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific brief by ID
|
||||
*/
|
||||
async getBrief(briefId: string): Promise<Brief | null> {
|
||||
const service = await this.getOrganizationService();
|
||||
return service.getBrief(briefId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all tasks for a specific brief
|
||||
*/
|
||||
async getTasks(briefId: string): Promise<RemoteTask[]> {
|
||||
const service = await this.getOrganizationService();
|
||||
return service.getTasks(briefId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,15 +19,39 @@ import { getLogger } from '../logger/index.js';
|
||||
* human-readable persisted format in the auth.json file.
|
||||
*/
|
||||
export class CredentialStore {
|
||||
private static instance: CredentialStore | null = null;
|
||||
private logger = getLogger('CredentialStore');
|
||||
private config: AuthConfig;
|
||||
// Clock skew tolerance for expiry checks (30 seconds)
|
||||
private readonly CLOCK_SKEW_MS = 30_000;
|
||||
|
||||
constructor(config?: Partial<AuthConfig>) {
|
||||
private constructor(config?: Partial<AuthConfig>) {
|
||||
this.config = getAuthConfig(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the singleton instance of CredentialStore
|
||||
*/
|
||||
static getInstance(config?: Partial<AuthConfig>): CredentialStore {
|
||||
if (!CredentialStore.instance) {
|
||||
CredentialStore.instance = new CredentialStore(config);
|
||||
} else if (config) {
|
||||
// Warn if config is provided after initialization
|
||||
const logger = getLogger('CredentialStore');
|
||||
logger.warn(
|
||||
'getInstance called with config after initialization; config is ignored.'
|
||||
);
|
||||
}
|
||||
return CredentialStore.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the singleton instance (useful for testing)
|
||||
*/
|
||||
static resetInstance(): void {
|
||||
CredentialStore.instance = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get stored authentication credentials
|
||||
* @returns AuthCredentials with expiresAt as number (milliseconds) for runtime use
|
||||
|
||||
@@ -5,12 +5,19 @@
|
||||
export { AuthManager } from './auth-manager.js';
|
||||
export { CredentialStore } from './credential-store.js';
|
||||
export { OAuthService } from './oauth-service.js';
|
||||
export { SupabaseSessionStorage } from './supabase-session-storage.js';
|
||||
export type {
|
||||
Organization,
|
||||
Brief,
|
||||
RemoteTask
|
||||
} from '../services/organization.service.js';
|
||||
|
||||
export type {
|
||||
AuthCredentials,
|
||||
OAuthFlowOptions,
|
||||
AuthConfig,
|
||||
CliData
|
||||
CliData,
|
||||
UserContext
|
||||
} from './types.js';
|
||||
|
||||
export { AuthenticationError } from './types.js';
|
||||
|
||||
@@ -181,8 +181,8 @@ export class OAuthService {
|
||||
timestamp: Date.now()
|
||||
};
|
||||
|
||||
// Build authorization URL for web app sign-in page
|
||||
const authUrl = new URL(`${this.baseUrl}/auth/sign-in`);
|
||||
// Build authorization URL for CLI-specific sign-in page
|
||||
const authUrl = new URL(`${this.baseUrl}/auth/cli/sign-in`);
|
||||
|
||||
// Encode CLI data as base64
|
||||
const cliParam = Buffer.from(JSON.stringify(cliData)).toString(
|
||||
@@ -272,7 +272,49 @@ export class OAuthService {
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle direct token response from server
|
||||
// Handle authorization code for PKCE flow
|
||||
const code = url.searchParams.get('code');
|
||||
if (code && type === 'pkce_callback') {
|
||||
try {
|
||||
this.logger.info('Received authorization code for PKCE flow');
|
||||
|
||||
// Exchange code for session using PKCE
|
||||
const session = await this.supabaseClient.exchangeCodeForSession(code);
|
||||
|
||||
// Save authentication data
|
||||
const authData: AuthCredentials = {
|
||||
token: session.access_token,
|
||||
refreshToken: session.refresh_token,
|
||||
userId: session.user.id,
|
||||
email: session.user.email,
|
||||
expiresAt: session.expires_at
|
||||
? new Date(session.expires_at * 1000).toISOString()
|
||||
: undefined,
|
||||
tokenType: 'standard',
|
||||
savedAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
this.credentialStore.saveCredentials(authData);
|
||||
|
||||
if (server.listening) {
|
||||
server.close();
|
||||
}
|
||||
// Clear timeout since authentication succeeded
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
resolve(authData);
|
||||
return;
|
||||
} catch (error) {
|
||||
if (server.listening) {
|
||||
server.close();
|
||||
}
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle direct token response from server (legacy flow)
|
||||
if (
|
||||
accessToken &&
|
||||
(type === 'oauth_success' || type === 'session_transfer')
|
||||
@@ -280,8 +322,23 @@ export class OAuthService {
|
||||
try {
|
||||
this.logger.info(`Received tokens via ${type}`);
|
||||
|
||||
// Get user info using the access token if possible
|
||||
const user = await this.supabaseClient.getUser(accessToken);
|
||||
// Create a session with the tokens and set it in Supabase client
|
||||
const session = {
|
||||
access_token: accessToken,
|
||||
refresh_token: refreshToken || '',
|
||||
expires_at: expiresIn
|
||||
? Math.floor(Date.now() / 1000) + parseInt(expiresIn)
|
||||
: undefined,
|
||||
expires_in: expiresIn ? parseInt(expiresIn) : undefined,
|
||||
token_type: 'bearer',
|
||||
user: null as any // Will be populated by setSession
|
||||
};
|
||||
|
||||
// Set the session in Supabase client
|
||||
await this.supabaseClient.setSession(session as any);
|
||||
|
||||
// Get user info from the session
|
||||
const user = await this.supabaseClient.getUser();
|
||||
|
||||
// Calculate expiration time
|
||||
const expiresAt = expiresIn
|
||||
|
||||
155
packages/tm-core/src/auth/supabase-session-storage.ts
Normal file
155
packages/tm-core/src/auth/supabase-session-storage.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
/**
|
||||
* Custom storage adapter for Supabase Auth sessions in CLI environment
|
||||
* Implements the SupportedStorage interface required by Supabase Auth
|
||||
*
|
||||
* This adapter bridges Supabase's session management with our existing
|
||||
* auth.json credential storage, maintaining backward compatibility
|
||||
*/
|
||||
|
||||
import { SupportedStorage } from '@supabase/supabase-js';
|
||||
import { CredentialStore } from './credential-store.js';
|
||||
import { AuthCredentials } from './types.js';
|
||||
import { getLogger } from '../logger/index.js';
|
||||
|
||||
const STORAGE_KEY = 'sb-taskmaster-auth-token';
|
||||
|
||||
export class SupabaseSessionStorage implements SupportedStorage {
|
||||
private store: CredentialStore;
|
||||
private logger = getLogger('SupabaseSessionStorage');
|
||||
|
||||
constructor(store: CredentialStore) {
|
||||
this.store = store;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a Supabase session object from our credentials
|
||||
*/
|
||||
private buildSessionFromCredentials(credentials: AuthCredentials): any {
|
||||
// Create a session object that Supabase expects
|
||||
const session = {
|
||||
access_token: credentials.token,
|
||||
refresh_token: credentials.refreshToken || '',
|
||||
expires_at: credentials.expiresAt
|
||||
? Math.floor(new Date(credentials.expiresAt).getTime() / 1000)
|
||||
: Math.floor(Date.now() / 1000) + 3600, // Default to 1 hour
|
||||
token_type: 'bearer',
|
||||
user: {
|
||||
id: credentials.userId,
|
||||
email: credentials.email || '',
|
||||
aud: 'authenticated',
|
||||
role: 'authenticated',
|
||||
email_confirmed_at: new Date().toISOString(),
|
||||
app_metadata: {},
|
||||
user_metadata: {},
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString()
|
||||
}
|
||||
};
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a Supabase session back to our credentials
|
||||
*/
|
||||
private parseSessionToCredentials(
|
||||
sessionData: any
|
||||
): Partial<AuthCredentials> {
|
||||
try {
|
||||
const session = JSON.parse(sessionData);
|
||||
return {
|
||||
token: session.access_token,
|
||||
refreshToken: session.refresh_token,
|
||||
userId: session.user?.id || 'unknown',
|
||||
email: session.user?.email,
|
||||
expiresAt: session.expires_at
|
||||
? new Date(session.expires_at * 1000).toISOString()
|
||||
: undefined
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error('Error parsing session:', error);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get item from storage - Supabase will request the session with a specific key
|
||||
*/
|
||||
getItem(key: string): string | null {
|
||||
// Supabase uses a specific key pattern for sessions
|
||||
if (key === STORAGE_KEY || key.includes('auth-token')) {
|
||||
try {
|
||||
const credentials = this.store.getCredentials({ allowExpired: true });
|
||||
if (credentials && credentials.token) {
|
||||
// Build and return a session object from our stored credentials
|
||||
const session = this.buildSessionFromCredentials(credentials);
|
||||
return JSON.stringify(session);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error('Error getting session:', error);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set item in storage - Supabase will store the session with a specific key
|
||||
*/
|
||||
setItem(key: string, value: string): void {
|
||||
// Only handle Supabase session keys
|
||||
if (key === STORAGE_KEY || key.includes('auth-token')) {
|
||||
try {
|
||||
// Parse the session and update our credentials
|
||||
const sessionUpdates = this.parseSessionToCredentials(value);
|
||||
const existingCredentials = this.store.getCredentials({
|
||||
allowExpired: true
|
||||
});
|
||||
|
||||
if (sessionUpdates.token) {
|
||||
const updatedCredentials: AuthCredentials = {
|
||||
...existingCredentials,
|
||||
...sessionUpdates,
|
||||
savedAt: new Date().toISOString(),
|
||||
selectedContext: existingCredentials?.selectedContext
|
||||
} as AuthCredentials;
|
||||
|
||||
this.store.saveCredentials(updatedCredentials);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error('Error setting session:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove item from storage - Called when signing out
|
||||
*/
|
||||
removeItem(key: string): void {
|
||||
if (key === STORAGE_KEY || key.includes('auth-token')) {
|
||||
// Don't actually remove credentials, just clear the tokens
|
||||
// This preserves other data like selectedContext
|
||||
try {
|
||||
const credentials = this.store.getCredentials({ allowExpired: true });
|
||||
if (credentials) {
|
||||
// Keep context but clear auth tokens
|
||||
const clearedCredentials: AuthCredentials = {
|
||||
...credentials,
|
||||
token: '',
|
||||
refreshToken: undefined,
|
||||
expiresAt: undefined
|
||||
} as AuthCredentials;
|
||||
this.store.saveCredentials(clearedCredentials);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error('Error removing session:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all session data
|
||||
*/
|
||||
clear(): void {
|
||||
// Clear auth tokens but preserve context
|
||||
this.removeItem(STORAGE_KEY);
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,15 @@ export interface AuthCredentials {
|
||||
expiresAt?: string | number;
|
||||
tokenType?: 'standard';
|
||||
savedAt: string;
|
||||
selectedContext?: UserContext;
|
||||
}
|
||||
|
||||
export interface UserContext {
|
||||
orgId?: string;
|
||||
orgName?: string;
|
||||
briefId?: string;
|
||||
briefName?: string;
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
export interface OAuthFlowOptions {
|
||||
@@ -67,7 +76,11 @@ export type AuthErrorCode =
|
||||
| 'STORAGE_ERROR'
|
||||
| 'NOT_SUPPORTED'
|
||||
| 'REFRESH_FAILED'
|
||||
| 'INVALID_RESPONSE';
|
||||
| 'INVALID_RESPONSE'
|
||||
| 'PKCE_INIT_FAILED'
|
||||
| 'PKCE_FAILED'
|
||||
| 'CODE_EXCHANGE_FAILED'
|
||||
| 'SESSION_SET_FAILED';
|
||||
|
||||
/**
|
||||
* Authentication error class
|
||||
|
||||
@@ -1,19 +1,32 @@
|
||||
/**
|
||||
* Supabase client for authentication
|
||||
* Supabase authentication client for CLI auth flows
|
||||
*/
|
||||
|
||||
import { createClient, SupabaseClient, User } from '@supabase/supabase-js';
|
||||
import {
|
||||
createClient,
|
||||
SupabaseClient as SupabaseJSClient,
|
||||
User,
|
||||
Session
|
||||
} from '@supabase/supabase-js';
|
||||
import { AuthenticationError } from '../auth/types.js';
|
||||
import { getLogger } from '../logger/index.js';
|
||||
import { SupabaseSessionStorage } from '../auth/supabase-session-storage.js';
|
||||
import { CredentialStore } from '../auth/credential-store.js';
|
||||
|
||||
export class SupabaseAuthClient {
|
||||
private client: SupabaseClient | null = null;
|
||||
private client: SupabaseJSClient | null = null;
|
||||
private sessionStorage: SupabaseSessionStorage;
|
||||
private logger = getLogger('SupabaseAuthClient');
|
||||
|
||||
constructor() {
|
||||
const credentialStore = CredentialStore.getInstance();
|
||||
this.sessionStorage = new SupabaseSessionStorage(credentialStore);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize Supabase client
|
||||
* Get Supabase client with proper session management
|
||||
*/
|
||||
private getClient(): SupabaseClient {
|
||||
getClient(): SupabaseJSClient {
|
||||
if (!this.client) {
|
||||
// Get Supabase configuration from environment - using TM_PUBLIC prefix
|
||||
const supabaseUrl = process.env.TM_PUBLIC_SUPABASE_URL;
|
||||
@@ -26,10 +39,12 @@ export class SupabaseAuthClient {
|
||||
);
|
||||
}
|
||||
|
||||
// Create client with custom storage adapter (similar to React Native AsyncStorage)
|
||||
this.client = createClient(supabaseUrl, supabaseAnonKey, {
|
||||
auth: {
|
||||
storage: this.sessionStorage,
|
||||
autoRefreshToken: true,
|
||||
persistSession: false, // We handle persistence ourselves
|
||||
persistSession: true,
|
||||
detectSessionInUrl: false
|
||||
}
|
||||
});
|
||||
@@ -39,40 +54,159 @@ export class SupabaseAuthClient {
|
||||
}
|
||||
|
||||
/**
|
||||
* Note: Code exchange is now handled server-side
|
||||
* The server returns tokens directly to avoid PKCE issues
|
||||
* This method is kept for potential future use
|
||||
* Initialize the client and restore session if available
|
||||
*/
|
||||
async exchangeCodeForSession(_code: string): Promise<{
|
||||
token: string;
|
||||
refreshToken?: string;
|
||||
userId: string;
|
||||
email?: string;
|
||||
expiresAt?: string;
|
||||
}> {
|
||||
throw new AuthenticationError(
|
||||
'Code exchange is handled server-side. CLI receives tokens directly.',
|
||||
'NOT_SUPPORTED'
|
||||
);
|
||||
async initialize(): Promise<Session | null> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
// Get the current session from storage
|
||||
const {
|
||||
data: { session },
|
||||
error
|
||||
} = await client.auth.getSession();
|
||||
|
||||
if (error) {
|
||||
this.logger.warn('Failed to restore session:', error);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (session) {
|
||||
this.logger.info('Session restored successfully');
|
||||
}
|
||||
|
||||
return session;
|
||||
} catch (error) {
|
||||
this.logger.error('Error initializing session:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh an access token
|
||||
* Sign in with PKCE flow (for CLI auth)
|
||||
*/
|
||||
async refreshSession(refreshToken: string): Promise<{
|
||||
token: string;
|
||||
refreshToken?: string;
|
||||
expiresAt?: string;
|
||||
}> {
|
||||
try {
|
||||
const client = this.getClient();
|
||||
async signInWithPKCE(): Promise<{ url: string; codeVerifier: string }> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
// Generate PKCE challenge
|
||||
const { data, error } = await client.auth.signInWithOAuth({
|
||||
provider: 'github',
|
||||
options: {
|
||||
redirectTo:
|
||||
process.env.TM_AUTH_CALLBACK_URL ||
|
||||
'http://localhost:3421/auth/callback',
|
||||
scopes: 'email'
|
||||
}
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw new AuthenticationError(
|
||||
`Failed to initiate PKCE flow: ${error.message}`,
|
||||
'PKCE_INIT_FAILED'
|
||||
);
|
||||
}
|
||||
|
||||
if (!data?.url) {
|
||||
throw new AuthenticationError(
|
||||
'No authorization URL returned',
|
||||
'INVALID_RESPONSE'
|
||||
);
|
||||
}
|
||||
|
||||
// Extract code_verifier from the URL or generate it
|
||||
// Note: Supabase handles PKCE internally, we just need to handle the callback
|
||||
return {
|
||||
url: data.url,
|
||||
codeVerifier: '' // Supabase manages this internally
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof AuthenticationError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new AuthenticationError(
|
||||
`Failed to start PKCE flow: ${(error as Error).message}`,
|
||||
'PKCE_FAILED'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exchange authorization code for session (PKCE flow)
|
||||
*/
|
||||
async exchangeCodeForSession(code: string): Promise<Session> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
const { data, error } = await client.auth.exchangeCodeForSession(code);
|
||||
|
||||
if (error) {
|
||||
throw new AuthenticationError(
|
||||
`Failed to exchange code: ${error.message}`,
|
||||
'CODE_EXCHANGE_FAILED'
|
||||
);
|
||||
}
|
||||
|
||||
if (!data?.session) {
|
||||
throw new AuthenticationError(
|
||||
'No session returned from code exchange',
|
||||
'INVALID_RESPONSE'
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.info('Successfully exchanged code for session');
|
||||
return data.session;
|
||||
} catch (error) {
|
||||
if (error instanceof AuthenticationError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new AuthenticationError(
|
||||
`Code exchange failed: ${(error as Error).message}`,
|
||||
'CODE_EXCHANGE_FAILED'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current session
|
||||
*/
|
||||
async getSession(): Promise<Session | null> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
const {
|
||||
data: { session },
|
||||
error
|
||||
} = await client.auth.getSession();
|
||||
|
||||
if (error) {
|
||||
this.logger.warn('Failed to get session:', error);
|
||||
return null;
|
||||
}
|
||||
|
||||
return session;
|
||||
} catch (error) {
|
||||
this.logger.error('Error getting session:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh the current session
|
||||
*/
|
||||
async refreshSession(): Promise<Session | null> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
this.logger.info('Refreshing session...');
|
||||
|
||||
// Set the session with refresh token
|
||||
const { data, error } = await client.auth.refreshSession({
|
||||
refresh_token: refreshToken
|
||||
});
|
||||
// Supabase will automatically use the stored refresh token
|
||||
const {
|
||||
data: { session },
|
||||
error
|
||||
} = await client.auth.refreshSession();
|
||||
|
||||
if (error) {
|
||||
this.logger.error('Failed to refresh session:', error);
|
||||
@@ -82,22 +216,11 @@ export class SupabaseAuthClient {
|
||||
);
|
||||
}
|
||||
|
||||
if (!data.session) {
|
||||
throw new AuthenticationError(
|
||||
'No session data returned',
|
||||
'INVALID_RESPONSE'
|
||||
);
|
||||
if (session) {
|
||||
this.logger.info('Successfully refreshed session');
|
||||
}
|
||||
|
||||
this.logger.info('Successfully refreshed session');
|
||||
|
||||
return {
|
||||
token: data.session.access_token,
|
||||
refreshToken: data.session.refresh_token,
|
||||
expiresAt: data.session.expires_at
|
||||
? new Date(data.session.expires_at * 1000).toISOString()
|
||||
: undefined
|
||||
};
|
||||
return session;
|
||||
} catch (error) {
|
||||
if (error instanceof AuthenticationError) {
|
||||
throw error;
|
||||
@@ -111,21 +234,23 @@ export class SupabaseAuthClient {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get user details from token
|
||||
* Get current user from session
|
||||
*/
|
||||
async getUser(token: string): Promise<User | null> {
|
||||
try {
|
||||
const client = this.getClient();
|
||||
async getUser(): Promise<User | null> {
|
||||
const client = this.getClient();
|
||||
|
||||
// Get user with the token
|
||||
const { data, error } = await client.auth.getUser(token);
|
||||
try {
|
||||
const {
|
||||
data: { user },
|
||||
error
|
||||
} = await client.auth.getUser();
|
||||
|
||||
if (error) {
|
||||
this.logger.warn('Failed to get user:', error);
|
||||
return null;
|
||||
}
|
||||
|
||||
return data.user;
|
||||
return user;
|
||||
} catch (error) {
|
||||
this.logger.error('Error getting user:', error);
|
||||
return null;
|
||||
@@ -133,22 +258,55 @@ export class SupabaseAuthClient {
|
||||
}
|
||||
|
||||
/**
|
||||
* Sign out (revoke tokens)
|
||||
* Note: This requires the user to be authenticated with the current session.
|
||||
* For remote token revocation, a server-side admin API with service_role key would be needed.
|
||||
* Sign out and clear session
|
||||
*/
|
||||
async signOut(): Promise<void> {
|
||||
try {
|
||||
const client = this.getClient();
|
||||
const client = this.getClient();
|
||||
|
||||
// Sign out the current session with global scope to revoke all refresh tokens
|
||||
try {
|
||||
// Sign out with global scope to revoke all refresh tokens
|
||||
const { error } = await client.auth.signOut({ scope: 'global' });
|
||||
|
||||
if (error) {
|
||||
this.logger.warn('Failed to sign out:', error);
|
||||
}
|
||||
|
||||
// Clear cached session data
|
||||
this.sessionStorage.clear();
|
||||
} catch (error) {
|
||||
this.logger.error('Error during sign out:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set session from external auth (e.g., from server callback)
|
||||
*/
|
||||
async setSession(session: Session): Promise<void> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
const { error } = await client.auth.setSession({
|
||||
access_token: session.access_token,
|
||||
refresh_token: session.refresh_token
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw new AuthenticationError(
|
||||
`Failed to set session: ${error.message}`,
|
||||
'SESSION_SET_FAILED'
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.info('Session set successfully');
|
||||
} catch (error) {
|
||||
if (error instanceof AuthenticationError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new AuthenticationError(
|
||||
`Failed to set session: ${(error as Error).message}`,
|
||||
'SESSION_SET_FAILED'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,7 +177,7 @@ describe('ConfigManager', () => {
|
||||
|
||||
it('should return storage configuration', () => {
|
||||
const storage = manager.getStorageConfig();
|
||||
expect(storage).toEqual({ type: 'auto', apiConfigured: false });
|
||||
expect(storage).toEqual({ type: 'file' });
|
||||
});
|
||||
|
||||
it('should return API storage configuration when configured', async () => {
|
||||
@@ -206,65 +206,7 @@ describe('ConfigManager', () => {
|
||||
expect(storage).toEqual({
|
||||
type: 'api',
|
||||
apiEndpoint: 'https://api.example.com',
|
||||
apiAccessToken: 'token123',
|
||||
apiConfigured: true
|
||||
});
|
||||
});
|
||||
|
||||
it('should return auto storage configuration with apiConfigured flag', async () => {
|
||||
// Create a new instance with auto storage config and partial API settings
|
||||
vi.mocked(ConfigMerger).mockImplementationOnce(
|
||||
() =>
|
||||
({
|
||||
addSource: vi.fn(),
|
||||
clearSources: vi.fn(),
|
||||
merge: vi.fn().mockReturnValue({
|
||||
storage: {
|
||||
type: 'auto',
|
||||
apiEndpoint: 'https://api.example.com'
|
||||
// No apiAccessToken - partial config
|
||||
}
|
||||
}),
|
||||
getSources: vi.fn().mockReturnValue([])
|
||||
}) as any
|
||||
);
|
||||
|
||||
const autoManager = await ConfigManager.create(testProjectRoot);
|
||||
|
||||
const storage = autoManager.getStorageConfig();
|
||||
expect(storage).toEqual({
|
||||
type: 'auto',
|
||||
apiEndpoint: 'https://api.example.com',
|
||||
apiAccessToken: undefined,
|
||||
apiConfigured: true // true because apiEndpoint is provided
|
||||
});
|
||||
});
|
||||
|
||||
it('should return auto storage with apiConfigured false when no API settings', async () => {
|
||||
// Create a new instance with auto storage but no API settings
|
||||
vi.mocked(ConfigMerger).mockImplementationOnce(
|
||||
() =>
|
||||
({
|
||||
addSource: vi.fn(),
|
||||
clearSources: vi.fn(),
|
||||
merge: vi.fn().mockReturnValue({
|
||||
storage: {
|
||||
type: 'auto'
|
||||
// No API settings at all
|
||||
}
|
||||
}),
|
||||
getSources: vi.fn().mockReturnValue([])
|
||||
}) as any
|
||||
);
|
||||
|
||||
const autoManager = await ConfigManager.create(testProjectRoot);
|
||||
|
||||
const storage = autoManager.getStorageConfig();
|
||||
expect(storage).toEqual({
|
||||
type: 'auto',
|
||||
apiEndpoint: undefined,
|
||||
apiAccessToken: undefined,
|
||||
apiConfigured: false // false because no API settings
|
||||
apiAccessToken: 'token123'
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -85,11 +85,6 @@ describe('EnvironmentConfigProvider', () => {
|
||||
provider = new EnvironmentConfigProvider(); // Reset provider
|
||||
config = provider.loadConfig();
|
||||
expect(config.storage?.type).toBe('api');
|
||||
|
||||
process.env.TASKMASTER_STORAGE_TYPE = 'auto';
|
||||
provider = new EnvironmentConfigProvider(); // Reset provider
|
||||
config = provider.loadConfig();
|
||||
expect(config.storage?.type).toBe('auto');
|
||||
});
|
||||
|
||||
it('should handle nested configuration paths', () => {
|
||||
|
||||
@@ -31,7 +31,7 @@ export class EnvironmentConfigProvider {
|
||||
{
|
||||
env: 'TASKMASTER_STORAGE_TYPE',
|
||||
path: ['storage', 'type'],
|
||||
validate: (v: string) => ['file', 'api', 'auto'].includes(v)
|
||||
validate: (v: string) => ['file', 'api'].includes(v)
|
||||
},
|
||||
{ env: 'TASKMASTER_API_ENDPOINT', path: ['storage', 'apiEndpoint'] },
|
||||
{ env: 'TASKMASTER_API_TOKEN', path: ['storage', 'apiAccessToken'] },
|
||||
|
||||
@@ -53,6 +53,7 @@ export type OutputFormat = (typeof OUTPUT_FORMATS)[number];
|
||||
*/
|
||||
export const STATUS_ICONS: Record<TaskStatus, string> = {
|
||||
done: '✓',
|
||||
completed: '✓',
|
||||
'in-progress': '►',
|
||||
blocked: '⭕',
|
||||
pending: '○',
|
||||
@@ -71,5 +72,6 @@ export const STATUS_COLORS: Record<TaskStatus, string> = {
|
||||
deferred: 'gray',
|
||||
cancelled: 'red',
|
||||
blocked: 'magenta',
|
||||
review: 'cyan'
|
||||
review: 'cyan',
|
||||
completed: 'green'
|
||||
} as const;
|
||||
|
||||
@@ -55,7 +55,3 @@ export {
|
||||
|
||||
// Re-export logger
|
||||
export { getLogger, createLogger, setGlobalLogger } from './logger/index.js';
|
||||
|
||||
// Re-export workflow
|
||||
export { WorkflowService, type WorkflowServiceConfig } from './workflow/index.js';
|
||||
export type * from './workflow/index.js';
|
||||
|
||||
@@ -3,7 +3,11 @@
|
||||
* This file defines the contract for configuration management
|
||||
*/
|
||||
|
||||
import type { TaskComplexity, TaskPriority } from '../types/index.js';
|
||||
import type {
|
||||
TaskComplexity,
|
||||
TaskPriority,
|
||||
StorageType
|
||||
} from '../types/index.js';
|
||||
|
||||
/**
|
||||
* Model configuration for different AI roles
|
||||
@@ -73,14 +77,6 @@ export interface TagSettings {
|
||||
tagNamingConvention: 'kebab-case' | 'camelCase' | 'snake_case';
|
||||
}
|
||||
|
||||
/**
|
||||
* Storage type options
|
||||
* - 'file': Local file system storage
|
||||
* - 'api': Remote API storage (Hamster integration)
|
||||
* - 'auto': Automatically detect based on auth status
|
||||
*/
|
||||
export type StorageType = 'file' | 'api' | 'auto';
|
||||
|
||||
/**
|
||||
* Runtime storage configuration used for storage backend selection
|
||||
* This is what getStorageConfig() returns and what StorageFactory expects
|
||||
|
||||
170
packages/tm-core/src/mappers/TaskMapper.ts
Normal file
170
packages/tm-core/src/mappers/TaskMapper.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import { Task, Subtask } from '../types/index.js';
|
||||
import { Database, Tables } from '../types/database.types.js';
|
||||
|
||||
type TaskRow = Tables<'tasks'>;
|
||||
type DependencyRow = Tables<'task_dependencies'>;
|
||||
|
||||
export class TaskMapper {
|
||||
/**
|
||||
* Maps database tasks to internal Task format
|
||||
*/
|
||||
static mapDatabaseTasksToTasks(
|
||||
dbTasks: TaskRow[],
|
||||
dbDependencies: DependencyRow[]
|
||||
): Task[] {
|
||||
if (!dbTasks || dbTasks.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Group dependencies by task_id
|
||||
const dependenciesByTaskId = this.groupDependenciesByTaskId(dbDependencies);
|
||||
|
||||
// Separate parent tasks and subtasks
|
||||
const parentTasks = dbTasks.filter((t) => !t.parent_task_id);
|
||||
const subtasksByParentId = this.groupSubtasksByParentId(dbTasks);
|
||||
|
||||
// Map parent tasks with their subtasks
|
||||
return parentTasks.map((taskRow) =>
|
||||
this.mapDatabaseTaskToTask(
|
||||
taskRow,
|
||||
subtasksByParentId.get(taskRow.id) || [],
|
||||
dependenciesByTaskId
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps a single database task to internal Task format
|
||||
*/
|
||||
static mapDatabaseTaskToTask(
|
||||
dbTask: TaskRow,
|
||||
dbSubtasks: TaskRow[],
|
||||
dependenciesByTaskId: Map<string, string[]>
|
||||
): Task {
|
||||
// Map subtasks
|
||||
const subtasks: Subtask[] = dbSubtasks.map((subtask, index) => ({
|
||||
id: index + 1, // Use numeric ID for subtasks
|
||||
parentId: dbTask.id,
|
||||
title: subtask.title,
|
||||
description: subtask.description || '',
|
||||
status: this.mapStatus(subtask.status),
|
||||
priority: this.mapPriority(subtask.priority),
|
||||
dependencies: dependenciesByTaskId.get(subtask.id) || [],
|
||||
details: (subtask.metadata as any)?.details || '',
|
||||
testStrategy: (subtask.metadata as any)?.testStrategy || '',
|
||||
createdAt: subtask.created_at,
|
||||
updatedAt: subtask.updated_at,
|
||||
assignee: subtask.assignee_id || undefined,
|
||||
complexity: subtask.complexity
|
||||
? this.mapComplexityToInternal(subtask.complexity)
|
||||
: undefined
|
||||
}));
|
||||
|
||||
return {
|
||||
id: dbTask.display_id || dbTask.id, // Use display_id if available
|
||||
title: dbTask.title,
|
||||
description: dbTask.description || '',
|
||||
status: this.mapStatus(dbTask.status),
|
||||
priority: this.mapPriority(dbTask.priority),
|
||||
dependencies: dependenciesByTaskId.get(dbTask.id) || [],
|
||||
details: (dbTask.metadata as any)?.details || '',
|
||||
testStrategy: (dbTask.metadata as any)?.testStrategy || '',
|
||||
subtasks,
|
||||
createdAt: dbTask.created_at,
|
||||
updatedAt: dbTask.updated_at,
|
||||
assignee: dbTask.assignee_id || undefined,
|
||||
complexity: dbTask.complexity
|
||||
? this.mapComplexityToInternal(dbTask.complexity)
|
||||
: undefined,
|
||||
effort: dbTask.estimated_hours || undefined,
|
||||
actualEffort: dbTask.actual_hours || undefined
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups dependencies by task ID
|
||||
*/
|
||||
private static groupDependenciesByTaskId(
|
||||
dependencies: DependencyRow[]
|
||||
): Map<string, string[]> {
|
||||
const dependenciesByTaskId = new Map<string, string[]>();
|
||||
|
||||
if (dependencies) {
|
||||
for (const dep of dependencies) {
|
||||
const deps = dependenciesByTaskId.get(dep.task_id) || [];
|
||||
deps.push(dep.depends_on_task_id);
|
||||
dependenciesByTaskId.set(dep.task_id, deps);
|
||||
}
|
||||
}
|
||||
|
||||
return dependenciesByTaskId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups subtasks by their parent ID
|
||||
*/
|
||||
private static groupSubtasksByParentId(
|
||||
tasks: TaskRow[]
|
||||
): Map<string, TaskRow[]> {
|
||||
const subtasksByParentId = new Map<string, TaskRow[]>();
|
||||
|
||||
for (const task of tasks) {
|
||||
if (task.parent_task_id) {
|
||||
const subtasks = subtasksByParentId.get(task.parent_task_id) || [];
|
||||
subtasks.push(task);
|
||||
subtasksByParentId.set(task.parent_task_id, subtasks);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort subtasks by subtask_position for each parent
|
||||
for (const subtasks of subtasksByParentId.values()) {
|
||||
subtasks.sort((a, b) => a.subtask_position - b.subtask_position);
|
||||
}
|
||||
|
||||
return subtasksByParentId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps database status to internal status
|
||||
*/
|
||||
private static mapStatus(
|
||||
status: Database['public']['Enums']['task_status']
|
||||
): Task['status'] {
|
||||
switch (status) {
|
||||
case 'todo':
|
||||
return 'pending';
|
||||
case 'in_progress':
|
||||
return 'in-progress';
|
||||
case 'done':
|
||||
return 'done';
|
||||
default:
|
||||
return 'pending';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps database priority to internal priority
|
||||
*/
|
||||
private static mapPriority(
|
||||
priority: Database['public']['Enums']['task_priority']
|
||||
): Task['priority'] {
|
||||
switch (priority) {
|
||||
case 'urgent':
|
||||
return 'critical';
|
||||
default:
|
||||
return priority as Task['priority'];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps numeric complexity to descriptive complexity
|
||||
*/
|
||||
private static mapComplexityToInternal(
|
||||
complexity: number
|
||||
): Task['complexity'] {
|
||||
if (complexity <= 2) return 'simple';
|
||||
if (complexity <= 5) return 'moderate';
|
||||
if (complexity <= 8) return 'complex';
|
||||
return 'very-complex';
|
||||
}
|
||||
}
|
||||
110
packages/tm-core/src/repositories/supabase-task-repository.ts
Normal file
110
packages/tm-core/src/repositories/supabase-task-repository.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { SupabaseClient } from '@supabase/supabase-js';
|
||||
import { Task } from '../types/index.js';
|
||||
import { Database } from '../types/database.types.js';
|
||||
import { TaskMapper } from '../mappers/TaskMapper.js';
|
||||
import { AuthManager } from '../auth/auth-manager.js';
|
||||
|
||||
export class SupabaseTaskRepository {
|
||||
constructor(private supabase: SupabaseClient<Database>) {}
|
||||
|
||||
async getTasks(_projectId?: string): Promise<Task[]> {
|
||||
// Get the current context to determine briefId
|
||||
const authManager = AuthManager.getInstance();
|
||||
const context = authManager.getContext();
|
||||
|
||||
if (!context || !context.briefId) {
|
||||
throw new Error(
|
||||
'No brief selected. Please select a brief first using: tm context brief'
|
||||
);
|
||||
}
|
||||
|
||||
// Get all tasks for the brief using the exact query structure
|
||||
const { data: tasks, error } = await this.supabase
|
||||
.from('tasks')
|
||||
.select(`
|
||||
*,
|
||||
document:document_id (
|
||||
id,
|
||||
document_name,
|
||||
title,
|
||||
description
|
||||
)
|
||||
`)
|
||||
.eq('brief_id', context.briefId)
|
||||
.order('position', { ascending: true })
|
||||
.order('subtask_position', { ascending: true })
|
||||
.order('created_at', { ascending: true });
|
||||
|
||||
if (error) {
|
||||
throw new Error(`Failed to fetch tasks: ${error.message}`);
|
||||
}
|
||||
|
||||
if (!tasks || tasks.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Get all dependencies for these tasks
|
||||
const taskIds = tasks.map((t: any) => t.id);
|
||||
const { data: depsData, error: depsError } = await this.supabase
|
||||
.from('task_dependencies')
|
||||
.select('*')
|
||||
.in('task_id', taskIds);
|
||||
|
||||
if (depsError) {
|
||||
throw new Error(
|
||||
`Failed to fetch task dependencies: ${depsError.message}`
|
||||
);
|
||||
}
|
||||
|
||||
// Use mapper to convert to internal format
|
||||
return TaskMapper.mapDatabaseTasksToTasks(tasks, depsData || []);
|
||||
}
|
||||
|
||||
async getTask(accountId: string, taskId: string): Promise<Task | null> {
|
||||
const { data, error } = await this.supabase
|
||||
.from('tasks')
|
||||
.select('*')
|
||||
.eq('account_id', accountId)
|
||||
.eq('id', taskId)
|
||||
.single();
|
||||
|
||||
if (error) {
|
||||
if (error.code === 'PGRST116') {
|
||||
return null; // Not found
|
||||
}
|
||||
throw new Error(`Failed to fetch task: ${error.message}`);
|
||||
}
|
||||
|
||||
// Get dependencies for this task
|
||||
const { data: depsData } = await this.supabase
|
||||
.from('task_dependencies')
|
||||
.select('*')
|
||||
.eq('task_id', taskId);
|
||||
|
||||
// Get subtasks if this is a parent task
|
||||
const { data: subtasksData } = await this.supabase
|
||||
.from('tasks')
|
||||
.select('*')
|
||||
.eq('parent_task_id', taskId)
|
||||
.order('subtask_position', { ascending: true });
|
||||
|
||||
// Create dependency map
|
||||
const dependenciesByTaskId = new Map<string, string[]>();
|
||||
if (depsData) {
|
||||
dependenciesByTaskId.set(
|
||||
taskId,
|
||||
depsData.map(
|
||||
(d: Database['public']['Tables']['task_dependencies']['Row']) =>
|
||||
d.depends_on_task_id
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Use mapper to convert single task
|
||||
return TaskMapper.mapDatabaseTaskToTask(
|
||||
data,
|
||||
subtasksData || [],
|
||||
dependenciesByTaskId
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
import { Task, TaskTag } from '../types/index.js';
|
||||
|
||||
export interface TaskRepository {
|
||||
// Task operations
|
||||
getTasks(projectId: string): Promise<Task[]>;
|
||||
getTask(projectId: string, taskId: string): Promise<Task | null>;
|
||||
createTask(projectId: string, task: Omit<Task, 'id'>): Promise<Task>;
|
||||
updateTask(
|
||||
projectId: string,
|
||||
taskId: string,
|
||||
updates: Partial<Task>
|
||||
): Promise<Task>;
|
||||
deleteTask(projectId: string, taskId: string): Promise<void>;
|
||||
|
||||
// Tag operations
|
||||
getTags(projectId: string): Promise<TaskTag[]>;
|
||||
getTag(projectId: string, tagName: string): Promise<TaskTag | null>;
|
||||
createTag(projectId: string, tag: TaskTag): Promise<TaskTag>;
|
||||
updateTag(
|
||||
projectId: string,
|
||||
tagName: string,
|
||||
updates: Partial<TaskTag>
|
||||
): Promise<TaskTag>;
|
||||
deleteTag(projectId: string, tagName: string): Promise<void>;
|
||||
|
||||
// Bulk operations
|
||||
bulkCreateTasks(
|
||||
projectId: string,
|
||||
tasks: Omit<Task, 'id'>[]
|
||||
): Promise<Task[]>;
|
||||
bulkUpdateTasks(
|
||||
projectId: string,
|
||||
updates: Array<{ id: string; updates: Partial<Task> }>
|
||||
): Promise<Task[]>;
|
||||
bulkDeleteTasks(projectId: string, taskIds: string[]): Promise<void>;
|
||||
}
|
||||
@@ -4,3 +4,5 @@
|
||||
*/
|
||||
|
||||
export { TaskService } from './task-service.js';
|
||||
export { OrganizationService } from './organization.service.js';
|
||||
export type { Organization, Brief } from './organization.service.js';
|
||||
|
||||
363
packages/tm-core/src/services/organization.service.ts
Normal file
363
packages/tm-core/src/services/organization.service.ts
Normal file
@@ -0,0 +1,363 @@
|
||||
/**
|
||||
* @fileoverview Organization and Brief management service
|
||||
* Handles fetching and managing organizations and briefs from the API
|
||||
*/
|
||||
|
||||
import { SupabaseClient } from '@supabase/supabase-js';
|
||||
import { Database } from '../types/database.types.js';
|
||||
import { TaskMasterError, ERROR_CODES } from '../errors/task-master-error.js';
|
||||
import { getLogger } from '../logger/index.js';
|
||||
|
||||
/**
|
||||
* Organization data structure
|
||||
*/
|
||||
export interface Organization {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Brief data structure
|
||||
*/
|
||||
export interface Brief {
|
||||
id: string;
|
||||
accountId: string;
|
||||
documentId: string;
|
||||
status: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Task data structure from the remote database
|
||||
*/
|
||||
export interface RemoteTask {
|
||||
id: string;
|
||||
briefId: string;
|
||||
documentId: string;
|
||||
position: number | null;
|
||||
subtaskPosition: number | null;
|
||||
status: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
// Document details from join
|
||||
document?: {
|
||||
id: string;
|
||||
document_name: string;
|
||||
title: string;
|
||||
description: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Service for managing organizations and briefs
|
||||
*/
|
||||
export class OrganizationService {
|
||||
private logger = getLogger('OrganizationService');
|
||||
|
||||
constructor(private supabaseClient: SupabaseClient<Database>) {}
|
||||
|
||||
/**
|
||||
* Get all organizations for the authenticated user
|
||||
*/
|
||||
async getOrganizations(): Promise<Organization[]> {
|
||||
try {
|
||||
// The user is already authenticated via the Authorization header
|
||||
// Query the user_accounts view/table (filtered by RLS for current user)
|
||||
const { data, error } = await this.supabaseClient
|
||||
.from('user_accounts')
|
||||
.select(`
|
||||
id,
|
||||
name,
|
||||
slug
|
||||
`);
|
||||
|
||||
if (error) {
|
||||
throw new TaskMasterError(
|
||||
`Failed to fetch organizations: ${error.message}`,
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getOrganizations' },
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
if (!data || data.length === 0) {
|
||||
this.logger.debug('No organizations found for user');
|
||||
return [];
|
||||
}
|
||||
|
||||
// Map to our Organization interface
|
||||
return data.map((org) => ({
|
||||
id: org.id ?? '',
|
||||
name: org.name ?? '',
|
||||
slug: org.slug ?? org.id ?? '' // Use ID as fallback if slug is null
|
||||
}));
|
||||
} catch (error) {
|
||||
if (error instanceof TaskMasterError) {
|
||||
throw error;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
'Failed to fetch organizations',
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getOrganizations' },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific organization by ID
|
||||
*/
|
||||
async getOrganization(orgId: string): Promise<Organization | null> {
|
||||
try {
|
||||
const { data, error } = await this.supabaseClient
|
||||
.from('accounts')
|
||||
.select(`
|
||||
id,
|
||||
name,
|
||||
slug
|
||||
`)
|
||||
.eq('id', orgId)
|
||||
.single();
|
||||
|
||||
if (error) {
|
||||
if (error.code === 'PGRST116') {
|
||||
// No rows found
|
||||
return null;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
`Failed to fetch organization: ${error.message}`,
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getOrganization', orgId },
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const accountData =
|
||||
data as Database['public']['Tables']['accounts']['Row'];
|
||||
return {
|
||||
id: accountData.id,
|
||||
name: accountData.name,
|
||||
slug: accountData.slug || accountData.id
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof TaskMasterError) {
|
||||
throw error;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
'Failed to fetch organization',
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getOrganization', orgId },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all briefs for a specific organization
|
||||
*/
|
||||
async getBriefs(orgId: string): Promise<Brief[]> {
|
||||
try {
|
||||
const { data, error } = await this.supabaseClient
|
||||
.from('brief')
|
||||
.select(`
|
||||
id,
|
||||
account_id,
|
||||
document_id,
|
||||
status,
|
||||
created_at,
|
||||
updated_at
|
||||
`)
|
||||
.eq('account_id', orgId);
|
||||
|
||||
if (error) {
|
||||
throw new TaskMasterError(
|
||||
`Failed to fetch briefs: ${error.message}`,
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getBriefs', orgId },
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
if (!data || data.length === 0) {
|
||||
this.logger.debug(`No briefs found for organization ${orgId}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
// Map to our Brief interface
|
||||
return data.map((brief: any) => ({
|
||||
id: brief.id,
|
||||
accountId: brief.account_id,
|
||||
documentId: brief.document_id,
|
||||
status: brief.status,
|
||||
createdAt: brief.created_at,
|
||||
updatedAt: brief.updated_at
|
||||
}));
|
||||
} catch (error) {
|
||||
if (error instanceof TaskMasterError) {
|
||||
throw error;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
'Failed to fetch briefs',
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getBriefs', orgId },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific brief by ID
|
||||
*/
|
||||
async getBrief(briefId: string): Promise<Brief | null> {
|
||||
try {
|
||||
const { data, error } = await this.supabaseClient
|
||||
.from('brief')
|
||||
.select(`
|
||||
id,
|
||||
account_id,
|
||||
document_id,
|
||||
status,
|
||||
created_at,
|
||||
updated_at
|
||||
`)
|
||||
.eq('id', briefId)
|
||||
.single();
|
||||
|
||||
if (error) {
|
||||
if (error.code === 'PGRST116') {
|
||||
// No rows found
|
||||
return null;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
`Failed to fetch brief: ${error.message}`,
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getBrief', briefId },
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const briefData = data as any;
|
||||
return {
|
||||
id: briefData.id,
|
||||
accountId: briefData.account_id,
|
||||
documentId: briefData.document_id,
|
||||
status: briefData.status,
|
||||
createdAt: briefData.created_at,
|
||||
updatedAt: briefData.updated_at
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof TaskMasterError) {
|
||||
throw error;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
'Failed to fetch brief',
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getBrief', briefId },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a user has access to an organization
|
||||
*/
|
||||
async validateOrgAccess(orgId: string): Promise<boolean> {
|
||||
try {
|
||||
const org = await this.getOrganization(orgId);
|
||||
return org !== null;
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to validate org access: ${error}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a user has access to a brief
|
||||
*/
|
||||
async validateBriefAccess(briefId: string): Promise<boolean> {
|
||||
try {
|
||||
const brief = await this.getBrief(briefId);
|
||||
return brief !== null;
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to validate brief access: ${error}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all tasks for a specific brief
|
||||
*/
|
||||
async getTasks(briefId: string): Promise<RemoteTask[]> {
|
||||
try {
|
||||
const { data, error } = await this.supabaseClient
|
||||
.from('tasks')
|
||||
.select(`
|
||||
*,
|
||||
document:document_id (
|
||||
id,
|
||||
document_name,
|
||||
title,
|
||||
description
|
||||
)
|
||||
`)
|
||||
.eq('brief_id', briefId)
|
||||
.order('position', { ascending: true })
|
||||
.order('subtask_position', { ascending: true })
|
||||
.order('created_at', { ascending: true });
|
||||
|
||||
if (error) {
|
||||
throw new TaskMasterError(
|
||||
`Failed to fetch tasks: ${error.message}`,
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getTasks', briefId },
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
if (!data || data.length === 0) {
|
||||
this.logger.debug(`No tasks found for brief ${briefId}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
// Map to our RemoteTask interface
|
||||
return data.map((task: any) => ({
|
||||
id: task.id,
|
||||
briefId: task.brief_id,
|
||||
documentId: task.document_id,
|
||||
position: task.position,
|
||||
subtaskPosition: task.subtask_position,
|
||||
status: task.status,
|
||||
createdAt: task.created_at,
|
||||
updatedAt: task.updated_at,
|
||||
document: task.document
|
||||
? {
|
||||
id: task.document.id,
|
||||
document_name: task.document.document_name,
|
||||
title: task.document.title,
|
||||
description: task.document.description
|
||||
}
|
||||
: undefined
|
||||
}));
|
||||
} catch (error) {
|
||||
if (error instanceof TaskMasterError) {
|
||||
throw error;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
'Failed to fetch tasks',
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getTasks', briefId },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,12 @@
|
||||
* Core service for task operations - handles business logic between storage and API
|
||||
*/
|
||||
|
||||
import type { Task, TaskFilter, TaskStatus } from '../types/index.js';
|
||||
import type {
|
||||
Task,
|
||||
TaskFilter,
|
||||
TaskStatus,
|
||||
StorageType
|
||||
} from '../types/index.js';
|
||||
import type { IStorage } from '../interfaces/storage.interface.js';
|
||||
import { ConfigManager } from '../config/config-manager.js';
|
||||
import { StorageFactory } from '../storage/storage-factory.js';
|
||||
@@ -22,8 +27,8 @@ export interface TaskListResult {
|
||||
filtered: number;
|
||||
/** The tag these tasks belong to (only present if explicitly provided) */
|
||||
tag?: string;
|
||||
/** Storage type being used - includes 'auto' for automatic detection */
|
||||
storageType: 'file' | 'api' | 'auto';
|
||||
/** Storage type being used */
|
||||
storageType: StorageType;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -113,7 +118,7 @@ export class TaskService {
|
||||
total: rawTasks.length,
|
||||
filtered: filteredEntities.length,
|
||||
tag: options.tag, // Only include tag if explicitly provided
|
||||
storageType: this.configManager.getStorageConfig().type
|
||||
storageType: this.getStorageType()
|
||||
};
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
@@ -166,7 +171,7 @@ export class TaskService {
|
||||
byStatus: Record<TaskStatus, number>;
|
||||
withSubtasks: number;
|
||||
blocked: number;
|
||||
storageType: 'file' | 'api' | 'auto';
|
||||
storageType: StorageType;
|
||||
}> {
|
||||
const result = await this.getTaskList({
|
||||
tag,
|
||||
@@ -334,8 +339,12 @@ export class TaskService {
|
||||
/**
|
||||
* Get current storage type
|
||||
*/
|
||||
getStorageType(): 'file' | 'api' | 'auto' {
|
||||
return this.configManager.getStorageConfig().type;
|
||||
getStorageType(): StorageType {
|
||||
// Prefer the runtime storage type if available to avoid exposing 'auto'
|
||||
const s = this.storage as { getType?: () => 'file' | 'api' } | null;
|
||||
const runtimeType = s?.getType?.();
|
||||
return (runtimeType ??
|
||||
this.configManager.getStorageConfig().type) as StorageType;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,27 +1,29 @@
|
||||
/**
|
||||
* @fileoverview API-based storage implementation for Hamster integration
|
||||
* This provides storage via REST API instead of local file system
|
||||
* @fileoverview API-based storage implementation using repository pattern
|
||||
* This provides storage via repository abstraction for flexibility
|
||||
*/
|
||||
|
||||
import type {
|
||||
IStorage,
|
||||
StorageStats
|
||||
} from '../interfaces/storage.interface.js';
|
||||
import type { Task, TaskMetadata } from '../types/index.js';
|
||||
import type { Task, TaskMetadata, TaskTag } from '../types/index.js';
|
||||
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
|
||||
import { TaskRepository } from '../repositories/task-repository.interface.js';
|
||||
import { SupabaseTaskRepository } from '../repositories/supabase-task-repository.js';
|
||||
import { SupabaseClient } from '@supabase/supabase-js';
|
||||
import { AuthManager } from '../auth/auth-manager.js';
|
||||
|
||||
/**
|
||||
* API storage configuration
|
||||
*/
|
||||
export interface ApiStorageConfig {
|
||||
/** API endpoint base URL */
|
||||
endpoint: string;
|
||||
/** Access token for authentication */
|
||||
accessToken: string;
|
||||
/** Optional project ID */
|
||||
projectId?: string;
|
||||
/** Request timeout in milliseconds */
|
||||
timeout?: number;
|
||||
/** Supabase client instance */
|
||||
supabaseClient?: SupabaseClient;
|
||||
/** Custom repository implementation */
|
||||
repository?: TaskRepository;
|
||||
/** Project ID for scoping */
|
||||
projectId: string;
|
||||
/** Enable request retries */
|
||||
enableRetry?: boolean;
|
||||
/** Maximum retry attempts */
|
||||
@@ -29,64 +31,58 @@ export interface ApiStorageConfig {
|
||||
}
|
||||
|
||||
/**
|
||||
* API response wrapper
|
||||
*/
|
||||
interface ApiResponse<T> {
|
||||
success: boolean;
|
||||
data?: T;
|
||||
error?: string;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* ApiStorage implementation for Hamster integration
|
||||
* Fetches and stores tasks via REST API
|
||||
* ApiStorage implementation using repository pattern
|
||||
* Provides flexibility to swap between different backend implementations
|
||||
*/
|
||||
export class ApiStorage implements IStorage {
|
||||
private readonly config: Required<ApiStorageConfig>;
|
||||
private readonly repository: TaskRepository;
|
||||
private readonly projectId: string;
|
||||
private readonly enableRetry: boolean;
|
||||
private readonly maxRetries: number;
|
||||
private initialized = false;
|
||||
private tagsCache: Map<string, TaskTag> = new Map();
|
||||
|
||||
constructor(config: ApiStorageConfig) {
|
||||
this.validateConfig(config);
|
||||
|
||||
this.config = {
|
||||
endpoint: config.endpoint.replace(/\/$/, ''), // Remove trailing slash
|
||||
accessToken: config.accessToken,
|
||||
projectId: config.projectId || 'default',
|
||||
timeout: config.timeout || 30000,
|
||||
enableRetry: config.enableRetry ?? true,
|
||||
maxRetries: config.maxRetries || 3
|
||||
};
|
||||
// Use provided repository or create Supabase repository
|
||||
if (config.repository) {
|
||||
this.repository = config.repository;
|
||||
} else if (config.supabaseClient) {
|
||||
// TODO: SupabaseTaskRepository doesn't implement all TaskRepository methods yet
|
||||
// Cast for now until full implementation is complete
|
||||
this.repository = new SupabaseTaskRepository(
|
||||
config.supabaseClient
|
||||
) as unknown as TaskRepository;
|
||||
} else {
|
||||
throw new TaskMasterError(
|
||||
'Either repository or supabaseClient must be provided',
|
||||
ERROR_CODES.MISSING_CONFIGURATION
|
||||
);
|
||||
}
|
||||
|
||||
this.projectId = config.projectId;
|
||||
this.enableRetry = config.enableRetry ?? true;
|
||||
this.maxRetries = config.maxRetries ?? 3;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate API storage configuration
|
||||
*/
|
||||
private validateConfig(config: ApiStorageConfig): void {
|
||||
if (!config.endpoint) {
|
||||
if (!config.projectId) {
|
||||
throw new TaskMasterError(
|
||||
'API endpoint is required for API storage',
|
||||
'Project ID is required for API storage',
|
||||
ERROR_CODES.MISSING_CONFIGURATION
|
||||
);
|
||||
}
|
||||
|
||||
if (!config.accessToken) {
|
||||
if (!config.repository && !config.supabaseClient) {
|
||||
throw new TaskMasterError(
|
||||
'Access token is required for API storage',
|
||||
'Either repository or supabaseClient must be provided',
|
||||
ERROR_CODES.MISSING_CONFIGURATION
|
||||
);
|
||||
}
|
||||
|
||||
// Validate endpoint URL format
|
||||
try {
|
||||
new URL(config.endpoint);
|
||||
} catch {
|
||||
throw new TaskMasterError(
|
||||
'Invalid API endpoint URL',
|
||||
ERROR_CODES.INVALID_INPUT,
|
||||
{ endpoint: config.endpoint }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -96,8 +92,8 @@ export class ApiStorage implements IStorage {
|
||||
if (this.initialized) return;
|
||||
|
||||
try {
|
||||
// Verify API connectivity
|
||||
await this.verifyConnection();
|
||||
// Load initial tags
|
||||
await this.loadTagsIntoCache();
|
||||
this.initialized = true;
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
@@ -110,39 +106,71 @@ export class ApiStorage implements IStorage {
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify API connection
|
||||
* Load tags into cache
|
||||
* In our API-based system, "tags" represent briefs
|
||||
*/
|
||||
private async verifyConnection(): Promise<void> {
|
||||
const response = await this.makeRequest<{ status: string }>('/health');
|
||||
private async loadTagsIntoCache(): Promise<void> {
|
||||
try {
|
||||
const authManager = AuthManager.getInstance();
|
||||
const context = authManager.getContext();
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(`API health check failed: ${response.error}`);
|
||||
// If we have a selected brief, create a virtual "tag" for it
|
||||
if (context?.briefId) {
|
||||
// Create a virtual tag representing the current brief
|
||||
const briefTag: TaskTag = {
|
||||
name: context.briefId,
|
||||
tasks: [], // Will be populated when tasks are loaded
|
||||
metadata: {
|
||||
briefId: context.briefId,
|
||||
briefName: context.briefName,
|
||||
organizationId: context.orgId
|
||||
}
|
||||
};
|
||||
|
||||
this.tagsCache.clear();
|
||||
this.tagsCache.set(context.briefId, briefTag);
|
||||
}
|
||||
} catch (error) {
|
||||
// If no brief is selected, that's okay - user needs to select one first
|
||||
console.debug('No brief selected, starting with empty cache');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load tasks from API
|
||||
* In our system, the tag parameter represents a brief ID
|
||||
*/
|
||||
async loadTasks(tag?: string): Promise<Task[]> {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/tasks`;
|
||||
const authManager = AuthManager.getInstance();
|
||||
const context = authManager.getContext();
|
||||
|
||||
const response = await this.makeRequest<{ tasks: Task[] }>(endpoint);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to load tasks');
|
||||
// If no brief is selected in context, throw an error
|
||||
if (!context?.briefId) {
|
||||
throw new Error(
|
||||
'No brief selected. Please select a brief first using: tm context brief <brief-id>'
|
||||
);
|
||||
}
|
||||
|
||||
return response.data?.tasks || [];
|
||||
// Load tasks from the current brief context
|
||||
const tasks = await this.retryOperation(() =>
|
||||
this.repository.getTasks(this.projectId)
|
||||
);
|
||||
|
||||
// Update the tag cache with the loaded task IDs
|
||||
const briefTag = this.tagsCache.get(context.briefId);
|
||||
if (briefTag) {
|
||||
briefTag.tasks = tasks.map((task) => task.id);
|
||||
}
|
||||
|
||||
return tasks;
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to load tasks from API',
|
||||
ERROR_CODES.STORAGE_ERROR,
|
||||
{ operation: 'loadTasks', tag },
|
||||
{ operation: 'loadTasks', tag, context: 'brief-based loading' },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
@@ -155,15 +183,29 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/tasks`;
|
||||
if (tag) {
|
||||
// Update tag with task IDs
|
||||
const tagData = this.tagsCache.get(tag) || {
|
||||
name: tag,
|
||||
tasks: [],
|
||||
metadata: {}
|
||||
};
|
||||
tagData.tasks = tasks.map((t) => t.id);
|
||||
|
||||
const response = await this.makeRequest(endpoint, 'PUT', { tasks });
|
||||
// Save or update tag
|
||||
if (this.tagsCache.has(tag)) {
|
||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
||||
} else {
|
||||
await this.repository.createTag(this.projectId, tagData);
|
||||
}
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to save tasks');
|
||||
this.tagsCache.set(tag, tagData);
|
||||
}
|
||||
|
||||
// Save tasks using bulk operation
|
||||
await this.retryOperation(() =>
|
||||
this.repository.bulkCreateTasks(this.projectId, tasks)
|
||||
);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to save tasks to API',
|
||||
@@ -181,20 +223,17 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/tasks/${taskId}`;
|
||||
|
||||
const response = await this.makeRequest<{ task: Task }>(endpoint);
|
||||
|
||||
if (!response.success) {
|
||||
if (response.error?.includes('not found')) {
|
||||
if (tag) {
|
||||
// Check if task is in tag
|
||||
const tagData = this.tagsCache.get(tag);
|
||||
if (!tagData || !tagData.tasks.includes(taskId)) {
|
||||
return null;
|
||||
}
|
||||
throw new Error(response.error || 'Failed to load task');
|
||||
}
|
||||
|
||||
return response.data?.task || null;
|
||||
return await this.retryOperation(() =>
|
||||
this.repository.getTask(this.projectId, taskId)
|
||||
);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to load task from API',
|
||||
@@ -212,14 +251,26 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/tasks/${task.id}?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/tasks/${task.id}`;
|
||||
// Check if task exists
|
||||
const existing = await this.repository.getTask(this.projectId, task.id);
|
||||
|
||||
const response = await this.makeRequest(endpoint, 'PUT', { task });
|
||||
if (existing) {
|
||||
await this.retryOperation(() =>
|
||||
this.repository.updateTask(this.projectId, task.id, task)
|
||||
);
|
||||
} else {
|
||||
await this.retryOperation(() =>
|
||||
this.repository.createTask(this.projectId, task)
|
||||
);
|
||||
}
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to save task');
|
||||
// Update tag if specified
|
||||
if (tag) {
|
||||
const tagData = this.tagsCache.get(tag);
|
||||
if (tagData && !tagData.tasks.includes(task.id)) {
|
||||
tagData.tasks.push(task.id);
|
||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
@@ -238,14 +289,17 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/tasks/${taskId}`;
|
||||
await this.retryOperation(() =>
|
||||
this.repository.deleteTask(this.projectId, taskId)
|
||||
);
|
||||
|
||||
const response = await this.makeRequest(endpoint, 'DELETE');
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to delete task');
|
||||
// Remove from tag if specified
|
||||
if (tag) {
|
||||
const tagData = this.tagsCache.get(tag);
|
||||
if (tagData) {
|
||||
tagData.tasks = tagData.tasks.filter((id) => id !== taskId);
|
||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
@@ -258,21 +312,24 @@ export class ApiStorage implements IStorage {
|
||||
}
|
||||
|
||||
/**
|
||||
* List available tags
|
||||
* List available tags (briefs in our system)
|
||||
*/
|
||||
async listTags(): Promise<string[]> {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const response = await this.makeRequest<{ tags: string[] }>(
|
||||
`/projects/${this.config.projectId}/tags`
|
||||
);
|
||||
const authManager = AuthManager.getInstance();
|
||||
const context = authManager.getContext();
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to list tags');
|
||||
// In our API-based system, we only have one "tag" at a time - the current brief
|
||||
if (context?.briefId) {
|
||||
// Ensure the current brief is in our cache
|
||||
await this.loadTagsIntoCache();
|
||||
return [context.briefId];
|
||||
}
|
||||
|
||||
return response.data?.tags || [];
|
||||
// No brief selected, return empty array
|
||||
return [];
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to list tags from API',
|
||||
@@ -290,19 +347,15 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/metadata`;
|
||||
|
||||
const response = await this.makeRequest<{ metadata: TaskMetadata }>(
|
||||
endpoint
|
||||
);
|
||||
|
||||
if (!response.success) {
|
||||
return null;
|
||||
if (tag) {
|
||||
const tagData = this.tagsCache.get(tag);
|
||||
return (tagData?.metadata as TaskMetadata) || null;
|
||||
}
|
||||
|
||||
return response.data?.metadata || null;
|
||||
// Return global metadata if no tag specified
|
||||
// This could be stored in a special system tag
|
||||
const systemTag = await this.repository.getTag(this.projectId, '_system');
|
||||
return (systemTag?.metadata as TaskMetadata) || null;
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to load metadata from API',
|
||||
@@ -320,14 +373,38 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/metadata`;
|
||||
if (tag) {
|
||||
const tagData = this.tagsCache.get(tag) || {
|
||||
name: tag,
|
||||
tasks: [],
|
||||
metadata: {}
|
||||
};
|
||||
tagData.metadata = metadata as any;
|
||||
|
||||
const response = await this.makeRequest(endpoint, 'PUT', { metadata });
|
||||
if (this.tagsCache.has(tag)) {
|
||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
||||
} else {
|
||||
await this.repository.createTag(this.projectId, tagData);
|
||||
}
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to save metadata');
|
||||
this.tagsCache.set(tag, tagData);
|
||||
} else {
|
||||
// Save to system tag
|
||||
const systemTag: TaskTag = {
|
||||
name: '_system',
|
||||
tasks: [],
|
||||
metadata: metadata as any
|
||||
};
|
||||
|
||||
const existing = await this.repository.getTag(
|
||||
this.projectId,
|
||||
'_system'
|
||||
);
|
||||
if (existing) {
|
||||
await this.repository.updateTag(this.projectId, '_system', systemTag);
|
||||
} else {
|
||||
await this.repository.createTag(this.projectId, systemTag);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
@@ -358,14 +435,30 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
// First load existing tasks
|
||||
const existingTasks = await this.loadTasks(tag);
|
||||
// Use bulk create - repository should handle duplicates
|
||||
await this.retryOperation(() =>
|
||||
this.repository.bulkCreateTasks(this.projectId, tasks)
|
||||
);
|
||||
|
||||
// Append new tasks
|
||||
const allTasks = [...existingTasks, ...tasks];
|
||||
// Update tag if specified
|
||||
if (tag) {
|
||||
const tagData = this.tagsCache.get(tag) || {
|
||||
name: tag,
|
||||
tasks: [],
|
||||
metadata: {}
|
||||
};
|
||||
|
||||
// Save all tasks
|
||||
await this.saveTasks(allTasks, tag);
|
||||
const newTaskIds = tasks.map((t) => t.id);
|
||||
tagData.tasks = [...new Set([...tagData.tasks, ...newTaskIds])];
|
||||
|
||||
if (this.tagsCache.has(tag)) {
|
||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
||||
} else {
|
||||
await this.repository.createTag(this.projectId, tagData);
|
||||
}
|
||||
|
||||
this.tagsCache.set(tag, tagData);
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to append tasks to API',
|
||||
@@ -387,18 +480,9 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
// Load the task
|
||||
const task = await this.loadTask(taskId, tag);
|
||||
|
||||
if (!task) {
|
||||
throw new Error(`Task ${taskId} not found`);
|
||||
}
|
||||
|
||||
// Merge updates
|
||||
const updatedTask = { ...task, ...updates, id: taskId };
|
||||
|
||||
// Save updated task
|
||||
await this.saveTask(updatedTask, tag);
|
||||
await this.retryOperation(() =>
|
||||
this.repository.updateTask(this.projectId, taskId, updates)
|
||||
);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to update task via API',
|
||||
@@ -423,14 +507,11 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const response = await this.makeRequest(
|
||||
`/projects/${this.config.projectId}/tags/${encodeURIComponent(tag)}`,
|
||||
'DELETE'
|
||||
await this.retryOperation(() =>
|
||||
this.repository.deleteTag(this.projectId, tag)
|
||||
);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to delete tag');
|
||||
}
|
||||
this.tagsCache.delete(tag);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to delete tag via API',
|
||||
@@ -448,15 +529,21 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const response = await this.makeRequest(
|
||||
`/projects/${this.config.projectId}/tags/${encodeURIComponent(oldTag)}/rename`,
|
||||
'POST',
|
||||
{ newTag }
|
||||
);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to rename tag');
|
||||
const tagData = this.tagsCache.get(oldTag);
|
||||
if (!tagData) {
|
||||
throw new Error(`Tag ${oldTag} not found`);
|
||||
}
|
||||
|
||||
// Create new tag with same data
|
||||
const newTagData = { ...tagData, name: newTag };
|
||||
await this.repository.createTag(this.projectId, newTagData);
|
||||
|
||||
// Delete old tag
|
||||
await this.repository.deleteTag(this.projectId, oldTag);
|
||||
|
||||
// Update cache
|
||||
this.tagsCache.delete(oldTag);
|
||||
this.tagsCache.set(newTag, newTagData);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to rename tag via API',
|
||||
@@ -474,15 +561,17 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const response = await this.makeRequest(
|
||||
`/projects/${this.config.projectId}/tags/${encodeURIComponent(sourceTag)}/copy`,
|
||||
'POST',
|
||||
{ targetTag }
|
||||
);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to copy tag');
|
||||
const sourceData = this.tagsCache.get(sourceTag);
|
||||
if (!sourceData) {
|
||||
throw new Error(`Source tag ${sourceTag} not found`);
|
||||
}
|
||||
|
||||
// Create new tag with copied data
|
||||
const targetData = { ...sourceData, name: targetTag };
|
||||
await this.repository.createTag(this.projectId, targetData);
|
||||
|
||||
// Update cache
|
||||
this.tagsCache.set(targetTag, targetData);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to copy tag via API',
|
||||
@@ -500,24 +589,22 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const response = await this.makeRequest<{
|
||||
stats: StorageStats;
|
||||
}>(`/projects/${this.config.projectId}/stats`);
|
||||
const tasks = await this.repository.getTasks(this.projectId);
|
||||
const tags = await this.repository.getTags(this.projectId);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to get stats');
|
||||
}
|
||||
const tagStats = tags.map((tag) => ({
|
||||
tag: tag.name,
|
||||
taskCount: tag.tasks.length,
|
||||
lastModified: new Date().toISOString() // TODO: Get actual last modified from tag data
|
||||
}));
|
||||
|
||||
// Return stats or default values
|
||||
return (
|
||||
response.data?.stats || {
|
||||
totalTasks: 0,
|
||||
totalTags: 0,
|
||||
storageSize: 0,
|
||||
lastModified: new Date().toISOString(),
|
||||
tagStats: []
|
||||
}
|
||||
);
|
||||
return {
|
||||
totalTasks: tasks.length,
|
||||
totalTags: tags.length,
|
||||
storageSize: 0, // Not applicable for API storage
|
||||
lastModified: new Date().toISOString(),
|
||||
tagStats
|
||||
};
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to get stats from API',
|
||||
@@ -535,16 +622,15 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const response = await this.makeRequest<{ backupId: string }>(
|
||||
`/projects/${this.config.projectId}/backup`,
|
||||
'POST'
|
||||
);
|
||||
// Export all data
|
||||
await this.repository.getTasks(this.projectId);
|
||||
await this.repository.getTags(this.projectId);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to create backup');
|
||||
}
|
||||
|
||||
return response.data?.backupId || 'unknown';
|
||||
// TODO: In a real implementation, this would:
|
||||
// 1. Create backup data structure with tasks and tags
|
||||
// 2. Save the backup to a storage service
|
||||
// For now, return a backup identifier
|
||||
return `backup-${this.projectId}-${Date.now()}`;
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to create backup via API',
|
||||
@@ -558,27 +644,16 @@ export class ApiStorage implements IStorage {
|
||||
/**
|
||||
* Restore from backup
|
||||
*/
|
||||
async restore(backupPath: string): Promise<void> {
|
||||
async restore(backupId: string): Promise<void> {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const response = await this.makeRequest(
|
||||
`/projects/${this.config.projectId}/restore`,
|
||||
'POST',
|
||||
{ backupId: backupPath }
|
||||
);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to restore backup');
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to restore backup via API',
|
||||
ERROR_CODES.STORAGE_ERROR,
|
||||
{ operation: 'restore', backupPath },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
// This would restore from a backup service
|
||||
// Implementation depends on backup strategy
|
||||
throw new TaskMasterError(
|
||||
'Restore not implemented for API storage',
|
||||
ERROR_CODES.NOT_IMPLEMENTED,
|
||||
{ operation: 'restore', backupId }
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -588,14 +663,23 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const response = await this.makeRequest(
|
||||
`/projects/${this.config.projectId}/clear`,
|
||||
'POST'
|
||||
);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to clear data');
|
||||
// Delete all tasks
|
||||
const tasks = await this.repository.getTasks(this.projectId);
|
||||
if (tasks.length > 0) {
|
||||
await this.repository.bulkDeleteTasks(
|
||||
this.projectId,
|
||||
tasks.map((t) => t.id)
|
||||
);
|
||||
}
|
||||
|
||||
// Delete all tags
|
||||
const tags = await this.repository.getTags(this.projectId);
|
||||
for (const tag of tags) {
|
||||
await this.repository.deleteTag(this.projectId, tag.name);
|
||||
}
|
||||
|
||||
// Clear cache
|
||||
this.tagsCache.clear();
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to clear data via API',
|
||||
@@ -611,6 +695,7 @@ export class ApiStorage implements IStorage {
|
||||
*/
|
||||
async close(): Promise<void> {
|
||||
this.initialized = false;
|
||||
this.tagsCache.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -623,102 +708,21 @@ export class ApiStorage implements IStorage {
|
||||
}
|
||||
|
||||
/**
|
||||
* Make HTTP request to API
|
||||
* Retry an operation with exponential backoff
|
||||
*/
|
||||
private async makeRequest<T>(
|
||||
path: string,
|
||||
method: 'GET' | 'POST' | 'PUT' | 'DELETE' = 'GET',
|
||||
body?: unknown
|
||||
): Promise<ApiResponse<T>> {
|
||||
const url = `${this.config.endpoint}${path}`;
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);
|
||||
|
||||
private async retryOperation<T>(
|
||||
operation: () => Promise<T>,
|
||||
attempt: number = 1
|
||||
): Promise<T> {
|
||||
try {
|
||||
const options: RequestInit = {
|
||||
method,
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.config.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json'
|
||||
},
|
||||
signal: controller.signal
|
||||
};
|
||||
|
||||
if (body && (method === 'POST' || method === 'PUT')) {
|
||||
options.body = JSON.stringify(body);
|
||||
return await operation();
|
||||
} catch (error) {
|
||||
if (this.enableRetry && attempt < this.maxRetries) {
|
||||
const delay = Math.pow(2, attempt) * 1000;
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
return this.retryOperation(operation, attempt + 1);
|
||||
}
|
||||
|
||||
let lastError: Error | null = null;
|
||||
let attempt = 0;
|
||||
|
||||
while (attempt < this.config.maxRetries) {
|
||||
attempt++;
|
||||
|
||||
try {
|
||||
const response = await fetch(url, options);
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
return { success: true, data: data as T };
|
||||
}
|
||||
|
||||
// Handle specific error codes
|
||||
if (response.status === 401) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Authentication failed - check access token'
|
||||
};
|
||||
}
|
||||
|
||||
if (response.status === 404) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Resource not found'
|
||||
};
|
||||
}
|
||||
|
||||
if (response.status === 429) {
|
||||
// Rate limited - retry with backoff
|
||||
if (this.config.enableRetry && attempt < this.config.maxRetries) {
|
||||
await this.delay(Math.pow(2, attempt) * 1000);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const errorData = data as any;
|
||||
return {
|
||||
success: false,
|
||||
error:
|
||||
errorData.error ||
|
||||
errorData.message ||
|
||||
`HTTP ${response.status}: ${response.statusText}`
|
||||
};
|
||||
} catch (error) {
|
||||
lastError = error as Error;
|
||||
|
||||
// Retry on network errors
|
||||
if (this.config.enableRetry && attempt < this.config.maxRetries) {
|
||||
await this.delay(Math.pow(2, attempt) * 1000);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// All retries exhausted
|
||||
return {
|
||||
success: false,
|
||||
error: lastError?.message || 'Request failed after retries'
|
||||
};
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delay helper for retries
|
||||
*/
|
||||
private delay(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import { ApiStorage } from './api-storage.js';
|
||||
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
|
||||
import { AuthManager } from '../auth/auth-manager.js';
|
||||
import { getLogger } from '../logger/index.js';
|
||||
import { SupabaseAuthClient } from '../clients/supabase-client.js';
|
||||
|
||||
/**
|
||||
* Factory for creating storage implementations based on configuration
|
||||
@@ -148,29 +149,13 @@ export class StorageFactory {
|
||||
* Create API storage implementation
|
||||
*/
|
||||
private static createApiStorage(config: Partial<IConfiguration>): ApiStorage {
|
||||
const { apiEndpoint, apiAccessToken } = config.storage || {};
|
||||
|
||||
if (!apiEndpoint) {
|
||||
throw new TaskMasterError(
|
||||
'API endpoint is required for API storage',
|
||||
ERROR_CODES.MISSING_CONFIGURATION,
|
||||
{ storageType: 'api' }
|
||||
);
|
||||
}
|
||||
|
||||
if (!apiAccessToken) {
|
||||
throw new TaskMasterError(
|
||||
'API access token is required for API storage',
|
||||
ERROR_CODES.MISSING_CONFIGURATION,
|
||||
{ storageType: 'api' }
|
||||
);
|
||||
}
|
||||
// Use our SupabaseAuthClient instead of creating a raw Supabase client
|
||||
const supabaseAuthClient = new SupabaseAuthClient();
|
||||
const supabaseClient = supabaseAuthClient.getClient();
|
||||
|
||||
return new ApiStorage({
|
||||
endpoint: apiEndpoint,
|
||||
accessToken: apiAccessToken,
|
||||
projectId: config.projectPath,
|
||||
timeout: config.retry?.requestTimeout,
|
||||
supabaseClient,
|
||||
projectId: config.projectPath || '',
|
||||
enableRetry: config.retry?.retryOnNetworkError,
|
||||
maxRetries: config.retry?.retryAttempts
|
||||
});
|
||||
|
||||
@@ -10,8 +10,12 @@ import {
|
||||
} from './services/task-service.js';
|
||||
import { ERROR_CODES, TaskMasterError } from './errors/task-master-error.js';
|
||||
import type { IConfiguration } from './interfaces/configuration.interface.js';
|
||||
import type { Task, TaskStatus, TaskFilter } from './types/index.js';
|
||||
import { WorkflowService, type WorkflowServiceConfig } from './workflow/index.js';
|
||||
import type {
|
||||
Task,
|
||||
TaskStatus,
|
||||
TaskFilter,
|
||||
StorageType
|
||||
} from './types/index.js';
|
||||
|
||||
/**
|
||||
* Options for creating TaskMasterCore instance
|
||||
@@ -19,7 +23,6 @@ import { WorkflowService, type WorkflowServiceConfig } from './workflow/index.js
|
||||
export interface TaskMasterCoreOptions {
|
||||
projectPath: string;
|
||||
configuration?: Partial<IConfiguration>;
|
||||
workflow?: Partial<WorkflowServiceConfig>;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -35,7 +38,6 @@ export type { GetTaskListOptions } from './services/task-service.js';
|
||||
export class TaskMasterCore {
|
||||
private configManager: ConfigManager;
|
||||
private taskService: TaskService;
|
||||
private workflowService: WorkflowService;
|
||||
|
||||
/**
|
||||
* Create and initialize a new TaskMasterCore instance
|
||||
@@ -58,7 +60,6 @@ export class TaskMasterCore {
|
||||
// Services will be initialized in the initialize() method
|
||||
this.configManager = null as any;
|
||||
this.taskService = null as any;
|
||||
this.workflowService = null as any;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -85,28 +86,6 @@ export class TaskMasterCore {
|
||||
// Create task service
|
||||
this.taskService = new TaskService(this.configManager);
|
||||
await this.taskService.initialize();
|
||||
|
||||
// Create workflow service
|
||||
const workflowConfig: WorkflowServiceConfig = {
|
||||
projectRoot: options.projectPath,
|
||||
...options.workflow
|
||||
};
|
||||
|
||||
// Pass task retrieval function to workflow service
|
||||
this.workflowService = new WorkflowService(
|
||||
workflowConfig,
|
||||
async (taskId: string) => {
|
||||
const task = await this.getTask(taskId);
|
||||
if (!task) {
|
||||
throw new TaskMasterError(
|
||||
`Task ${taskId} not found`,
|
||||
ERROR_CODES.TASK_NOT_FOUND
|
||||
);
|
||||
}
|
||||
return task;
|
||||
}
|
||||
);
|
||||
await this.workflowService.initialize();
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to initialize TaskMasterCore',
|
||||
@@ -178,7 +157,7 @@ export class TaskMasterCore {
|
||||
/**
|
||||
* Get current storage type
|
||||
*/
|
||||
getStorageType(): 'file' | 'api' | 'auto' {
|
||||
getStorageType(): StorageType {
|
||||
return this.taskService.getStorageType();
|
||||
}
|
||||
|
||||
@@ -196,21 +175,11 @@ export class TaskMasterCore {
|
||||
await this.configManager.setActiveTag(tag);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workflow service for workflow operations
|
||||
*/
|
||||
get workflow(): WorkflowService {
|
||||
return this.workflowService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Close and cleanup resources
|
||||
*/
|
||||
async close(): Promise<void> {
|
||||
// TaskService handles storage cleanup internally
|
||||
if (this.workflowService) {
|
||||
await this.workflowService.dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
491
packages/tm-core/src/types/database.types.ts
Normal file
491
packages/tm-core/src/types/database.types.ts
Normal file
@@ -0,0 +1,491 @@
|
||||
export type Json =
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null
|
||||
| { [key: string]: Json | undefined }
|
||||
| Json[];
|
||||
|
||||
export type Database = {
|
||||
public: {
|
||||
Tables: {
|
||||
accounts: {
|
||||
Row: {
|
||||
created_at: string | null;
|
||||
created_by: string | null;
|
||||
email: string | null;
|
||||
id: string;
|
||||
is_personal_account: boolean;
|
||||
name: string;
|
||||
picture_url: string | null;
|
||||
primary_owner_user_id: string;
|
||||
public_data: Json;
|
||||
slug: string | null;
|
||||
updated_at: string | null;
|
||||
updated_by: string | null;
|
||||
};
|
||||
Insert: {
|
||||
created_at?: string | null;
|
||||
created_by?: string | null;
|
||||
email?: string | null;
|
||||
id?: string;
|
||||
is_personal_account?: boolean;
|
||||
name: string;
|
||||
picture_url?: string | null;
|
||||
primary_owner_user_id?: string;
|
||||
public_data?: Json;
|
||||
slug?: string | null;
|
||||
updated_at?: string | null;
|
||||
updated_by?: string | null;
|
||||
};
|
||||
Update: {
|
||||
created_at?: string | null;
|
||||
created_by?: string | null;
|
||||
email?: string | null;
|
||||
id?: string;
|
||||
is_personal_account?: boolean;
|
||||
name?: string;
|
||||
picture_url?: string | null;
|
||||
primary_owner_user_id?: string;
|
||||
public_data?: Json;
|
||||
slug?: string | null;
|
||||
updated_at?: string | null;
|
||||
updated_by?: string | null;
|
||||
};
|
||||
Relationships: [];
|
||||
};
|
||||
brief: {
|
||||
Row: {
|
||||
account_id: string;
|
||||
created_at: string;
|
||||
created_by: string;
|
||||
document_id: string;
|
||||
id: string;
|
||||
plan_generation_completed_at: string | null;
|
||||
plan_generation_error: string | null;
|
||||
plan_generation_started_at: string | null;
|
||||
plan_generation_status: Database['public']['Enums']['plan_generation_status'];
|
||||
status: Database['public']['Enums']['brief_status'];
|
||||
updated_at: string;
|
||||
};
|
||||
Insert: {
|
||||
account_id: string;
|
||||
created_at?: string;
|
||||
created_by: string;
|
||||
document_id: string;
|
||||
id?: string;
|
||||
plan_generation_completed_at?: string | null;
|
||||
plan_generation_error?: string | null;
|
||||
plan_generation_started_at?: string | null;
|
||||
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
|
||||
status?: Database['public']['Enums']['brief_status'];
|
||||
updated_at?: string;
|
||||
};
|
||||
Update: {
|
||||
account_id?: string;
|
||||
created_at?: string;
|
||||
created_by?: string;
|
||||
document_id?: string;
|
||||
id?: string;
|
||||
plan_generation_completed_at?: string | null;
|
||||
plan_generation_error?: string | null;
|
||||
plan_generation_started_at?: string | null;
|
||||
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
|
||||
status?: Database['public']['Enums']['brief_status'];
|
||||
updated_at?: string;
|
||||
};
|
||||
Relationships: [
|
||||
{
|
||||
foreignKeyName: 'brief_account_id_fkey';
|
||||
columns: ['account_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'accounts';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'brief_document_id_fkey';
|
||||
columns: ['document_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'document';
|
||||
referencedColumns: ['id'];
|
||||
}
|
||||
];
|
||||
};
|
||||
document: {
|
||||
Row: {
|
||||
account_id: string;
|
||||
created_at: string;
|
||||
created_by: string;
|
||||
description: string | null;
|
||||
document_name: string;
|
||||
document_type: Database['public']['Enums']['document_type'];
|
||||
file_path: string | null;
|
||||
file_size: number | null;
|
||||
id: string;
|
||||
metadata: Json | null;
|
||||
mime_type: string | null;
|
||||
processed_at: string | null;
|
||||
processing_error: string | null;
|
||||
processing_status:
|
||||
| Database['public']['Enums']['document_processing_status']
|
||||
| null;
|
||||
source_id: string | null;
|
||||
source_type: string | null;
|
||||
title: string;
|
||||
updated_at: string;
|
||||
};
|
||||
Insert: {
|
||||
account_id: string;
|
||||
created_at?: string;
|
||||
created_by: string;
|
||||
description?: string | null;
|
||||
document_name: string;
|
||||
document_type?: Database['public']['Enums']['document_type'];
|
||||
file_path?: string | null;
|
||||
file_size?: number | null;
|
||||
id?: string;
|
||||
metadata?: Json | null;
|
||||
mime_type?: string | null;
|
||||
processed_at?: string | null;
|
||||
processing_error?: string | null;
|
||||
processing_status?:
|
||||
| Database['public']['Enums']['document_processing_status']
|
||||
| null;
|
||||
source_id?: string | null;
|
||||
source_type?: string | null;
|
||||
title: string;
|
||||
updated_at?: string;
|
||||
};
|
||||
Update: {
|
||||
account_id?: string;
|
||||
created_at?: string;
|
||||
created_by?: string;
|
||||
description?: string | null;
|
||||
document_name?: string;
|
||||
document_type?: Database['public']['Enums']['document_type'];
|
||||
file_path?: string | null;
|
||||
file_size?: number | null;
|
||||
id?: string;
|
||||
metadata?: Json | null;
|
||||
mime_type?: string | null;
|
||||
processed_at?: string | null;
|
||||
processing_error?: string | null;
|
||||
processing_status?:
|
||||
| Database['public']['Enums']['document_processing_status']
|
||||
| null;
|
||||
source_id?: string | null;
|
||||
source_type?: string | null;
|
||||
title?: string;
|
||||
updated_at?: string;
|
||||
};
|
||||
Relationships: [
|
||||
{
|
||||
foreignKeyName: 'document_account_id_fkey';
|
||||
columns: ['account_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'accounts';
|
||||
referencedColumns: ['id'];
|
||||
}
|
||||
];
|
||||
};
|
||||
tasks: {
|
||||
Row: {
|
||||
account_id: string;
|
||||
actual_hours: number;
|
||||
assignee_id: string | null;
|
||||
brief_id: string | null;
|
||||
completed_subtasks: number;
|
||||
complexity: number | null;
|
||||
created_at: string;
|
||||
created_by: string;
|
||||
description: string | null;
|
||||
display_id: string | null;
|
||||
document_id: string | null;
|
||||
due_date: string | null;
|
||||
estimated_hours: number | null;
|
||||
id: string;
|
||||
metadata: Json;
|
||||
parent_task_id: string | null;
|
||||
position: number;
|
||||
priority: Database['public']['Enums']['task_priority'];
|
||||
status: Database['public']['Enums']['task_status'];
|
||||
subtask_position: number;
|
||||
title: string;
|
||||
total_subtasks: number;
|
||||
updated_at: string;
|
||||
updated_by: string;
|
||||
};
|
||||
Insert: {
|
||||
account_id: string;
|
||||
actual_hours?: number;
|
||||
assignee_id?: string | null;
|
||||
brief_id?: string | null;
|
||||
completed_subtasks?: number;
|
||||
complexity?: number | null;
|
||||
created_at?: string;
|
||||
created_by: string;
|
||||
description?: string | null;
|
||||
display_id?: string | null;
|
||||
document_id?: string | null;
|
||||
due_date?: string | null;
|
||||
estimated_hours?: number | null;
|
||||
id?: string;
|
||||
metadata?: Json;
|
||||
parent_task_id?: string | null;
|
||||
position?: number;
|
||||
priority?: Database['public']['Enums']['task_priority'];
|
||||
status?: Database['public']['Enums']['task_status'];
|
||||
subtask_position?: number;
|
||||
title: string;
|
||||
total_subtasks?: number;
|
||||
updated_at?: string;
|
||||
updated_by: string;
|
||||
};
|
||||
Update: {
|
||||
account_id?: string;
|
||||
actual_hours?: number;
|
||||
assignee_id?: string | null;
|
||||
brief_id?: string | null;
|
||||
completed_subtasks?: number;
|
||||
complexity?: number | null;
|
||||
created_at?: string;
|
||||
created_by?: string;
|
||||
description?: string | null;
|
||||
display_id?: string | null;
|
||||
document_id?: string | null;
|
||||
due_date?: string | null;
|
||||
estimated_hours?: number | null;
|
||||
id?: string;
|
||||
metadata?: Json;
|
||||
parent_task_id?: string | null;
|
||||
position?: number;
|
||||
priority?: Database['public']['Enums']['task_priority'];
|
||||
status?: Database['public']['Enums']['task_status'];
|
||||
subtask_position?: number;
|
||||
title?: string;
|
||||
total_subtasks?: number;
|
||||
updated_at?: string;
|
||||
updated_by?: string;
|
||||
};
|
||||
Relationships: [
|
||||
{
|
||||
foreignKeyName: 'tasks_account_id_fkey';
|
||||
columns: ['account_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'accounts';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'tasks_brief_id_fkey';
|
||||
columns: ['brief_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'brief';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'tasks_document_id_fkey';
|
||||
columns: ['document_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'document';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'tasks_parent_task_id_fkey';
|
||||
columns: ['parent_task_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'tasks';
|
||||
referencedColumns: ['id'];
|
||||
}
|
||||
];
|
||||
};
|
||||
task_dependencies: {
|
||||
Row: {
|
||||
account_id: string;
|
||||
created_at: string;
|
||||
depends_on_task_id: string;
|
||||
id: string;
|
||||
task_id: string;
|
||||
};
|
||||
Insert: {
|
||||
account_id: string;
|
||||
created_at?: string;
|
||||
depends_on_task_id: string;
|
||||
id?: string;
|
||||
task_id: string;
|
||||
};
|
||||
Update: {
|
||||
account_id?: string;
|
||||
created_at?: string;
|
||||
depends_on_task_id?: string;
|
||||
id?: string;
|
||||
task_id?: string;
|
||||
};
|
||||
Relationships: [
|
||||
{
|
||||
foreignKeyName: 'task_dependencies_account_id_fkey';
|
||||
columns: ['account_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'accounts';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'task_dependencies_depends_on_task_id_fkey';
|
||||
columns: ['depends_on_task_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'tasks';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'task_dependencies_task_id_fkey';
|
||||
columns: ['task_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'tasks';
|
||||
referencedColumns: ['id'];
|
||||
}
|
||||
];
|
||||
};
|
||||
user_accounts: {
|
||||
Row: {
|
||||
id: string | null;
|
||||
name: string | null;
|
||||
picture_url: string | null;
|
||||
role: string | null;
|
||||
slug: string | null;
|
||||
};
|
||||
Insert: {
|
||||
id?: string | null;
|
||||
name?: string | null;
|
||||
picture_url?: string | null;
|
||||
role?: string | null;
|
||||
slug?: string | null;
|
||||
};
|
||||
Update: {
|
||||
id?: string | null;
|
||||
name?: string | null;
|
||||
picture_url?: string | null;
|
||||
role?: string | null;
|
||||
slug?: string | null;
|
||||
};
|
||||
Relationships: [];
|
||||
};
|
||||
};
|
||||
Views: {
|
||||
[_ in never]: never;
|
||||
};
|
||||
Functions: {
|
||||
[_ in never]: never;
|
||||
};
|
||||
Enums: {
|
||||
brief_status:
|
||||
| 'draft'
|
||||
| 'refining'
|
||||
| 'aligned'
|
||||
| 'delivering'
|
||||
| 'delivered'
|
||||
| 'done'
|
||||
| 'archived';
|
||||
document_processing_status: 'pending' | 'processing' | 'ready' | 'failed';
|
||||
document_type:
|
||||
| 'brief'
|
||||
| 'blueprint'
|
||||
| 'file'
|
||||
| 'note'
|
||||
| 'transcript'
|
||||
| 'generated_plan'
|
||||
| 'generated_task'
|
||||
| 'generated_summary'
|
||||
| 'method'
|
||||
| 'task';
|
||||
plan_generation_status:
|
||||
| 'not_started'
|
||||
| 'generating'
|
||||
| 'completed'
|
||||
| 'failed';
|
||||
task_priority: 'low' | 'medium' | 'high' | 'urgent';
|
||||
task_status: 'todo' | 'in_progress' | 'done';
|
||||
};
|
||||
CompositeTypes: {
|
||||
[_ in never]: never;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
export type Tables<
|
||||
PublicTableNameOrOptions extends
|
||||
| keyof (Database['public']['Tables'] & Database['public']['Views'])
|
||||
| { schema: keyof Database },
|
||||
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? keyof (Database[PublicTableNameOrOptions['schema']]['Tables'] &
|
||||
Database[PublicTableNameOrOptions['schema']]['Views'])
|
||||
: never = never
|
||||
> = PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? (Database[PublicTableNameOrOptions['schema']]['Tables'] &
|
||||
Database[PublicTableNameOrOptions['schema']]['Views'])[TableName] extends {
|
||||
Row: infer R;
|
||||
}
|
||||
? R
|
||||
: never
|
||||
: PublicTableNameOrOptions extends keyof (Database['public']['Tables'] &
|
||||
Database['public']['Views'])
|
||||
? (Database['public']['Tables'] &
|
||||
Database['public']['Views'])[PublicTableNameOrOptions] extends {
|
||||
Row: infer R;
|
||||
}
|
||||
? R
|
||||
: never
|
||||
: never;
|
||||
|
||||
export type TablesInsert<
|
||||
PublicTableNameOrOptions extends
|
||||
| keyof Database['public']['Tables']
|
||||
| { schema: keyof Database },
|
||||
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
|
||||
: never = never
|
||||
> = PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
|
||||
Insert: infer I;
|
||||
}
|
||||
? I
|
||||
: never
|
||||
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
|
||||
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
|
||||
Insert: infer I;
|
||||
}
|
||||
? I
|
||||
: never
|
||||
: never;
|
||||
|
||||
export type TablesUpdate<
|
||||
PublicTableNameOrOptions extends
|
||||
| keyof Database['public']['Tables']
|
||||
| { schema: keyof Database },
|
||||
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
|
||||
: never = never
|
||||
> = PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
|
||||
Update: infer U;
|
||||
}
|
||||
? U
|
||||
: never
|
||||
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
|
||||
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
|
||||
Update: infer U;
|
||||
}
|
||||
? U
|
||||
: never
|
||||
: never;
|
||||
|
||||
export type Enums<
|
||||
PublicEnumNameOrOptions extends
|
||||
| keyof Database['public']['Enums']
|
||||
| { schema: keyof Database },
|
||||
EnumName extends PublicEnumNameOrOptions extends { schema: keyof Database }
|
||||
? keyof Database[PublicEnumNameOrOptions['schema']]['Enums']
|
||||
: never = never
|
||||
> = PublicEnumNameOrOptions extends { schema: keyof Database }
|
||||
? Database[PublicEnumNameOrOptions['schema']]['Enums'][EnumName]
|
||||
: PublicEnumNameOrOptions extends keyof Database['public']['Enums']
|
||||
? Database['public']['Enums'][PublicEnumNameOrOptions]
|
||||
: never;
|
||||
@@ -2,6 +2,14 @@
|
||||
* Core type definitions for Task Master
|
||||
*/
|
||||
|
||||
/**
|
||||
* Storage type options
|
||||
* - 'file': Local file system storage
|
||||
* - 'api': Remote API storage (Hamster integration)
|
||||
* - 'auto': Automatically detect based on auth status
|
||||
*/
|
||||
export type StorageType = 'file' | 'api' | 'auto';
|
||||
|
||||
// ============================================================================
|
||||
// Type Literals
|
||||
// ============================================================================
|
||||
@@ -16,7 +24,8 @@ export type TaskStatus =
|
||||
| 'deferred'
|
||||
| 'cancelled'
|
||||
| 'blocked'
|
||||
| 'review';
|
||||
| 'review'
|
||||
| 'completed';
|
||||
|
||||
/**
|
||||
* Task priority levels
|
||||
@@ -96,6 +105,15 @@ export interface TaskCollection {
|
||||
metadata: TaskMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Task tag for organizing tasks
|
||||
*/
|
||||
export interface TaskTag {
|
||||
name: string;
|
||||
tasks: string[]; // Task IDs belonging to this tag
|
||||
metadata: Record<string, any>;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Utility Types
|
||||
// ============================================================================
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Module
|
||||
* Public exports for workflow functionality
|
||||
*/
|
||||
|
||||
export { WorkflowService, type WorkflowServiceConfig } from './workflow-service.js';
|
||||
|
||||
// Re-export workflow engine types for convenience
|
||||
export type {
|
||||
WorkflowExecutionContext,
|
||||
WorkflowStatus,
|
||||
WorkflowEvent,
|
||||
WorkflowEventType,
|
||||
WorkflowProcess,
|
||||
ProcessStatus,
|
||||
WorktreeInfo
|
||||
} from '@tm/workflow-engine';
|
||||
@@ -1,218 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Service
|
||||
* Integrates workflow engine into Task Master Core
|
||||
*/
|
||||
|
||||
import {
|
||||
TaskExecutionManager,
|
||||
type TaskExecutionManagerConfig,
|
||||
type WorkflowExecutionContext
|
||||
} from '@tm/workflow-engine';
|
||||
import type { Task } from '../types/index.js';
|
||||
import { TaskMasterError } from '../errors/index.js';
|
||||
|
||||
export interface WorkflowServiceConfig {
|
||||
/** Project root directory */
|
||||
projectRoot: string;
|
||||
/** Maximum number of concurrent workflows */
|
||||
maxConcurrent?: number;
|
||||
/** Default timeout for workflow execution (minutes) */
|
||||
defaultTimeout?: number;
|
||||
/** Base directory for worktrees */
|
||||
worktreeBase?: string;
|
||||
/** Claude Code executable path */
|
||||
claudeExecutable?: string;
|
||||
/** Enable debug logging */
|
||||
debug?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* WorkflowService provides Task Master workflow capabilities through core
|
||||
*/
|
||||
export class WorkflowService {
|
||||
private workflowEngine: TaskExecutionManager;
|
||||
|
||||
constructor(
|
||||
config: WorkflowServiceConfig,
|
||||
private getTask: (taskId: string) => Promise<Task>
|
||||
) {
|
||||
|
||||
const engineConfig: TaskExecutionManagerConfig = {
|
||||
projectRoot: config.projectRoot,
|
||||
maxConcurrent: config.maxConcurrent || 5,
|
||||
defaultTimeout: config.defaultTimeout || 60,
|
||||
worktreeBase:
|
||||
config.worktreeBase ||
|
||||
require('path').join(config.projectRoot, '..', 'task-worktrees'),
|
||||
claudeExecutable: config.claudeExecutable || 'claude',
|
||||
debug: config.debug || false
|
||||
};
|
||||
|
||||
this.workflowEngine = new TaskExecutionManager(engineConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the workflow service
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
await this.workflowEngine.initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a workflow for a task
|
||||
*/
|
||||
async start(
|
||||
taskId: string,
|
||||
options?: {
|
||||
branchName?: string;
|
||||
timeout?: number;
|
||||
env?: Record<string, string>;
|
||||
}
|
||||
): Promise<string> {
|
||||
try {
|
||||
// Get task from core
|
||||
const task = await this.getTask(taskId);
|
||||
|
||||
// Start workflow using engine
|
||||
return await this.workflowEngine.startTaskExecution(task, options);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
`Failed to start workflow for task ${taskId}`,
|
||||
'WORKFLOW_START_FAILED',
|
||||
error instanceof Error ? error : undefined
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a workflow
|
||||
*/
|
||||
async stop(workflowId: string, force = false): Promise<void> {
|
||||
try {
|
||||
await this.workflowEngine.stopTaskExecution(workflowId, force);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
`Failed to stop workflow ${workflowId}`,
|
||||
'WORKFLOW_STOP_FAILED',
|
||||
error instanceof Error ? error : undefined
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pause a workflow
|
||||
*/
|
||||
async pause(workflowId: string): Promise<void> {
|
||||
try {
|
||||
await this.workflowEngine.pauseTaskExecution(workflowId);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
`Failed to pause workflow ${workflowId}`,
|
||||
'WORKFLOW_PAUSE_FAILED',
|
||||
error instanceof Error ? error : undefined
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume a paused workflow
|
||||
*/
|
||||
async resume(workflowId: string): Promise<void> {
|
||||
try {
|
||||
await this.workflowEngine.resumeTaskExecution(workflowId);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
`Failed to resume workflow ${workflowId}`,
|
||||
'WORKFLOW_RESUME_FAILED',
|
||||
error instanceof Error ? error : undefined
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workflow status
|
||||
*/
|
||||
getStatus(workflowId: string): WorkflowExecutionContext | undefined {
|
||||
return this.workflowEngine.getWorkflowStatus(workflowId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workflow by task ID
|
||||
*/
|
||||
getByTaskId(taskId: string): WorkflowExecutionContext | undefined {
|
||||
return this.workflowEngine.getWorkflowByTaskId(taskId);
|
||||
}
|
||||
|
||||
/**
|
||||
* List all workflows
|
||||
*/
|
||||
list(): WorkflowExecutionContext[] {
|
||||
return this.workflowEngine.listWorkflows();
|
||||
}
|
||||
|
||||
/**
|
||||
* List active workflows
|
||||
*/
|
||||
listActive(): WorkflowExecutionContext[] {
|
||||
return this.workflowEngine.listActiveWorkflows();
|
||||
}
|
||||
|
||||
/**
|
||||
* Send input to a running workflow
|
||||
*/
|
||||
async sendInput(workflowId: string, input: string): Promise<void> {
|
||||
try {
|
||||
await this.workflowEngine.sendInputToWorkflow(workflowId, input);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
`Failed to send input to workflow ${workflowId}`,
|
||||
'WORKFLOW_INPUT_FAILED',
|
||||
error instanceof Error ? error : undefined
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up all workflows
|
||||
*/
|
||||
async cleanup(force = false): Promise<void> {
|
||||
try {
|
||||
await this.workflowEngine.cleanup(force);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to cleanup workflows',
|
||||
'WORKFLOW_CLEANUP_FAILED',
|
||||
error instanceof Error ? error : undefined
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribe to workflow events
|
||||
*/
|
||||
on(event: string, listener: (...args: any[]) => void): void {
|
||||
this.workflowEngine.on(event, listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unsubscribe from workflow events
|
||||
*/
|
||||
off(event: string, listener: (...args: any[]) => void): void {
|
||||
this.workflowEngine.off(event, listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workflow engine instance (for advanced usage)
|
||||
*/
|
||||
getEngine(): TaskExecutionManager {
|
||||
return this.workflowEngine;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispose of the workflow service
|
||||
*/
|
||||
async dispose(): Promise<void> {
|
||||
await this.cleanup(true);
|
||||
this.workflowEngine.removeAllListeners();
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "ESNext",
|
||||
"module": "NodeNext",
|
||||
"lib": ["ES2022"],
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true,
|
||||
"outDir": "./dist",
|
||||
"baseUrl": ".",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
@@ -23,25 +24,12 @@
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "bundler",
|
||||
"moduleResolution": "NodeNext",
|
||||
"moduleDetection": "force",
|
||||
"types": ["node"],
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"paths": {
|
||||
"@/*": ["./src/*"],
|
||||
"@/auth": ["./src/auth"],
|
||||
"@/config": ["./src/config"],
|
||||
"@/errors": ["./src/errors"],
|
||||
"@/interfaces": ["./src/interfaces"],
|
||||
"@/logger": ["./src/logger"],
|
||||
"@/parser": ["./src/parser"],
|
||||
"@/providers": ["./src/providers"],
|
||||
"@/services": ["./src/services"],
|
||||
"@/storage": ["./src/storage"],
|
||||
"@/types": ["./src/types"],
|
||||
"@/utils": ["./src/utils"]
|
||||
}
|
||||
"allowImportingTsExtensions": false
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"]
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
import { defineConfig } from 'tsup';
|
||||
import { dotenvLoad } from 'dotenv-mono';
|
||||
dotenvLoad();
|
||||
|
||||
// Get all TM_PUBLIC_* env variables for build-time injection
|
||||
const getBuildTimeEnvs = () => {
|
||||
const envs: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(process.env)) {
|
||||
if (key.startsWith('TM_PUBLIC_')) {
|
||||
// Return the actual value, not JSON.stringify'd
|
||||
envs[key] = value || '';
|
||||
}
|
||||
}
|
||||
return envs;
|
||||
};
|
||||
|
||||
export default defineConfig({
|
||||
entry: {
|
||||
index: 'src/index.ts',
|
||||
'auth/index': 'src/auth/index.ts',
|
||||
'config/index': 'src/config/index.ts',
|
||||
'errors/index': 'src/errors/index.ts',
|
||||
'interfaces/index': 'src/interfaces/index.ts',
|
||||
'logger/index': 'src/logger/index.ts',
|
||||
'parser/index': 'src/parser/index.ts',
|
||||
'providers/index': 'src/providers/index.ts',
|
||||
'services/index': 'src/services/index.ts',
|
||||
'storage/index': 'src/storage/index.ts',
|
||||
'types/index': 'src/types/index.ts',
|
||||
'utils/index': 'src/utils/index.ts',
|
||||
'workflow/index': 'src/workflow/index.ts'
|
||||
},
|
||||
format: ['cjs', 'esm'],
|
||||
dts: true,
|
||||
sourcemap: true,
|
||||
clean: true,
|
||||
splitting: false,
|
||||
treeshake: true,
|
||||
minify: false,
|
||||
target: 'es2022',
|
||||
tsconfig: './tsconfig.json',
|
||||
outDir: 'dist',
|
||||
// Replace process.env.TM_PUBLIC_* with actual values at build time
|
||||
env: getBuildTimeEnvs(),
|
||||
// Auto-external all dependencies from package.json
|
||||
external: [
|
||||
// External all node_modules - everything not starting with . or /
|
||||
/^[^./]/
|
||||
],
|
||||
esbuildOptions(options) {
|
||||
options.conditions = ['module'];
|
||||
}
|
||||
});
|
||||
@@ -1,371 +0,0 @@
|
||||
# @tm/workflow-engine
|
||||
|
||||
Enhanced Task Master workflow execution engine with git worktree isolation and Claude Code process management.
|
||||
|
||||
## Overview
|
||||
|
||||
The Workflow Engine extends Task Master with advanced execution capabilities:
|
||||
|
||||
- **Git Worktree Isolation**: Each task runs in its own isolated worktree
|
||||
- **Process Sandboxing**: Spawns dedicated Claude Code processes for task execution
|
||||
- **Real-time Monitoring**: Track workflow progress and process output
|
||||
- **State Management**: Persistent workflow state across sessions
|
||||
- **Parallel Execution**: Run multiple tasks concurrently with resource limits
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
TaskExecutionManager
|
||||
├── WorktreeManager # Git worktree lifecycle
|
||||
├── ProcessSandbox # Claude Code process management
|
||||
└── WorkflowStateManager # Persistent state tracking
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
```typescript
|
||||
import { TaskExecutionManager } from '@tm/workflow-engine';
|
||||
|
||||
const manager = new TaskExecutionManager({
|
||||
projectRoot: '/path/to/project',
|
||||
worktreeBase: '/path/to/worktrees',
|
||||
claudeExecutable: 'claude',
|
||||
maxConcurrent: 3,
|
||||
defaultTimeout: 60,
|
||||
debug: true
|
||||
});
|
||||
|
||||
await manager.initialize();
|
||||
|
||||
// Start task execution
|
||||
const workflowId = await manager.startTaskExecution({
|
||||
id: '1.2',
|
||||
title: 'Implement authentication',
|
||||
description: 'Add JWT-based auth system',
|
||||
status: 'pending',
|
||||
priority: 'high'
|
||||
});
|
||||
|
||||
// Monitor workflow
|
||||
const workflow = manager.getWorkflowStatus(workflowId);
|
||||
console.log(`Status: ${workflow.status}`);
|
||||
|
||||
// Stop when complete
|
||||
await manager.stopTaskExecution(workflowId);
|
||||
```
|
||||
|
||||
## CLI Integration
|
||||
|
||||
```bash
|
||||
# Start workflow
|
||||
tm workflow start 1.2
|
||||
|
||||
# List active workflows
|
||||
tm workflow list
|
||||
|
||||
# Check status
|
||||
tm workflow status workflow-1.2-1234567890-abc123
|
||||
|
||||
# Stop workflow
|
||||
tm workflow stop workflow-1.2-1234567890-abc123
|
||||
```
|
||||
|
||||
## VS Code Extension
|
||||
|
||||
The workflow engine integrates with the Task Master VS Code extension to provide:
|
||||
|
||||
- **Workflow Tree View**: Visual workflow management
|
||||
- **Process Monitoring**: Real-time output streaming
|
||||
- **Worktree Navigation**: Quick access to isolated workspaces
|
||||
- **Status Indicators**: Visual workflow state tracking
|
||||
|
||||
## Core Components
|
||||
|
||||
### TaskExecutionManager
|
||||
|
||||
Orchestrates complete workflow lifecycle:
|
||||
|
||||
```typescript
|
||||
// Event-driven workflow management
|
||||
manager.on('workflow.started', (event) => {
|
||||
console.log(`Started: ${event.workflowId}`);
|
||||
});
|
||||
|
||||
manager.on('process.output', (event) => {
|
||||
console.log(`[${event.data.stream}]: ${event.data.data}`);
|
||||
});
|
||||
```
|
||||
|
||||
### WorktreeManager
|
||||
|
||||
Manages git worktree operations:
|
||||
|
||||
```typescript
|
||||
import { WorktreeManager } from '@tm/workflow-engine';
|
||||
|
||||
const manager = new WorktreeManager({
|
||||
worktreeBase: './worktrees',
|
||||
projectRoot: process.cwd(),
|
||||
autoCleanup: true
|
||||
});
|
||||
|
||||
// Create isolated workspace
|
||||
const worktree = await manager.createWorktree('task-1.2');
|
||||
console.log(`Created: ${worktree.path}`);
|
||||
|
||||
// List all worktrees
|
||||
const worktrees = await manager.listWorktrees();
|
||||
|
||||
// Cleanup
|
||||
await manager.removeWorktree('task-1.2');
|
||||
```
|
||||
|
||||
### ProcessSandbox
|
||||
|
||||
Spawns and manages Claude Code processes:
|
||||
|
||||
```typescript
|
||||
import { ProcessSandbox } from '@tm/workflow-engine';
|
||||
|
||||
const sandbox = new ProcessSandbox({
|
||||
claudeExecutable: 'claude',
|
||||
defaultTimeout: 30,
|
||||
debug: true
|
||||
});
|
||||
|
||||
// Start isolated process
|
||||
const process = await sandbox.startProcess(
|
||||
'workflow-123',
|
||||
'task-1.2',
|
||||
'Implement user authentication with JWT tokens',
|
||||
{ cwd: '/path/to/worktree' }
|
||||
);
|
||||
|
||||
// Send input
|
||||
await sandbox.sendInput('workflow-123', 'npm test');
|
||||
|
||||
// Monitor output
|
||||
sandbox.on('process.output', (event) => {
|
||||
console.log(event.data.data);
|
||||
});
|
||||
```
|
||||
|
||||
### WorkflowStateManager
|
||||
|
||||
Persistent workflow state management:
|
||||
|
||||
```typescript
|
||||
import { WorkflowStateManager } from '@tm/workflow-engine';
|
||||
|
||||
const stateManager = new WorkflowStateManager({
|
||||
projectRoot: process.cwd()
|
||||
});
|
||||
|
||||
await stateManager.loadState();
|
||||
|
||||
// Register workflow
|
||||
const workflowId = await stateManager.registerWorkflow({
|
||||
taskId: '1.2',
|
||||
taskTitle: 'Authentication',
|
||||
// ... other context
|
||||
});
|
||||
|
||||
// Update status
|
||||
await stateManager.updateWorkflowStatus(workflowId, 'running');
|
||||
|
||||
// Query workflows
|
||||
const running = stateManager.listWorkflowsByStatus('running');
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `TASKMASTER_WORKFLOW_DEBUG`: Enable debug logging
|
||||
- `TASKMASTER_CLAUDE_PATH`: Custom Claude Code executable path
|
||||
- `TASKMASTER_WORKTREE_BASE`: Base directory for worktrees
|
||||
- `TASKMASTER_MAX_CONCURRENT`: Maximum concurrent workflows
|
||||
|
||||
### Config Object
|
||||
|
||||
```typescript
|
||||
interface TaskExecutionManagerConfig {
|
||||
projectRoot: string; // Project root directory
|
||||
worktreeBase: string; // Worktree base path
|
||||
claudeExecutable: string; // Claude executable
|
||||
maxConcurrent: number; // Concurrent limit
|
||||
defaultTimeout: number; // Timeout (minutes)
|
||||
debug: boolean; // Debug logging
|
||||
}
|
||||
```
|
||||
|
||||
## Workflow States
|
||||
|
||||
| State | Description |
|
||||
|-------|-------------|
|
||||
| `pending` | Created but not started |
|
||||
| `initializing` | Setting up worktree/process |
|
||||
| `running` | Active execution |
|
||||
| `paused` | Temporarily stopped |
|
||||
| `completed` | Successfully finished |
|
||||
| `failed` | Error occurred |
|
||||
| `cancelled` | User cancelled |
|
||||
| `timeout` | Exceeded time limit |
|
||||
|
||||
## Events
|
||||
|
||||
The workflow engine emits events for real-time monitoring:
|
||||
|
||||
```typescript
|
||||
// Workflow lifecycle
|
||||
manager.on('workflow.started', (event) => {});
|
||||
manager.on('workflow.completed', (event) => {});
|
||||
manager.on('workflow.failed', (event) => {});
|
||||
|
||||
// Process events
|
||||
manager.on('process.started', (event) => {});
|
||||
manager.on('process.output', (event) => {});
|
||||
manager.on('process.stopped', (event) => {});
|
||||
|
||||
// Worktree events
|
||||
manager.on('worktree.created', (event) => {});
|
||||
manager.on('worktree.deleted', (event) => {});
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The workflow engine provides specialized error types:
|
||||
|
||||
```typescript
|
||||
import {
|
||||
WorkflowError,
|
||||
WorktreeError,
|
||||
ProcessError,
|
||||
MaxConcurrentWorkflowsError
|
||||
} from '@tm/workflow-engine';
|
||||
|
||||
try {
|
||||
await manager.startTaskExecution(task);
|
||||
} catch (error) {
|
||||
if (error instanceof MaxConcurrentWorkflowsError) {
|
||||
console.log('Too many concurrent workflows');
|
||||
} else if (error instanceof WorktreeError) {
|
||||
console.log('Worktree operation failed');
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
npm install
|
||||
|
||||
# Build package
|
||||
npm run build
|
||||
|
||||
# Run tests
|
||||
npm test
|
||||
|
||||
# Development mode
|
||||
npm run dev
|
||||
```
|
||||
|
||||
## Integration Examples
|
||||
|
||||
### With Task Master Core
|
||||
|
||||
```typescript
|
||||
import { createTaskMasterCore } from '@tm/core';
|
||||
import { TaskExecutionManager } from '@tm/workflow-engine';
|
||||
|
||||
const core = await createTaskMasterCore({ projectPath: '.' });
|
||||
const workflows = new TaskExecutionManager({ /*...*/ });
|
||||
|
||||
// Get task from core
|
||||
const tasks = await core.getTaskList({});
|
||||
const task = tasks.tasks.find(t => t.id === '1.2');
|
||||
|
||||
// Execute with workflow engine
|
||||
if (task) {
|
||||
const workflowId = await workflows.startTaskExecution(task);
|
||||
}
|
||||
```
|
||||
|
||||
### With VS Code Extension
|
||||
|
||||
```typescript
|
||||
import { WorkflowProvider } from './workflow-provider';
|
||||
|
||||
// Register tree view
|
||||
const provider = new WorkflowProvider(context);
|
||||
vscode.window.createTreeView('taskmaster.workflows', {
|
||||
treeDataProvider: provider
|
||||
});
|
||||
|
||||
// Register commands
|
||||
vscode.commands.registerCommand('taskmaster.workflow.start',
|
||||
async (taskId) => {
|
||||
await provider.startWorkflow(taskId);
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Worktree Creation Fails**
|
||||
```bash
|
||||
# Check git version (requires 2.5+)
|
||||
git --version
|
||||
|
||||
# Verify project is git repository
|
||||
git status
|
||||
```
|
||||
|
||||
2. **Claude Code Not Found**
|
||||
```bash
|
||||
# Check Claude installation
|
||||
which claude
|
||||
|
||||
# Set custom path
|
||||
export TASKMASTER_CLAUDE_PATH=/path/to/claude
|
||||
```
|
||||
|
||||
3. **Permission Errors**
|
||||
```bash
|
||||
# Check worktree directory permissions
|
||||
chmod -R 755 ./worktrees
|
||||
```
|
||||
|
||||
### Debug Mode
|
||||
|
||||
Enable debug logging for troubleshooting:
|
||||
|
||||
```typescript
|
||||
const manager = new TaskExecutionManager({
|
||||
// ... other config
|
||||
debug: true
|
||||
});
|
||||
```
|
||||
|
||||
Or via environment:
|
||||
|
||||
```bash
|
||||
export TASKMASTER_WORKFLOW_DEBUG=true
|
||||
tm workflow start 1.2
|
||||
```
|
||||
|
||||
## Roadmap
|
||||
|
||||
- [ ] Process resource monitoring (CPU, memory)
|
||||
- [ ] Workflow templates and presets
|
||||
- [ ] Integration with CI/CD pipelines
|
||||
- [ ] Workflow scheduling and queueing
|
||||
- [ ] Multi-machine workflow distribution
|
||||
- [ ] Advanced debugging and profiling tools
|
||||
|
||||
## License
|
||||
|
||||
MIT WITH Commons-Clause
|
||||
@@ -1,56 +0,0 @@
|
||||
{
|
||||
"name": "@tm/workflow-engine",
|
||||
"version": "0.1.0",
|
||||
"description": "Task Master workflow execution engine with git worktree and process management",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts"
|
||||
},
|
||||
"./task-execution": {
|
||||
"import": "./dist/task-execution/index.js",
|
||||
"types": "./dist/task-execution/index.d.ts"
|
||||
},
|
||||
"./worktree": {
|
||||
"import": "./dist/worktree/index.js",
|
||||
"types": "./dist/worktree/index.d.ts"
|
||||
},
|
||||
"./process": {
|
||||
"import": "./dist/process/index.js",
|
||||
"types": "./dist/process/index.d.ts"
|
||||
},
|
||||
"./state": {
|
||||
"import": "./dist/state/index.js",
|
||||
"types": "./dist/state/index.d.ts"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"dev": "tsup --watch",
|
||||
"test": "vitest",
|
||||
"test:watch": "vitest --watch",
|
||||
"type-check": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tm/core": "*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"tsup": "^8.0.0",
|
||||
"typescript": "^5.5.0",
|
||||
"vitest": "^2.0.0"
|
||||
},
|
||||
"files": ["dist"],
|
||||
"keywords": [
|
||||
"task-master",
|
||||
"workflow",
|
||||
"git-worktree",
|
||||
"process-management",
|
||||
"claude-code"
|
||||
],
|
||||
"author": "Task Master AI Team",
|
||||
"license": "MIT"
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Engine Errors
|
||||
* Public error exports
|
||||
*/
|
||||
|
||||
export * from './workflow.errors.js';
|
||||
@@ -1,59 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Engine Errors
|
||||
* Custom error classes for workflow operations
|
||||
*/
|
||||
|
||||
export class WorkflowError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public workflowId?: string,
|
||||
public taskId?: string,
|
||||
public cause?: Error
|
||||
) {
|
||||
super(message);
|
||||
this.name = 'WorkflowError';
|
||||
}
|
||||
}
|
||||
|
||||
export class WorktreeError extends WorkflowError {
|
||||
constructor(message: string, public path?: string, cause?: Error) {
|
||||
super(message, 'WORKTREE_ERROR', undefined, undefined, cause);
|
||||
this.name = 'WorktreeError';
|
||||
}
|
||||
}
|
||||
|
||||
export class ProcessError extends WorkflowError {
|
||||
constructor(message: string, public pid?: number, cause?: Error) {
|
||||
super(message, 'PROCESS_ERROR', undefined, undefined, cause);
|
||||
this.name = 'ProcessError';
|
||||
}
|
||||
}
|
||||
|
||||
export class WorkflowTimeoutError extends WorkflowError {
|
||||
constructor(workflowId: string, timeoutMinutes: number) {
|
||||
super(
|
||||
`Workflow ${workflowId} timed out after ${timeoutMinutes} minutes`,
|
||||
'WORKFLOW_TIMEOUT',
|
||||
workflowId
|
||||
);
|
||||
this.name = 'WorkflowTimeoutError';
|
||||
}
|
||||
}
|
||||
|
||||
export class WorkflowNotFoundError extends WorkflowError {
|
||||
constructor(workflowId: string) {
|
||||
super(`Workflow ${workflowId} not found`, 'WORKFLOW_NOT_FOUND', workflowId);
|
||||
this.name = 'WorkflowNotFoundError';
|
||||
}
|
||||
}
|
||||
|
||||
export class MaxConcurrentWorkflowsError extends WorkflowError {
|
||||
constructor(maxConcurrent: number) {
|
||||
super(
|
||||
`Maximum concurrent workflows (${maxConcurrent}) reached`,
|
||||
'MAX_CONCURRENT_WORKFLOWS'
|
||||
);
|
||||
this.name = 'MaxConcurrentWorkflowsError';
|
||||
}
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Engine
|
||||
* Main entry point for the Task Master workflow execution engine
|
||||
*/
|
||||
|
||||
// Core task execution
|
||||
export * from './task-execution/index.js';
|
||||
|
||||
// Component managers
|
||||
export * from './worktree/index.js';
|
||||
export * from './process/index.js';
|
||||
export * from './state/index.js';
|
||||
|
||||
// Types and errors
|
||||
export * from './types/index.js';
|
||||
export * from './errors/index.js';
|
||||
|
||||
// Convenience exports
|
||||
export { TaskExecutionManager as WorkflowEngine } from './task-execution/index.js';
|
||||
@@ -1,6 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Process Management
|
||||
* Public exports for process operations
|
||||
*/
|
||||
|
||||
export * from './process-sandbox.js';
|
||||
@@ -1,378 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Process Sandbox
|
||||
* Manages Claude Code process execution in isolated environments
|
||||
*/
|
||||
|
||||
import { spawn, ChildProcess } from 'node:child_process';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import type {
|
||||
WorkflowProcess,
|
||||
WorkflowEvent,
|
||||
WorkflowEventType
|
||||
} from '../types/workflow.types.js';
|
||||
import { ProcessError } from '../errors/workflow.errors.js';
|
||||
|
||||
export interface ProcessSandboxConfig {
|
||||
/** Claude Code executable path */
|
||||
claudeExecutable: string;
|
||||
/** Default timeout for processes (minutes) */
|
||||
defaultTimeout: number;
|
||||
/** Environment variables to pass to processes */
|
||||
environment?: Record<string, string>;
|
||||
/** Enable debug output */
|
||||
debug: boolean;
|
||||
}
|
||||
|
||||
export interface ProcessOptions {
|
||||
/** Working directory for the process */
|
||||
cwd: string;
|
||||
/** Environment variables (merged with config) */
|
||||
env?: Record<string, string>;
|
||||
/** Timeout in minutes (overrides default) */
|
||||
timeout?: number;
|
||||
/** Additional Claude Code arguments */
|
||||
args?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* ProcessSandbox manages Claude Code process lifecycle
|
||||
* Single responsibility: Process spawning, monitoring, and cleanup
|
||||
*/
|
||||
export class ProcessSandbox extends EventEmitter {
|
||||
private config: ProcessSandboxConfig;
|
||||
private activeProcesses = new Map<string, WorkflowProcess>();
|
||||
private childProcesses = new Map<string, ChildProcess>();
|
||||
private timeouts = new Map<string, NodeJS.Timeout>();
|
||||
|
||||
constructor(config: ProcessSandboxConfig) {
|
||||
super();
|
||||
this.config = config;
|
||||
this.setupCleanupHandlers();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a Claude Code process for task execution
|
||||
*/
|
||||
async startProcess(
|
||||
workflowId: string,
|
||||
taskId: string,
|
||||
taskPrompt: string,
|
||||
options: ProcessOptions
|
||||
): Promise<WorkflowProcess> {
|
||||
if (this.activeProcesses.has(workflowId)) {
|
||||
throw new ProcessError(
|
||||
`Process already running for workflow ${workflowId}`
|
||||
);
|
||||
}
|
||||
|
||||
// Prepare command and arguments
|
||||
const args = [
|
||||
'-p', // Print mode for non-interactive execution
|
||||
taskPrompt,
|
||||
...(options.args || [])
|
||||
];
|
||||
|
||||
// Prepare environment
|
||||
const env = {
|
||||
...process.env,
|
||||
...this.config.environment,
|
||||
...options.env,
|
||||
// Ensure task context is available
|
||||
TASKMASTER_WORKFLOW_ID: workflowId,
|
||||
TASKMASTER_TASK_ID: taskId
|
||||
};
|
||||
|
||||
try {
|
||||
// Spawn Claude Code process
|
||||
const childProcess = spawn(this.config.claudeExecutable, args, {
|
||||
cwd: options.cwd,
|
||||
env,
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
const workflowProcess: WorkflowProcess = {
|
||||
pid: childProcess.pid!,
|
||||
command: this.config.claudeExecutable,
|
||||
args,
|
||||
cwd: options.cwd,
|
||||
env,
|
||||
startedAt: new Date(),
|
||||
status: 'starting'
|
||||
};
|
||||
|
||||
// Store process references
|
||||
this.activeProcesses.set(workflowId, workflowProcess);
|
||||
this.childProcesses.set(workflowId, childProcess);
|
||||
|
||||
// Setup process event handlers
|
||||
this.setupProcessHandlers(workflowId, taskId, childProcess);
|
||||
|
||||
// Setup timeout if specified
|
||||
const timeoutMinutes = options.timeout || this.config.defaultTimeout;
|
||||
if (timeoutMinutes > 0) {
|
||||
this.setupProcessTimeout(workflowId, timeoutMinutes);
|
||||
}
|
||||
|
||||
// Emit process started event
|
||||
this.emitEvent('process.started', workflowId, taskId, {
|
||||
pid: workflowProcess.pid,
|
||||
command: workflowProcess.command
|
||||
});
|
||||
|
||||
workflowProcess.status = 'running';
|
||||
return workflowProcess;
|
||||
} catch (error) {
|
||||
throw new ProcessError(
|
||||
`Failed to start process for workflow ${workflowId}`,
|
||||
undefined,
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a running process
|
||||
*/
|
||||
async stopProcess(workflowId: string, force = false): Promise<void> {
|
||||
const process = this.activeProcesses.get(workflowId);
|
||||
const childProcess = this.childProcesses.get(workflowId);
|
||||
|
||||
if (!process || !childProcess) {
|
||||
throw new ProcessError(
|
||||
`No running process found for workflow ${workflowId}`
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
// Clear timeout
|
||||
const timeout = this.timeouts.get(workflowId);
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
this.timeouts.delete(workflowId);
|
||||
}
|
||||
|
||||
// Kill the process
|
||||
if (force) {
|
||||
childProcess.kill('SIGKILL');
|
||||
} else {
|
||||
childProcess.kill('SIGTERM');
|
||||
|
||||
// Give it 5 seconds to gracefully exit, then force kill
|
||||
setTimeout(() => {
|
||||
if (!childProcess.killed) {
|
||||
childProcess.kill('SIGKILL');
|
||||
}
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
process.status = 'stopped';
|
||||
|
||||
// Emit process stopped event
|
||||
this.emitEvent('process.stopped', workflowId, process.pid.toString(), {
|
||||
pid: process.pid,
|
||||
forced: force
|
||||
});
|
||||
} catch (error) {
|
||||
throw new ProcessError(
|
||||
`Failed to stop process for workflow ${workflowId}`,
|
||||
process.pid,
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send input to a running process
|
||||
*/
|
||||
async sendInput(workflowId: string, input: string): Promise<void> {
|
||||
const childProcess = this.childProcesses.get(workflowId);
|
||||
if (!childProcess) {
|
||||
throw new ProcessError(
|
||||
`No running process found for workflow ${workflowId}`
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
childProcess.stdin?.write(input);
|
||||
childProcess.stdin?.write('\n');
|
||||
} catch (error) {
|
||||
throw new ProcessError(
|
||||
`Failed to send input to process for workflow ${workflowId}`,
|
||||
childProcess.pid,
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get process information
|
||||
*/
|
||||
getProcess(workflowId: string): WorkflowProcess | undefined {
|
||||
return this.activeProcesses.get(workflowId);
|
||||
}
|
||||
|
||||
/**
|
||||
* List all active processes
|
||||
*/
|
||||
listProcesses(): WorkflowProcess[] {
|
||||
return Array.from(this.activeProcesses.values());
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a process is running
|
||||
*/
|
||||
isProcessRunning(workflowId: string): boolean {
|
||||
const process = this.activeProcesses.get(workflowId);
|
||||
return process?.status === 'running' || process?.status === 'starting';
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up all processes
|
||||
*/
|
||||
async cleanupAll(force = false): Promise<void> {
|
||||
const workflowIds = Array.from(this.activeProcesses.keys());
|
||||
|
||||
await Promise.all(
|
||||
workflowIds.map(async (workflowId) => {
|
||||
try {
|
||||
await this.stopProcess(workflowId, force);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Failed to cleanup process for workflow ${workflowId}:`,
|
||||
error
|
||||
);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup process event handlers
|
||||
*/
|
||||
private setupProcessHandlers(
|
||||
workflowId: string,
|
||||
taskId: string,
|
||||
childProcess: ChildProcess
|
||||
): void {
|
||||
const process = this.activeProcesses.get(workflowId);
|
||||
if (!process) return;
|
||||
|
||||
// Handle stdout
|
||||
childProcess.stdout?.on('data', (data) => {
|
||||
const output = data.toString();
|
||||
if (this.config.debug) {
|
||||
console.log(`[${workflowId}] STDOUT:`, output);
|
||||
}
|
||||
|
||||
this.emitEvent('process.output', workflowId, taskId, {
|
||||
stream: 'stdout',
|
||||
data: output
|
||||
});
|
||||
});
|
||||
|
||||
// Handle stderr
|
||||
childProcess.stderr?.on('data', (data) => {
|
||||
const output = data.toString();
|
||||
if (this.config.debug) {
|
||||
console.error(`[${workflowId}] STDERR:`, output);
|
||||
}
|
||||
|
||||
this.emitEvent('process.output', workflowId, taskId, {
|
||||
stream: 'stderr',
|
||||
data: output
|
||||
});
|
||||
});
|
||||
|
||||
// Handle process exit
|
||||
childProcess.on('exit', (code, signal) => {
|
||||
process.status = code === 0 ? 'stopped' : 'crashed';
|
||||
|
||||
this.emitEvent('process.stopped', workflowId, taskId, {
|
||||
pid: process.pid,
|
||||
exitCode: code,
|
||||
signal
|
||||
});
|
||||
|
||||
// Cleanup
|
||||
this.activeProcesses.delete(workflowId);
|
||||
this.childProcesses.delete(workflowId);
|
||||
|
||||
const timeout = this.timeouts.get(workflowId);
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
this.timeouts.delete(workflowId);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle process errors
|
||||
childProcess.on('error', (error) => {
|
||||
process.status = 'crashed';
|
||||
|
||||
this.emitEvent('process.error', workflowId, taskId, undefined, error);
|
||||
|
||||
// Cleanup
|
||||
this.activeProcesses.delete(workflowId);
|
||||
this.childProcesses.delete(workflowId);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup process timeout
|
||||
*/
|
||||
private setupProcessTimeout(
|
||||
workflowId: string,
|
||||
timeoutMinutes: number
|
||||
): void {
|
||||
const timeout = setTimeout(
|
||||
async () => {
|
||||
console.warn(`Process timeout reached for workflow ${workflowId}`);
|
||||
|
||||
try {
|
||||
await this.stopProcess(workflowId, true);
|
||||
} catch (error) {
|
||||
console.error('Failed to stop timed out process:', error);
|
||||
}
|
||||
},
|
||||
timeoutMinutes * 60 * 1000
|
||||
);
|
||||
|
||||
this.timeouts.set(workflowId, timeout);
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit workflow event
|
||||
*/
|
||||
private emitEvent(
|
||||
type: WorkflowEventType,
|
||||
workflowId: string,
|
||||
taskId: string,
|
||||
data?: any,
|
||||
error?: Error
|
||||
): void {
|
||||
const event: WorkflowEvent = {
|
||||
type,
|
||||
workflowId,
|
||||
taskId,
|
||||
timestamp: new Date(),
|
||||
data,
|
||||
error
|
||||
};
|
||||
|
||||
this.emit('event', event);
|
||||
this.emit(type, event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup cleanup handlers for graceful shutdown
|
||||
*/
|
||||
private setupCleanupHandlers(): void {
|
||||
const cleanup = () => {
|
||||
console.log('Cleaning up processes...');
|
||||
this.cleanupAll(true).catch(console.error);
|
||||
};
|
||||
|
||||
process.on('SIGINT', cleanup);
|
||||
process.on('SIGTERM', cleanup);
|
||||
process.on('exit', cleanup);
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
/**
|
||||
* @fileoverview State Management
|
||||
* Public exports for workflow state operations
|
||||
*/
|
||||
|
||||
export * from './workflow-state-manager.js';
|
||||
@@ -1,320 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow State Manager
|
||||
* Extends tm-core RuntimeStateManager with workflow tracking capabilities
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import type {
|
||||
WorkflowExecutionContext,
|
||||
WorkflowStatus,
|
||||
WorkflowEvent
|
||||
} from '../types/workflow.types.js';
|
||||
import { WorkflowError } from '../errors/workflow.errors.js';
|
||||
|
||||
export interface WorkflowStateConfig {
|
||||
/** Project root directory */
|
||||
projectRoot: string;
|
||||
/** Custom state directory (defaults to .taskmaster) */
|
||||
stateDir?: string;
|
||||
}
|
||||
|
||||
export interface WorkflowRegistryEntry {
|
||||
/** Workflow ID */
|
||||
workflowId: string;
|
||||
/** Task ID being executed */
|
||||
taskId: string;
|
||||
/** Workflow status */
|
||||
status: WorkflowStatus;
|
||||
/** Worktree path */
|
||||
worktreePath: string;
|
||||
/** Process ID if running */
|
||||
processId?: number;
|
||||
/** Start timestamp */
|
||||
startedAt: string;
|
||||
/** Last activity timestamp */
|
||||
lastActivity: string;
|
||||
/** Branch name */
|
||||
branchName: string;
|
||||
/** Additional metadata */
|
||||
metadata?: Record<string, any>;
|
||||
}
|
||||
|
||||
/**
|
||||
* WorkflowStateManager manages workflow execution state
|
||||
* Extends the concept of RuntimeStateManager to track active workflows globally
|
||||
*/
|
||||
export class WorkflowStateManager {
|
||||
private config: WorkflowStateConfig;
|
||||
private stateFilePath: string;
|
||||
private activeWorkflows = new Map<string, WorkflowExecutionContext>();
|
||||
|
||||
constructor(config: WorkflowStateConfig) {
|
||||
this.config = config;
|
||||
const stateDir = config.stateDir || '.taskmaster';
|
||||
this.stateFilePath = path.join(config.projectRoot, stateDir, 'workflows.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Load workflow state from disk
|
||||
*/
|
||||
async loadState(): Promise<void> {
|
||||
try {
|
||||
const stateData = await fs.readFile(this.stateFilePath, 'utf-8');
|
||||
const registry = JSON.parse(stateData) as Record<string, WorkflowRegistryEntry>;
|
||||
|
||||
// Convert registry entries to WorkflowExecutionContext
|
||||
for (const [workflowId, entry] of Object.entries(registry)) {
|
||||
const context: WorkflowExecutionContext = {
|
||||
taskId: entry.taskId,
|
||||
taskTitle: `Task ${entry.taskId}`, // Will be updated when task details are loaded
|
||||
taskDescription: '',
|
||||
projectRoot: this.config.projectRoot,
|
||||
worktreePath: entry.worktreePath,
|
||||
branchName: entry.branchName,
|
||||
processId: entry.processId,
|
||||
startedAt: new Date(entry.startedAt),
|
||||
status: entry.status,
|
||||
lastActivity: new Date(entry.lastActivity),
|
||||
metadata: entry.metadata
|
||||
};
|
||||
|
||||
this.activeWorkflows.set(workflowId, context);
|
||||
}
|
||||
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
// Workflows file doesn't exist, start with empty state
|
||||
console.debug('No workflows.json found, starting with empty state');
|
||||
return;
|
||||
}
|
||||
|
||||
console.warn('Failed to load workflow state:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save workflow state to disk
|
||||
*/
|
||||
async saveState(): Promise<void> {
|
||||
const stateDir = path.dirname(this.stateFilePath);
|
||||
|
||||
try {
|
||||
await fs.mkdir(stateDir, { recursive: true });
|
||||
|
||||
// Convert contexts to registry entries
|
||||
const registry: Record<string, WorkflowRegistryEntry> = {};
|
||||
|
||||
for (const [workflowId, context] of this.activeWorkflows.entries()) {
|
||||
registry[workflowId] = {
|
||||
workflowId,
|
||||
taskId: context.taskId,
|
||||
status: context.status,
|
||||
worktreePath: context.worktreePath,
|
||||
processId: context.processId,
|
||||
startedAt: context.startedAt.toISOString(),
|
||||
lastActivity: context.lastActivity.toISOString(),
|
||||
branchName: context.branchName,
|
||||
metadata: context.metadata
|
||||
};
|
||||
}
|
||||
|
||||
await fs.writeFile(
|
||||
this.stateFilePath,
|
||||
JSON.stringify(registry, null, 2),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
} catch (error) {
|
||||
throw new WorkflowError(
|
||||
'Failed to save workflow state',
|
||||
'WORKFLOW_STATE_SAVE_ERROR',
|
||||
undefined,
|
||||
undefined,
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a new workflow
|
||||
*/
|
||||
async registerWorkflow(context: WorkflowExecutionContext): Promise<string> {
|
||||
const workflowId = this.generateWorkflowId(context.taskId);
|
||||
|
||||
this.activeWorkflows.set(workflowId, {
|
||||
...context,
|
||||
lastActivity: new Date()
|
||||
});
|
||||
|
||||
await this.saveState();
|
||||
return workflowId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update workflow context
|
||||
*/
|
||||
async updateWorkflow(
|
||||
workflowId: string,
|
||||
updates: Partial<WorkflowExecutionContext>
|
||||
): Promise<void> {
|
||||
const existing = this.activeWorkflows.get(workflowId);
|
||||
if (!existing) {
|
||||
throw new WorkflowError(
|
||||
`Workflow ${workflowId} not found`,
|
||||
'WORKFLOW_NOT_FOUND',
|
||||
workflowId
|
||||
);
|
||||
}
|
||||
|
||||
const updated = {
|
||||
...existing,
|
||||
...updates,
|
||||
lastActivity: new Date()
|
||||
};
|
||||
|
||||
this.activeWorkflows.set(workflowId, updated);
|
||||
await this.saveState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Update workflow status
|
||||
*/
|
||||
async updateWorkflowStatus(workflowId: string, status: WorkflowStatus): Promise<void> {
|
||||
await this.updateWorkflow(workflowId, { status });
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister a workflow (remove from state)
|
||||
*/
|
||||
async unregisterWorkflow(workflowId: string): Promise<void> {
|
||||
if (!this.activeWorkflows.has(workflowId)) {
|
||||
throw new WorkflowError(
|
||||
`Workflow ${workflowId} not found`,
|
||||
'WORKFLOW_NOT_FOUND',
|
||||
workflowId
|
||||
);
|
||||
}
|
||||
|
||||
this.activeWorkflows.delete(workflowId);
|
||||
await this.saveState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workflow context by ID
|
||||
*/
|
||||
getWorkflow(workflowId: string): WorkflowExecutionContext | undefined {
|
||||
return this.activeWorkflows.get(workflowId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workflow by task ID
|
||||
*/
|
||||
getWorkflowByTaskId(taskId: string): WorkflowExecutionContext | undefined {
|
||||
for (const context of this.activeWorkflows.values()) {
|
||||
if (context.taskId === taskId) {
|
||||
return context;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* List all active workflows
|
||||
*/
|
||||
listWorkflows(): WorkflowExecutionContext[] {
|
||||
return Array.from(this.activeWorkflows.values());
|
||||
}
|
||||
|
||||
/**
|
||||
* List workflows by status
|
||||
*/
|
||||
listWorkflowsByStatus(status: WorkflowStatus): WorkflowExecutionContext[] {
|
||||
return this.listWorkflows().filter(w => w.status === status);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get running workflows count
|
||||
*/
|
||||
getRunningCount(): number {
|
||||
return this.listWorkflowsByStatus('running').length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a task has an active workflow
|
||||
*/
|
||||
hasActiveWorkflow(taskId: string): boolean {
|
||||
return this.getWorkflowByTaskId(taskId) !== undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up completed/failed workflows older than specified time
|
||||
*/
|
||||
async cleanupOldWorkflows(olderThanHours = 24): Promise<number> {
|
||||
const cutoffTime = new Date(Date.now() - (olderThanHours * 60 * 60 * 1000));
|
||||
let cleaned = 0;
|
||||
|
||||
for (const [workflowId, context] of this.activeWorkflows.entries()) {
|
||||
const isOld = context.lastActivity < cutoffTime;
|
||||
const isFinished = ['completed', 'failed', 'cancelled', 'timeout'].includes(context.status);
|
||||
|
||||
if (isOld && isFinished) {
|
||||
this.activeWorkflows.delete(workflowId);
|
||||
cleaned++;
|
||||
}
|
||||
}
|
||||
|
||||
if (cleaned > 0) {
|
||||
await this.saveState();
|
||||
}
|
||||
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all workflow state
|
||||
*/
|
||||
async clearState(): Promise<void> {
|
||||
try {
|
||||
await fs.unlink(this.stateFilePath);
|
||||
} catch (error: any) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
this.activeWorkflows.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Record workflow event (for audit trail)
|
||||
*/
|
||||
async recordEvent(event: WorkflowEvent): Promise<void> {
|
||||
// Update workflow last activity
|
||||
const workflow = this.activeWorkflows.get(event.workflowId);
|
||||
if (workflow) {
|
||||
workflow.lastActivity = event.timestamp;
|
||||
await this.saveState();
|
||||
}
|
||||
|
||||
// Optional: Could extend to maintain event log file
|
||||
if (process.env.TASKMASTER_DEBUG) {
|
||||
console.log('Workflow Event:', {
|
||||
type: event.type,
|
||||
workflowId: event.workflowId,
|
||||
taskId: event.taskId,
|
||||
timestamp: event.timestamp.toISOString(),
|
||||
data: event.data
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate unique workflow ID
|
||||
*/
|
||||
private generateWorkflowId(taskId: string): string {
|
||||
const timestamp = Date.now();
|
||||
const random = Math.random().toString(36).substring(2, 8);
|
||||
return `workflow-${taskId}-${timestamp}-${random}`;
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Task Execution Management
|
||||
* Public exports for task execution operations
|
||||
*/
|
||||
|
||||
export * from './task-execution-manager.js';
|
||||
@@ -1,433 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Task Execution Manager
|
||||
* Orchestrates the complete task execution workflow using worktrees and processes
|
||||
*/
|
||||
|
||||
import { EventEmitter } from 'node:events';
|
||||
import path from 'node:path';
|
||||
import type { Task } from '@tm/core';
|
||||
import {
|
||||
WorktreeManager,
|
||||
type WorktreeManagerConfig
|
||||
} from '../worktree/worktree-manager.js';
|
||||
import {
|
||||
ProcessSandbox,
|
||||
type ProcessSandboxConfig
|
||||
} from '../process/process-sandbox.js';
|
||||
import {
|
||||
WorkflowStateManager,
|
||||
type WorkflowStateConfig
|
||||
} from '../state/workflow-state-manager.js';
|
||||
import type {
|
||||
WorkflowConfig,
|
||||
WorkflowExecutionContext,
|
||||
WorkflowStatus,
|
||||
WorkflowEvent
|
||||
} from '../types/workflow.types.js';
|
||||
import {
|
||||
WorkflowError,
|
||||
WorkflowNotFoundError,
|
||||
MaxConcurrentWorkflowsError,
|
||||
WorkflowTimeoutError
|
||||
} from '../errors/workflow.errors.js';
|
||||
|
||||
export interface TaskExecutionManagerConfig extends WorkflowConfig {
|
||||
/** Project root directory */
|
||||
projectRoot: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* TaskExecutionManager orchestrates the complete task execution workflow
|
||||
* Coordinates worktree creation, process spawning, and state management
|
||||
*/
|
||||
export class TaskExecutionManager extends EventEmitter {
|
||||
private config: TaskExecutionManagerConfig;
|
||||
private worktreeManager: WorktreeManager;
|
||||
private processSandbox: ProcessSandbox;
|
||||
private stateManager: WorkflowStateManager;
|
||||
private initialized = false;
|
||||
|
||||
constructor(config: TaskExecutionManagerConfig) {
|
||||
super();
|
||||
this.config = config;
|
||||
|
||||
// Initialize component managers
|
||||
const worktreeConfig: WorktreeManagerConfig = {
|
||||
worktreeBase: config.worktreeBase,
|
||||
projectRoot: config.projectRoot,
|
||||
autoCleanup: true
|
||||
};
|
||||
|
||||
const processConfig: ProcessSandboxConfig = {
|
||||
claudeExecutable: config.claudeExecutable,
|
||||
defaultTimeout: config.defaultTimeout,
|
||||
debug: config.debug
|
||||
};
|
||||
|
||||
const stateConfig: WorkflowStateConfig = {
|
||||
projectRoot: config.projectRoot
|
||||
};
|
||||
|
||||
this.worktreeManager = new WorktreeManager(worktreeConfig);
|
||||
this.processSandbox = new ProcessSandbox(processConfig);
|
||||
this.stateManager = new WorkflowStateManager(stateConfig);
|
||||
|
||||
// Forward events from components
|
||||
this.processSandbox.on('event', (event: WorkflowEvent) => {
|
||||
this.stateManager.recordEvent(event);
|
||||
this.emit('event', event);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the task execution manager
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
if (this.initialized) return;
|
||||
|
||||
await this.stateManager.loadState();
|
||||
|
||||
// Clean up any stale workflows
|
||||
await this.cleanupStaleWorkflows();
|
||||
|
||||
this.initialized = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start task execution workflow
|
||||
*/
|
||||
async startTaskExecution(
|
||||
task: Task,
|
||||
options?: {
|
||||
branchName?: string;
|
||||
timeout?: number;
|
||||
env?: Record<string, string>;
|
||||
}
|
||||
): Promise<string> {
|
||||
if (!this.initialized) {
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
// Check concurrent workflow limit
|
||||
const runningCount = this.stateManager.getRunningCount();
|
||||
if (runningCount >= this.config.maxConcurrent) {
|
||||
throw new MaxConcurrentWorkflowsError(this.config.maxConcurrent);
|
||||
}
|
||||
|
||||
// Check if task already has an active workflow
|
||||
if (this.stateManager.hasActiveWorkflow(task.id)) {
|
||||
throw new WorkflowError(
|
||||
`Task ${task.id} already has an active workflow`,
|
||||
'TASK_ALREADY_EXECUTING',
|
||||
undefined,
|
||||
task.id
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
// Create worktree
|
||||
const worktreeInfo = await this.worktreeManager.createWorktree(
|
||||
task.id,
|
||||
options?.branchName
|
||||
);
|
||||
|
||||
// Prepare task context
|
||||
const context: WorkflowExecutionContext = {
|
||||
taskId: task.id,
|
||||
taskTitle: task.title,
|
||||
taskDescription: task.description,
|
||||
taskDetails: task.details,
|
||||
projectRoot: this.config.projectRoot,
|
||||
worktreePath: worktreeInfo.path,
|
||||
branchName: worktreeInfo.branch,
|
||||
startedAt: new Date(),
|
||||
status: 'initializing',
|
||||
lastActivity: new Date(),
|
||||
metadata: {
|
||||
priority: task.priority,
|
||||
dependencies: task.dependencies
|
||||
}
|
||||
};
|
||||
|
||||
// Register workflow
|
||||
const workflowId = await this.stateManager.registerWorkflow(context);
|
||||
|
||||
try {
|
||||
// Prepare task prompt for Claude Code
|
||||
const taskPrompt = this.generateTaskPrompt(task);
|
||||
|
||||
// Start Claude Code process
|
||||
const process = await this.processSandbox.startProcess(
|
||||
workflowId,
|
||||
task.id,
|
||||
taskPrompt,
|
||||
{
|
||||
cwd: worktreeInfo.path,
|
||||
timeout: options?.timeout,
|
||||
env: options?.env
|
||||
}
|
||||
);
|
||||
|
||||
// Update workflow with process information
|
||||
await this.stateManager.updateWorkflow(workflowId, {
|
||||
processId: process.pid,
|
||||
status: 'running'
|
||||
});
|
||||
|
||||
// Emit workflow started event
|
||||
this.emitEvent('workflow.started', workflowId, task.id, {
|
||||
worktreePath: worktreeInfo.path,
|
||||
processId: process.pid
|
||||
});
|
||||
|
||||
return workflowId;
|
||||
} catch (error) {
|
||||
// Clean up worktree if process failed to start
|
||||
await this.worktreeManager.removeWorktree(task.id, true);
|
||||
await this.stateManager.unregisterWorkflow(workflowId);
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
throw new WorkflowError(
|
||||
`Failed to start task execution for ${task.id}`,
|
||||
'TASK_EXECUTION_START_ERROR',
|
||||
undefined,
|
||||
task.id,
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop task execution workflow
|
||||
*/
|
||||
async stopTaskExecution(workflowId: string, force = false): Promise<void> {
|
||||
const workflow = this.stateManager.getWorkflow(workflowId);
|
||||
if (!workflow) {
|
||||
throw new WorkflowNotFoundError(workflowId);
|
||||
}
|
||||
|
||||
try {
|
||||
// Stop the process if running
|
||||
if (this.processSandbox.isProcessRunning(workflowId)) {
|
||||
await this.processSandbox.stopProcess(workflowId, force);
|
||||
}
|
||||
|
||||
// Update workflow status
|
||||
const status: WorkflowStatus = force ? 'cancelled' : 'completed';
|
||||
await this.stateManager.updateWorkflowStatus(workflowId, status);
|
||||
|
||||
// Clean up worktree
|
||||
await this.worktreeManager.removeWorktree(workflow.taskId, force);
|
||||
|
||||
// Emit workflow stopped event
|
||||
this.emitEvent('workflow.completed', workflowId, workflow.taskId, {
|
||||
status,
|
||||
forced: force
|
||||
});
|
||||
|
||||
// Unregister workflow
|
||||
await this.stateManager.unregisterWorkflow(workflowId);
|
||||
} catch (error) {
|
||||
throw new WorkflowError(
|
||||
`Failed to stop workflow ${workflowId}`,
|
||||
'WORKFLOW_STOP_ERROR',
|
||||
workflowId,
|
||||
workflow.taskId,
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pause task execution
|
||||
*/
|
||||
async pauseTaskExecution(workflowId: string): Promise<void> {
|
||||
const workflow = this.stateManager.getWorkflow(workflowId);
|
||||
if (!workflow) {
|
||||
throw new WorkflowNotFoundError(workflowId);
|
||||
}
|
||||
|
||||
if (workflow.status !== 'running') {
|
||||
throw new WorkflowError(
|
||||
`Cannot pause workflow ${workflowId} - not currently running`,
|
||||
'WORKFLOW_NOT_RUNNING',
|
||||
workflowId,
|
||||
workflow.taskId
|
||||
);
|
||||
}
|
||||
|
||||
// For now, we'll just mark as paused - in the future could implement
|
||||
// process suspension or other pause mechanisms
|
||||
await this.stateManager.updateWorkflowStatus(workflowId, 'paused');
|
||||
|
||||
this.emitEvent('workflow.paused', workflowId, workflow.taskId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume paused task execution
|
||||
*/
|
||||
async resumeTaskExecution(workflowId: string): Promise<void> {
|
||||
const workflow = this.stateManager.getWorkflow(workflowId);
|
||||
if (!workflow) {
|
||||
throw new WorkflowNotFoundError(workflowId);
|
||||
}
|
||||
|
||||
if (workflow.status !== 'paused') {
|
||||
throw new WorkflowError(
|
||||
`Cannot resume workflow ${workflowId} - not currently paused`,
|
||||
'WORKFLOW_NOT_PAUSED',
|
||||
workflowId,
|
||||
workflow.taskId
|
||||
);
|
||||
}
|
||||
|
||||
await this.stateManager.updateWorkflowStatus(workflowId, 'running');
|
||||
|
||||
this.emitEvent('workflow.resumed', workflowId, workflow.taskId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workflow status
|
||||
*/
|
||||
getWorkflowStatus(workflowId: string): WorkflowExecutionContext | undefined {
|
||||
return this.stateManager.getWorkflow(workflowId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workflow by task ID
|
||||
*/
|
||||
getWorkflowByTaskId(taskId: string): WorkflowExecutionContext | undefined {
|
||||
return this.stateManager.getWorkflowByTaskId(taskId);
|
||||
}
|
||||
|
||||
/**
|
||||
* List all workflows
|
||||
*/
|
||||
listWorkflows(): WorkflowExecutionContext[] {
|
||||
return this.stateManager.listWorkflows();
|
||||
}
|
||||
|
||||
/**
|
||||
* List active workflows
|
||||
*/
|
||||
listActiveWorkflows(): WorkflowExecutionContext[] {
|
||||
return this.stateManager.listWorkflowsByStatus('running');
|
||||
}
|
||||
|
||||
/**
|
||||
* Send input to a running workflow
|
||||
*/
|
||||
async sendInputToWorkflow(workflowId: string, input: string): Promise<void> {
|
||||
const workflow = this.stateManager.getWorkflow(workflowId);
|
||||
if (!workflow) {
|
||||
throw new WorkflowNotFoundError(workflowId);
|
||||
}
|
||||
|
||||
if (!this.processSandbox.isProcessRunning(workflowId)) {
|
||||
throw new WorkflowError(
|
||||
`Cannot send input to workflow ${workflowId} - process not running`,
|
||||
'PROCESS_NOT_RUNNING',
|
||||
workflowId,
|
||||
workflow.taskId
|
||||
);
|
||||
}
|
||||
|
||||
await this.processSandbox.sendInput(workflowId, input);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up all workflows
|
||||
*/
|
||||
async cleanup(force = false): Promise<void> {
|
||||
// Stop all processes
|
||||
await this.processSandbox.cleanupAll(force);
|
||||
|
||||
// Clean up all worktrees
|
||||
await this.worktreeManager.cleanupAll(force);
|
||||
|
||||
// Clear workflow state
|
||||
await this.stateManager.clearState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate task prompt for Claude Code
|
||||
*/
|
||||
private generateTaskPrompt(task: Task): string {
|
||||
const prompt = [
|
||||
`Work on Task ${task.id}: ${task.title}`,
|
||||
'',
|
||||
`Description: ${task.description}`
|
||||
];
|
||||
|
||||
if (task.details) {
|
||||
prompt.push('', `Details: ${task.details}`);
|
||||
}
|
||||
|
||||
if (task.testStrategy) {
|
||||
prompt.push('', `Test Strategy: ${task.testStrategy}`);
|
||||
}
|
||||
|
||||
if (task.dependencies?.length) {
|
||||
prompt.push('', `Dependencies: ${task.dependencies.join(', ')}`);
|
||||
}
|
||||
|
||||
prompt.push(
|
||||
'',
|
||||
'Please implement this task following the project conventions and best practices.',
|
||||
'When complete, update the task status appropriately using the available Task Master commands.'
|
||||
);
|
||||
|
||||
return prompt.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up stale workflows from previous sessions
|
||||
*/
|
||||
private async cleanupStaleWorkflows(): Promise<void> {
|
||||
const workflows = this.stateManager.listWorkflows();
|
||||
|
||||
for (const workflow of workflows) {
|
||||
const isStale =
|
||||
workflow.status === 'running' &&
|
||||
!this.processSandbox.isProcessRunning(`workflow-${workflow.taskId}`);
|
||||
|
||||
if (isStale) {
|
||||
console.log(`Cleaning up stale workflow for task ${workflow.taskId}`);
|
||||
|
||||
try {
|
||||
await this.stateManager.updateWorkflowStatus(
|
||||
`workflow-${workflow.taskId}`,
|
||||
'failed'
|
||||
);
|
||||
|
||||
// Try to clean up worktree
|
||||
await this.worktreeManager.removeWorktree(workflow.taskId, true);
|
||||
} catch (error) {
|
||||
console.error(`Failed to cleanup stale workflow:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit workflow event
|
||||
*/
|
||||
private emitEvent(
|
||||
type: string,
|
||||
workflowId: string,
|
||||
taskId: string,
|
||||
data?: any
|
||||
): void {
|
||||
const event: WorkflowEvent = {
|
||||
type: type as any,
|
||||
workflowId,
|
||||
taskId,
|
||||
timestamp: new Date(),
|
||||
data
|
||||
};
|
||||
|
||||
this.emit('event', event);
|
||||
this.emit(type, event);
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Engine Types
|
||||
* Public type exports
|
||||
*/
|
||||
|
||||
export * from './workflow.types.js';
|
||||
@@ -1,119 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Workflow Engine Types
|
||||
* Core types for workflow execution system
|
||||
*/
|
||||
|
||||
export interface WorkflowConfig {
|
||||
/** Maximum number of concurrent workflows */
|
||||
maxConcurrent: number;
|
||||
/** Default timeout for workflow execution (minutes) */
|
||||
defaultTimeout: number;
|
||||
/** Base directory for worktrees */
|
||||
worktreeBase: string;
|
||||
/** Claude Code executable path */
|
||||
claudeExecutable: string;
|
||||
/** Enable debug logging */
|
||||
debug: boolean;
|
||||
}
|
||||
|
||||
export interface WorkflowExecutionContext {
|
||||
/** Task ID being executed */
|
||||
taskId: string;
|
||||
/** Task title for display */
|
||||
taskTitle: string;
|
||||
/** Full task description */
|
||||
taskDescription: string;
|
||||
/** Task implementation details */
|
||||
taskDetails?: string;
|
||||
/** Project root path */
|
||||
projectRoot: string;
|
||||
/** Worktree path */
|
||||
worktreePath: string;
|
||||
/** Branch name for this workflow */
|
||||
branchName: string;
|
||||
/** Process ID of running Claude Code */
|
||||
processId?: number;
|
||||
/** Workflow start time */
|
||||
startedAt: Date;
|
||||
/** Workflow status */
|
||||
status: WorkflowStatus;
|
||||
/** Last activity timestamp */
|
||||
lastActivity: Date;
|
||||
/** Execution metadata */
|
||||
metadata?: Record<string, any>;
|
||||
}
|
||||
|
||||
export type WorkflowStatus =
|
||||
| 'pending' // Created but not started
|
||||
| 'initializing' // Setting up worktree/process
|
||||
| 'running' // Active execution
|
||||
| 'paused' // Temporarily stopped
|
||||
| 'completed' // Successfully finished
|
||||
| 'failed' // Error occurred
|
||||
| 'cancelled' // User cancelled
|
||||
| 'timeout'; // Exceeded time limit
|
||||
|
||||
export interface WorkflowEvent {
|
||||
type: WorkflowEventType;
|
||||
workflowId: string;
|
||||
taskId: string;
|
||||
timestamp: Date;
|
||||
data?: any;
|
||||
error?: Error;
|
||||
}
|
||||
|
||||
export type WorkflowEventType =
|
||||
| 'workflow.created'
|
||||
| 'workflow.started'
|
||||
| 'workflow.paused'
|
||||
| 'workflow.resumed'
|
||||
| 'workflow.completed'
|
||||
| 'workflow.failed'
|
||||
| 'workflow.cancelled'
|
||||
| 'worktree.created'
|
||||
| 'worktree.deleted'
|
||||
| 'process.started'
|
||||
| 'process.stopped'
|
||||
| 'process.output'
|
||||
| 'process.error';
|
||||
|
||||
export interface WorkflowProcess {
|
||||
/** Process ID */
|
||||
pid: number;
|
||||
/** Command that was executed */
|
||||
command: string;
|
||||
/** Command arguments */
|
||||
args: string[];
|
||||
/** Working directory */
|
||||
cwd: string;
|
||||
/** Environment variables */
|
||||
env?: Record<string, string>;
|
||||
/** Process start time */
|
||||
startedAt: Date;
|
||||
/** Process status */
|
||||
status: ProcessStatus;
|
||||
}
|
||||
|
||||
export type ProcessStatus =
|
||||
| 'starting'
|
||||
| 'running'
|
||||
| 'stopped'
|
||||
| 'crashed'
|
||||
| 'killed';
|
||||
|
||||
export interface WorktreeInfo {
|
||||
/** Worktree path */
|
||||
path: string;
|
||||
/** Branch name */
|
||||
branch: string;
|
||||
/** Creation timestamp */
|
||||
createdAt: Date;
|
||||
/** Associated task ID */
|
||||
taskId: string;
|
||||
/** Git commit hash */
|
||||
commit?: string;
|
||||
/** Worktree lock status */
|
||||
locked: boolean;
|
||||
/** Lock reason if applicable */
|
||||
lockReason?: string;
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Worktree Management
|
||||
* Public exports for worktree operations
|
||||
*/
|
||||
|
||||
export * from './worktree-manager.js';
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user