Compare commits

..

37 Commits

Author SHA1 Message Date
Ralph Khreish
e6de285cea feat: add auto-update to every command when your task-master instance is out of date (#1217) 2025-09-18 18:35:32 +02:00
github-actions[bot]
cf3339fa48 chore: rc version bump 2025-09-18 15:18:17 +00:00
Ralph Khreish
255b9f0334 chore: test pre-release functionality with new system 2025-09-18 17:16:26 +02:00
github-actions[bot]
cb2c266b2d chore: rc version bump 2025-09-18 12:56:01 +00:00
Ralph Khreish
170d6f2f65 feat: implement api update-task (#1214) 2025-09-18 01:48:01 +02:00
Ralph Khreish
137ef36278 chore: fix pre-release CI (#1213)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2025-09-18 00:34:13 +02:00
Ralph Khreish
1a3a528bf7 Merge pull request #1212 from eyaltoledano/ralph/main/rebase 2025-09-17 22:05:13 +02:00
Ralph Khreish
c164adc6ff chore: move to tsdown (#1211)
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2025-09-17 22:00:37 +02:00
Ralph Khreish
9d61e0447d fix: UI list and show (#1210) 2025-09-17 22:00:37 +02:00
Ralph Khreish
ee11b735b3 chore: fix env variables (#1204)
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2025-09-17 22:00:37 +02:00
losolosol
6d978228d9 feat: added vscode start task button (#1201)
Co-authored-by: Carlos Montoya <carlos@Carloss-MacBook-Pro.local>
Co-authored-by: Carlos Montoya <los@losmontoya.com>
2025-09-17 22:00:36 +02:00
Ralph Khreish
ea9341e7af chore: fix CI p2 2025-09-17 22:00:36 +02:00
Ralph Khreish
4296e383ea chore: fix CI 2025-09-17 22:00:36 +02:00
Ralph Khreish
97b2781709 feat: add tm show (#1199) 2025-09-17 22:00:36 +02:00
Ralph Khreish
96553e4a5f chore: fix CI with new typescript setup (#1194)
Co-authored-by: Ralph Khreish <Crunchyman-ralph@users.noreply.github.com>
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
2025-09-17 22:00:36 +02:00
Ralph Khreish
7582219365 chore: fix format 2025-09-17 22:00:35 +02:00
Ralph Khreish
84baedc3d2 feat: implement tm list remote (#1185) 2025-09-17 22:00:35 +02:00
Ralph Khreish
78da39edff chore: address oauth PR concerns (#1184) 2025-09-17 22:00:35 +02:00
Ralph Khreish
4d1416b175 feat: add oauth with remote server (#1178) 2025-09-17 22:00:35 +02:00
Ralph Khreish
dc811eb45e feat: create tm-core and apps/cli (#1093)
- add typescript
- add npm workspaces
2025-09-17 22:00:34 +02:00
Ralph Khreish
3c41a113fe chore: move to tsdown (#1211)
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2025-09-17 21:55:53 +02:00
Ralph Khreish
0e8c42c7cb fix: UI list and show (#1210) 2025-09-17 15:05:33 +02:00
Ralph Khreish
799d1d2cce chore: fix env variables (#1204)
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
2025-09-13 01:34:50 +02:00
losolosol
83af314879 feat: added vscode start task button (#1201)
Co-authored-by: Carlos Montoya <carlos@Carloss-MacBook-Pro.local>
Co-authored-by: Carlos Montoya <los@losmontoya.com>
2025-09-12 05:35:57 +02:00
Ralph Khreish
dd03374496 chore: fix CI p2 2025-09-11 18:44:40 -07:00
Ralph Khreish
4ab0affba7 chore: fix CI 2025-09-11 18:35:07 -07:00
Ralph Khreish
77e1ddc237 feat: add tm show (#1199) 2025-09-12 03:34:32 +02:00
Ralph Khreish
3eeb19590a chore: fix CI with new typescript setup (#1194)
Co-authored-by: Ralph Khreish <Crunchyman-ralph@users.noreply.github.com>
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
2025-09-09 23:35:47 +02:00
Ralph Khreish
587745046f chore: fix format 2025-09-09 03:32:48 +02:00
Ralph Khreish
c61c73f827 feat: implement tm list remote (#1185) 2025-09-09 03:32:48 +02:00
Ralph Khreish
15900d9fd5 chore: address oauth PR concerns (#1184) 2025-09-09 03:32:48 +02:00
Ralph Khreish
7cf4004038 feat: add oauth with remote server (#1178) 2025-09-09 03:32:48 +02:00
Ralph Khreish
0f3ab00f26 feat: create tm-core and apps/cli (#1093)
- add typescript
- add npm workspaces
2025-09-09 03:32:48 +02:00
github-actions[bot]
e81040def5 Version Packages (#1189)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Ralph Khreish <35776126+Crunchyman-ralph@users.noreply.github.com>
2025-09-09 03:17:22 +02:00
Ralph Khreish
597f6b03b4 Merge pull request #1164 (Release 0.26.0) 2025-09-08 22:17:30 +02:00
Ralph Khreish
a7ad4c8e92 chore: improve Claude documentation workflows (#1155) 2025-09-08 22:11:46 +02:00
Ralph Khreish
0d54747894 chore: fix CI 2025-09-08 12:46:07 -07:00
102 changed files with 18173 additions and 7049 deletions

View File

@@ -1,5 +0,0 @@
---
"task-master-ai": patch
---
docs(move): clarify cross-tag move docs; deprecate "force"; add explicit --with-dependencies/--ignore-dependencies examples

View File

@@ -8,11 +8,12 @@
],
"commit": false,
"fixed": [],
"linked": [],
"access": "public",
"baseBranch": "main",
"updateInternalDependencies": "patch",
"ignore": [
"docs"
"docs",
"@tm/cli",
"@tm/core",
"@tm/build-config"
]
}

View File

@@ -1,5 +0,0 @@
---
"task-master-ai": minor
---
Restore Taskmaster claude-code commands and move clear commands under /remove to avoid collision with the claude-code /clear command.

View File

@@ -1,9 +0,0 @@
---
"task-master-ai": minor
---
Enhanced Gemini CLI provider with codebase-aware task generation
Added automatic codebase analysis for Gemini CLI provider in parse-prd, and analyze-complexity, add-task, udpate-task, update, update-subtask commands
When using Gemini CLI as the AI provider, Task Master now instructs the AI to analyze the project structure, existing implementations, and patterns before generating tasks or subtasks
Tasks and subtasks generated by Claude Code are now informed by actual codebase analysis, resulting in more accurate and contextual outputs

View File

@@ -0,0 +1,5 @@
---
"task-master-ai": minor
---
Testing one more pre-release iteration

View File

@@ -0,0 +1,5 @@
---
"task-master-ai": minor
---
Test out the RC

View File

@@ -0,0 +1,5 @@
---
"@tm/cli": minor
---
testing this stuff out to see how the release candidate works with monorepo

View File

@@ -2,15 +2,17 @@
"mode": "pre",
"tag": "rc",
"initialVersions": {
"task-master-ai": "0.25.1",
"docs": "0.0.1",
"extension": "0.24.1"
"task-master-ai": "0.26.0",
"@tm/cli": "0.26.0",
"docs": "0.0.2",
"extension": "0.24.2",
"@tm/build-config": "1.0.0",
"@tm/core": "0.26.0"
},
"changesets": [
"clarify-force-move-docs",
"curvy-moons-dig",
"sour-coins-lay",
"strong-eagles-vanish",
"wet-candies-accept"
"easy-deer-heal",
"moody-oranges-slide",
"odd-otters-tan",
"wild-ears-look"
]
}

View File

@@ -0,0 +1,5 @@
---
"task-master-ai": major
---
@tm/cli: add auto-update functionality to every command

View File

@@ -1,11 +0,0 @@
---
"task-master-ai": minor
---
Add configurable codebase analysis feature flag with multiple configuration sources
Users can now control whether codebase analysis features (Claude Code and Gemini CLI integration) are enabled through environment variables, MCP configuration, or project config files.
Priority order: .env > MCP session env > .taskmaster/config.json.
Set `TASKMASTER_ENABLE_CODEBASE_ANALYSIS=false` in `.env` to disable codebase analysis prompts and tool integration.

View File

@@ -1,12 +0,0 @@
---
"task-master-ai": minor
---
feat(move): improve cross-tag move UX and safety
- CLI: print "Next Steps" tips after cross-tag moves that used --ignore-dependencies (validate/fix guidance)
- CLI: show dedicated help block on ID collisions (destination tag already has the ID)
- Core: add structured suggestions to TASK_ALREADY_EXISTS errors
- MCP: map ID collision errors to TASK_ALREADY_EXISTS and include suggestions
- Tests: cover MCP options, error suggestions, CLI tips printing, and integration error payload suggestions
---

View File

@@ -1,14 +0,0 @@
---
"task-master-ai": minor
---
Enhanced Claude Code and Google CLI integration with automatic codebase analysis for task operations
When using Claude Code as the AI provider, task management commands now automatically analyze your codebase before generating or updating tasks. This provides more accurate, context-aware implementation details that align with your project's existing architecture and patterns.
Commands contextualised:
- add-task
- update-subtask
- update-task
- update

View File

@@ -0,0 +1,5 @@
---
"extension": minor
---
Added a Start Build button to the VSCODE Task Properties Right Panel

View File

@@ -9,70 +9,124 @@ on:
branches:
- main
- next
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
contents: read
env:
DO_NOT_TRACK: 1
NODE_ENV: development
jobs:
setup:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-node@v4
with:
node-version: 20
cache: 'npm'
- name: Install Dependencies
id: install
run: npm ci
timeout-minutes: 2
- name: Cache node_modules
uses: actions/cache@v4
with:
path: node_modules
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
# Fast checks that can run in parallel
format-check:
needs: setup
name: Format Check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4
with:
node-version: 20
cache: "npm"
- name: Restore node_modules
uses: actions/cache@v4
with:
path: node_modules
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
- name: Install dependencies
run: npm install --frozen-lockfile --prefer-offline
timeout-minutes: 5
- name: Format Check
run: npm run format-check
env:
FORCE_COLOR: 1
test:
needs: setup
typecheck:
name: Typecheck
timeout-minutes: 10
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4
with:
node-version: 20
cache: "npm"
- name: Restore node_modules
uses: actions/cache@v4
- name: Install dependencies
run: npm install --frozen-lockfile --prefer-offline
timeout-minutes: 5
- name: Typecheck
run: npm run turbo:typecheck
env:
FORCE_COLOR: 1
# Build job to ensure everything compiles
build:
name: Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
path: node_modules
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
fetch-depth: 2
- uses: actions/setup-node@v4
with:
node-version: 20
cache: "npm"
- name: Install dependencies
run: npm install --frozen-lockfile --prefer-offline
timeout-minutes: 5
- name: Build
run: npm run turbo:build
env:
NODE_ENV: production
FORCE_COLOR: 1
TM_PUBLIC_BASE_DOMAIN: ${{ secrets.TM_PUBLIC_BASE_DOMAIN }}
TM_PUBLIC_SUPABASE_URL: ${{ secrets.TM_PUBLIC_SUPABASE_URL }}
TM_PUBLIC_SUPABASE_ANON_KEY: ${{ secrets.TM_PUBLIC_SUPABASE_ANON_KEY }}
- name: Upload build artifacts
uses: actions/upload-artifact@v4
with:
name: build-artifacts
path: dist/
retention-days: 1
test:
name: Test
timeout-minutes: 15
runs-on: ubuntu-latest
needs: [format-check, typecheck, build]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4
with:
node-version: 20
cache: "npm"
- name: Install dependencies
run: npm install --frozen-lockfile --prefer-offline
timeout-minutes: 5
- name: Download build artifacts
uses: actions/download-artifact@v4
with:
name: build-artifacts
path: dist/
- name: Run Tests
run: |
@@ -81,7 +135,6 @@ jobs:
NODE_ENV: test
CI: true
FORCE_COLOR: 1
timeout-minutes: 10
- name: Upload Test Results
if: always()

View File

@@ -0,0 +1,57 @@
name: Trigger Claude Documentation Update
on:
push:
branches:
- next
paths-ignore:
- "apps/docs/**"
- "*.md"
- ".github/workflows/**"
jobs:
trigger-docs-update:
# Only run if changes were merged (not direct pushes from bots)
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
runs-on: ubuntu-latest
permissions:
contents: read
actions: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 2 # Need previous commit for comparison
- name: Get changed files
id: changed-files
run: |
echo "Changed files in this push:"
git diff --name-only HEAD^ HEAD | tee changed_files.txt
# Store changed files for Claude to analyze (escaped for JSON)
CHANGED_FILES=$(git diff --name-only HEAD^ HEAD | jq -Rs .)
echo "changed_files=$CHANGED_FILES" >> $GITHUB_OUTPUT
# Get the commit message (escaped for JSON)
COMMIT_MSG=$(git log -1 --pretty=%B | jq -Rs .)
echo "commit_message=$COMMIT_MSG" >> $GITHUB_OUTPUT
# Get diff for documentation context (escaped for JSON)
COMMIT_DIFF=$(git diff HEAD^ HEAD --stat | jq -Rs .)
echo "commit_diff=$COMMIT_DIFF" >> $GITHUB_OUTPUT
# Get commit SHA
echo "commit_sha=${{ github.sha }}" >> $GITHUB_OUTPUT
- name: Trigger Claude workflow
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Trigger the Claude docs updater workflow with the change information
gh workflow run claude-docs-updater.yml \
--ref next \
-f commit_sha="${{ steps.changed-files.outputs.commit_sha }}" \
-f commit_message=${{ steps.changed-files.outputs.commit_message }} \
-f changed_files=${{ steps.changed-files.outputs.changed_files }} \
-f commit_diff=${{ steps.changed-files.outputs.commit_diff }}

View File

@@ -1,18 +1,27 @@
name: Claude Documentation Updater
on:
push:
branches:
- next
paths-ignore:
- "apps/docs/**"
- "*.md"
- ".github/workflows/**"
workflow_dispatch:
inputs:
commit_sha:
description: 'The commit SHA that triggered this update'
required: true
type: string
commit_message:
description: 'The commit message'
required: true
type: string
changed_files:
description: 'List of changed files'
required: true
type: string
commit_diff:
description: 'Diff summary of changes'
required: true
type: string
jobs:
update-docs:
# Only run if changes were merged (not direct pushes from bots)
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
runs-on: ubuntu-latest
permissions:
contents: write
@@ -22,28 +31,8 @@ jobs:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 2 # Need previous commit for comparison
- name: Get changed files
id: changed-files
run: |
echo "Changed files in this push:"
git diff --name-only HEAD^ HEAD | tee changed_files.txt
# Store changed files for Claude to analyze
echo "changed_files<<EOF" >> $GITHUB_OUTPUT
git diff --name-only HEAD^ HEAD >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Get the commit message and changes summary
echo "commit_message<<EOF" >> $GITHUB_OUTPUT
git log -1 --pretty=%B >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Get diff for documentation context
echo "commit_diff<<EOF" >> $GITHUB_OUTPUT
git diff HEAD^ HEAD --stat >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
ref: next
fetch-depth: 0 # Need full history to checkout specific commit
- name: Create docs update branch
id: create-branch
@@ -71,12 +60,12 @@ jobs:
You are a documentation specialist. Analyze the recent changes pushed to the 'next' branch and update the documentation accordingly.
Recent changes:
- Commit: ${{ steps.changed-files.outputs.commit_message }}
- Commit: ${{ inputs.commit_message }}
- Changed files:
${{ steps.changed-files.outputs.changed_files }}
${{ inputs.changed_files }}
- Changes summary:
${{ steps.changed-files.outputs.commit_diff }}
${{ inputs.commit_diff }}
Your task:
1. Analyze the changes to understand what functionality was added, modified, or removed
@@ -113,7 +102,7 @@ jobs:
This PR was automatically generated to update documentation based on recent changes.
Original commit: ${{ steps.changed-files.outputs.commit_message }}
Original commit: ${{ inputs.commit_message }}
Co-authored-by: Claude <claude-assistant@anthropic.com>"
fi
@@ -133,12 +122,12 @@ jobs:
This PR automatically updates documentation based on recent changes merged to the \`next\` branch.
### Original Changes
**Commit:** ${{ github.sha }}
**Message:** ${{ steps.changed-files.outputs.commit_message }}
**Commit:** ${{ inputs.commit_sha }}
**Message:** ${{ inputs.commit_message }}
### Changed Files in Original Commit
\`\`\`
${{ steps.changed-files.outputs.changed_files }}
${{ inputs.changed_files }}
\`\`\`
### Documentation Updates

View File

@@ -65,6 +65,20 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Run format
run: npm run format
env:
FORCE_COLOR: 1
- name: Build packages
run: npm run turbo:build
env:
NODE_ENV: production
FORCE_COLOR: 1
TM_PUBLIC_BASE_DOMAIN: ${{ secrets.TM_PUBLIC_BASE_DOMAIN }}
TM_PUBLIC_SUPABASE_URL: ${{ secrets.TM_PUBLIC_SUPABASE_URL }}
TM_PUBLIC_SUPABASE_ANON_KEY: ${{ secrets.TM_PUBLIC_SUPABASE_ANON_KEY }}
- name: Create Release Candidate Pull Request or Publish Release Candidate to npm
uses: changesets/action@v1
with:

View File

@@ -22,7 +22,7 @@ jobs:
- uses: actions/setup-node@v4
with:
node-version: 20
cache: 'npm'
cache: "npm"
- name: Cache node_modules
uses: actions/cache@v4
@@ -41,6 +41,15 @@ jobs:
- name: Check pre-release mode
run: node ./.github/scripts/check-pre-release-mode.mjs "main"
- name: Build packages
run: npm run turbo:build
env:
NODE_ENV: production
FORCE_COLOR: 1
TM_PUBLIC_BASE_DOMAIN: ${{ secrets.TM_PUBLIC_BASE_DOMAIN }}
TM_PUBLIC_SUPABASE_URL: ${{ secrets.TM_PUBLIC_SUPABASE_URL }}
TM_PUBLIC_SUPABASE_ANON_KEY: ${{ secrets.TM_PUBLIC_SUPABASE_ANON_KEY }}
- name: Create Release Pull Request or Publish to npm
uses: changesets/action@v1
with:

5
.gitignore vendored
View File

@@ -93,4 +93,7 @@ dev-debug.log
apps/extension/.vscode-test/
# apps/extension
apps/extension/vsix-build/
apps/extension/vsix-build/
# turbo
.turbo

2
.nvmrc
View File

@@ -1 +1 @@
22
22

View File

@@ -1,5 +1,65 @@
# task-master-ai
## 0.27.0-rc.1
### Minor Changes
- [`255b9f0`](https://github.com/eyaltoledano/claude-task-master/commit/255b9f0334555b0063280abde701445cd62fa11b) Thanks [@Crunchyman-ralph](https://github.com/Crunchyman-ralph)! - Testing one more pre-release iteration
## 0.27.0-rc.0
### Minor Changes
- [#1213](https://github.com/eyaltoledano/claude-task-master/pull/1213) [`137ef36`](https://github.com/eyaltoledano/claude-task-master/commit/137ef362789a9cdfdb1925e35e0438c1fa6c69ee) Thanks [@Crunchyman-ralph](https://github.com/Crunchyman-ralph)! - Test out the RC
### Patch Changes
- Updated dependencies [[`137ef36`](https://github.com/eyaltoledano/claude-task-master/commit/137ef362789a9cdfdb1925e35e0438c1fa6c69ee)]:
- @tm/cli@0.27.0-rc.0
## 0.26.0
### Minor Changes
- [#1133](https://github.com/eyaltoledano/claude-task-master/pull/1133) [`df26c65`](https://github.com/eyaltoledano/claude-task-master/commit/df26c65632000874a73504963b08f18c46283144) Thanks [@neonwatty](https://github.com/neonwatty)! - Restore Taskmaster claude-code commands and move clear commands under /remove to avoid collision with the claude-code /clear command.
- [#1163](https://github.com/eyaltoledano/claude-task-master/pull/1163) [`37af0f1`](https://github.com/eyaltoledano/claude-task-master/commit/37af0f191227a68d119b7f89a377bf932ee3ac66) Thanks [@Crunchyman-ralph](https://github.com/Crunchyman-ralph)! - Enhanced Gemini CLI provider with codebase-aware task generation
Added automatic codebase analysis for Gemini CLI provider in parse-prd, and analyze-complexity, add-task, udpate-task, update, update-subtask commands
When using Gemini CLI as the AI provider, Task Master now instructs the AI to analyze the project structure, existing implementations, and patterns before generating tasks or subtasks
Tasks and subtasks generated by Claude Code are now informed by actual codebase analysis, resulting in more accurate and contextual outputs
- [#1165](https://github.com/eyaltoledano/claude-task-master/pull/1165) [`c4f92f6`](https://github.com/eyaltoledano/claude-task-master/commit/c4f92f6a0aee3435c56eb8d27d9aa9204284833e) Thanks [@Crunchyman-ralph](https://github.com/Crunchyman-ralph)! - Add configurable codebase analysis feature flag with multiple configuration sources
Users can now control whether codebase analysis features (Claude Code and Gemini CLI integration) are enabled through environment variables, MCP configuration, or project config files.
Priority order: .env > MCP session env > .taskmaster/config.json.
Set `TASKMASTER_ENABLE_CODEBASE_ANALYSIS=false` in `.env` to disable codebase analysis prompts and tool integration.
- [#1135](https://github.com/eyaltoledano/claude-task-master/pull/1135) [`8783708`](https://github.com/eyaltoledano/claude-task-master/commit/8783708e5e3389890a78fcf685d3da0580e73b3f) Thanks [@mm-parthy](https://github.com/mm-parthy)! - feat(move): improve cross-tag move UX and safety
- CLI: print "Next Steps" tips after cross-tag moves that used --ignore-dependencies (validate/fix guidance)
- CLI: show dedicated help block on ID collisions (destination tag already has the ID)
- Core: add structured suggestions to TASK_ALREADY_EXISTS errors
- MCP: map ID collision errors to TASK_ALREADY_EXISTS and include suggestions
- Tests: cover MCP options, error suggestions, CLI tips printing, and integration error payload suggestions
***
- [#1162](https://github.com/eyaltoledano/claude-task-master/pull/1162) [`4dad2fd`](https://github.com/eyaltoledano/claude-task-master/commit/4dad2fd613ceac56a65ae9d3c1c03092b8860ac9) Thanks [@Crunchyman-ralph](https://github.com/Crunchyman-ralph)! - Enhanced Claude Code and Google CLI integration with automatic codebase analysis for task operations
When using Claude Code as the AI provider, task management commands now automatically analyze your codebase before generating or updating tasks. This provides more accurate, context-aware implementation details that align with your project's existing architecture and patterns.
Commands contextualised:
- add-task
- update-subtask
- update-task
- update
### Patch Changes
- [#1135](https://github.com/eyaltoledano/claude-task-master/pull/1135) [`8783708`](https://github.com/eyaltoledano/claude-task-master/commit/8783708e5e3389890a78fcf685d3da0580e73b3f) Thanks [@mm-parthy](https://github.com/mm-parthy)! - docs(move): clarify cross-tag move docs; deprecate "force"; add explicit --with-dependencies/--ignore-dependencies examples
## 0.26.0-rc.1
### Minor Changes

13
apps/cli/CHANGELOG.md Normal file
View File

@@ -0,0 +1,13 @@
# @tm/cli
## 0.27.0-rc.0
### Minor Changes
- [#1213](https://github.com/eyaltoledano/claude-task-master/pull/1213) [`137ef36`](https://github.com/eyaltoledano/claude-task-master/commit/137ef362789a9cdfdb1925e35e0438c1fa6c69ee) Thanks [@Crunchyman-ralph](https://github.com/Crunchyman-ralph)! - testing this stuff out to see how the release candidate works with monorepo
## 1.1.0-rc.0
### Minor Changes
- [#1213](https://github.com/eyaltoledano/claude-task-master/pull/1213) [`cd90b4d`](https://github.com/eyaltoledano/claude-task-master/commit/cd90b4d65fc2f04bdad9fb73aba320b58a124240) Thanks [@Crunchyman-ralph](https://github.com/Crunchyman-ralph)! - testing this stuff out to see how the release candidate works with monorepo

View File

@@ -1,42 +1,40 @@
{
"name": "@tm/cli",
"version": "1.0.0",
"version": "0.27.0-rc.0",
"description": "Task Master CLI - Command line interface for task management",
"type": "module",
"private": true,
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"types": "./src/index.ts",
"exports": {
".": {
"types": "./src/index.ts",
"import": "./dist/index.js",
"require": "./dist/index.js"
}
".": "./src/index.ts"
},
"files": ["dist", "README.md"],
"scripts": {
"build": "tsup",
"dev": "tsup --watch",
"typecheck": "tsc --noEmit",
"lint": "biome check src",
"format": "biome format --write src",
"test": "vitest run",
"test:watch": "vitest"
"test:watch": "vitest",
"test:coverage": "vitest run --coverage",
"test:unit": "vitest run -t unit",
"test:integration": "vitest run -t integration",
"test:e2e": "vitest run --dir tests/e2e",
"test:ci": "vitest run --coverage --reporter=dot"
},
"dependencies": {
"@tm/core": "*",
"boxen": "^7.1.1",
"chalk": "^5.3.0",
"chalk": "5.6.2",
"cli-table3": "^0.6.5",
"commander": "^12.1.0",
"inquirer": "^9.2.10",
"open": "^10.2.0",
"ora": "^8.1.0"
},
"devDependencies": {
"@biomejs/biome": "^1.9.4",
"@types/inquirer": "^9.0.3",
"@types/node": "^22.10.5",
"tsup": "^8.3.0",
"tsx": "^4.20.4",
"typescript": "^5.7.3",
"vitest": "^2.1.8"
@@ -46,5 +44,10 @@
},
"keywords": ["task-master", "cli", "task-management", "productivity"],
"author": "",
"license": "MIT"
"license": "MIT",
"typesVersions": {
"*": {
"*": ["src/*"]
}
}
}

View File

@@ -0,0 +1,570 @@
/**
* @fileoverview Context command for managing org/brief selection
* Provides a clean interface for workspace context management
*/
import { Command } from 'commander';
import chalk from 'chalk';
import inquirer from 'inquirer';
import ora from 'ora';
import {
AuthManager,
AuthenticationError,
type UserContext
} from '@tm/core/auth';
import * as ui from '../utils/ui.js';
/**
* Result type from context command
*/
export interface ContextResult {
success: boolean;
action: 'show' | 'select-org' | 'select-brief' | 'clear' | 'set';
context?: UserContext;
message?: string;
}
/**
* ContextCommand extending Commander's Command class
* Manages user's workspace context (org/brief selection)
*/
export class ContextCommand extends Command {
private authManager: AuthManager;
private lastResult?: ContextResult;
constructor(name?: string) {
super(name || 'context');
// Initialize auth manager
this.authManager = AuthManager.getInstance();
// Configure the command
this.description(
'Manage workspace context (organization and brief selection)'
);
// Add subcommands
this.addOrgCommand();
this.addBriefCommand();
this.addClearCommand();
this.addSetCommand();
// Default action shows current context
this.action(async () => {
await this.executeShow();
});
}
/**
* Add org selection subcommand
*/
private addOrgCommand(): void {
this.command('org')
.description('Select an organization')
.action(async () => {
await this.executeSelectOrg();
});
}
/**
* Add brief selection subcommand
*/
private addBriefCommand(): void {
this.command('brief')
.description('Select a brief within the current organization')
.action(async () => {
await this.executeSelectBrief();
});
}
/**
* Add clear subcommand
*/
private addClearCommand(): void {
this.command('clear')
.description('Clear all context selections')
.action(async () => {
await this.executeClear();
});
}
/**
* Add set subcommand for direct context setting
*/
private addSetCommand(): void {
this.command('set')
.description('Set context directly')
.option('--org <id>', 'Organization ID')
.option('--org-name <name>', 'Organization name')
.option('--brief <id>', 'Brief ID')
.option('--brief-name <name>', 'Brief name')
.action(async (options) => {
await this.executeSet(options);
});
}
/**
* Execute show current context
*/
private async executeShow(): Promise<void> {
try {
const result = this.displayContext();
this.setLastResult(result);
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Display current context
*/
private displayContext(): ContextResult {
// Check authentication first
if (!this.authManager.isAuthenticated()) {
console.log(chalk.yellow('✗ Not authenticated'));
console.log(chalk.gray('\n Run "tm auth login" to authenticate first'));
return {
success: false,
action: 'show',
message: 'Not authenticated'
};
}
const context = this.authManager.getContext();
console.log(chalk.cyan('\n🌍 Workspace Context\n'));
if (context && (context.orgId || context.briefId)) {
if (context.orgName || context.orgId) {
console.log(chalk.green('✓ Organization'));
if (context.orgName) {
console.log(chalk.white(` ${context.orgName}`));
}
if (context.orgId) {
console.log(chalk.gray(` ID: ${context.orgId}`));
}
}
if (context.briefName || context.briefId) {
console.log(chalk.green('\n✓ Brief'));
if (context.briefName) {
console.log(chalk.white(` ${context.briefName}`));
}
if (context.briefId) {
console.log(chalk.gray(` ID: ${context.briefId}`));
}
}
if (context.updatedAt) {
console.log(
chalk.gray(
`\n Last updated: ${new Date(context.updatedAt).toLocaleString()}`
)
);
}
return {
success: true,
action: 'show',
context,
message: 'Context loaded'
};
} else {
console.log(chalk.yellow('✗ No context selected'));
console.log(
chalk.gray('\n Run "tm context org" to select an organization')
);
console.log(chalk.gray(' Run "tm context brief" to select a brief'));
return {
success: true,
action: 'show',
message: 'No context selected'
};
}
}
/**
* Execute org selection
*/
private async executeSelectOrg(): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
const result = await this.selectOrganization();
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Select an organization interactively
*/
private async selectOrganization(): Promise<ContextResult> {
const spinner = ora('Fetching organizations...').start();
try {
// Fetch organizations from API
const organizations = await this.authManager.getOrganizations();
spinner.stop();
if (organizations.length === 0) {
ui.displayWarning('No organizations available');
return {
success: false,
action: 'select-org',
message: 'No organizations available'
};
}
// Prompt for selection
const { selectedOrg } = await inquirer.prompt([
{
type: 'list',
name: 'selectedOrg',
message: 'Select an organization:',
choices: organizations.map((org) => ({
name: org.name,
value: org
}))
}
]);
// Update context
await this.authManager.updateContext({
orgId: selectedOrg.id,
orgName: selectedOrg.name,
// Clear brief when changing org
briefId: undefined,
briefName: undefined
});
ui.displaySuccess(`Selected organization: ${selectedOrg.name}`);
return {
success: true,
action: 'select-org',
context: this.authManager.getContext() || undefined,
message: `Selected organization: ${selectedOrg.name}`
};
} catch (error) {
spinner.fail('Failed to fetch organizations');
throw error;
}
}
/**
* Execute brief selection
*/
private async executeSelectBrief(): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
// Check if org is selected
const context = this.authManager.getContext();
if (!context?.orgId) {
ui.displayError(
'No organization selected. Run "tm context org" first.'
);
process.exit(1);
}
const result = await this.selectBrief(context.orgId);
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Select a brief within the current organization
*/
private async selectBrief(orgId: string): Promise<ContextResult> {
const spinner = ora('Fetching briefs...').start();
try {
// Fetch briefs from API
const briefs = await this.authManager.getBriefs(orgId);
spinner.stop();
if (briefs.length === 0) {
ui.displayWarning('No briefs available in this organization');
return {
success: false,
action: 'select-brief',
message: 'No briefs available'
};
}
// Prompt for selection
const { selectedBrief } = await inquirer.prompt([
{
type: 'list',
name: 'selectedBrief',
message: 'Select a brief:',
choices: [
{ name: '(No brief - organization level)', value: null },
...briefs.map((brief) => ({
name: `Brief ${brief.id} (${new Date(brief.createdAt).toLocaleDateString()})`,
value: brief
}))
]
}
]);
if (selectedBrief) {
// Update context with brief
const briefName = `Brief ${selectedBrief.id.slice(0, 8)}`;
await this.authManager.updateContext({
briefId: selectedBrief.id,
briefName: briefName
});
ui.displaySuccess(`Selected brief: ${briefName}`);
return {
success: true,
action: 'select-brief',
context: this.authManager.getContext() || undefined,
message: `Selected brief: ${selectedBrief.name}`
};
} else {
// Clear brief selection
await this.authManager.updateContext({
briefId: undefined,
briefName: undefined
});
ui.displaySuccess('Cleared brief selection (organization level)');
return {
success: true,
action: 'select-brief',
context: this.authManager.getContext() || undefined,
message: 'Cleared brief selection'
};
}
} catch (error) {
spinner.fail('Failed to fetch briefs');
throw error;
}
}
/**
* Execute clear context
*/
private async executeClear(): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
const result = await this.clearContext();
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Clear all context selections
*/
private async clearContext(): Promise<ContextResult> {
try {
await this.authManager.clearContext();
ui.displaySuccess('Context cleared');
return {
success: true,
action: 'clear',
message: 'Context cleared'
};
} catch (error) {
ui.displayError(`Failed to clear context: ${(error as Error).message}`);
return {
success: false,
action: 'clear',
message: `Failed to clear context: ${(error as Error).message}`
};
}
}
/**
* Execute set context with options
*/
private async executeSet(options: any): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
const result = await this.setContext(options);
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Set context directly from options
*/
private async setContext(options: any): Promise<ContextResult> {
try {
const context: Partial<UserContext> = {};
if (options.org) {
context.orgId = options.org;
}
if (options.orgName) {
context.orgName = options.orgName;
}
if (options.brief) {
context.briefId = options.brief;
}
if (options.briefName) {
context.briefName = options.briefName;
}
if (Object.keys(context).length === 0) {
ui.displayWarning('No context options provided');
return {
success: false,
action: 'set',
message: 'No context options provided'
};
}
await this.authManager.updateContext(context);
ui.displaySuccess('Context updated');
// Display what was set
if (context.orgName || context.orgId) {
console.log(
chalk.gray(` Organization: ${context.orgName || context.orgId}`)
);
}
if (context.briefName || context.briefId) {
console.log(
chalk.gray(` Brief: ${context.briefName || context.briefId}`)
);
}
return {
success: true,
action: 'set',
context: this.authManager.getContext() || undefined,
message: 'Context updated'
};
} catch (error) {
ui.displayError(`Failed to set context: ${(error as Error).message}`);
return {
success: false,
action: 'set',
message: `Failed to set context: ${(error as Error).message}`
};
}
}
/**
* Handle errors
*/
private handleError(error: any): void {
if (error instanceof AuthenticationError) {
console.error(chalk.red(`\n✗ ${error.message}`));
if (error.code === 'NOT_AUTHENTICATED') {
ui.displayWarning('Please authenticate first: tm auth login');
}
} else {
const msg = error?.message ?? String(error);
console.error(chalk.red(`Error: ${msg}`));
if (error.stack && process.env.DEBUG) {
console.error(chalk.gray(error.stack));
}
}
}
/**
* Set the last result for programmatic access
*/
private setLastResult(result: ContextResult): void {
this.lastResult = result;
}
/**
* Get the last result (for programmatic usage)
*/
getLastResult(): ContextResult | undefined {
return this.lastResult;
}
/**
* Get current context (for programmatic usage)
*/
getContext(): UserContext | null {
return this.authManager.getContext();
}
/**
* Clean up resources
*/
async cleanup(): Promise<void> {
// No resources to clean up for context command
}
/**
* Static method to register this command on an existing program
*/
static registerOn(program: Command): Command {
const contextCommand = new ContextCommand();
program.addCommand(contextCommand);
return contextCommand;
}
/**
* Alternative registration that returns the command for chaining
*/
static register(program: Command, name?: string): ContextCommand {
const contextCommand = new ContextCommand(name);
program.addCommand(contextCommand);
return contextCommand;
}
}

View File

@@ -15,7 +15,20 @@ import {
STATUS_ICONS,
type OutputFormat
} from '@tm/core';
import type { StorageType } from '@tm/core/types';
import * as ui from '../utils/ui.js';
import {
displayHeader,
displayDashboards,
calculateTaskStatistics,
calculateSubtaskStatistics,
calculateDependencyStatistics,
getPriorityBreakdown,
displayRecommendedNextTask,
getTaskDescription,
displaySuggestedNextSteps,
type NextTaskInfo
} from '../ui/index.js';
/**
* Options interface for the list command
@@ -37,7 +50,7 @@ export interface ListTasksResult {
total: number;
filtered: number;
tag?: string;
storageType: 'file' | 'api';
storageType: Exclude<StorageType, 'auto'>;
}
/**
@@ -244,19 +257,16 @@ export class ListTasksCommand extends Command {
* Display in text format with tables
*/
private displayText(data: ListTasksResult, withSubtasks?: boolean): void {
const { tasks, total, filtered, tag, storageType } = data;
const { tasks, tag } = data;
// Header
ui.displayBanner(`Task List${tag ? ` (${tag})` : ''}`);
// Get file path for display
const filePath = this.tmCore ? `.taskmaster/tasks/tasks.json` : undefined;
// Statistics
console.log(chalk.blue.bold('\n📊 Statistics:\n'));
console.log(` Total tasks: ${chalk.cyan(total)}`);
console.log(` Filtered: ${chalk.cyan(filtered)}`);
if (tag) {
console.log(` Tag: ${chalk.cyan(tag)}`);
}
console.log(` Storage: ${chalk.cyan(storageType)}`);
// Display header without banner (banner already shown by main CLI)
displayHeader({
tag: tag || 'master',
filePath: filePath
});
// No tasks message
if (tasks.length === 0) {
@@ -264,21 +274,50 @@ export class ListTasksCommand extends Command {
return;
}
// Task table
console.log(chalk.blue.bold(`\n📋 Tasks (${tasks.length}):\n`));
// Calculate statistics
const taskStats = calculateTaskStatistics(tasks);
const subtaskStats = calculateSubtaskStatistics(tasks);
const depStats = calculateDependencyStatistics(tasks);
const priorityBreakdown = getPriorityBreakdown(tasks);
// Find next task following the same logic as findNextTask
const nextTask = this.findNextTask(tasks);
// Display dashboard boxes
displayDashboards(
taskStats,
subtaskStats,
priorityBreakdown,
depStats,
nextTask
);
// Task table - no title, just show the table directly
console.log(
ui.createTaskTable(tasks, {
showSubtasks: withSubtasks,
showDependencies: true
showDependencies: true,
showComplexity: true // Enable complexity column
})
);
// Progress bar
const completedCount = tasks.filter(
(t: Task) => t.status === 'done'
).length;
console.log(chalk.blue.bold('\n📊 Overall Progress:\n'));
console.log(` ${ui.createProgressBar(completedCount, tasks.length)}`);
// Display recommended next task section immediately after table
if (nextTask) {
// Find the full task object to get description
const fullTask = tasks.find((t) => String(t.id) === String(nextTask.id));
const description = fullTask ? getTaskDescription(fullTask) : undefined;
displayRecommendedNextTask({
...nextTask,
status: 'pending', // Next task is typically pending
description
});
} else {
displayRecommendedNextTask(undefined);
}
// Display suggested next steps at the end
displaySuggestedNextSteps();
}
/**
@@ -288,6 +327,128 @@ export class ListTasksCommand extends Command {
this.lastResult = result;
}
/**
* Find the next task to work on
* Implements the same logic as scripts/modules/task-manager/find-next-task.js
*/
private findNextTask(tasks: Task[]): NextTaskInfo | undefined {
const priorityValues: Record<string, number> = {
critical: 4,
high: 3,
medium: 2,
low: 1
};
// Build set of completed task IDs (including subtasks)
const completedIds = new Set<string>();
tasks.forEach((t) => {
if (t.status === 'done' || t.status === 'completed') {
completedIds.add(String(t.id));
}
if (t.subtasks) {
t.subtasks.forEach((st) => {
if (st.status === 'done' || st.status === 'completed') {
completedIds.add(`${t.id}.${st.id}`);
}
});
}
});
// First, look for eligible subtasks in in-progress parent tasks
const candidateSubtasks: NextTaskInfo[] = [];
tasks
.filter(
(t) => t.status === 'in-progress' && t.subtasks && t.subtasks.length > 0
)
.forEach((parent) => {
parent.subtasks!.forEach((st) => {
const stStatus = (st.status || 'pending').toLowerCase();
if (stStatus !== 'pending' && stStatus !== 'in-progress') return;
// Check if dependencies are satisfied
const fullDeps =
st.dependencies?.map((d) => {
// Handle both numeric and string IDs
if (typeof d === 'string' && d.includes('.')) {
return d;
}
return `${parent.id}.${d}`;
}) ?? [];
const depsSatisfied =
fullDeps.length === 0 ||
fullDeps.every((depId) => completedIds.has(String(depId)));
if (depsSatisfied) {
candidateSubtasks.push({
id: `${parent.id}.${st.id}`,
title: st.title || `Subtask ${st.id}`,
priority: st.priority || parent.priority || 'medium',
dependencies: fullDeps.map((d) => String(d))
});
}
});
});
if (candidateSubtasks.length > 0) {
// Sort by priority, then by dependencies count, then by ID
candidateSubtasks.sort((a, b) => {
const pa = priorityValues[a.priority || 'medium'] ?? 2;
const pb = priorityValues[b.priority || 'medium'] ?? 2;
if (pb !== pa) return pb - pa;
const depCountA = a.dependencies?.length || 0;
const depCountB = b.dependencies?.length || 0;
if (depCountA !== depCountB) return depCountA - depCountB;
return String(a.id).localeCompare(String(b.id));
});
return candidateSubtasks[0];
}
// Fall back to finding eligible top-level tasks
const eligibleTasks = tasks.filter((task) => {
// Skip non-eligible statuses
const status = (task.status || 'pending').toLowerCase();
if (status !== 'pending' && status !== 'in-progress') return false;
// Check dependencies
const deps = task.dependencies || [];
const depsSatisfied =
deps.length === 0 ||
deps.every((depId) => completedIds.has(String(depId)));
return depsSatisfied;
});
if (eligibleTasks.length === 0) return undefined;
// Sort eligible tasks
eligibleTasks.sort((a, b) => {
// Priority (higher first)
const pa = priorityValues[a.priority || 'medium'] ?? 2;
const pb = priorityValues[b.priority || 'medium'] ?? 2;
if (pb !== pa) return pb - pa;
// Dependencies count (fewer first)
const depCountA = a.dependencies?.length || 0;
const depCountB = b.dependencies?.length || 0;
if (depCountA !== depCountB) return depCountA - depCountB;
// ID (lower first)
return Number(a.id) - Number(b.id);
});
const nextTask = eligibleTasks[0];
return {
id: nextTask.id,
title: nextTask.title,
priority: nextTask.priority,
dependencies: nextTask.dependencies?.map((d) => String(d))
};
}
/**
* Get the last result (for programmatic usage)
*/

View File

@@ -0,0 +1,318 @@
/**
* @fileoverview SetStatusCommand using Commander's native class pattern
* Extends Commander.Command for better integration with the framework
*/
import { Command } from 'commander';
import chalk from 'chalk';
import boxen from 'boxen';
import {
createTaskMasterCore,
type TaskMasterCore,
type TaskStatus
} from '@tm/core';
import type { StorageType } from '@tm/core/types';
/**
* Valid task status values for validation
*/
const VALID_TASK_STATUSES: TaskStatus[] = [
'pending',
'in-progress',
'done',
'deferred',
'cancelled',
'blocked',
'review'
];
/**
* Options interface for the set-status command
*/
export interface SetStatusCommandOptions {
id?: string;
status?: TaskStatus;
format?: 'text' | 'json';
silent?: boolean;
project?: string;
}
/**
* Result type from set-status command
*/
export interface SetStatusResult {
success: boolean;
updatedTasks: Array<{
taskId: string;
oldStatus: TaskStatus;
newStatus: TaskStatus;
}>;
storageType: Exclude<StorageType, 'auto'>;
}
/**
* SetStatusCommand extending Commander's Command class
* This is a thin presentation layer over @tm/core
*/
export class SetStatusCommand extends Command {
private tmCore?: TaskMasterCore;
private lastResult?: SetStatusResult;
constructor(name?: string) {
super(name || 'set-status');
// Configure the command
this.description('Update the status of one or more tasks')
.requiredOption(
'-i, --id <id>',
'Task ID(s) to update (comma-separated for multiple, supports subtasks like 5.2)'
)
.requiredOption(
'-s, --status <status>',
`New status (${VALID_TASK_STATUSES.join(', ')})`
)
.option('-f, --format <format>', 'Output format (text, json)', 'text')
.option('--silent', 'Suppress output (useful for programmatic usage)')
.option('-p, --project <path>', 'Project root directory', process.cwd())
.action(async (options: SetStatusCommandOptions) => {
await this.executeCommand(options);
});
}
/**
* Execute the set-status command
*/
private async executeCommand(
options: SetStatusCommandOptions
): Promise<void> {
try {
// Validate required options
if (!options.id) {
console.error(chalk.red('Error: Task ID is required. Use -i or --id'));
process.exit(1);
}
if (!options.status) {
console.error(
chalk.red('Error: Status is required. Use -s or --status')
);
process.exit(1);
}
// Validate status
if (!VALID_TASK_STATUSES.includes(options.status)) {
console.error(
chalk.red(
`Error: Invalid status "${options.status}". Valid options: ${VALID_TASK_STATUSES.join(', ')}`
)
);
process.exit(1);
}
// Initialize TaskMaster core
this.tmCore = await createTaskMasterCore({
projectPath: options.project || process.cwd()
});
// Parse task IDs (handle comma-separated values)
const taskIds = options.id.split(',').map((id) => id.trim());
// Update each task
const updatedTasks: Array<{
taskId: string;
oldStatus: TaskStatus;
newStatus: TaskStatus;
}> = [];
for (const taskId of taskIds) {
try {
const result = await this.tmCore.updateTaskStatus(
taskId,
options.status
);
updatedTasks.push({
taskId: result.taskId,
oldStatus: result.oldStatus,
newStatus: result.newStatus
});
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : String(error);
if (!options.silent) {
console.error(
chalk.red(`Failed to update task ${taskId}: ${errorMessage}`)
);
}
if (options.format === 'json') {
console.log(
JSON.stringify({
success: false,
error: errorMessage,
taskId,
timestamp: new Date().toISOString()
})
);
}
process.exit(1);
}
}
// Store result for potential reuse
this.lastResult = {
success: true,
updatedTasks,
storageType: this.tmCore.getStorageType() as Exclude<
StorageType,
'auto'
>
};
// Display results
this.displayResults(this.lastResult, options);
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : 'Unknown error occurred';
if (!options.silent) {
console.error(chalk.red(`Error: ${errorMessage}`));
}
if (options.format === 'json') {
console.log(JSON.stringify({ success: false, error: errorMessage }));
}
process.exit(1);
} finally {
// Clean up resources
if (this.tmCore) {
await this.tmCore.close();
}
}
}
/**
* Display results based on format
*/
private displayResults(
result: SetStatusResult,
options: SetStatusCommandOptions
): void {
const format = options.format || 'text';
switch (format) {
case 'json':
console.log(JSON.stringify(result, null, 2));
break;
case 'text':
default:
if (!options.silent) {
this.displayTextResults(result);
}
break;
}
}
/**
* Display results in text format
*/
private displayTextResults(result: SetStatusResult): void {
if (result.updatedTasks.length === 1) {
// Single task update
const update = result.updatedTasks[0];
console.log(
boxen(
chalk.white.bold(`✅ Successfully updated task ${update.taskId}`) +
'\n\n' +
`${chalk.blue('From:')} ${this.getStatusDisplay(update.oldStatus)}\n` +
`${chalk.blue('To:')} ${this.getStatusDisplay(update.newStatus)}`,
{
padding: 1,
borderColor: 'green',
borderStyle: 'round',
margin: { top: 1 }
}
)
);
} else {
// Multiple task updates
console.log(
boxen(
chalk.white.bold(
`✅ Successfully updated ${result.updatedTasks.length} tasks`
) +
'\n\n' +
result.updatedTasks
.map(
(update) =>
`${chalk.cyan(update.taskId)}: ${this.getStatusDisplay(update.oldStatus)}${this.getStatusDisplay(update.newStatus)}`
)
.join('\n'),
{
padding: 1,
borderColor: 'green',
borderStyle: 'round',
margin: { top: 1 }
}
)
);
}
// Show storage info
console.log(chalk.gray(`\nUsing ${result.storageType} storage`));
}
/**
* Get colored status display
*/
private getStatusDisplay(status: TaskStatus): string {
const statusColors: Record<TaskStatus, (text: string) => string> = {
pending: chalk.yellow,
'in-progress': chalk.blue,
done: chalk.green,
deferred: chalk.gray,
cancelled: chalk.red,
blocked: chalk.red,
review: chalk.magenta,
completed: chalk.green
};
const colorFn = statusColors[status] || chalk.white;
return colorFn(status);
}
/**
* Get the last command result (useful for testing or chaining)
*/
getLastResult(): SetStatusResult | undefined {
return this.lastResult;
}
/**
* Static method to register this command on an existing program
* This is for gradual migration - allows commands.js to use this
*/
static registerOn(program: Command): Command {
const setStatusCommand = new SetStatusCommand();
program.addCommand(setStatusCommand);
return setStatusCommand;
}
/**
* Alternative registration that returns the command for chaining
* Can also configure the command name if needed
*/
static register(program: Command, name?: string): SetStatusCommand {
const setStatusCommand = new SetStatusCommand(name);
program.addCommand(setStatusCommand);
return setStatusCommand;
}
}
/**
* Factory function to create and configure the set-status command
*/
export function createSetStatusCommand(): SetStatusCommand {
return new SetStatusCommand();
}

View File

@@ -0,0 +1,383 @@
/**
* @fileoverview ShowCommand using Commander's native class pattern
* Extends Commander.Command for better integration with the framework
*/
import { Command } from 'commander';
import chalk from 'chalk';
import boxen from 'boxen';
import { createTaskMasterCore, type Task, type TaskMasterCore } from '@tm/core';
import type { StorageType } from '@tm/core/types';
import * as ui from '../utils/ui.js';
import {
displayTaskHeader,
displayTaskProperties,
displayImplementationDetails,
displayTestStrategy,
displaySubtasks,
displaySuggestedActions
} from '../ui/components/task-detail.component.js';
/**
* Options interface for the show command
*/
export interface ShowCommandOptions {
id?: string;
status?: string;
format?: 'text' | 'json';
silent?: boolean;
project?: string;
}
/**
* Result type from show command
*/
export interface ShowTaskResult {
task: Task | null;
found: boolean;
storageType: Exclude<StorageType, 'auto'>;
}
/**
* Result type for multiple tasks
*/
export interface ShowMultipleTasksResult {
tasks: Task[];
notFound: string[];
storageType: Exclude<StorageType, 'auto'>;
}
/**
* ShowCommand extending Commander's Command class
* This is a thin presentation layer over @tm/core
*/
export class ShowCommand extends Command {
private tmCore?: TaskMasterCore;
private lastResult?: ShowTaskResult | ShowMultipleTasksResult;
constructor(name?: string) {
super(name || 'show');
// Configure the command
this.description('Display detailed information about one or more tasks')
.argument('[id]', 'Task ID(s) to show (comma-separated for multiple)')
.option(
'-i, --id <id>',
'Task ID(s) to show (comma-separated for multiple)'
)
.option('-s, --status <status>', 'Filter subtasks by status')
.option('-f, --format <format>', 'Output format (text, json)', 'text')
.option('--silent', 'Suppress output (useful for programmatic usage)')
.option('-p, --project <path>', 'Project root directory', process.cwd())
.action(
async (taskId: string | undefined, options: ShowCommandOptions) => {
await this.executeCommand(taskId, options);
}
);
}
/**
* Execute the show command
*/
private async executeCommand(
taskId: string | undefined,
options: ShowCommandOptions
): Promise<void> {
try {
// Validate options
if (!this.validateOptions(options)) {
process.exit(1);
}
// Initialize tm-core
await this.initializeCore(options.project || process.cwd());
// Get the task ID from argument or option
const idArg = taskId || options.id;
if (!idArg) {
console.error(chalk.red('Error: Please provide a task ID'));
process.exit(1);
}
// Check if multiple IDs are provided (comma-separated)
const taskIds = idArg
.split(',')
.map((id) => id.trim())
.filter((id) => id.length > 0);
// Get tasks from core
const result =
taskIds.length > 1
? await this.getMultipleTasks(taskIds, options)
: await this.getSingleTask(taskIds[0], options);
// Store result for programmatic access
this.setLastResult(result);
// Display results
if (!options.silent) {
this.displayResults(result, options);
}
} catch (error: any) {
const msg = error?.getSanitizedDetails?.() ?? {
message: error?.message ?? String(error)
};
console.error(chalk.red(`Error: ${msg.message || 'Unexpected error'}`));
if (error.stack && process.env.DEBUG) {
console.error(chalk.gray(error.stack));
}
process.exit(1);
}
}
/**
* Validate command options
*/
private validateOptions(options: ShowCommandOptions): boolean {
// Validate format
if (options.format && !['text', 'json'].includes(options.format)) {
console.error(chalk.red(`Invalid format: ${options.format}`));
console.error(chalk.gray(`Valid formats: text, json`));
return false;
}
return true;
}
/**
* Initialize TaskMasterCore
*/
private async initializeCore(projectRoot: string): Promise<void> {
if (!this.tmCore) {
this.tmCore = await createTaskMasterCore({ projectPath: projectRoot });
}
}
/**
* Get a single task from tm-core
*/
private async getSingleTask(
taskId: string,
_options: ShowCommandOptions
): Promise<ShowTaskResult> {
if (!this.tmCore) {
throw new Error('TaskMasterCore not initialized');
}
// Get the task
const task = await this.tmCore.getTask(taskId);
// Get storage type
const storageType = this.tmCore.getStorageType();
return {
task,
found: task !== null,
storageType: storageType as Exclude<StorageType, 'auto'>
};
}
/**
* Get multiple tasks from tm-core
*/
private async getMultipleTasks(
taskIds: string[],
_options: ShowCommandOptions
): Promise<ShowMultipleTasksResult> {
if (!this.tmCore) {
throw new Error('TaskMasterCore not initialized');
}
const tasks: Task[] = [];
const notFound: string[] = [];
// Get each task individually
for (const taskId of taskIds) {
const task = await this.tmCore.getTask(taskId);
if (task) {
tasks.push(task);
} else {
notFound.push(taskId);
}
}
// Get storage type
const storageType = this.tmCore.getStorageType();
return {
tasks,
notFound,
storageType: storageType as Exclude<StorageType, 'auto'>
};
}
/**
* Display results based on format
*/
private displayResults(
result: ShowTaskResult | ShowMultipleTasksResult,
options: ShowCommandOptions
): void {
const format = options.format || 'text';
switch (format) {
case 'json':
this.displayJson(result);
break;
case 'text':
default:
if ('task' in result) {
// Single task result
this.displaySingleTask(result, options);
} else {
// Multiple tasks result
this.displayMultipleTasks(result, options);
}
break;
}
}
/**
* Display in JSON format
*/
private displayJson(result: ShowTaskResult | ShowMultipleTasksResult): void {
console.log(JSON.stringify(result, null, 2));
}
/**
* Display a single task in text format
*/
private displaySingleTask(
result: ShowTaskResult,
options: ShowCommandOptions
): void {
if (!result.found || !result.task) {
console.log(
boxen(chalk.yellow(`Task not found!`), {
padding: { top: 0, bottom: 0, left: 1, right: 1 },
borderColor: 'yellow',
borderStyle: 'round',
margin: { top: 1 }
})
);
return;
}
const task = result.task;
// Display header with tag
displayTaskHeader(task.id, task.title);
// Display task properties in table format
displayTaskProperties(task);
// Display implementation details if available
if (task.details) {
console.log(); // Empty line for spacing
displayImplementationDetails(task.details);
}
// Display test strategy if available
if ('testStrategy' in task && task.testStrategy) {
console.log(); // Empty line for spacing
displayTestStrategy(task.testStrategy as string);
}
// Display subtasks if available
if (task.subtasks && task.subtasks.length > 0) {
// Filter subtasks by status if provided
const filteredSubtasks = options.status
? task.subtasks.filter((sub) => sub.status === options.status)
: task.subtasks;
if (filteredSubtasks.length === 0 && options.status) {
console.log(
chalk.gray(` No subtasks with status '${options.status}'`)
);
} else {
displaySubtasks(filteredSubtasks, task.id);
}
}
// Display suggested actions
displaySuggestedActions(task.id);
}
/**
* Display multiple tasks in text format
*/
private displayMultipleTasks(
result: ShowMultipleTasksResult,
_options: ShowCommandOptions
): void {
// Header
ui.displayBanner(`Tasks (${result.tasks.length} found)`);
if (result.notFound.length > 0) {
console.log(chalk.yellow(`\n⚠ Not found: ${result.notFound.join(', ')}`));
}
if (result.tasks.length === 0) {
ui.displayWarning('No tasks found matching the criteria.');
return;
}
// Task table
console.log(chalk.blue.bold(`\n📋 Tasks:\n`));
console.log(
ui.createTaskTable(result.tasks, {
showSubtasks: true,
showDependencies: true
})
);
console.log(`\n${chalk.gray('Storage: ' + result.storageType)}`);
}
/**
* Set the last result for programmatic access
*/
private setLastResult(
result: ShowTaskResult | ShowMultipleTasksResult
): void {
this.lastResult = result;
}
/**
* Get the last result (for programmatic usage)
*/
getLastResult(): ShowTaskResult | ShowMultipleTasksResult | undefined {
return this.lastResult;
}
/**
* Clean up resources
*/
async cleanup(): Promise<void> {
if (this.tmCore) {
await this.tmCore.close();
this.tmCore = undefined;
}
}
/**
* Static method to register this command on an existing program
* This is for gradual migration - allows commands.js to use this
*/
static registerOn(program: Command): Command {
const showCommand = new ShowCommand();
program.addCommand(showCommand);
return showCommand;
}
/**
* Alternative registration that returns the command for chaining
* Can also configure the command name if needed
*/
static register(program: Command, name?: string): ShowCommand {
const showCommand = new ShowCommand(name);
program.addCommand(showCommand);
return showCommand;
}
}

View File

@@ -5,11 +5,21 @@
// Commands
export { ListTasksCommand } from './commands/list.command.js';
export { ShowCommand } from './commands/show.command.js';
export { AuthCommand } from './commands/auth.command.js';
export { ContextCommand } from './commands/context.command.js';
export { SetStatusCommand } from './commands/set-status.command.js';
// UI utilities (for other commands to use)
export * as ui from './utils/ui.js';
// Auto-update utilities
export {
checkForUpdate,
performAutoUpdate,
displayUpgradeNotification
} from './utils/auto-update.js';
// Re-export commonly used types from tm-core
export type {
Task,

View File

@@ -0,0 +1,567 @@
/**
* @fileoverview Dashboard components for Task Master CLI
* Displays project statistics and dependency information
*/
import chalk from 'chalk';
import boxen from 'boxen';
import type { Task, TaskPriority } from '@tm/core/types';
/**
* Statistics for task collection
*/
export interface TaskStatistics {
total: number;
done: number;
inProgress: number;
pending: number;
blocked: number;
deferred: number;
cancelled: number;
review?: number;
completionPercentage: number;
}
/**
* Statistics for dependencies
*/
export interface DependencyStatistics {
tasksWithNoDeps: number;
tasksReadyToWork: number;
tasksBlockedByDeps: number;
mostDependedOnTaskId?: number;
mostDependedOnCount?: number;
avgDependenciesPerTask: number;
}
/**
* Next task information
*/
export interface NextTaskInfo {
id: string | number;
title: string;
priority?: TaskPriority;
dependencies?: (string | number)[];
complexity?: number | string;
}
/**
* Status breakdown for progress bars
*/
export interface StatusBreakdown {
'in-progress'?: number;
pending?: number;
blocked?: number;
deferred?: number;
cancelled?: number;
review?: number;
}
/**
* Create a progress bar with color-coded status segments
*/
function createProgressBar(
completionPercentage: number,
width: number = 30,
statusBreakdown?: StatusBreakdown
): string {
// If no breakdown provided, use simple green bar
if (!statusBreakdown) {
const filled = Math.round((completionPercentage / 100) * width);
const empty = width - filled;
return chalk.green('█').repeat(filled) + chalk.gray('░').repeat(empty);
}
// Build the bar with different colored sections
// Order matches the status display: Done, Cancelled, Deferred, In Progress, Review, Pending, Blocked
let bar = '';
let charsUsed = 0;
// 1. Green filled blocks for completed tasks (done)
const completedChars = Math.round((completionPercentage / 100) * width);
if (completedChars > 0) {
bar += chalk.green('█').repeat(completedChars);
charsUsed += completedChars;
}
// 2. Gray filled blocks for cancelled (won't be done)
if (statusBreakdown.cancelled && charsUsed < width) {
const cancelledChars = Math.round(
(statusBreakdown.cancelled / 100) * width
);
const actualChars = Math.min(cancelledChars, width - charsUsed);
if (actualChars > 0) {
bar += chalk.gray('█').repeat(actualChars);
charsUsed += actualChars;
}
}
// 3. Gray filled blocks for deferred (won't be done now)
if (statusBreakdown.deferred && charsUsed < width) {
const deferredChars = Math.round((statusBreakdown.deferred / 100) * width);
const actualChars = Math.min(deferredChars, width - charsUsed);
if (actualChars > 0) {
bar += chalk.gray('█').repeat(actualChars);
charsUsed += actualChars;
}
}
// 4. Blue filled blocks for in-progress (actively working)
if (statusBreakdown['in-progress'] && charsUsed < width) {
const inProgressChars = Math.round(
(statusBreakdown['in-progress'] / 100) * width
);
const actualChars = Math.min(inProgressChars, width - charsUsed);
if (actualChars > 0) {
bar += chalk.blue('█').repeat(actualChars);
charsUsed += actualChars;
}
}
// 5. Magenta empty blocks for review (almost done)
if (statusBreakdown.review && charsUsed < width) {
const reviewChars = Math.round((statusBreakdown.review / 100) * width);
const actualChars = Math.min(reviewChars, width - charsUsed);
if (actualChars > 0) {
bar += chalk.magenta('░').repeat(actualChars);
charsUsed += actualChars;
}
}
// 6. Yellow empty blocks for pending (ready to start)
if (statusBreakdown.pending && charsUsed < width) {
const pendingChars = Math.round((statusBreakdown.pending / 100) * width);
const actualChars = Math.min(pendingChars, width - charsUsed);
if (actualChars > 0) {
bar += chalk.yellow('░').repeat(actualChars);
charsUsed += actualChars;
}
}
// 7. Red empty blocks for blocked (can't start yet)
if (statusBreakdown.blocked && charsUsed < width) {
const blockedChars = Math.round((statusBreakdown.blocked / 100) * width);
const actualChars = Math.min(blockedChars, width - charsUsed);
if (actualChars > 0) {
bar += chalk.red('░').repeat(actualChars);
charsUsed += actualChars;
}
}
// Fill any remaining space with gray empty yellow blocks
if (charsUsed < width) {
bar += chalk.yellow('░').repeat(width - charsUsed);
}
return bar;
}
/**
* Calculate task statistics from a list of tasks
*/
export function calculateTaskStatistics(tasks: Task[]): TaskStatistics {
const stats: TaskStatistics = {
total: tasks.length,
done: 0,
inProgress: 0,
pending: 0,
blocked: 0,
deferred: 0,
cancelled: 0,
review: 0,
completionPercentage: 0
};
tasks.forEach((task) => {
switch (task.status) {
case 'done':
stats.done++;
break;
case 'in-progress':
stats.inProgress++;
break;
case 'pending':
stats.pending++;
break;
case 'blocked':
stats.blocked++;
break;
case 'deferred':
stats.deferred++;
break;
case 'cancelled':
stats.cancelled++;
break;
case 'review':
stats.review = (stats.review || 0) + 1;
break;
}
});
stats.completionPercentage =
stats.total > 0 ? Math.round((stats.done / stats.total) * 100) : 0;
return stats;
}
/**
* Calculate subtask statistics from tasks
*/
export function calculateSubtaskStatistics(tasks: Task[]): TaskStatistics {
const stats: TaskStatistics = {
total: 0,
done: 0,
inProgress: 0,
pending: 0,
blocked: 0,
deferred: 0,
cancelled: 0,
review: 0,
completionPercentage: 0
};
tasks.forEach((task) => {
if (task.subtasks && task.subtasks.length > 0) {
task.subtasks.forEach((subtask) => {
stats.total++;
switch (subtask.status) {
case 'done':
stats.done++;
break;
case 'in-progress':
stats.inProgress++;
break;
case 'pending':
stats.pending++;
break;
case 'blocked':
stats.blocked++;
break;
case 'deferred':
stats.deferred++;
break;
case 'cancelled':
stats.cancelled++;
break;
case 'review':
stats.review = (stats.review || 0) + 1;
break;
}
});
}
});
stats.completionPercentage =
stats.total > 0 ? Math.round((stats.done / stats.total) * 100) : 0;
return stats;
}
/**
* Calculate dependency statistics
*/
export function calculateDependencyStatistics(
tasks: Task[]
): DependencyStatistics {
const completedTaskIds = new Set(
tasks.filter((t) => t.status === 'done').map((t) => t.id)
);
const tasksWithNoDeps = tasks.filter(
(t) =>
t.status !== 'done' && (!t.dependencies || t.dependencies.length === 0)
).length;
const tasksWithAllDepsSatisfied = tasks.filter(
(t) =>
t.status !== 'done' &&
t.dependencies &&
t.dependencies.length > 0 &&
t.dependencies.every((depId) => completedTaskIds.has(depId))
).length;
const tasksBlockedByDeps = tasks.filter(
(t) =>
t.status !== 'done' &&
t.dependencies &&
t.dependencies.length > 0 &&
!t.dependencies.every((depId) => completedTaskIds.has(depId))
).length;
// Calculate most depended-on task
const dependencyCount: Record<string, number> = {};
tasks.forEach((task) => {
if (task.dependencies && task.dependencies.length > 0) {
task.dependencies.forEach((depId) => {
const key = String(depId);
dependencyCount[key] = (dependencyCount[key] || 0) + 1;
});
}
});
let mostDependedOnTaskId: number | undefined;
let mostDependedOnCount = 0;
for (const [taskId, count] of Object.entries(dependencyCount)) {
if (count > mostDependedOnCount) {
mostDependedOnCount = count;
mostDependedOnTaskId = parseInt(taskId);
}
}
// Calculate average dependencies
const totalDependencies = tasks.reduce(
(sum, task) => sum + (task.dependencies ? task.dependencies.length : 0),
0
);
const avgDependenciesPerTask =
tasks.length > 0 ? totalDependencies / tasks.length : 0;
return {
tasksWithNoDeps,
tasksReadyToWork: tasksWithNoDeps + tasksWithAllDepsSatisfied,
tasksBlockedByDeps,
mostDependedOnTaskId,
mostDependedOnCount,
avgDependenciesPerTask
};
}
/**
* Get priority counts
*/
export function getPriorityBreakdown(
tasks: Task[]
): Record<TaskPriority, number> {
const breakdown: Record<TaskPriority, number> = {
critical: 0,
high: 0,
medium: 0,
low: 0
};
tasks.forEach((task) => {
const priority = task.priority || 'medium';
breakdown[priority]++;
});
return breakdown;
}
/**
* Calculate status breakdown as percentages
*/
function calculateStatusBreakdown(stats: TaskStatistics): StatusBreakdown {
if (stats.total === 0) return {};
return {
'in-progress': (stats.inProgress / stats.total) * 100,
pending: (stats.pending / stats.total) * 100,
blocked: (stats.blocked / stats.total) * 100,
deferred: (stats.deferred / stats.total) * 100,
cancelled: (stats.cancelled / stats.total) * 100,
review: ((stats.review || 0) / stats.total) * 100
};
}
/**
* Format status counts in the correct order with colors
* @param stats - The statistics object containing counts
* @param isSubtask - Whether this is for subtasks (affects "Done" vs "Completed" label)
*/
function formatStatusLine(
stats: TaskStatistics,
isSubtask: boolean = false
): string {
const parts: string[] = [];
// Order: Done, Cancelled, Deferred, In Progress, Review, Pending, Blocked
if (isSubtask) {
parts.push(`Completed: ${chalk.green(`${stats.done}/${stats.total}`)}`);
} else {
parts.push(`Done: ${chalk.green(stats.done)}`);
}
parts.push(`Cancelled: ${chalk.gray(stats.cancelled)}`);
parts.push(`Deferred: ${chalk.gray(stats.deferred)}`);
// Add line break for second row
const firstLine = parts.join(' ');
parts.length = 0;
parts.push(`In Progress: ${chalk.blue(stats.inProgress)}`);
parts.push(`Review: ${chalk.magenta(stats.review || 0)}`);
parts.push(`Pending: ${chalk.yellow(stats.pending)}`);
parts.push(`Blocked: ${chalk.red(stats.blocked)}`);
const secondLine = parts.join(' ');
return firstLine + '\n' + secondLine;
}
/**
* Display the project dashboard box
*/
export function displayProjectDashboard(
taskStats: TaskStatistics,
subtaskStats: TaskStatistics,
priorityBreakdown: Record<TaskPriority, number>
): string {
// Calculate status breakdowns using the helper function
const taskStatusBreakdown = calculateStatusBreakdown(taskStats);
const subtaskStatusBreakdown = calculateStatusBreakdown(subtaskStats);
// Create progress bars with the breakdowns
const taskProgressBar = createProgressBar(
taskStats.completionPercentage,
30,
taskStatusBreakdown
);
const subtaskProgressBar = createProgressBar(
subtaskStats.completionPercentage,
30,
subtaskStatusBreakdown
);
const taskPercentage = `${taskStats.completionPercentage}% ${taskStats.done}/${taskStats.total}`;
const subtaskPercentage = `${subtaskStats.completionPercentage}% ${subtaskStats.done}/${subtaskStats.total}`;
const content =
chalk.white.bold('Project Dashboard') +
'\n' +
`Tasks Progress: ${taskProgressBar} ${chalk.yellow(taskPercentage)}\n` +
formatStatusLine(taskStats, false) +
'\n\n' +
`Subtasks Progress: ${subtaskProgressBar} ${chalk.cyan(subtaskPercentage)}\n` +
formatStatusLine(subtaskStats, true) +
'\n\n' +
chalk.cyan.bold('Priority Breakdown:') +
'\n' +
`${chalk.red('•')} ${chalk.white('High priority:')} ${priorityBreakdown.high}\n` +
`${chalk.yellow('•')} ${chalk.white('Medium priority:')} ${priorityBreakdown.medium}\n` +
`${chalk.green('•')} ${chalk.white('Low priority:')} ${priorityBreakdown.low}`;
return content;
}
/**
* Display the dependency dashboard box
*/
export function displayDependencyDashboard(
depStats: DependencyStatistics,
nextTask?: NextTaskInfo
): string {
const content =
chalk.white.bold('Dependency Status & Next Task') +
'\n' +
chalk.cyan.bold('Dependency Metrics:') +
'\n' +
`${chalk.green('•')} ${chalk.white('Tasks with no dependencies:')} ${depStats.tasksWithNoDeps}\n` +
`${chalk.green('•')} ${chalk.white('Tasks ready to work on:')} ${depStats.tasksReadyToWork}\n` +
`${chalk.yellow('•')} ${chalk.white('Tasks blocked by dependencies:')} ${depStats.tasksBlockedByDeps}\n` +
`${chalk.magenta('•')} ${chalk.white('Most depended-on task:')} ${
depStats.mostDependedOnTaskId
? chalk.cyan(
`#${depStats.mostDependedOnTaskId} (${depStats.mostDependedOnCount} dependents)`
)
: chalk.gray('None')
}\n` +
`${chalk.blue('•')} ${chalk.white('Avg dependencies per task:')} ${depStats.avgDependenciesPerTask.toFixed(1)}\n\n` +
chalk.cyan.bold('Next Task to Work On:') +
'\n' +
`ID: ${nextTask ? chalk.cyan(String(nextTask.id)) : chalk.gray('N/A')} - ${
nextTask
? chalk.white.bold(nextTask.title)
: chalk.yellow('No task available')
}\n` +
`Priority: ${nextTask?.priority || chalk.gray('N/A')} Dependencies: ${
nextTask?.dependencies?.length
? chalk.cyan(nextTask.dependencies.join(', '))
: chalk.gray('None')
}\n` +
`Complexity: ${nextTask?.complexity || chalk.gray('N/A')}`;
return content;
}
/**
* Display dashboard boxes side by side or stacked
*/
export function displayDashboards(
taskStats: TaskStatistics,
subtaskStats: TaskStatistics,
priorityBreakdown: Record<TaskPriority, number>,
depStats: DependencyStatistics,
nextTask?: NextTaskInfo
): void {
const projectDashboardContent = displayProjectDashboard(
taskStats,
subtaskStats,
priorityBreakdown
);
const dependencyDashboardContent = displayDependencyDashboard(
depStats,
nextTask
);
// Get terminal width
const terminalWidth = process.stdout.columns || 80;
const minDashboardWidth = 50;
const minDependencyWidth = 50;
const totalMinWidth = minDashboardWidth + minDependencyWidth + 4;
// If terminal is wide enough, show side by side
if (terminalWidth >= totalMinWidth) {
const halfWidth = Math.floor(terminalWidth / 2);
const boxContentWidth = halfWidth - 4;
const dashboardBox = boxen(projectDashboardContent, {
padding: 1,
borderColor: 'blue',
borderStyle: 'round',
width: boxContentWidth,
dimBorder: false
});
const dependencyBox = boxen(dependencyDashboardContent, {
padding: 1,
borderColor: 'magenta',
borderStyle: 'round',
width: boxContentWidth,
dimBorder: false
});
// Create side-by-side layout
const dashboardLines = dashboardBox.split('\n');
const dependencyLines = dependencyBox.split('\n');
const maxHeight = Math.max(dashboardLines.length, dependencyLines.length);
const combinedLines = [];
for (let i = 0; i < maxHeight; i++) {
const dashLine = i < dashboardLines.length ? dashboardLines[i] : '';
const depLine = i < dependencyLines.length ? dependencyLines[i] : '';
const paddedDashLine = dashLine.padEnd(halfWidth, ' ');
combinedLines.push(paddedDashLine + depLine);
}
console.log(combinedLines.join('\n'));
} else {
// Show stacked vertically
const dashboardBox = boxen(projectDashboardContent, {
padding: 1,
borderColor: 'blue',
borderStyle: 'round',
margin: { top: 0, bottom: 1 }
});
const dependencyBox = boxen(dependencyDashboardContent, {
padding: 1,
borderColor: 'magenta',
borderStyle: 'round',
margin: { top: 0, bottom: 1 }
});
console.log(dashboardBox);
console.log(dependencyBox);
}
}

View File

@@ -0,0 +1,45 @@
/**
* @fileoverview Task Master header component
* Displays the banner, version, project info, and file path
*/
import chalk from 'chalk';
/**
* Header configuration options
*/
export interface HeaderOptions {
title?: string;
tag?: string;
filePath?: string;
}
/**
* Display the Task Master header with project info
*/
export function displayHeader(options: HeaderOptions = {}): void {
const { filePath, tag } = options;
// Display tag and file path info
if (tag) {
let tagInfo = '';
if (tag && tag !== 'master') {
tagInfo = `🏷 tag: ${chalk.cyan(tag)}`;
} else {
tagInfo = `🏷 tag: ${chalk.cyan('master')}`;
}
console.log(tagInfo);
if (filePath) {
// Convert to absolute path if it's relative
const absolutePath = filePath.startsWith('/')
? filePath
: `${process.cwd()}/${filePath}`;
console.log(`Listing tasks from: ${chalk.dim(absolutePath)}`);
}
console.log(); // Empty line for spacing
}
}

View File

@@ -0,0 +1,9 @@
/**
* @fileoverview UI components exports
*/
export * from './header.component.js';
export * from './dashboard.component.js';
export * from './next-task.component.js';
export * from './suggested-steps.component.js';
export * from './task-detail.component.js';

View File

@@ -0,0 +1,134 @@
/**
* @fileoverview Next task recommendation component
* Displays detailed information about the recommended next task
*/
import chalk from 'chalk';
import boxen from 'boxen';
import type { Task } from '@tm/core/types';
/**
* Next task display options
*/
export interface NextTaskDisplayOptions {
id: string | number;
title: string;
priority?: string;
status?: string;
dependencies?: (string | number)[];
description?: string;
}
/**
* Display the recommended next task section
*/
export function displayRecommendedNextTask(
task: NextTaskDisplayOptions | undefined
): void {
if (!task) {
// If no task available, show a message
console.log(
boxen(
chalk.yellow(
'No tasks available to work on. All tasks are either completed, blocked by dependencies, or in progress.'
),
{
padding: 1,
borderStyle: 'round',
borderColor: 'yellow',
title: '⚠ NO TASKS AVAILABLE ⚠',
titleAlignment: 'center'
}
)
);
return;
}
// Build the content for the next task box
const content = [];
// Task header with ID and title
content.push(
`🔥 ${chalk.hex('#FF8800').bold('Next Task to Work On:')} ${chalk.yellow(`#${task.id}`)}${chalk.hex('#FF8800').bold(` - ${task.title}`)}`
);
content.push('');
// Priority and Status line
const statusLine = [];
if (task.priority) {
const priorityColor =
task.priority === 'high'
? chalk.red
: task.priority === 'medium'
? chalk.yellow
: chalk.gray;
statusLine.push(`Priority: ${priorityColor.bold(task.priority)}`);
}
if (task.status) {
const statusDisplay =
task.status === 'pending'
? chalk.yellow('○ pending')
: task.status === 'in-progress'
? chalk.blue('▶ in-progress')
: chalk.gray(task.status);
statusLine.push(`Status: ${statusDisplay}`);
}
content.push(statusLine.join(' '));
// Dependencies
const depsDisplay =
!task.dependencies || task.dependencies.length === 0
? chalk.gray('None')
: chalk.cyan(task.dependencies.join(', '));
content.push(`Dependencies: ${depsDisplay}`);
// Description if available
if (task.description) {
content.push('');
content.push(`Description: ${chalk.white(task.description)}`);
}
// Action commands
content.push('');
content.push(
`${chalk.cyan('Start working:')} ${chalk.yellow(`task-master set-status --id=${task.id} --status=in-progress`)}`
);
content.push(
`${chalk.cyan('View details:')} ${chalk.yellow(`task-master show ${task.id}`)}`
);
// Display in a styled box with orange border
console.log(
boxen(content.join('\n'), {
padding: 1,
margin: { top: 1, bottom: 1 },
borderStyle: 'round',
borderColor: '#FFA500', // Orange color
title: chalk.hex('#FFA500')('⚡ RECOMMENDED NEXT TASK ⚡'),
titleAlignment: 'center',
width: process.stdout.columns * 0.97,
fullscreen: false
})
);
}
/**
* Get task description from the full task object
*/
export function getTaskDescription(task: Task): string | undefined {
// Try to get description from the task
// This could be from task.description or the first line of task.details
if ('description' in task && task.description) {
return task.description as string;
}
if ('details' in task && task.details) {
// Take first sentence or line from details
const details = task.details as string;
const firstLine = details.split('\n')[0];
const firstSentence = firstLine.split('.')[0];
return firstSentence;
}
return undefined;
}

View File

@@ -0,0 +1,31 @@
/**
* @fileoverview Suggested next steps component
* Displays helpful command suggestions at the end of the list
*/
import chalk from 'chalk';
import boxen from 'boxen';
/**
* Display suggested next steps section
*/
export function displaySuggestedNextSteps(): void {
const steps = [
`${chalk.cyan('1.')} Run ${chalk.yellow('task-master next')} to see what to work on next`,
`${chalk.cyan('2.')} Run ${chalk.yellow('task-master expand --id=<id>')} to break down a task into subtasks`,
`${chalk.cyan('3.')} Run ${chalk.yellow('task-master set-status --id=<id> --status=done')} to mark a task as complete`
];
console.log(
boxen(
chalk.white.bold('Suggested Next Steps:') + '\n\n' + steps.join('\n'),
{
padding: 1,
margin: { top: 0, bottom: 1 },
borderStyle: 'round',
borderColor: 'gray',
width: process.stdout.columns * 0.97
}
)
);
}

View File

@@ -0,0 +1,264 @@
/**
* @fileoverview Task detail component for show command
* Displays detailed task information in a structured format
*/
import chalk from 'chalk';
import boxen from 'boxen';
import Table from 'cli-table3';
import { marked, MarkedExtension } from 'marked';
import { markedTerminal } from 'marked-terminal';
import type { Task } from '@tm/core/types';
import { getStatusWithColor, getPriorityWithColor } from '../../utils/ui.js';
// Configure marked to use terminal renderer with subtle colors
marked.use(
markedTerminal({
// More subtle colors that match the overall design
code: (code: string) => {
// Custom code block handler to preserve formatting
return code
.split('\n')
.map((line) => ' ' + chalk.cyan(line))
.join('\n');
},
blockquote: chalk.gray.italic,
html: chalk.gray,
heading: chalk.white.bold, // White bold for headings
hr: chalk.gray,
listitem: chalk.white, // White for list items
paragraph: chalk.white, // White for paragraphs (default text color)
strong: chalk.white.bold, // White bold for strong text
em: chalk.white.italic, // White italic for emphasis
codespan: chalk.cyan, // Cyan for inline code (no background)
del: chalk.dim.strikethrough,
link: chalk.blue,
href: chalk.blue.underline,
// Add more explicit code block handling
showSectionPrefix: false,
unescape: true,
emoji: false,
// Try to preserve whitespace in code blocks
tab: 4,
width: 120
}) as MarkedExtension
);
// Also set marked options to preserve whitespace
marked.setOptions({
breaks: true,
gfm: true
});
/**
* Display the task header with tag
*/
export function displayTaskHeader(
taskId: string | number,
title: string
): void {
// Display task header box
console.log(
boxen(chalk.white.bold(`Task: #${taskId} - ${title}`), {
padding: { top: 0, bottom: 0, left: 1, right: 1 },
borderColor: 'blue',
borderStyle: 'round'
})
);
}
/**
* Display task properties in a table format
*/
export function displayTaskProperties(task: Task): void {
const terminalWidth = process.stdout.columns * 0.95 || 100;
// Create table for task properties - simple 2-column layout
const table = new Table({
head: [],
style: {
head: [],
border: ['grey']
},
colWidths: [
Math.floor(terminalWidth * 0.2),
Math.floor(terminalWidth * 0.8)
],
wordWrap: true
});
const deps =
task.dependencies && task.dependencies.length > 0
? task.dependencies.map((d) => String(d)).join(', ')
: 'None';
// Build the left column (labels) and right column (values)
const labels = [
chalk.cyan('ID:'),
chalk.cyan('Title:'),
chalk.cyan('Status:'),
chalk.cyan('Priority:'),
chalk.cyan('Dependencies:'),
chalk.cyan('Complexity:'),
chalk.cyan('Description:')
].join('\n');
const values = [
String(task.id),
task.title,
getStatusWithColor(task.status),
getPriorityWithColor(task.priority),
deps,
'N/A',
task.description || ''
].join('\n');
table.push([labels, values]);
console.log(table.toString());
}
/**
* Display implementation details in a box
*/
export function displayImplementationDetails(details: string): void {
// Handle all escaped characters properly
const cleanDetails = details
.replace(/\\n/g, '\n') // Convert \n to actual newlines
.replace(/\\t/g, '\t') // Convert \t to actual tabs
.replace(/\\"/g, '"') // Convert \" to actual quotes
.replace(/\\\\/g, '\\'); // Convert \\ to single backslash
const terminalWidth = process.stdout.columns * 0.95 || 100;
// Parse markdown to terminal-friendly format
const markdownResult = marked(cleanDetails);
const formattedDetails =
typeof markdownResult === 'string' ? markdownResult.trim() : cleanDetails; // Fallback to original if Promise
console.log(
boxen(
chalk.white.bold('Implementation Details:') + '\n\n' + formattedDetails,
{
padding: 1,
borderStyle: 'round',
borderColor: 'cyan', // Changed to cyan to match the original
width: terminalWidth // Fixed width to match the original
}
)
);
}
/**
* Display test strategy in a box
*/
export function displayTestStrategy(testStrategy: string): void {
// Handle all escaped characters properly (same as implementation details)
const cleanStrategy = testStrategy
.replace(/\\n/g, '\n') // Convert \n to actual newlines
.replace(/\\t/g, '\t') // Convert \t to actual tabs
.replace(/\\"/g, '"') // Convert \" to actual quotes
.replace(/\\\\/g, '\\'); // Convert \\ to single backslash
const terminalWidth = process.stdout.columns * 0.95 || 100;
// Parse markdown to terminal-friendly format (same as implementation details)
const markdownResult = marked(cleanStrategy);
const formattedStrategy =
typeof markdownResult === 'string' ? markdownResult.trim() : cleanStrategy; // Fallback to original if Promise
console.log(
boxen(chalk.white.bold('Test Strategy:') + '\n\n' + formattedStrategy, {
padding: 1,
borderStyle: 'round',
borderColor: 'cyan', // Changed to cyan to match implementation details
width: terminalWidth
})
);
}
/**
* Display subtasks in a table format
*/
export function displaySubtasks(
subtasks: Array<{
id: string | number;
title: string;
status: any;
description?: string;
dependencies?: string[];
}>,
parentId: string | number
): void {
const terminalWidth = process.stdout.columns * 0.95 || 100;
// Display subtasks header
console.log(
boxen(chalk.magenta.bold('Subtasks'), {
padding: { top: 0, bottom: 0, left: 1, right: 1 },
borderColor: 'magenta',
borderStyle: 'round',
margin: { top: 1, bottom: 0 }
})
);
// Create subtasks table
const table = new Table({
head: [
chalk.magenta.bold('ID'),
chalk.magenta.bold('Status'),
chalk.magenta.bold('Title'),
chalk.magenta.bold('Deps')
],
style: {
head: [],
border: ['grey']
},
colWidths: [
Math.floor(terminalWidth * 0.1),
Math.floor(terminalWidth * 0.15),
Math.floor(terminalWidth * 0.6),
Math.floor(terminalWidth * 0.15)
],
wordWrap: true
});
subtasks.forEach((subtask) => {
const subtaskId = `${parentId}.${subtask.id}`;
// Format dependencies
const deps =
subtask.dependencies && subtask.dependencies.length > 0
? subtask.dependencies.join(', ')
: 'None';
table.push([
subtaskId,
getStatusWithColor(subtask.status),
subtask.title,
deps
]);
});
console.log(table.toString());
}
/**
* Display suggested actions
*/
export function displaySuggestedActions(taskId: string | number): void {
console.log(
boxen(
chalk.white.bold('Suggested Actions:') +
'\n\n' +
`${chalk.cyan('1.')} Run ${chalk.yellow(`task-master set-status --id=${taskId} --status=in-progress`)} to start working\n` +
`${chalk.cyan('2.')} Run ${chalk.yellow(`task-master expand --id=${taskId}`)} to break down into subtasks\n` +
`${chalk.cyan('3.')} Run ${chalk.yellow(`task-master update-task --id=${taskId} --prompt="..."`)} to update details`,
{
padding: 1,
margin: { top: 1 },
borderStyle: 'round',
borderColor: 'green',
width: process.stdout.columns * 0.95 || 100
}
)
);
}

9
apps/cli/src/ui/index.ts Normal file
View File

@@ -0,0 +1,9 @@
/**
* @fileoverview Main UI exports
*/
// Export all components
export * from './components/index.js';
// Re-export existing UI utilities
export * from '../utils/ui.js';

View File

@@ -0,0 +1,238 @@
/**
* @fileoverview Auto-update utilities for task-master-ai CLI
*/
import { spawn } from 'child_process';
import https from 'https';
import chalk from 'chalk';
import ora from 'ora';
import boxen from 'boxen';
import packageJson from '../../../../package.json' with { type: 'json' };
export interface UpdateInfo {
currentVersion: string;
latestVersion: string;
needsUpdate: boolean;
}
/**
* Get current version from package.json
*/
function getCurrentVersion(): string {
try {
return packageJson.version;
} catch (error) {
console.warn('Could not read package.json for version info');
return '0.0.0';
}
}
/**
* Compare semantic versions with proper pre-release handling
* @param v1 - First version
* @param v2 - Second version
* @returns -1 if v1 < v2, 0 if v1 = v2, 1 if v1 > v2
*/
function compareVersions(v1: string, v2: string): number {
const toParts = (v: string) => {
const [core, pre = ''] = v.split('-', 2);
const nums = core.split('.').map((n) => Number.parseInt(n, 10) || 0);
return { nums, pre };
};
const a = toParts(v1);
const b = toParts(v2);
const len = Math.max(a.nums.length, b.nums.length);
// Compare numeric parts
for (let i = 0; i < len; i++) {
const d = (a.nums[i] || 0) - (b.nums[i] || 0);
if (d !== 0) return d < 0 ? -1 : 1;
}
// Handle pre-release comparison
if (a.pre && !b.pre) return -1; // prerelease < release
if (!a.pre && b.pre) return 1; // release > prerelease
if (a.pre === b.pre) return 0; // same or both empty
return a.pre < b.pre ? -1 : 1; // basic prerelease tie-break
}
/**
* Check for newer version of task-master-ai
*/
export async function checkForUpdate(
currentVersionOverride?: string
): Promise<UpdateInfo> {
const currentVersion = currentVersionOverride || getCurrentVersion();
return new Promise((resolve) => {
const options = {
hostname: 'registry.npmjs.org',
path: '/task-master-ai',
method: 'GET',
headers: {
Accept: 'application/vnd.npm.install-v1+json',
'User-Agent': `task-master-ai/${currentVersion}`
}
};
const req = https.request(options, (res) => {
let data = '';
res.on('data', (chunk) => {
data += chunk;
});
res.on('end', () => {
try {
if (res.statusCode !== 200)
throw new Error(`npm registry status ${res.statusCode}`);
const npmData = JSON.parse(data);
const latestVersion = npmData['dist-tags']?.latest || currentVersion;
const needsUpdate =
compareVersions(currentVersion, latestVersion) < 0;
resolve({
currentVersion,
latestVersion,
needsUpdate
});
} catch (error) {
resolve({
currentVersion,
latestVersion: currentVersion,
needsUpdate: false
});
}
});
});
req.on('error', () => {
resolve({
currentVersion,
latestVersion: currentVersion,
needsUpdate: false
});
});
req.setTimeout(3000, () => {
req.destroy();
resolve({
currentVersion,
latestVersion: currentVersion,
needsUpdate: false
});
});
req.end();
});
}
/**
* Display upgrade notification message
*/
export function displayUpgradeNotification(
currentVersion: string,
latestVersion: string
) {
const message = boxen(
`${chalk.blue.bold('Update Available!')} ${chalk.dim(currentVersion)}${chalk.green(latestVersion)}\n\n` +
`Auto-updating to the latest version with new features and bug fixes...`,
{
padding: 1,
margin: { top: 1, bottom: 1 },
borderColor: 'yellow',
borderStyle: 'round'
}
);
console.log(message);
}
/**
* Automatically update task-master-ai to the latest version
*/
export async function performAutoUpdate(
latestVersion: string
): Promise<boolean> {
if (process.env.TASKMASTER_SKIP_AUTO_UPDATE === '1' || process.env.CI) {
console.log(
chalk.dim('Skipping auto-update (TASKMASTER_SKIP_AUTO_UPDATE/CI).')
);
return false;
}
const spinner = ora({
text: chalk.blue(
`Updating task-master-ai to version ${chalk.green(latestVersion)}`
),
spinner: 'dots',
color: 'blue'
}).start();
return new Promise((resolve) => {
const updateProcess = spawn(
'npm',
[
'install',
'-g',
`task-master-ai@${latestVersion}`,
'--no-fund',
'--no-audit',
'--loglevel=warn'
],
{
stdio: ['ignore', 'pipe', 'pipe']
}
);
let errorOutput = '';
updateProcess.stdout.on('data', () => {
// Update spinner text with progress
spinner.text = chalk.blue(
`Installing task-master-ai@${latestVersion}...`
);
});
updateProcess.stderr.on('data', (data) => {
errorOutput += data.toString();
});
updateProcess.on('close', (code) => {
if (code === 0) {
spinner.succeed(
chalk.green(
`Successfully updated to version ${chalk.bold(latestVersion)}`
)
);
console.log(
chalk.dim('Please restart your command to use the new version.')
);
resolve(true);
} else {
spinner.fail(chalk.red('Auto-update failed'));
console.log(
chalk.cyan(
`Please run manually: npm install -g task-master-ai@${latestVersion}`
)
);
if (errorOutput) {
console.log(chalk.dim(`Error: ${errorOutput.trim()}`));
}
resolve(false);
}
});
updateProcess.on('error', (error) => {
spinner.fail(chalk.red('Auto-update failed'));
console.log(chalk.red('Error:'), error.message);
console.log(
chalk.cyan(
`Please run manually: npm install -g task-master-ai@${latestVersion}`
)
);
resolve(false);
});
});
}

View File

@@ -6,7 +6,7 @@
import chalk from 'chalk';
import boxen from 'boxen';
import Table from 'cli-table3';
import type { Task, TaskStatus, TaskPriority } from '@tm/core';
import type { Task, TaskStatus, TaskPriority } from '@tm/core/types';
/**
* Get colored status display with ASCII icons (matches scripts/modules/ui.js style)
@@ -18,19 +18,44 @@ export function getStatusWithColor(
const statusConfig = {
done: {
color: chalk.green,
icon: String.fromCharCode(8730),
tableIcon: String.fromCharCode(8730)
}, // √
pending: { color: chalk.yellow, icon: 'o', tableIcon: 'o' },
icon: '✓',
tableIcon: '✓'
},
pending: {
color: chalk.yellow,
icon: '○',
tableIcon: '○'
},
'in-progress': {
color: chalk.hex('#FFA500'),
icon: String.fromCharCode(9654),
tableIcon: '>'
}, // ▶
deferred: { color: chalk.gray, icon: 'x', tableIcon: 'x' },
blocked: { color: chalk.red, icon: '!', tableIcon: '!' },
review: { color: chalk.magenta, icon: '?', tableIcon: '?' },
cancelled: { color: chalk.gray, icon: 'X', tableIcon: 'X' }
icon: '▶',
tableIcon: ''
},
deferred: {
color: chalk.gray,
icon: 'x',
tableIcon: 'x'
},
review: {
color: chalk.magenta,
icon: '?',
tableIcon: '?'
},
cancelled: {
color: chalk.gray,
icon: 'x',
tableIcon: 'x'
},
blocked: {
color: chalk.red,
icon: '!',
tableIcon: '!'
},
completed: {
color: chalk.green,
icon: '✓',
tableIcon: '✓'
}
};
const config = statusConfig[status] || {
@@ -39,18 +64,7 @@ export function getStatusWithColor(
tableIcon: 'X'
};
// Use simple ASCII characters for stable display
const simpleIcons = {
done: String.fromCharCode(8730), // √
pending: 'o',
'in-progress': '>',
deferred: 'x',
blocked: '!',
review: '?',
cancelled: 'X'
};
const icon = forTable ? simpleIcons[status] || 'X' : config.icon;
const icon = forTable ? config.tableIcon : config.icon;
return config.color(`${icon} ${status}`);
}
@@ -245,10 +259,24 @@ export function createTaskTable(
} = options || {};
// Calculate dynamic column widths based on terminal width
const terminalWidth = process.stdout.columns || 100;
const terminalWidth = process.stdout.columns * 0.9 || 100;
// Adjust column widths to better match the original layout
const baseColWidths = showComplexity
? [8, Math.floor(terminalWidth * 0.35), 18, 12, 15, 12] // ID, Title, Status, Priority, Dependencies, Complexity
: [8, Math.floor(terminalWidth * 0.4), 18, 12, 20]; // ID, Title, Status, Priority, Dependencies
? [
Math.floor(terminalWidth * 0.06),
Math.floor(terminalWidth * 0.4),
Math.floor(terminalWidth * 0.15),
Math.floor(terminalWidth * 0.12),
Math.floor(terminalWidth * 0.2),
Math.floor(terminalWidth * 0.12)
] // ID, Title, Status, Priority, Dependencies, Complexity
: [
Math.floor(terminalWidth * 0.08),
Math.floor(terminalWidth * 0.4),
Math.floor(terminalWidth * 0.18),
Math.floor(terminalWidth * 0.12),
Math.floor(terminalWidth * 0.2)
]; // ID, Title, Status, Priority, Dependencies
const headers = [
chalk.blue.bold('ID'),
@@ -284,11 +312,19 @@ export function createTaskTable(
];
if (showDependencies) {
row.push(formatDependenciesWithStatus(task.dependencies, tasks));
// For table display, show simple format without status icons
if (!task.dependencies || task.dependencies.length === 0) {
row.push(chalk.gray('None'));
} else {
row.push(
chalk.cyan(task.dependencies.map((d) => String(d)).join(', '))
);
}
}
if (showComplexity && 'complexity' in task) {
row.push(getComplexityWithColor(task.complexity as number | string));
if (showComplexity) {
// Show N/A if no complexity score
row.push(chalk.gray('N/A'));
}
table.push(row);

View File

@@ -1,27 +1,36 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"module": "NodeNext",
"lib": ["ES2022"],
"moduleResolution": "bundler",
"allowSyntheticDefaultImports": true,
"esModuleInterop": true,
"strict": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"outDir": "./dist",
"baseUrl": ".",
"rootDir": "./src",
"resolveJsonModule": true,
"allowJs": false,
"strict": true,
"noImplicitAny": true,
"strictNullChecks": true,
"strictFunctionTypes": true,
"strictBindCallApply": true,
"strictPropertyInitialization": true,
"noImplicitThis": true,
"alwaysStrict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"types": ["node"]
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"moduleResolution": "NodeNext",
"moduleDetection": "force",
"types": ["node"],
"resolveJsonModule": true,
"isolatedModules": true,
"allowImportingTsExtensions": false
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "tests"]
"exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"]
}

View File

@@ -1,15 +0,0 @@
import { defineConfig } from 'tsup';
export default defineConfig({
entry: ['src/index.ts'],
format: ['esm'],
target: 'node18',
splitting: false,
sourcemap: true,
clean: true,
dts: true,
shims: true,
esbuildOptions(options) {
options.platform = 'node';
}
});

View File

@@ -1,3 +1,5 @@
# docs
## 0.0.2
## 0.0.1

View File

@@ -83,6 +83,8 @@ Taskmaster uses two primary methods for configuration:
- `VERTEX_PROJECT_ID`: Your Google Cloud project ID for Vertex AI. Required when using the 'vertex' provider.
- `VERTEX_LOCATION`: Google Cloud region for Vertex AI (e.g., 'us-central1'). Default is 'us-central1'.
- `GOOGLE_APPLICATION_CREDENTIALS`: Path to service account credentials JSON file for Google Cloud auth (alternative to API key for Vertex AI).
- **Optional Auto-Update Control:**
- `TASKMASTER_SKIP_AUTO_UPDATE`: Set to '1' to disable automatic updates. Also automatically disabled in CI environments (when `CI` environment variable is set).
**Important:** Settings like model ID selections (`main`, `research`, `fallback`), `maxTokens`, `temperature`, `logLevel`, `defaultSubtasks`, `defaultPriority`, and `projectName` are **managed in `.taskmaster/config.json`** (or `.taskmasterconfig` for unmigrated projects), not environment variables.

View File

@@ -1,14 +1,13 @@
{
"name": "docs",
"version": "0.0.1",
"version": "0.0.2",
"private": true,
"description": "Task Master documentation powered by Mintlify",
"scripts": {
"dev": "mintlify dev",
"build": "mintlify build",
"preview": "mintlify preview"
},
"devDependencies": {
"mintlify": "^4.0.0"
"mintlify": "^4.2.111"
}
}

View File

@@ -1,5 +1,23 @@
# Change Log
## 0.25.0-rc.0
### Minor Changes
- [#1201](https://github.com/eyaltoledano/claude-task-master/pull/1201) [`83af314`](https://github.com/eyaltoledano/claude-task-master/commit/83af314879fc0e563581161c60d2bd089899313e) Thanks [@losolosol](https://github.com/losolosol)! - Added a Start Build button to the VSCODE Task Properties Right Panel
### Patch Changes
- Updated dependencies [[`137ef36`](https://github.com/eyaltoledano/claude-task-master/commit/137ef362789a9cdfdb1925e35e0438c1fa6c69ee)]:
- task-master-ai@0.27.0-rc.0
## 0.24.2
### Patch Changes
- Updated dependencies [[`8783708`](https://github.com/eyaltoledano/claude-task-master/commit/8783708e5e3389890a78fcf685d3da0580e73b3f), [`df26c65`](https://github.com/eyaltoledano/claude-task-master/commit/df26c65632000874a73504963b08f18c46283144), [`37af0f1`](https://github.com/eyaltoledano/claude-task-master/commit/37af0f191227a68d119b7f89a377bf932ee3ac66), [`c4f92f6`](https://github.com/eyaltoledano/claude-task-master/commit/c4f92f6a0aee3435c56eb8d27d9aa9204284833e), [`8783708`](https://github.com/eyaltoledano/claude-task-master/commit/8783708e5e3389890a78fcf685d3da0580e73b3f), [`4dad2fd`](https://github.com/eyaltoledano/claude-task-master/commit/4dad2fd613ceac56a65ae9d3c1c03092b8860ac9)]:
- task-master-ai@0.26.0
## 0.24.2-rc.1
### Patch Changes

View File

@@ -103,8 +103,8 @@ async function main() {
// This prevents the multiple React instances issue
// Ensure React is resolved from the workspace root to avoid duplicates
alias: {
react: path.resolve(__dirname, 'node_modules/react'),
'react-dom': path.resolve(__dirname, 'node_modules/react-dom')
react: path.resolve(__dirname, '../../node_modules/react'),
'react-dom': path.resolve(__dirname, '../../node_modules/react-dom')
},
define: {
'process.env.NODE_ENV': production ? '"production"' : '"development"',
@@ -135,8 +135,8 @@ async function main() {
jsxImportSource: 'react',
external: ['*.css'],
alias: {
react: path.resolve(__dirname, 'node_modules/react'),
'react-dom': path.resolve(__dirname, 'node_modules/react-dom')
react: path.resolve(__dirname, '../../node_modules/react'),
'react-dom': path.resolve(__dirname, '../../node_modules/react-dom')
},
define: {
'process.env.NODE_ENV': production ? '"production"' : '"development"',

View File

@@ -3,7 +3,7 @@
"private": true,
"displayName": "TaskMaster",
"description": "A visual Kanban board interface for TaskMaster projects in VS Code",
"version": "0.24.2-rc.1",
"version": "0.25.0-rc.0",
"publisher": "Hamster",
"icon": "assets/icon.png",
"engines": {
@@ -229,6 +229,7 @@
"build": "npm run build:js && npm run build:css",
"build:js": "node ./esbuild.js --production",
"build:css": "npx @tailwindcss/cli -i ./src/webview/index.css -o ./dist/index.css --minify",
"dev": "npm run watch",
"package": "npm exec node ./package.mjs",
"package:direct": "node ./package.mjs",
"debug:env": "node ./debug-env.mjs",
@@ -239,7 +240,7 @@
"check-types": "tsc --noEmit"
},
"dependencies": {
"task-master-ai": "0.26.0-rc.1"
"task-master-ai": "*"
},
"devDependencies": {
"@dnd-kit/core": "^6.3.1",

View File

@@ -1,9 +1,10 @@
import type React from 'react';
import { useState, useEffect } from 'react';
import { Button } from '@/components/ui/button';
import { Loader2 } from 'lucide-react';
import { Loader2, Play } from 'lucide-react';
import { PriorityBadge } from './PriorityBadge';
import type { TaskMasterTask } from '../../webview/types';
import { useVSCodeContext } from '../../webview/contexts/VSCodeContext';
interface TaskMetadataSidebarProps {
currentTask: TaskMasterTask;
@@ -28,10 +29,12 @@ export const TaskMetadataSidebar: React.FC<TaskMetadataSidebarProps> = ({
isRegenerating = false,
isAppending = false
}) => {
const { vscode } = useVSCodeContext();
const [isLoadingComplexity, setIsLoadingComplexity] = useState(false);
const [mcpComplexityScore, setMcpComplexityScore] = useState<
number | undefined
>(undefined);
const [isStartingTask, setIsStartingTask] = useState(false);
// Get complexity score from task
const currentComplexityScore = complexity?.score;
@@ -97,6 +100,29 @@ export const TaskMetadataSidebar: React.FC<TaskMetadataSidebarProps> = ({
}
};
// Handle starting a task
const handleStartTask = () => {
if (!currentTask || isStartingTask) {
return;
}
setIsStartingTask(true);
// Send message to extension to open terminal
if (vscode) {
vscode.postMessage({
type: 'openTerminal',
taskId: currentTask.id,
taskTitle: currentTask.title
});
}
// Reset loading state after a short delay
setTimeout(() => {
setIsStartingTask(false);
}, 500);
};
// Effect to handle complexity on task change
useEffect(() => {
if (currentTask?.id) {
@@ -284,6 +310,30 @@ export const TaskMetadataSidebar: React.FC<TaskMetadataSidebarProps> = ({
{currentTask.dependencies && currentTask.dependencies.length > 0 && (
<div className="border-b border-textSeparator-foreground" />
)}
{/* Start Task Button */}
<div className="mt-4">
<Button
onClick={handleStartTask}
variant="default"
size="sm"
className="w-full text-xs"
disabled={
isRegenerating ||
isAppending ||
isStartingTask ||
currentTask?.status === 'done' ||
currentTask?.status === 'in-progress'
}
>
{isStartingTask ? (
<Loader2 className="w-4 h-4 mr-2 animate-spin" />
) : (
<Play className="w-4 h-4 mr-2" />
)}
{isStartingTask ? 'Starting...' : 'Start Task'}
</Button>
</div>
</div>
</div>
</div>

View File

@@ -361,6 +361,30 @@ export class WebviewManager {
}
return;
case 'openTerminal':
// Open VS Code terminal for task execution
this.logger.log(
`Opening terminal for task ${data.taskId}: ${data.taskTitle}`
);
try {
const terminal = vscode.window.createTerminal({
name: `Task ${data.taskId}: ${data.taskTitle}`,
cwd: vscode.workspace.workspaceFolders?.[0]?.uri.fsPath
});
terminal.show();
this.logger.log('Terminal created and shown successfully');
response = { success: true };
} catch (error) {
this.logger.error('Failed to create terminal:', error);
response = {
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
};
}
break;
default:
throw new Error(`Unknown message type: ${type}`);
}

View File

@@ -20,357 +20,8 @@
* Main entry point for globally installed package
*/
import { fileURLToPath } from 'url';
import { dirname, resolve } from 'path';
import { createRequire } from 'module';
import { spawn } from 'child_process';
import { Command } from 'commander';
import { displayHelp, displayBanner } from '../scripts/modules/ui.js';
import { registerCommands } from '../scripts/modules/commands.js';
import { detectCamelCaseFlags } from '../scripts/modules/utils.js';
import chalk from 'chalk';
// Direct imports instead of spawning child processes
import { runCLI } from '../scripts/modules/commands.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const require = createRequire(import.meta.url);
// Get package information
const packageJson = require('../package.json');
const version = packageJson.version;
// Get paths to script files
const devScriptPath = resolve(__dirname, '../scripts/dev.js');
const initScriptPath = resolve(__dirname, '../scripts/init.js');
// Helper function to run dev.js with arguments
function runDevScript(args) {
// Debug: Show the transformed arguments when DEBUG=1 is set
if (process.env.DEBUG === '1') {
console.error('\nDEBUG - CLI Wrapper Analysis:');
console.error('- Original command: ' + process.argv.join(' '));
console.error('- Transformed args: ' + args.join(' '));
console.error(
'- dev.js will receive: node ' +
devScriptPath +
' ' +
args.join(' ') +
'\n'
);
}
// For testing: If TEST_MODE is set, just print args and exit
if (process.env.TEST_MODE === '1') {
console.log('Would execute:');
console.log(`node ${devScriptPath} ${args.join(' ')}`);
process.exit(0);
return;
}
const child = spawn('node', [devScriptPath, ...args], {
stdio: 'inherit',
cwd: process.cwd()
});
child.on('close', (code) => {
process.exit(code);
});
}
// Helper function to detect camelCase and convert to kebab-case
const toKebabCase = (str) => str.replace(/([A-Z])/g, '-$1').toLowerCase();
/**
* Create a wrapper action that passes the command to dev.js
* @param {string} commandName - The name of the command
* @returns {Function} Wrapper action function
*/
function createDevScriptAction(commandName) {
return (options, cmd) => {
// Check for camelCase flags and error out with helpful message
const camelCaseFlags = detectCamelCaseFlags(process.argv);
// If camelCase flags were found, show error and exit
if (camelCaseFlags.length > 0) {
console.error('\nError: Please use kebab-case for CLI flags:');
camelCaseFlags.forEach((flag) => {
console.error(` Instead of: --${flag.original}`);
console.error(` Use: --${flag.kebabCase}`);
});
console.error(
'\nExample: task-master parse-prd --num-tasks=5 instead of --numTasks=5\n'
);
process.exit(1);
}
// Since we've ensured no camelCase flags, we can now just:
// 1. Start with the command name
const args = [commandName];
// 3. Get positional arguments and explicit flags from the command line
const commandArgs = [];
const positionals = new Set(); // Track positional args we've seen
// Find the command in raw process.argv to extract args
const commandIndex = process.argv.indexOf(commandName);
if (commandIndex !== -1) {
// Process all args after the command name
for (let i = commandIndex + 1; i < process.argv.length; i++) {
const arg = process.argv[i];
if (arg.startsWith('--')) {
// It's a flag - pass through as is
commandArgs.push(arg);
// Skip the next arg if this is a flag with a value (not --flag=value format)
if (
!arg.includes('=') &&
i + 1 < process.argv.length &&
!process.argv[i + 1].startsWith('--')
) {
commandArgs.push(process.argv[++i]);
}
} else if (!positionals.has(arg)) {
// It's a positional argument we haven't seen
commandArgs.push(arg);
positionals.add(arg);
}
}
}
// Add all command line args we collected
args.push(...commandArgs);
// 4. Add default options from Commander if not specified on command line
// Track which options we've seen on the command line
const userOptions = new Set();
for (const arg of commandArgs) {
if (arg.startsWith('--')) {
// Extract option name (without -- and value)
const name = arg.split('=')[0].slice(2);
userOptions.add(name);
// Add the kebab-case version too, to prevent duplicates
const kebabName = name.replace(/([A-Z])/g, '-$1').toLowerCase();
userOptions.add(kebabName);
// Add the camelCase version as well
const camelName = kebabName.replace(/-([a-z])/g, (_, letter) =>
letter.toUpperCase()
);
userOptions.add(camelName);
}
}
// Add Commander-provided defaults for options not specified by user
Object.entries(options).forEach(([key, value]) => {
// Debug output to see what keys we're getting
if (process.env.DEBUG === '1') {
console.error(`DEBUG - Processing option: ${key} = ${value}`);
}
// Special case for numTasks > num-tasks (a known problem case)
if (key === 'numTasks') {
if (process.env.DEBUG === '1') {
console.error('DEBUG - Converting numTasks to num-tasks');
}
if (!userOptions.has('num-tasks') && !userOptions.has('numTasks')) {
args.push(`--num-tasks=${value}`);
}
return;
}
// Skip built-in Commander properties and options the user provided
if (
['parent', 'commands', 'options', 'rawArgs'].includes(key) ||
userOptions.has(key)
) {
return;
}
// Also check the kebab-case version of this key
const kebabKey = key.replace(/([A-Z])/g, '-$1').toLowerCase();
if (userOptions.has(kebabKey)) {
return;
}
// Add default values, using kebab-case for the parameter name
if (value !== undefined) {
if (typeof value === 'boolean') {
if (value === true) {
args.push(`--${kebabKey}`);
} else if (value === false && key === 'generate') {
args.push('--skip-generate');
}
} else {
// Always use kebab-case for option names
args.push(`--${kebabKey}=${value}`);
}
}
});
// Special handling for parent parameter (uses -p)
if (options.parent && !args.includes('-p') && !userOptions.has('parent')) {
args.push('-p', options.parent);
}
// Debug output for troubleshooting
if (process.env.DEBUG === '1') {
console.error('DEBUG - Command args:', commandArgs);
console.error('DEBUG - User options:', Array.from(userOptions));
console.error('DEBUG - Commander options:', options);
console.error('DEBUG - Final args:', args);
}
// Run the script with our processed args
runDevScript(args);
};
}
// // Special case for the 'init' command which uses a different script
// function registerInitCommand(program) {
// program
// .command('init')
// .description('Initialize a new project')
// .option('-y, --yes', 'Skip prompts and use default values')
// .option('-n, --name <name>', 'Project name')
// .option('-d, --description <description>', 'Project description')
// .option('-v, --version <version>', 'Project version')
// .option('-a, --author <author>', 'Author name')
// .option('--skip-install', 'Skip installing dependencies')
// .option('--dry-run', 'Show what would be done without making changes')
// .action((options) => {
// // Pass through any options to the init script
// const args = [
// '--yes',
// 'name',
// 'description',
// 'version',
// 'author',
// 'skip-install',
// 'dry-run'
// ]
// .filter((opt) => options[opt])
// .map((opt) => {
// if (opt === 'yes' || opt === 'skip-install' || opt === 'dry-run') {
// return `--${opt}`;
// }
// return `--${opt}=${options[opt]}`;
// });
// const child = spawn('node', [initScriptPath, ...args], {
// stdio: 'inherit',
// cwd: process.cwd()
// });
// child.on('close', (code) => {
// process.exit(code);
// });
// });
// }
// Set up the command-line interface
const program = new Command();
program
.name('task-master')
.description('Claude Task Master CLI')
.version(version)
.addHelpText('afterAll', () => {
// Use the same help display function as dev.js for consistency
displayHelp();
return ''; // Return empty string to prevent commander's default help
});
// Add custom help option to directly call our help display
program.helpOption('-h, --help', 'Display help information');
program.on('--help', () => {
displayHelp();
});
// // Add special case commands
// registerInitCommand(program);
program
.command('dev')
.description('Run the dev.js script')
.action(() => {
const args = process.argv.slice(process.argv.indexOf('dev') + 1);
runDevScript(args);
});
// Use a temporary Command instance to get all command definitions
const tempProgram = new Command();
registerCommands(tempProgram);
// For each command in the temp instance, add a modified version to our actual program
tempProgram.commands.forEach((cmd) => {
if (['dev'].includes(cmd.name())) {
// Skip commands we've already defined specially
return;
}
// Create a new command with the same name and description
const newCmd = program.command(cmd.name()).description(cmd.description());
// Copy all options
cmd.options.forEach((opt) => {
newCmd.option(opt.flags, opt.description, opt.defaultValue);
});
// Set the action to proxy to dev.js
newCmd.action(createDevScriptAction(cmd.name()));
});
// Parse the command line arguments
program.parse(process.argv);
// Add global error handling for unknown commands and options
process.on('uncaughtException', (err) => {
// Check if this is a commander.js unknown option error
if (err.code === 'commander.unknownOption') {
const option = err.message.match(/'([^']+)'/)?.[1];
const commandArg = process.argv.find(
(arg) =>
!arg.startsWith('-') &&
arg !== 'task-master' &&
!arg.includes('/') &&
arg !== 'node'
);
const command = commandArg || 'unknown';
console.error(chalk.red(`Error: Unknown option '${option}'`));
console.error(
chalk.yellow(
`Run 'task-master ${command} --help' to see available options for this command`
)
);
process.exit(1);
}
// Check if this is a commander.js unknown command error
if (err.code === 'commander.unknownCommand') {
const command = err.message.match(/'([^']+)'/)?.[1];
console.error(chalk.red(`Error: Unknown command '${command}'`));
console.error(
chalk.yellow(`Run 'task-master --help' to see available commands`)
);
process.exit(1);
}
// Handle other uncaught exceptions
console.error(chalk.red(`Error: ${err.message}`));
if (process.env.DEBUG === '1') {
console.error(err);
}
process.exit(1);
});
// Show help if no command was provided (just 'task-master' with no args)
if (process.argv.length <= 2) {
displayBanner();
displayHelp();
process.exit(0);
}
// Add exports at the end of the file
export { detectCamelCaseFlags };
// Simply run the CLI directly
runCLI();

View File

@@ -18,7 +18,17 @@ export default {
testMatch: ['**/__tests__/**/*.js', '**/?(*.)+(spec|test).js'],
// Transform files
transform: {},
preset: 'ts-jest/presets/default-esm',
extensionsToTreatAsEsm: ['.ts'],
moduleFileExtensions: ['js', 'ts', 'json', 'node'],
transform: {
'^.+\\.ts$': [
'ts-jest',
{
useESM: true
}
]
},
// Disable transformations for node_modules
transformIgnorePatterns: ['/node_modules/'],
@@ -27,6 +37,7 @@ export default {
moduleNameMapper: {
'^@/(.*)$': '<rootDir>/$1'
},
resolver: '<rootDir>/jest.resolver.cjs',
// Setup module aliases
moduleDirectories: ['node_modules', '<rootDir>'],

19
jest.resolver.cjs Normal file
View File

@@ -0,0 +1,19 @@
const { defaultResolver } = require('jest-resolve');
module.exports = function customResolver(request, options) {
const resolve = options.defaultResolver || defaultResolver;
try {
return resolve(request, options);
} catch (error) {
if (request.startsWith('.') && request.endsWith('.js')) {
try {
return resolve(request.replace(/\.js$/, '.ts'), options);
} catch (tsError) {
tsError.cause = tsError.cause ?? error;
throw tsError;
}
}
throw error;
}
};

16472
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "task-master-ai",
"version": "0.26.0-rc.1",
"version": "0.27.0-rc.1",
"description": "A task management system for ambitious AI-driven development that doesn't overwhelm and confuse Cursor.",
"main": "index.js",
"type": "module",
@@ -11,23 +11,25 @@
},
"workspaces": ["apps/*", "packages/*", "."],
"scripts": {
"build": "npm run build:packages && tsup",
"dev": "npm run build:packages && npm link && (npm run dev:packages & tsup --watch --onSuccess 'echo Build complete && npm link')",
"dev:packages": "(cd packages/tm-core && npm run dev) & (cd apps/cli && npm run dev) & wait",
"dev:core": "cd packages/tm-core && npm run dev",
"dev:cli": "cd apps/cli && npm run dev",
"build:packages": "npm run build:core && npm run build:cli",
"build:core": "cd packages/tm-core && npm run build",
"build:cli": "cd apps/cli && npm run build",
"build": "npm run build:build-config && cross-env NODE_ENV=production tsdown",
"dev": "tsdown --watch",
"turbo:dev": "turbo dev",
"turbo:build": "turbo build",
"turbo:typecheck": "turbo typecheck",
"build:build-config": "npm run build -w @tm/build-config",
"test": "node --experimental-vm-modules node_modules/.bin/jest",
"test:unit": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=unit",
"test:integration": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=integration",
"test:fails": "node --experimental-vm-modules node_modules/.bin/jest --onlyFailures",
"test:watch": "node --experimental-vm-modules node_modules/.bin/jest --watch",
"test:coverage": "node --experimental-vm-modules node_modules/.bin/jest --coverage",
"test:ci": "node --experimental-vm-modules node_modules/.bin/jest --coverage --ci",
"test:e2e": "./tests/e2e/run_e2e.sh",
"test:e2e-report": "./tests/e2e/run_e2e.sh --analyze-log",
"postpack": "chmod +x dist/task-master.js dist/mcp-server.js",
"changeset": "changeset",
"release": "changeset publish",
"publish-packages": "turbo run build lint test && changeset version && changeset publish",
"inspector": "npx @modelcontextprotocol/inspector node dist/mcp-server.js",
"mcp-server": "node dist/mcp-server.js",
"format-check": "biome format .",
@@ -67,7 +69,7 @@
"ajv": "^8.17.1",
"ajv-formats": "^3.0.1",
"boxen": "^8.0.1",
"chalk": "^5.4.1",
"chalk": "5.6.2",
"cli-highlight": "^2.1.11",
"cli-progress": "^3.12.0",
"cli-table3": "^0.6.5",
@@ -86,6 +88,8 @@
"jsonrepair": "^3.13.0",
"jsonwebtoken": "^9.0.2",
"lru-cache": "^10.2.0",
"marked": "^15.0.12",
"marked-terminal": "^7.3.0",
"ollama-ai-provider": "^1.2.0",
"openai": "^4.89.0",
"ora": "^8.2.0",
@@ -101,6 +105,7 @@
"engines": {
"node": ">=18.0.0"
},
"packageManager": "npm@10.9.2",
"repository": {
"type": "git",
"url": "git+https://github.com/eyaltoledano/claude-task-master.git"
@@ -115,22 +120,25 @@
"whatwg-url": "^11.0.0"
},
"devDependencies": {
"@tm/cli": "*",
"@biomejs/biome": "^1.9.4",
"@changesets/changelog-github": "^0.5.1",
"@changesets/cli": "^2.28.1",
"dotenv-mono": "^1.5.1",
"@types/jest": "^29.5.14",
"@types/marked-terminal": "^6.1.1",
"concurrently": "^9.2.1",
"cross-env": "^10.0.0",
"dotenv-mono": "^1.5.1",
"execa": "^8.0.1",
"ink": "^5.0.1",
"jest": "^29.7.0",
"jest-environment-node": "^29.7.0",
"mock-fs": "^5.5.0",
"prettier": "^3.5.3",
"supertest": "^7.1.0",
"tsup": "^8.5.0",
"ts-jest": "^29.4.2",
"tsdown": "^0.15.2",
"tsx": "^4.16.2",
"turbo": "^2.5.6",
"typescript": "^5.9.2"
}
}

View File

@@ -0,0 +1,30 @@
{
"name": "@tm/build-config",
"version": "1.0.0",
"description": "Shared build configuration for Task Master monorepo",
"type": "module",
"private": true,
"main": "./dist/tsdown.base.js",
"types": "./src/tsdown.base.ts",
"exports": {
".": {
"types": "./src/tsdown.base.ts",
"import": "./dist/tsdown.base.js"
}
},
"files": ["dist", "src"],
"keywords": ["build-config", "tsup", "monorepo"],
"author": "",
"license": "MIT",
"scripts": {
"build": "tsc",
"typecheck": "tsc --noEmit"
},
"devDependencies": {
"dotenv-mono": "^1.5.1",
"typescript": "^5.7.3"
},
"dependencies": {
"tsup": "^8.5.0"
}
}

View File

@@ -0,0 +1,46 @@
/**
* Base tsdown configuration for Task Master monorepo
* Provides shared configuration that can be extended by individual packages
*/
import type { UserConfig } from 'tsdown';
const isProduction = process.env.NODE_ENV === 'production';
const isDevelopment = !isProduction;
/**
* Environment helpers
*/
export const env = {
isProduction,
isDevelopment,
NODE_ENV: process.env.NODE_ENV || 'development'
};
/**
* Base tsdown configuration for all packages
* Since everything gets bundled into root dist/ anyway, use consistent settings
*/
export const baseConfig: Partial<UserConfig> = {
sourcemap: isDevelopment,
format: 'esm',
platform: 'node',
dts: isDevelopment,
minify: isProduction,
treeshake: isProduction,
// Keep all npm dependencies external (available via node_modules)
external: [/^[^@./]/, /^@(?!tm\/)/]
};
/**
* Utility function to merge configurations
* Simplified for tsdown usage
*/
export function mergeConfig(
base: Partial<UserConfig>,
overrides: Partial<UserConfig>
): Partial<UserConfig> {
return {
...base,
...overrides
};
}

View File

@@ -0,0 +1,21 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022"],
"module": "ESNext",
"moduleResolution": "bundler",
"allowSyntheticDefaultImports": true,
"esModuleInterop": true,
"baseUrl": ".",
"outDir": "dist",
"allowJs": true,
"strict": true,
"resolveJsonModule": true,
"isolatedModules": true,
"declaration": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

View File

@@ -1,71 +1,25 @@
{
"name": "@tm/core",
"version": "1.0.0",
"version": "0.26.0",
"private": true,
"description": "Core library for Task Master - TypeScript task management system",
"type": "module",
"types": "./dist/index.d.ts",
"types": "./src/index.ts",
"main": "./dist/index.js",
"exports": {
".": {
"types": "./src/index.ts",
"import": "./dist/index.js",
"require": "./dist/index.js"
},
"./auth": {
"types": "./src/auth/index.ts",
"import": "./dist/auth/index.js",
"require": "./dist/auth/index.js"
},
"./storage": {
"types": "./src/storage/index.ts",
"import": "./dist/storage/index.js",
"require": "./dist/storage/index.js"
},
"./config": {
"types": "./src/config/index.ts",
"import": "./dist/config/index.js",
"require": "./dist/config/index.js"
},
"./providers": {
"types": "./src/providers/index.ts",
"import": "./dist/providers/index.js",
"require": "./dist/providers/index.js"
},
"./services": {
"types": "./src/services/index.ts",
"import": "./dist/services/index.js",
"require": "./dist/services/index.js"
},
"./errors": {
"types": "./src/errors/index.ts",
"import": "./dist/errors/index.js",
"require": "./dist/errors/index.js"
},
"./logger": {
"types": "./src/logger/index.ts",
"import": "./dist/logger/index.js",
"require": "./dist/logger/index.js"
},
"./types": {
"types": "./src/types/index.ts",
"import": "./dist/types/index.js",
"require": "./dist/types/index.js"
},
"./interfaces": {
"types": "./src/interfaces/index.ts",
"import": "./dist/interfaces/index.js",
"require": "./dist/interfaces/index.js"
},
"./utils": {
"types": "./src/utils/index.ts",
"import": "./dist/utils/index.js",
"require": "./dist/utils/index.js"
},
"./package.json": "./package.json"
".": "./src/index.ts",
"./auth": "./src/auth/index.ts",
"./storage": "./src/storage/index.ts",
"./config": "./src/config/index.ts",
"./providers": "./src/providers/index.ts",
"./services": "./src/services/index.ts",
"./errors": "./src/errors/index.ts",
"./logger": "./src/logger/index.ts",
"./types": "./src/types/index.ts",
"./interfaces": "./src/interfaces/index.ts",
"./utils": "./src/utils/index.ts"
},
"scripts": {
"build": "tsup",
"dev": "tsup --watch",
"test": "vitest run",
"test:watch": "vitest",
"test:coverage": "vitest run --coverage",
@@ -77,24 +31,24 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@supabase/supabase-js": "^2.57.0",
"chalk": "^5.3.0",
"@supabase/supabase-js": "^2.57.4",
"zod": "^3.22.4"
},
"devDependencies": {
"@biomejs/biome": "^1.9.4",
"@tm/build-config": "*",
"@types/node": "^20.11.30",
"@vitest/coverage-v8": "^2.0.5",
"dotenv-mono": "^1.5.1",
"dotenv-mono": "^1.3.14",
"ts-node": "^10.9.2",
"tsup": "^8.0.2",
"tsup": "^8.5.0",
"typescript": "^5.4.3",
"vitest": "^2.0.5"
},
"engines": {
"node": ">=18.0.0"
},
"files": ["dist", "README.md", "CHANGELOG.md"],
"files": ["src", "README.md", "CHANGELOG.md"],
"keywords": ["task-management", "typescript", "ai", "prd", "parser"],
"author": "Task Master AI",
"license": "MIT"

View File

@@ -0,0 +1,150 @@
/**
* Tests for AuthManager singleton behavior
*/
import { describe, it, expect, beforeEach, vi } from 'vitest';
// Mock the logger to verify warnings (must be hoisted before SUT import)
const mockLogger = {
warn: vi.fn(),
info: vi.fn(),
debug: vi.fn(),
error: vi.fn()
};
vi.mock('../logger/index.js', () => ({
getLogger: () => mockLogger
}));
// Spy on CredentialStore constructor to verify config propagation
const CredentialStoreSpy = vi.fn();
vi.mock('./credential-store.js', () => {
return {
CredentialStore: class {
constructor(config: any) {
CredentialStoreSpy(config);
this.getCredentials = vi.fn(() => null);
}
getCredentials() {
return null;
}
saveCredentials() {}
clearCredentials() {}
hasValidCredentials() {
return false;
}
}
};
});
// Mock OAuthService to avoid side effects
vi.mock('./oauth-service.js', () => {
return {
OAuthService: class {
constructor() {}
authenticate() {
return Promise.resolve({});
}
getAuthorizationUrl() {
return null;
}
}
};
});
// Mock SupabaseAuthClient to avoid side effects
vi.mock('../clients/supabase-client.js', () => {
return {
SupabaseAuthClient: class {
constructor() {}
refreshSession() {
return Promise.resolve({});
}
signOut() {
return Promise.resolve();
}
}
};
});
// Import SUT after mocks
import { AuthManager } from './auth-manager.js';
describe('AuthManager Singleton', () => {
beforeEach(() => {
// Reset singleton before each test
AuthManager.resetInstance();
vi.clearAllMocks();
CredentialStoreSpy.mockClear();
});
it('should return the same instance on multiple calls', () => {
const instance1 = AuthManager.getInstance();
const instance2 = AuthManager.getInstance();
expect(instance1).toBe(instance2);
});
it('should use config on first call', () => {
const config = {
baseUrl: 'https://test.auth.com',
configDir: '/test/config',
configFile: '/test/config/auth.json'
};
const instance = AuthManager.getInstance(config);
expect(instance).toBeDefined();
// Assert that CredentialStore was constructed with the provided config
expect(CredentialStoreSpy).toHaveBeenCalledTimes(1);
expect(CredentialStoreSpy).toHaveBeenCalledWith(config);
// Verify the config is passed to internal components through observable behavior
// getCredentials would look in the configured file path
const credentials = instance.getCredentials();
expect(credentials).toBeNull(); // File doesn't exist, but config was propagated correctly
});
it('should warn when config is provided after initialization', () => {
// Clear previous calls
mockLogger.warn.mockClear();
// First call with config
AuthManager.getInstance({ baseUrl: 'https://first.auth.com' });
// Second call with different config
AuthManager.getInstance({ baseUrl: 'https://second.auth.com' });
// Verify warning was logged
expect(mockLogger.warn).toHaveBeenCalledWith(
expect.stringMatching(/config.*after initialization.*ignored/i)
);
});
it('should not warn when no config is provided after initialization', () => {
// Clear previous calls
mockLogger.warn.mockClear();
// First call with config
AuthManager.getInstance({ configDir: '/test/config' });
// Second call without config
AuthManager.getInstance();
// Verify no warning was logged
expect(mockLogger.warn).not.toHaveBeenCalled();
});
it('should allow resetting the instance', () => {
const instance1 = AuthManager.getInstance();
// Reset the instance
AuthManager.resetInstance();
// Get new instance
const instance2 = AuthManager.getInstance();
// They should be different instances
expect(instance1).not.toBe(instance2);
});
});

View File

@@ -6,25 +6,50 @@ import {
AuthCredentials,
OAuthFlowOptions,
AuthenticationError,
AuthConfig
} from './types';
import { CredentialStore } from './credential-store';
import { OAuthService } from './oauth-service';
import { SupabaseAuthClient } from '../clients/supabase-client';
AuthConfig,
UserContext
} from './types.js';
import { CredentialStore } from './credential-store.js';
import { OAuthService } from './oauth-service.js';
import { SupabaseAuthClient } from '../clients/supabase-client.js';
import {
OrganizationService,
type Organization,
type Brief,
type RemoteTask
} from '../services/organization.service.js';
import { getLogger } from '../logger/index.js';
/**
* Authentication manager class
*/
export class AuthManager {
private static instance: AuthManager;
private static instance: AuthManager | null = null;
private credentialStore: CredentialStore;
private oauthService: OAuthService;
private supabaseClient: SupabaseAuthClient;
private organizationService?: OrganizationService;
private constructor(config?: Partial<AuthConfig>) {
this.credentialStore = new CredentialStore(config);
this.credentialStore = CredentialStore.getInstance(config);
this.supabaseClient = new SupabaseAuthClient();
this.oauthService = new OAuthService(this.credentialStore, config);
// Initialize Supabase client with session restoration
this.initializeSupabaseSession();
}
/**
* Initialize Supabase session from stored credentials
*/
private async initializeSupabaseSession(): Promise<void> {
try {
await this.supabaseClient.initialize();
} catch (error) {
// Log but don't throw - session might not exist yet
const logger = getLogger('AuthManager');
logger.debug('No existing session to restore');
}
}
/**
@@ -33,10 +58,24 @@ export class AuthManager {
static getInstance(config?: Partial<AuthConfig>): AuthManager {
if (!AuthManager.instance) {
AuthManager.instance = new AuthManager(config);
} else if (config) {
// Warn if config is provided after initialization
const logger = getLogger('AuthManager');
logger.warn(
'getInstance called with config after initialization; config is ignored.'
);
}
return AuthManager.instance;
}
/**
* Reset the singleton instance (useful for testing)
*/
static resetInstance(): void {
AuthManager.instance = null;
CredentialStore.resetInstance();
}
/**
* Get stored authentication credentials
*/
@@ -61,62 +100,48 @@ export class AuthManager {
}
/**
* Authenticate with API key
* Note: This would require a custom implementation or Supabase RLS policies
*/
async authenticateWithApiKey(apiKey: string): Promise<AuthCredentials> {
const token = apiKey.trim();
if (!token || token.length < 10) {
throw new AuthenticationError('Invalid API key', 'INVALID_API_KEY');
}
const authData: AuthCredentials = {
token,
tokenType: 'api_key',
userId: 'api-user',
email: undefined,
expiresAt: undefined, // API keys don't expire
savedAt: new Date().toISOString()
};
this.credentialStore.saveCredentials(authData);
return authData;
}
/**
* Refresh authentication token
* Refresh authentication token using Supabase session
*/
async refreshToken(): Promise<AuthCredentials> {
const authData = this.credentialStore.getCredentials({
allowExpired: true
});
if (!authData || !authData.refreshToken) {
throw new AuthenticationError(
'No refresh token available',
'NO_REFRESH_TOKEN'
);
}
try {
// Use Supabase client to refresh the token
const response = await this.supabaseClient.refreshSession(
authData.refreshToken
);
// Use Supabase's built-in session refresh
const session = await this.supabaseClient.refreshSession();
// Update authentication data
if (!session) {
throw new AuthenticationError(
'Failed to refresh session',
'REFRESH_FAILED'
);
}
// Get existing credentials to preserve context
const existingCredentials = this.credentialStore.getCredentials({
allowExpired: true
});
// Update authentication data from session
const newAuthData: AuthCredentials = {
...authData,
token: response.token,
refreshToken: response.refreshToken,
expiresAt: response.expiresAt,
savedAt: new Date().toISOString()
token: session.access_token,
refreshToken: session.refresh_token,
userId: session.user.id,
email: session.user.email,
expiresAt: session.expires_at
? new Date(session.expires_at * 1000).toISOString()
: undefined,
savedAt: new Date().toISOString(),
selectedContext: existingCredentials?.selectedContext
};
this.credentialStore.saveCredentials(newAuthData);
return newAuthData;
} catch (error) {
throw error;
if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Token refresh failed: ${(error as Error).message}`,
'REFRESH_FAILED'
);
}
}
@@ -129,7 +154,7 @@ export class AuthManager {
await this.supabaseClient.signOut();
} catch (error) {
// Log but don't throw - we still want to clear local credentials
console.warn('Failed to sign out from Supabase:', error);
getLogger('AuthManager').warn('Failed to sign out from Supabase:', error);
}
// Always clear local credentials (removes auth.json file)
@@ -144,20 +169,112 @@ export class AuthManager {
}
/**
* Get authorization headers
* Get the current user context (org/brief selection)
*/
getAuthHeaders(): Record<string, string> {
const authData = this.getCredentials();
getContext(): UserContext | null {
const credentials = this.getCredentials();
return credentials?.selectedContext || null;
}
if (!authData) {
throw new AuthenticationError(
'Not authenticated. Please authenticate first.',
'NOT_AUTHENTICATED'
);
/**
* Update the user context (org/brief selection)
*/
async updateContext(context: Partial<UserContext>): Promise<void> {
const credentials = this.getCredentials();
if (!credentials) {
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
}
return {
Authorization: `Bearer ${authData.token}`
// Merge with existing context
const existingContext = credentials.selectedContext || {};
const newContext: UserContext = {
...existingContext,
...context,
updatedAt: new Date().toISOString()
};
// Save updated credentials with new context
const updatedCredentials: AuthCredentials = {
...credentials,
selectedContext: newContext
};
this.credentialStore.saveCredentials(updatedCredentials);
}
/**
* Clear the user context
*/
async clearContext(): Promise<void> {
const credentials = this.getCredentials();
if (!credentials) {
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
}
// Remove context from credentials
const { selectedContext, ...credentialsWithoutContext } = credentials;
this.credentialStore.saveCredentials(credentialsWithoutContext);
}
/**
* Get the organization service instance
* Uses the Supabase client with the current session or token
*/
private async getOrganizationService(): Promise<OrganizationService> {
if (!this.organizationService) {
// First check if we have credentials with a token
const credentials = this.getCredentials();
if (!credentials || !credentials.token) {
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
}
// Initialize session if needed (this will load from our storage adapter)
await this.supabaseClient.initialize();
// Use the SupabaseAuthClient which now has the session
const supabaseClient = this.supabaseClient.getClient();
this.organizationService = new OrganizationService(supabaseClient as any);
}
return this.organizationService;
}
/**
* Get all organizations for the authenticated user
*/
async getOrganizations(): Promise<Organization[]> {
const service = await this.getOrganizationService();
return service.getOrganizations();
}
/**
* Get all briefs for a specific organization
*/
async getBriefs(orgId: string): Promise<Brief[]> {
const service = await this.getOrganizationService();
return service.getBriefs(orgId);
}
/**
* Get a specific organization by ID
*/
async getOrganization(orgId: string): Promise<Organization | null> {
const service = await this.getOrganizationService();
return service.getOrganization(orgId);
}
/**
* Get a specific brief by ID
*/
async getBrief(briefId: string): Promise<Brief | null> {
const service = await this.getOrganizationService();
return service.getBrief(briefId);
}
/**
* Get all tasks for a specific brief
*/
async getTasks(briefId: string): Promise<RemoteTask[]> {
const service = await this.getOrganizationService();
return service.getTasks(briefId);
}
}

View File

@@ -4,7 +4,7 @@
import os from 'os';
import path from 'path';
import { AuthConfig } from './types';
import { AuthConfig } from './types.js';
// Single base domain for all URLs
// Build-time: process.env.TM_PUBLIC_BASE_DOMAIN gets replaced by tsup's env option

View File

@@ -0,0 +1,575 @@
/**
* Tests for CredentialStore with numeric and string timestamp handling
*/
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
import { CredentialStore } from './credential-store.js';
import { AuthenticationError } from './types.js';
import type { AuthCredentials } from './types.js';
import fs from 'fs';
import path from 'path';
import os from 'os';
// Mock fs module
vi.mock('fs');
// Mock logger
const mockLogger = {
warn: vi.fn(),
info: vi.fn(),
debug: vi.fn(),
error: vi.fn()
};
vi.mock('../logger/index.js', () => ({
getLogger: () => mockLogger
}));
describe('CredentialStore', () => {
let store: CredentialStore;
const testDir = '/test/config';
const configFile = '/test/config/auth.json';
beforeEach(() => {
vi.clearAllMocks();
store = new CredentialStore({
configDir: testDir,
configFile: configFile,
baseUrl: 'https://api.test.com'
});
});
afterEach(() => {
vi.restoreAllMocks();
});
describe('getCredentials with timestamp migration', () => {
it('should handle string ISO timestamp correctly', () => {
const futureDate = new Date(Date.now() + 3600000); // 1 hour from now
const mockCredentials: AuthCredentials = {
token: 'test-token',
userId: 'user-123',
email: 'test@example.com',
expiresAt: futureDate.toISOString(),
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).not.toBeNull();
expect(result?.token).toBe('test-token');
// The timestamp should be normalized to numeric milliseconds
expect(typeof result?.expiresAt).toBe('number');
expect(result?.expiresAt).toBe(futureDate.getTime());
});
it('should handle numeric timestamp correctly', () => {
const futureTimestamp = Date.now() + 7200000; // 2 hours from now
const mockCredentials = {
token: 'test-token',
userId: 'user-456',
email: 'test2@example.com',
expiresAt: futureTimestamp,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).not.toBeNull();
expect(result?.token).toBe('test-token');
// Numeric timestamp should remain as-is
expect(typeof result?.expiresAt).toBe('number');
expect(result?.expiresAt).toBe(futureTimestamp);
});
it('should reject invalid string timestamp', () => {
const mockCredentials = {
token: 'test-token',
userId: 'user-789',
expiresAt: 'invalid-date-string',
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.warn).toHaveBeenCalledWith(
'No valid expiration time provided for token'
);
});
it('should reject NaN timestamp', () => {
const mockCredentials = {
token: 'test-token',
userId: 'user-nan',
expiresAt: NaN,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.warn).toHaveBeenCalledWith(
'No valid expiration time provided for token'
);
});
it('should reject Infinity timestamp', () => {
const mockCredentials = {
token: 'test-token',
userId: 'user-inf',
expiresAt: Infinity,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.warn).toHaveBeenCalledWith(
'No valid expiration time provided for token'
);
});
it('should handle missing expiresAt field', () => {
const mockCredentials = {
token: 'test-token',
userId: 'user-no-expiry',
tokenType: 'standard',
savedAt: new Date().toISOString()
// No expiresAt field
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.warn).toHaveBeenCalledWith(
'No valid expiration time provided for token'
);
});
it('should check token expiration correctly', () => {
const expiredTimestamp = Date.now() - 3600000; // 1 hour ago
const mockCredentials = {
token: 'expired-token',
userId: 'user-expired',
expiresAt: expiredTimestamp,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.warn).toHaveBeenCalledWith(
expect.stringContaining('Authentication token has expired'),
expect.any(Object)
);
});
it('should allow expired tokens when requested', () => {
const expiredTimestamp = Date.now() - 3600000; // 1 hour ago
const mockCredentials = {
token: 'expired-token',
userId: 'user-expired',
expiresAt: expiredTimestamp,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify(mockCredentials)
);
const result = store.getCredentials({ allowExpired: true });
expect(result).not.toBeNull();
expect(result?.token).toBe('expired-token');
});
});
describe('saveCredentials with timestamp normalization', () => {
beforeEach(() => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.mkdirSync).mockImplementation(() => undefined);
vi.mocked(fs.writeFileSync).mockImplementation(() => undefined);
vi.mocked(fs.renameSync).mockImplementation(() => undefined);
});
it('should normalize string timestamp to ISO string when saving', () => {
const futureDate = new Date(Date.now() + 3600000);
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-123',
expiresAt: futureDate.toISOString(),
tokenType: 'standard',
savedAt: new Date().toISOString()
};
store.saveCredentials(credentials);
expect(fs.writeFileSync).toHaveBeenCalledWith(
expect.stringContaining('.tmp'),
expect.stringContaining('"expiresAt":'),
expect.any(Object)
);
// Check that the written data contains a valid ISO string
const writtenData = vi.mocked(fs.writeFileSync).mock
.calls[0][1] as string;
const parsed = JSON.parse(writtenData);
expect(typeof parsed.expiresAt).toBe('string');
expect(new Date(parsed.expiresAt).toISOString()).toBe(parsed.expiresAt);
});
it('should convert numeric timestamp to ISO string when saving', () => {
const futureTimestamp = Date.now() + 7200000;
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-456',
expiresAt: futureTimestamp,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
store.saveCredentials(credentials);
const writtenData = vi.mocked(fs.writeFileSync).mock
.calls[0][1] as string;
const parsed = JSON.parse(writtenData);
expect(typeof parsed.expiresAt).toBe('string');
expect(new Date(parsed.expiresAt).getTime()).toBe(futureTimestamp);
});
it('should reject invalid string timestamp when saving', () => {
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-789',
expiresAt: 'invalid-date' as any,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
let err: unknown;
try {
store.saveCredentials(credentials);
} catch (e) {
err = e;
}
expect(err).toBeInstanceOf(AuthenticationError);
expect((err as Error).message).toContain('Invalid expiresAt format');
});
it('should reject NaN timestamp when saving', () => {
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-nan',
expiresAt: NaN as any,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
let err: unknown;
try {
store.saveCredentials(credentials);
} catch (e) {
err = e;
}
expect(err).toBeInstanceOf(AuthenticationError);
expect((err as Error).message).toContain('Invalid expiresAt format');
});
it('should reject Infinity timestamp when saving', () => {
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-inf',
expiresAt: Infinity as any,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
let err: unknown;
try {
store.saveCredentials(credentials);
} catch (e) {
err = e;
}
expect(err).toBeInstanceOf(AuthenticationError);
expect((err as Error).message).toContain('Invalid expiresAt format');
});
it('should handle missing expiresAt when saving', () => {
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-no-expiry',
tokenType: 'standard',
savedAt: new Date().toISOString()
// No expiresAt
};
store.saveCredentials(credentials);
const writtenData = vi.mocked(fs.writeFileSync).mock
.calls[0][1] as string;
const parsed = JSON.parse(writtenData);
expect(parsed.expiresAt).toBeUndefined();
});
it('should not mutate the original credentials object', () => {
const originalTimestamp = Date.now() + 3600000;
const credentials: AuthCredentials = {
token: 'test-token',
userId: 'user-123',
expiresAt: originalTimestamp,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
const originalCredentialsCopy = { ...credentials };
store.saveCredentials(credentials);
// Original object should not be modified
expect(credentials).toEqual(originalCredentialsCopy);
expect(credentials.expiresAt).toBe(originalTimestamp);
});
});
describe('corrupt file handling', () => {
it('should quarantine corrupt file on JSON parse error', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue('invalid json {');
vi.mocked(fs.renameSync).mockImplementation(() => undefined);
const result = store.getCredentials();
expect(result).toBeNull();
expect(fs.renameSync).toHaveBeenCalledWith(
configFile,
expect.stringContaining('.corrupt-')
);
expect(mockLogger.warn).toHaveBeenCalledWith(
expect.stringContaining('Quarantined corrupt auth file')
);
});
it('should handle quarantine failure gracefully', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue('invalid json {');
vi.mocked(fs.renameSync).mockImplementation(() => {
throw new Error('Permission denied');
});
const result = store.getCredentials();
expect(result).toBeNull();
expect(mockLogger.debug).toHaveBeenCalledWith(
expect.stringContaining('Could not quarantine corrupt file')
);
});
});
describe('clearCredentials', () => {
it('should delete the auth file when it exists', () => {
// Mock file exists
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.unlinkSync).mockImplementation(() => undefined);
store.clearCredentials();
expect(fs.existsSync).toHaveBeenCalledWith('/test/config/auth.json');
expect(fs.unlinkSync).toHaveBeenCalledWith('/test/config/auth.json');
});
it('should not throw when auth file does not exist', () => {
// Mock file does not exist
vi.mocked(fs.existsSync).mockReturnValue(false);
// Should not throw
expect(() => store.clearCredentials()).not.toThrow();
// Should not try to unlink non-existent file
expect(fs.unlinkSync).not.toHaveBeenCalled();
});
it('should throw AuthenticationError when unlink fails', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.unlinkSync).mockImplementation(() => {
throw new Error('Permission denied');
});
let err: unknown;
try {
store.clearCredentials();
} catch (e) {
err = e;
}
expect(err).toBeInstanceOf(AuthenticationError);
expect((err as Error).message).toContain('Failed to clear credentials');
expect((err as Error).message).toContain('Permission denied');
});
});
describe('hasValidCredentials', () => {
it('should return true when valid unexpired credentials exist', () => {
const futureDate = new Date(Date.now() + 3600000); // 1 hour from now
const credentials = {
token: 'valid-token',
userId: 'user-123',
expiresAt: futureDate.toISOString(),
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(credentials));
expect(store.hasValidCredentials()).toBe(true);
});
it('should return false when credentials are expired', () => {
const pastDate = new Date(Date.now() - 3600000); // 1 hour ago
const credentials = {
token: 'expired-token',
userId: 'user-123',
expiresAt: pastDate.toISOString(),
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(credentials));
expect(store.hasValidCredentials()).toBe(false);
});
it('should return false when no credentials exist', () => {
vi.mocked(fs.existsSync).mockReturnValue(false);
expect(store.hasValidCredentials()).toBe(false);
});
it('should return false when file contains invalid JSON', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue('invalid json {');
vi.mocked(fs.renameSync).mockImplementation(() => undefined);
expect(store.hasValidCredentials()).toBe(false);
});
it('should return false for credentials without expiry', () => {
const credentials = {
token: 'no-expiry-token',
userId: 'user-123',
tokenType: 'standard',
savedAt: new Date().toISOString()
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(credentials));
// Credentials without expiry are considered invalid
expect(store.hasValidCredentials()).toBe(false);
// Should log warning about missing expiration
expect(mockLogger.warn).toHaveBeenCalledWith(
'No valid expiration time provided for token'
);
});
it('should use allowExpired=false by default', () => {
// Spy on getCredentials to verify it's called with correct params
const getCredentialsSpy = vi.spyOn(store, 'getCredentials');
vi.mocked(fs.existsSync).mockReturnValue(false);
store.hasValidCredentials();
expect(getCredentialsSpy).toHaveBeenCalledWith({ allowExpired: false });
});
});
describe('cleanupCorruptFiles', () => {
it('should remove old corrupt files', () => {
const now = Date.now();
const oldFile = 'auth.json.corrupt-' + (now - 8 * 24 * 60 * 60 * 1000); // 8 days old
const newFile = 'auth.json.corrupt-' + (now - 1000); // 1 second old
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readdirSync).mockReturnValue([
{ name: oldFile, isFile: () => true },
{ name: newFile, isFile: () => true },
{ name: 'auth.json', isFile: () => true }
] as any);
vi.mocked(fs.statSync).mockImplementation((filePath) => {
if (filePath.includes(oldFile)) {
return { mtimeMs: now - 8 * 24 * 60 * 60 * 1000 } as any;
}
return { mtimeMs: now - 1000 } as any;
});
vi.mocked(fs.unlinkSync).mockImplementation(() => undefined);
store.cleanupCorruptFiles();
expect(fs.unlinkSync).toHaveBeenCalledWith(
expect.stringContaining(oldFile)
);
expect(fs.unlinkSync).not.toHaveBeenCalledWith(
expect.stringContaining(newFile)
);
});
it('should handle cleanup errors gracefully', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readdirSync).mockImplementation(() => {
throw new Error('Permission denied');
});
// Should not throw
expect(() => store.cleanupCorruptFiles()).not.toThrow();
expect(mockLogger.debug).toHaveBeenCalledWith(
expect.stringContaining('Error during corrupt file cleanup')
);
});
});
});

View File

@@ -3,20 +3,58 @@
*/
import fs from 'fs';
import { AuthCredentials, AuthenticationError, AuthConfig } from './types';
import { getAuthConfig } from './config';
import { getLogger } from '../logger';
import path from 'path';
import { AuthCredentials, AuthenticationError, AuthConfig } from './types.js';
import { getAuthConfig } from './config.js';
import { getLogger } from '../logger/index.js';
/**
* CredentialStore manages the persistence and retrieval of authentication credentials.
*
* Runtime vs Persisted Shape:
* - When retrieved (getCredentials): expiresAt is normalized to number (milliseconds since epoch)
* - When persisted (saveCredentials): expiresAt is stored as ISO string for readability
*
* This normalization ensures consistent runtime behavior while maintaining
* human-readable persisted format in the auth.json file.
*/
export class CredentialStore {
private static instance: CredentialStore | null = null;
private logger = getLogger('CredentialStore');
private config: AuthConfig;
// Clock skew tolerance for expiry checks (30 seconds)
private readonly CLOCK_SKEW_MS = 30_000;
constructor(config?: Partial<AuthConfig>) {
private constructor(config?: Partial<AuthConfig>) {
this.config = getAuthConfig(config);
}
/**
* Get the singleton instance of CredentialStore
*/
static getInstance(config?: Partial<AuthConfig>): CredentialStore {
if (!CredentialStore.instance) {
CredentialStore.instance = new CredentialStore(config);
} else if (config) {
// Warn if config is provided after initialization
const logger = getLogger('CredentialStore');
logger.warn(
'getInstance called with config after initialization; config is ignored.'
);
}
return CredentialStore.instance;
}
/**
* Reset the singleton instance (useful for testing)
*/
static resetInstance(): void {
CredentialStore.instance = null;
}
/**
* Get stored authentication credentials
* @returns AuthCredentials with expiresAt as number (milliseconds) for runtime use
*/
getCredentials(options?: { allowExpired?: boolean }): AuthCredentials | null {
try {
@@ -28,27 +66,71 @@ export class CredentialStore {
fs.readFileSync(this.config.configFile, 'utf-8')
) as AuthCredentials;
// Check if token is expired
if (
authData.expiresAt &&
new Date(authData.expiresAt) < new Date() &&
!options?.allowExpired
) {
this.logger.warn('Authentication token has expired');
// Normalize/migrate timestamps to numeric (handles both number and ISO string)
let expiresAtMs: number | undefined;
if (typeof authData.expiresAt === 'number') {
expiresAtMs = Number.isFinite(authData.expiresAt)
? authData.expiresAt
: undefined;
} else if (typeof authData.expiresAt === 'string') {
const parsed = Date.parse(authData.expiresAt);
expiresAtMs = Number.isNaN(parsed) ? undefined : parsed;
} else {
expiresAtMs = undefined;
}
// Validate expiration time for tokens
if (expiresAtMs === undefined) {
this.logger.warn('No valid expiration time provided for token');
return null;
}
// Update the authData with normalized timestamp
authData.expiresAt = expiresAtMs;
// Check if the token has expired (with clock skew tolerance)
const now = Date.now();
const allowExpired = options?.allowExpired ?? false;
if (now >= expiresAtMs - this.CLOCK_SKEW_MS && !allowExpired) {
this.logger.warn(
'Authentication token has expired or is about to expire',
{
expiresAt: authData.expiresAt,
currentTime: new Date(now).toISOString(),
skewWindow: `${this.CLOCK_SKEW_MS / 1000}s`
}
);
return null;
}
// Return valid token
return authData;
} catch (error) {
this.logger.error(
`Failed to read auth credentials: ${(error as Error).message}`
);
// Quarantine corrupt file to prevent repeated errors
try {
if (fs.existsSync(this.config.configFile)) {
const corruptFile = `${this.config.configFile}.corrupt-${Date.now()}-${process.pid}-${Math.random().toString(36).slice(2, 8)}`;
fs.renameSync(this.config.configFile, corruptFile);
this.logger.warn(`Quarantined corrupt auth file to: ${corruptFile}`);
}
} catch (quarantineError) {
// If we can't quarantine, log but don't throw
this.logger.debug(
`Could not quarantine corrupt file: ${(quarantineError as Error).message}`
);
}
return null;
}
}
/**
* Save authentication credentials
* @param authData - Credentials with expiresAt as number or string (will be persisted as ISO string)
*/
saveCredentials(authData: AuthCredentials): void {
try {
@@ -57,8 +139,32 @@ export class CredentialStore {
fs.mkdirSync(this.config.configDir, { recursive: true, mode: 0o700 });
}
// Add timestamp
authData.savedAt = new Date().toISOString();
// Add timestamp without mutating caller's object
authData = { ...authData, savedAt: new Date().toISOString() };
// Validate and normalize expiresAt timestamp
if (authData.expiresAt !== undefined) {
let validTimestamp: number | undefined;
if (typeof authData.expiresAt === 'number') {
validTimestamp = Number.isFinite(authData.expiresAt)
? authData.expiresAt
: undefined;
} else if (typeof authData.expiresAt === 'string') {
const parsed = Date.parse(authData.expiresAt);
validTimestamp = Number.isNaN(parsed) ? undefined : parsed;
}
if (validTimestamp === undefined) {
throw new AuthenticationError(
`Invalid expiresAt format: ${authData.expiresAt}`,
'SAVE_FAILED'
);
}
// Store as ISO string for consistency
authData.expiresAt = new Date(validTimestamp).toISOString();
}
// Save credentials atomically with secure permissions
const tempFile = `${this.config.configFile}.tmp`;
@@ -106,4 +212,54 @@ export class CredentialStore {
getConfig(): AuthConfig {
return { ...this.config };
}
/**
* Clean up old corrupt auth files
* Removes corrupt files older than the specified age
*/
cleanupCorruptFiles(maxAgeMs: number = 7 * 24 * 60 * 60 * 1000): void {
try {
const dir = path.dirname(this.config.configFile);
const baseName = path.basename(this.config.configFile);
const prefix = `${baseName}.corrupt-`;
if (!fs.existsSync(dir)) {
return;
}
const entries = fs.readdirSync(dir, { withFileTypes: true });
const now = Date.now();
for (const entry of entries) {
if (!entry.isFile()) continue;
const file = entry.name;
// Check if file matches pattern: baseName.corrupt-{timestamp}
if (!file.startsWith(prefix)) continue;
const suffix = file.slice(prefix.length);
if (!/^\d+$/.test(suffix)) continue; // Fixed regex, not from variable input
const filePath = path.join(dir, file);
try {
const stats = fs.statSync(filePath);
const age = now - stats.mtimeMs;
if (age > maxAgeMs) {
fs.unlinkSync(filePath);
this.logger.debug(`Cleaned up old corrupt file: ${file}`);
}
} catch (error) {
// Ignore errors for individual file cleanup
this.logger.debug(
`Could not clean up corrupt file ${file}: ${(error as Error).message}`
);
}
}
} catch (error) {
// Log but don't throw - this is a cleanup operation
this.logger.debug(
`Error during corrupt file cleanup: ${(error as Error).message}`
);
}
}
}

View File

@@ -2,20 +2,27 @@
* Authentication module exports
*/
export { AuthManager } from './auth-manager';
export { CredentialStore } from './credential-store';
export { OAuthService } from './oauth-service';
export { AuthManager } from './auth-manager.js';
export { CredentialStore } from './credential-store.js';
export { OAuthService } from './oauth-service.js';
export { SupabaseSessionStorage } from './supabase-session-storage.js';
export type {
Organization,
Brief,
RemoteTask
} from '../services/organization.service.js';
export type {
AuthCredentials,
OAuthFlowOptions,
AuthConfig,
CliData
} from './types';
CliData,
UserContext
} from './types.js';
export { AuthenticationError } from './types';
export { AuthenticationError } from './types.js';
export {
DEFAULT_AUTH_CONFIG,
getAuthConfig
} from './config';
} from './config.js';

View File

@@ -12,11 +12,11 @@ import {
OAuthFlowOptions,
AuthConfig,
CliData
} from './types';
import { CredentialStore } from './credential-store';
import { SupabaseAuthClient } from '../clients/supabase-client';
import { getAuthConfig } from './config';
import { getLogger } from '../logger';
} from './types.js';
import { CredentialStore } from './credential-store.js';
import { SupabaseAuthClient } from '../clients/supabase-client.js';
import { getAuthConfig } from './config.js';
import { getLogger } from '../logger/index.js';
import packageJson from '../../../../package.json' with { type: 'json' };
export class OAuthService {
@@ -181,8 +181,8 @@ export class OAuthService {
timestamp: Date.now()
};
// Build authorization URL for web app sign-in page
const authUrl = new URL(`${this.baseUrl}/auth/sign-in`);
// Build authorization URL for CLI-specific sign-in page
const authUrl = new URL(`${this.baseUrl}/auth/cli/sign-in`);
// Encode CLI data as base64
const cliParam = Buffer.from(JSON.stringify(cliData)).toString(
@@ -272,7 +272,49 @@ export class OAuthService {
return;
}
// Handle direct token response from server
// Handle authorization code for PKCE flow
const code = url.searchParams.get('code');
if (code && type === 'pkce_callback') {
try {
this.logger.info('Received authorization code for PKCE flow');
// Exchange code for session using PKCE
const session = await this.supabaseClient.exchangeCodeForSession(code);
// Save authentication data
const authData: AuthCredentials = {
token: session.access_token,
refreshToken: session.refresh_token,
userId: session.user.id,
email: session.user.email,
expiresAt: session.expires_at
? new Date(session.expires_at * 1000).toISOString()
: undefined,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
this.credentialStore.saveCredentials(authData);
if (server.listening) {
server.close();
}
// Clear timeout since authentication succeeded
if (timeoutId) {
clearTimeout(timeoutId);
}
resolve(authData);
return;
} catch (error) {
if (server.listening) {
server.close();
}
reject(error);
return;
}
}
// Handle direct token response from server (legacy flow)
if (
accessToken &&
(type === 'oauth_success' || type === 'session_transfer')
@@ -280,8 +322,23 @@ export class OAuthService {
try {
this.logger.info(`Received tokens via ${type}`);
// Get user info using the access token if possible
const user = await this.supabaseClient.getUser(accessToken);
// Create a session with the tokens and set it in Supabase client
const session = {
access_token: accessToken,
refresh_token: refreshToken || '',
expires_at: expiresIn
? Math.floor(Date.now() / 1000) + parseInt(expiresIn)
: undefined,
expires_in: expiresIn ? parseInt(expiresIn) : undefined,
token_type: 'bearer',
user: null as any // Will be populated by setSession
};
// Set the session in Supabase client
await this.supabaseClient.setSession(session as any);
// Get user info from the session
const user = await this.supabaseClient.getUser();
// Calculate expiration time
const expiresAt = expiresIn

View File

@@ -0,0 +1,155 @@
/**
* Custom storage adapter for Supabase Auth sessions in CLI environment
* Implements the SupportedStorage interface required by Supabase Auth
*
* This adapter bridges Supabase's session management with our existing
* auth.json credential storage, maintaining backward compatibility
*/
import { SupportedStorage } from '@supabase/supabase-js';
import { CredentialStore } from './credential-store.js';
import { AuthCredentials } from './types.js';
import { getLogger } from '../logger/index.js';
const STORAGE_KEY = 'sb-taskmaster-auth-token';
export class SupabaseSessionStorage implements SupportedStorage {
private store: CredentialStore;
private logger = getLogger('SupabaseSessionStorage');
constructor(store: CredentialStore) {
this.store = store;
}
/**
* Build a Supabase session object from our credentials
*/
private buildSessionFromCredentials(credentials: AuthCredentials): any {
// Create a session object that Supabase expects
const session = {
access_token: credentials.token,
refresh_token: credentials.refreshToken || '',
expires_at: credentials.expiresAt
? Math.floor(new Date(credentials.expiresAt).getTime() / 1000)
: Math.floor(Date.now() / 1000) + 3600, // Default to 1 hour
token_type: 'bearer',
user: {
id: credentials.userId,
email: credentials.email || '',
aud: 'authenticated',
role: 'authenticated',
email_confirmed_at: new Date().toISOString(),
app_metadata: {},
user_metadata: {},
created_at: new Date().toISOString(),
updated_at: new Date().toISOString()
}
};
return session;
}
/**
* Parse a Supabase session back to our credentials
*/
private parseSessionToCredentials(
sessionData: any
): Partial<AuthCredentials> {
try {
const session = JSON.parse(sessionData);
return {
token: session.access_token,
refreshToken: session.refresh_token,
userId: session.user?.id || 'unknown',
email: session.user?.email,
expiresAt: session.expires_at
? new Date(session.expires_at * 1000).toISOString()
: undefined
};
} catch (error) {
this.logger.error('Error parsing session:', error);
return {};
}
}
/**
* Get item from storage - Supabase will request the session with a specific key
*/
getItem(key: string): string | null {
// Supabase uses a specific key pattern for sessions
if (key === STORAGE_KEY || key.includes('auth-token')) {
try {
const credentials = this.store.getCredentials({ allowExpired: true });
if (credentials && credentials.token) {
// Build and return a session object from our stored credentials
const session = this.buildSessionFromCredentials(credentials);
return JSON.stringify(session);
}
} catch (error) {
this.logger.error('Error getting session:', error);
}
}
return null;
}
/**
* Set item in storage - Supabase will store the session with a specific key
*/
setItem(key: string, value: string): void {
// Only handle Supabase session keys
if (key === STORAGE_KEY || key.includes('auth-token')) {
try {
// Parse the session and update our credentials
const sessionUpdates = this.parseSessionToCredentials(value);
const existingCredentials = this.store.getCredentials({
allowExpired: true
});
if (sessionUpdates.token) {
const updatedCredentials: AuthCredentials = {
...existingCredentials,
...sessionUpdates,
savedAt: new Date().toISOString(),
selectedContext: existingCredentials?.selectedContext
} as AuthCredentials;
this.store.saveCredentials(updatedCredentials);
}
} catch (error) {
this.logger.error('Error setting session:', error);
}
}
}
/**
* Remove item from storage - Called when signing out
*/
removeItem(key: string): void {
if (key === STORAGE_KEY || key.includes('auth-token')) {
// Don't actually remove credentials, just clear the tokens
// This preserves other data like selectedContext
try {
const credentials = this.store.getCredentials({ allowExpired: true });
if (credentials) {
// Keep context but clear auth tokens
const clearedCredentials: AuthCredentials = {
...credentials,
token: '',
refreshToken: undefined,
expiresAt: undefined
} as AuthCredentials;
this.store.saveCredentials(clearedCredentials);
}
} catch (error) {
this.logger.error('Error removing session:', error);
}
}
}
/**
* Clear all session data
*/
clear(): void {
// Clear auth tokens but preserve context
this.removeItem(STORAGE_KEY);
}
}

View File

@@ -7,9 +7,18 @@ export interface AuthCredentials {
refreshToken?: string;
userId: string;
email?: string;
expiresAt?: string;
tokenType?: 'standard' | 'api_key';
expiresAt?: string | number;
tokenType?: 'standard';
savedAt: string;
selectedContext?: UserContext;
}
export interface UserContext {
orgId?: string;
orgName?: string;
briefId?: string;
briefName?: string;
updatedAt: string;
}
export interface OAuthFlowOptions {
@@ -57,7 +66,6 @@ export type AuthErrorCode =
| 'INVALID_STATE'
| 'NO_TOKEN'
| 'TOKEN_EXCHANGE_FAILED'
| 'INVALID_API_KEY'
| 'INVALID_CREDENTIALS'
| 'NO_REFRESH_TOKEN'
| 'NOT_AUTHENTICATED'
@@ -65,7 +73,14 @@ export type AuthErrorCode =
| 'CONFIG_MISSING'
| 'SAVE_FAILED'
| 'CLEAR_FAILED'
| 'STORAGE_ERROR';
| 'STORAGE_ERROR'
| 'NOT_SUPPORTED'
| 'REFRESH_FAILED'
| 'INVALID_RESPONSE'
| 'PKCE_INIT_FAILED'
| 'PKCE_FAILED'
| 'CODE_EXCHANGE_FAILED'
| 'SESSION_SET_FAILED';
/**
* Authentication error class

View File

@@ -2,4 +2,4 @@
* Client exports
*/
export { SupabaseAuthClient } from './supabase-client';
export { SupabaseAuthClient } from './supabase-client.js';

View File

@@ -1,19 +1,32 @@
/**
* Supabase client for authentication
* Supabase authentication client for CLI auth flows
*/
import { createClient, SupabaseClient, User } from '@supabase/supabase-js';
import { AuthenticationError } from '../auth/types';
import { getLogger } from '../logger';
import {
createClient,
SupabaseClient as SupabaseJSClient,
User,
Session
} from '@supabase/supabase-js';
import { AuthenticationError } from '../auth/types.js';
import { getLogger } from '../logger/index.js';
import { SupabaseSessionStorage } from '../auth/supabase-session-storage.js';
import { CredentialStore } from '../auth/credential-store.js';
export class SupabaseAuthClient {
private client: SupabaseClient | null = null;
private client: SupabaseJSClient | null = null;
private sessionStorage: SupabaseSessionStorage;
private logger = getLogger('SupabaseAuthClient');
constructor() {
const credentialStore = CredentialStore.getInstance();
this.sessionStorage = new SupabaseSessionStorage(credentialStore);
}
/**
* Initialize Supabase client
* Get Supabase client with proper session management
*/
private getClient(): SupabaseClient {
getClient(): SupabaseJSClient {
if (!this.client) {
// Get Supabase configuration from environment - using TM_PUBLIC prefix
const supabaseUrl = process.env.TM_PUBLIC_SUPABASE_URL;
@@ -26,10 +39,12 @@ export class SupabaseAuthClient {
);
}
// Create client with custom storage adapter (similar to React Native AsyncStorage)
this.client = createClient(supabaseUrl, supabaseAnonKey, {
auth: {
storage: this.sessionStorage,
autoRefreshToken: true,
persistSession: false, // We handle persistence ourselves
persistSession: true,
detectSessionInUrl: false
}
});
@@ -39,40 +54,159 @@ export class SupabaseAuthClient {
}
/**
* Note: Code exchange is now handled server-side
* The server returns tokens directly to avoid PKCE issues
* This method is kept for potential future use
* Initialize the client and restore session if available
*/
async exchangeCodeForSession(_code: string): Promise<{
token: string;
refreshToken?: string;
userId: string;
email?: string;
expiresAt?: string;
}> {
throw new AuthenticationError(
'Code exchange is handled server-side. CLI receives tokens directly.',
'NOT_SUPPORTED'
);
async initialize(): Promise<Session | null> {
const client = this.getClient();
try {
// Get the current session from storage
const {
data: { session },
error
} = await client.auth.getSession();
if (error) {
this.logger.warn('Failed to restore session:', error);
return null;
}
if (session) {
this.logger.info('Session restored successfully');
}
return session;
} catch (error) {
this.logger.error('Error initializing session:', error);
return null;
}
}
/**
* Refresh an access token
* Sign in with PKCE flow (for CLI auth)
*/
async refreshSession(refreshToken: string): Promise<{
token: string;
refreshToken?: string;
expiresAt?: string;
}> {
try {
const client = this.getClient();
async signInWithPKCE(): Promise<{ url: string; codeVerifier: string }> {
const client = this.getClient();
try {
// Generate PKCE challenge
const { data, error } = await client.auth.signInWithOAuth({
provider: 'github',
options: {
redirectTo:
process.env.TM_AUTH_CALLBACK_URL ||
'http://localhost:3421/auth/callback',
scopes: 'email'
}
});
if (error) {
throw new AuthenticationError(
`Failed to initiate PKCE flow: ${error.message}`,
'PKCE_INIT_FAILED'
);
}
if (!data?.url) {
throw new AuthenticationError(
'No authorization URL returned',
'INVALID_RESPONSE'
);
}
// Extract code_verifier from the URL or generate it
// Note: Supabase handles PKCE internally, we just need to handle the callback
return {
url: data.url,
codeVerifier: '' // Supabase manages this internally
};
} catch (error) {
if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Failed to start PKCE flow: ${(error as Error).message}`,
'PKCE_FAILED'
);
}
}
/**
* Exchange authorization code for session (PKCE flow)
*/
async exchangeCodeForSession(code: string): Promise<Session> {
const client = this.getClient();
try {
const { data, error } = await client.auth.exchangeCodeForSession(code);
if (error) {
throw new AuthenticationError(
`Failed to exchange code: ${error.message}`,
'CODE_EXCHANGE_FAILED'
);
}
if (!data?.session) {
throw new AuthenticationError(
'No session returned from code exchange',
'INVALID_RESPONSE'
);
}
this.logger.info('Successfully exchanged code for session');
return data.session;
} catch (error) {
if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Code exchange failed: ${(error as Error).message}`,
'CODE_EXCHANGE_FAILED'
);
}
}
/**
* Get the current session
*/
async getSession(): Promise<Session | null> {
const client = this.getClient();
try {
const {
data: { session },
error
} = await client.auth.getSession();
if (error) {
this.logger.warn('Failed to get session:', error);
return null;
}
return session;
} catch (error) {
this.logger.error('Error getting session:', error);
return null;
}
}
/**
* Refresh the current session
*/
async refreshSession(): Promise<Session | null> {
const client = this.getClient();
try {
this.logger.info('Refreshing session...');
// Set the session with refresh token
const { data, error } = await client.auth.refreshSession({
refresh_token: refreshToken
});
// Supabase will automatically use the stored refresh token
const {
data: { session },
error
} = await client.auth.refreshSession();
if (error) {
this.logger.error('Failed to refresh session:', error);
@@ -82,22 +216,11 @@ export class SupabaseAuthClient {
);
}
if (!data.session) {
throw new AuthenticationError(
'No session data returned',
'INVALID_RESPONSE'
);
if (session) {
this.logger.info('Successfully refreshed session');
}
this.logger.info('Successfully refreshed session');
return {
token: data.session.access_token,
refreshToken: data.session.refresh_token,
expiresAt: data.session.expires_at
? new Date(data.session.expires_at * 1000).toISOString()
: undefined
};
return session;
} catch (error) {
if (error instanceof AuthenticationError) {
throw error;
@@ -111,21 +234,23 @@ export class SupabaseAuthClient {
}
/**
* Get user details from token
* Get current user from session
*/
async getUser(token: string): Promise<User | null> {
try {
const client = this.getClient();
async getUser(): Promise<User | null> {
const client = this.getClient();
// Get user with the token
const { data, error } = await client.auth.getUser(token);
try {
const {
data: { user },
error
} = await client.auth.getUser();
if (error) {
this.logger.warn('Failed to get user:', error);
return null;
}
return data.user;
return user;
} catch (error) {
this.logger.error('Error getting user:', error);
return null;
@@ -133,22 +258,55 @@ export class SupabaseAuthClient {
}
/**
* Sign out (revoke tokens)
* Note: This requires the user to be authenticated with the current session.
* For remote token revocation, a server-side admin API with service_role key would be needed.
* Sign out and clear session
*/
async signOut(): Promise<void> {
try {
const client = this.getClient();
const client = this.getClient();
// Sign out the current session with global scope to revoke all refresh tokens
try {
// Sign out with global scope to revoke all refresh tokens
const { error } = await client.auth.signOut({ scope: 'global' });
if (error) {
this.logger.warn('Failed to sign out:', error);
}
// Clear cached session data
this.sessionStorage.clear();
} catch (error) {
this.logger.error('Error during sign out:', error);
}
}
/**
* Set session from external auth (e.g., from server callback)
*/
async setSession(session: Session): Promise<void> {
const client = this.getClient();
try {
const { error } = await client.auth.setSession({
access_token: session.access_token,
refresh_token: session.refresh_token
});
if (error) {
throw new AuthenticationError(
`Failed to set session: ${error.message}`,
'SESSION_SET_FAILED'
);
}
this.logger.info('Session set successfully');
} catch (error) {
if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Failed to set session: ${(error as Error).message}`,
'SESSION_SET_FAILED'
);
}
}
}

View File

@@ -177,7 +177,7 @@ describe('ConfigManager', () => {
it('should return storage configuration', () => {
const storage = manager.getStorageConfig();
expect(storage).toEqual({ type: 'auto', apiConfigured: false });
expect(storage).toEqual({ type: 'file' });
});
it('should return API storage configuration when configured', async () => {
@@ -206,65 +206,7 @@ describe('ConfigManager', () => {
expect(storage).toEqual({
type: 'api',
apiEndpoint: 'https://api.example.com',
apiAccessToken: 'token123',
apiConfigured: true
});
});
it('should return auto storage configuration with apiConfigured flag', async () => {
// Create a new instance with auto storage config and partial API settings
vi.mocked(ConfigMerger).mockImplementationOnce(
() =>
({
addSource: vi.fn(),
clearSources: vi.fn(),
merge: vi.fn().mockReturnValue({
storage: {
type: 'auto',
apiEndpoint: 'https://api.example.com'
// No apiAccessToken - partial config
}
}),
getSources: vi.fn().mockReturnValue([])
}) as any
);
const autoManager = await ConfigManager.create(testProjectRoot);
const storage = autoManager.getStorageConfig();
expect(storage).toEqual({
type: 'auto',
apiEndpoint: 'https://api.example.com',
apiAccessToken: undefined,
apiConfigured: true // true because apiEndpoint is provided
});
});
it('should return auto storage with apiConfigured false when no API settings', async () => {
// Create a new instance with auto storage but no API settings
vi.mocked(ConfigMerger).mockImplementationOnce(
() =>
({
addSource: vi.fn(),
clearSources: vi.fn(),
merge: vi.fn().mockReturnValue({
storage: {
type: 'auto'
// No API settings at all
}
}),
getSources: vi.fn().mockReturnValue([])
}) as any
);
const autoManager = await ConfigManager.create(testProjectRoot);
const storage = autoManager.getStorageConfig();
expect(storage).toEqual({
type: 'auto',
apiEndpoint: undefined,
apiAccessToken: undefined,
apiConfigured: false // false because no API settings
apiAccessToken: 'token123'
});
});
@@ -309,11 +251,11 @@ describe('ConfigManager', () => {
expect(manager.getProjectRoot()).toBe(testProjectRoot);
});
it('should check if using API storage', () => {
expect(manager.isUsingApiStorage()).toBe(false);
it('should check if API is explicitly configured', () => {
expect(manager.isApiExplicitlyConfigured()).toBe(false);
});
it('should detect API storage', () => {
it('should detect when API is explicitly configured', () => {
// Update config for current instance
(manager as any).config = {
storage: {
@@ -323,7 +265,7 @@ describe('ConfigManager', () => {
}
};
expect(manager.isUsingApiStorage()).toBe(true);
expect(manager.isApiExplicitlyConfigured()).toBe(true);
});
});

View File

@@ -6,7 +6,10 @@
* maintainability, testability, and separation of concerns.
*/
import type { PartialConfiguration } from '../interfaces/configuration.interface.js';
import type {
PartialConfiguration,
RuntimeStorageConfig
} from '../interfaces/configuration.interface.js';
import { ConfigLoader } from './services/config-loader.service.js';
import {
ConfigMerger,
@@ -134,27 +137,28 @@ export class ConfigManager {
/**
* Get storage configuration
*/
getStorageConfig(): {
type: 'file' | 'api' | 'auto';
apiEndpoint?: string;
apiAccessToken?: string;
apiConfigured: boolean;
} {
getStorageConfig(): RuntimeStorageConfig {
const storage = this.config.storage;
// Return the configured type (including 'auto')
const storageType = storage?.type || 'auto';
const basePath = storage?.basePath ?? this.projectRoot;
if (storageType === 'api' || storageType === 'auto') {
return {
type: storageType,
basePath,
apiEndpoint: storage?.apiEndpoint,
apiAccessToken: storage?.apiAccessToken,
apiConfigured: Boolean(storage?.apiEndpoint || storage?.apiAccessToken)
};
}
return { type: storageType, apiConfigured: false };
return {
type: storageType,
basePath,
apiConfigured: false
};
}
/**
@@ -185,9 +189,10 @@ export class ConfigManager {
}
/**
* Check if using API storage
* Check if explicitly configured to use API storage
* Excludes 'auto' type
*/
isUsingApiStorage(): boolean {
isApiExplicitlyConfigured(): boolean {
return this.getStorageConfig().type === 'api';
}
@@ -220,6 +225,7 @@ export class ConfigManager {
await this.persistence.saveConfig(this.config);
// Re-initialize to respect precedence
this.initialized = false;
await this.initialize();
}

View File

@@ -85,11 +85,6 @@ describe('EnvironmentConfigProvider', () => {
provider = new EnvironmentConfigProvider(); // Reset provider
config = provider.loadConfig();
expect(config.storage?.type).toBe('api');
process.env.TASKMASTER_STORAGE_TYPE = 'auto';
provider = new EnvironmentConfigProvider(); // Reset provider
config = provider.loadConfig();
expect(config.storage?.type).toBe('auto');
});
it('should handle nested configuration paths', () => {

View File

@@ -31,7 +31,7 @@ export class EnvironmentConfigProvider {
{
env: 'TASKMASTER_STORAGE_TYPE',
path: ['storage', 'type'],
validate: (v: string) => ['file', 'api', 'auto'].includes(v)
validate: (v: string) => ['file', 'api'].includes(v)
},
{ env: 'TASKMASTER_API_ENDPOINT', path: ['storage', 'apiEndpoint'] },
{ env: 'TASKMASTER_API_TOKEN', path: ['storage', 'apiAccessToken'] },

View File

@@ -53,6 +53,7 @@ export type OutputFormat = (typeof OUTPUT_FORMATS)[number];
*/
export const STATUS_ICONS: Record<TaskStatus, string> = {
done: '✓',
completed: '✓',
'in-progress': '►',
blocked: '⭕',
pending: '○',
@@ -71,5 +72,6 @@ export const STATUS_COLORS: Record<TaskStatus, string> = {
deferred: 'gray',
cancelled: 'red',
blocked: 'magenta',
review: 'cyan'
review: 'cyan',
completed: 'green'
} as const;

View File

@@ -9,19 +9,19 @@ export {
createTaskMasterCore,
type TaskMasterCoreOptions,
type ListTasksResult
} from './task-master-core';
} from './task-master-core.js';
// Re-export types
export type * from './types';
export type * from './types/index.js';
// Re-export interfaces (types only to avoid conflicts)
export type * from './interfaces';
export type * from './interfaces/index.js';
// Re-export constants
export * from './constants';
export * from './constants/index.js';
// Re-export providers
export * from './providers';
export * from './providers/index.js';
// Re-export storage (selectively to avoid conflicts)
export {
@@ -29,20 +29,20 @@ export {
ApiStorage,
StorageFactory,
type ApiStorageConfig
} from './storage';
export { PlaceholderStorage, type StorageAdapter } from './storage';
} from './storage/index.js';
export { PlaceholderStorage, type StorageAdapter } from './storage/index.js';
// Re-export parser
export * from './parser';
export * from './parser/index.js';
// Re-export utilities
export * from './utils';
export * from './utils/index.js';
// Re-export errors
export * from './errors';
export * from './errors/index.js';
// Re-export entities
export { TaskEntity } from './entities/task.entity';
export { TaskEntity } from './entities/task.entity.js';
// Re-export authentication
export {
@@ -51,7 +51,7 @@ export {
type AuthCredentials,
type OAuthFlowOptions,
type AuthConfig
} from './auth';
} from './auth/index.js';
// Re-export logger
export { getLogger, createLogger, setGlobalLogger } from './logger';
export { getLogger, createLogger, setGlobalLogger } from './logger/index.js';

View File

@@ -3,7 +3,11 @@
* This file defines the contract for configuration management
*/
import type { TaskComplexity, TaskPriority } from '../types/index';
import type {
TaskComplexity,
TaskPriority,
StorageType
} from '../types/index.js';
/**
* Model configuration for different AI roles
@@ -74,19 +78,40 @@ export interface TagSettings {
}
/**
* Storage and persistence settings
* Runtime storage configuration used for storage backend selection
* This is what getStorageConfig() returns and what StorageFactory expects
*/
export interface StorageSettings {
/** Storage backend type - 'auto' detects based on auth status */
type: 'file' | 'api' | 'auto';
/** Base path for file storage */
export interface RuntimeStorageConfig {
/** Storage backend type */
type: StorageType;
/** Base path for file storage (if configured) */
basePath?: string;
/** API endpoint for API storage (Hamster integration) */
apiEndpoint?: string;
/** Access token for API authentication */
apiAccessToken?: string;
/** Indicates whether API is configured (has endpoint or token) */
apiConfigured?: boolean;
/**
* Indicates whether API is configured (has endpoint or token)
* @computed Derived automatically from presence of apiEndpoint or apiAccessToken
* @internal Should not be set manually - computed by ConfigManager
*/
readonly apiConfigured: boolean;
}
/**
* Storage and persistence settings
* Extended storage settings including file operation preferences
*/
export interface StorageSettings
extends Omit<RuntimeStorageConfig, 'apiConfigured'> {
/** Base path for file storage */
basePath?: string;
/**
* Indicates whether API is configured
* @computed Derived automatically from presence of apiEndpoint or apiAccessToken
* @internal Should not be set manually in user config - computed by ConfigManager
*/
readonly apiConfigured?: boolean;
/** Enable automatic backups */
enableBackup: boolean;
/** Maximum number of backups to retain */

View File

@@ -4,13 +4,13 @@
*/
// Storage interfaces
export type * from './storage.interface';
export * from './storage.interface';
export type * from './storage.interface.js';
export * from './storage.interface.js';
// AI Provider interfaces
export type * from './ai-provider.interface';
export * from './ai-provider.interface';
export type * from './ai-provider.interface.js';
export * from './ai-provider.interface.js';
// Configuration interfaces
export type * from './configuration.interface';
export * from './configuration.interface';
export type * from './configuration.interface.js';
export * from './configuration.interface.js';

View File

@@ -3,7 +3,7 @@
* This file defines the contract for all storage implementations
*/
import type { Task, TaskMetadata } from '../types/index';
import type { Task, TaskMetadata } from '../types/index.js';
/**
* Interface for storage operations on tasks
@@ -17,6 +17,14 @@ export interface IStorage {
*/
loadTasks(tag?: string): Promise<Task[]>;
/**
* Load a single task by ID
* @param taskId - ID of the task to load
* @param tag - Optional tag context for the task
* @returns Promise that resolves to the task or null if not found
*/
loadTask(taskId: string, tag?: string): Promise<Task | null>;
/**
* Save tasks to storage, replacing existing tasks
* @param tasks - Array of tasks to save
@@ -175,6 +183,7 @@ export abstract class BaseStorage implements IStorage {
// Abstract methods that must be implemented by concrete classes
abstract loadTasks(tag?: string): Promise<Task[]>;
abstract loadTask(taskId: string, tag?: string): Promise<Task | null>;
abstract saveTasks(tasks: Task[], tag?: string): Promise<void>;
abstract appendTasks(tasks: Task[], tag?: string): Promise<void>;
abstract updateTask(

View File

@@ -2,7 +2,7 @@
* @fileoverview Logger factory and singleton management
*/
import { Logger, LoggerConfig } from './logger.js';
import { Logger, type LoggerConfig } from './logger.js';
// Global logger instance
let globalLogger: Logger | null = null;

View File

@@ -0,0 +1,170 @@
import { Task, Subtask } from '../types/index.js';
import { Database, Tables } from '../types/database.types.js';
type TaskRow = Tables<'tasks'>;
type DependencyRow = Tables<'task_dependencies'>;
export class TaskMapper {
/**
* Maps database tasks to internal Task format
*/
static mapDatabaseTasksToTasks(
dbTasks: TaskRow[],
dbDependencies: DependencyRow[]
): Task[] {
if (!dbTasks || dbTasks.length === 0) {
return [];
}
// Group dependencies by task_id
const dependenciesByTaskId = this.groupDependenciesByTaskId(dbDependencies);
// Separate parent tasks and subtasks
const parentTasks = dbTasks.filter((t) => !t.parent_task_id);
const subtasksByParentId = this.groupSubtasksByParentId(dbTasks);
// Map parent tasks with their subtasks
return parentTasks.map((taskRow) =>
this.mapDatabaseTaskToTask(
taskRow,
subtasksByParentId.get(taskRow.id) || [],
dependenciesByTaskId
)
);
}
/**
* Maps a single database task to internal Task format
*/
static mapDatabaseTaskToTask(
dbTask: TaskRow,
dbSubtasks: TaskRow[],
dependenciesByTaskId: Map<string, string[]>
): Task {
// Map subtasks
const subtasks: Subtask[] = dbSubtasks.map((subtask, index) => ({
id: index + 1, // Use numeric ID for subtasks
parentId: dbTask.id,
title: subtask.title,
description: subtask.description || '',
status: this.mapStatus(subtask.status),
priority: this.mapPriority(subtask.priority),
dependencies: dependenciesByTaskId.get(subtask.id) || [],
details: (subtask.metadata as any)?.details || '',
testStrategy: (subtask.metadata as any)?.testStrategy || '',
createdAt: subtask.created_at,
updatedAt: subtask.updated_at,
assignee: subtask.assignee_id || undefined,
complexity: subtask.complexity
? this.mapComplexityToInternal(subtask.complexity)
: undefined
}));
return {
id: dbTask.display_id || dbTask.id, // Use display_id if available
title: dbTask.title,
description: dbTask.description || '',
status: this.mapStatus(dbTask.status),
priority: this.mapPriority(dbTask.priority),
dependencies: dependenciesByTaskId.get(dbTask.id) || [],
details: (dbTask.metadata as any)?.details || '',
testStrategy: (dbTask.metadata as any)?.testStrategy || '',
subtasks,
createdAt: dbTask.created_at,
updatedAt: dbTask.updated_at,
assignee: dbTask.assignee_id || undefined,
complexity: dbTask.complexity
? this.mapComplexityToInternal(dbTask.complexity)
: undefined,
effort: dbTask.estimated_hours || undefined,
actualEffort: dbTask.actual_hours || undefined
};
}
/**
* Groups dependencies by task ID
*/
private static groupDependenciesByTaskId(
dependencies: DependencyRow[]
): Map<string, string[]> {
const dependenciesByTaskId = new Map<string, string[]>();
if (dependencies) {
for (const dep of dependencies) {
const deps = dependenciesByTaskId.get(dep.task_id) || [];
deps.push(dep.depends_on_task_id);
dependenciesByTaskId.set(dep.task_id, deps);
}
}
return dependenciesByTaskId;
}
/**
* Groups subtasks by their parent ID
*/
private static groupSubtasksByParentId(
tasks: TaskRow[]
): Map<string, TaskRow[]> {
const subtasksByParentId = new Map<string, TaskRow[]>();
for (const task of tasks) {
if (task.parent_task_id) {
const subtasks = subtasksByParentId.get(task.parent_task_id) || [];
subtasks.push(task);
subtasksByParentId.set(task.parent_task_id, subtasks);
}
}
// Sort subtasks by subtask_position for each parent
for (const subtasks of subtasksByParentId.values()) {
subtasks.sort((a, b) => a.subtask_position - b.subtask_position);
}
return subtasksByParentId;
}
/**
* Maps database status to internal status
*/
static mapStatus(
status: Database['public']['Enums']['task_status']
): Task['status'] {
switch (status) {
case 'todo':
return 'pending';
case 'in_progress':
return 'in-progress';
case 'done':
return 'done';
default:
return 'pending';
}
}
/**
* Maps database priority to internal priority
*/
private static mapPriority(
priority: Database['public']['Enums']['task_priority']
): Task['priority'] {
switch (priority) {
case 'urgent':
return 'critical';
default:
return priority as Task['priority'];
}
}
/**
* Maps numeric complexity to descriptive complexity
*/
private static mapComplexityToInternal(
complexity: number
): Task['complexity'] {
if (complexity <= 2) return 'simple';
if (complexity <= 5) return 'moderate';
if (complexity <= 8) return 'complex';
return 'very-complex';
}
}

View File

@@ -3,7 +3,7 @@
* This file exports all parsing-related classes and functions
*/
import type { PlaceholderTask } from '../types/index';
import type { PlaceholderTask } from '../types/index.js';
// Parser implementations will be defined here
// export * from './prd-parser.js';

View File

@@ -0,0 +1,224 @@
import { SupabaseClient } from '@supabase/supabase-js';
import { Task } from '../types/index.js';
import { Database } from '../types/database.types.js';
import { TaskMapper } from '../mappers/TaskMapper.js';
import { AuthManager } from '../auth/auth-manager.js';
import { z } from 'zod';
// Zod schema for task status validation
const TaskStatusSchema = z.enum([
'pending',
'in-progress',
'done',
'review',
'deferred',
'cancelled',
'blocked'
]);
// Zod schema for task updates
const TaskUpdateSchema = z
.object({
title: z.string().min(1).optional(),
description: z.string().optional(),
status: TaskStatusSchema.optional(),
priority: z.enum(['low', 'medium', 'high', 'critical']).optional(),
details: z.string().optional(),
testStrategy: z.string().optional()
})
.partial();
export class SupabaseTaskRepository {
constructor(private supabase: SupabaseClient<Database>) {}
async getTasks(_projectId?: string): Promise<Task[]> {
// Get the current context to determine briefId
const authManager = AuthManager.getInstance();
const context = authManager.getContext();
if (!context || !context.briefId) {
throw new Error(
'No brief selected. Please select a brief first using: tm context brief'
);
}
// Get all tasks for the brief using the exact query structure
const { data: tasks, error } = await this.supabase
.from('tasks')
.select(`
*,
document:document_id (
id,
document_name,
title,
description
)
`)
.eq('brief_id', context.briefId)
.order('position', { ascending: true })
.order('subtask_position', { ascending: true })
.order('created_at', { ascending: true });
if (error) {
throw new Error(`Failed to fetch tasks: ${error.message}`);
}
if (!tasks || tasks.length === 0) {
return [];
}
// Get all dependencies for these tasks
const taskIds = tasks.map((t: any) => t.id);
const { data: depsData, error: depsError } = await this.supabase
.from('task_dependencies')
.select('*')
.in('task_id', taskIds);
if (depsError) {
throw new Error(
`Failed to fetch task dependencies: ${depsError.message}`
);
}
// Use mapper to convert to internal format
return TaskMapper.mapDatabaseTasksToTasks(tasks, depsData || []);
}
async getTask(_projectId: string, taskId: string): Promise<Task | null> {
// Get the current context to determine briefId (projectId not used in Supabase context)
const authManager = AuthManager.getInstance();
const context = authManager.getContext();
if (!context || !context.briefId) {
throw new Error(
'No brief selected. Please select a brief first using: tm context brief'
);
}
const { data, error } = await this.supabase
.from('tasks')
.select('*')
.eq('brief_id', context.briefId)
.eq('display_id', taskId.toUpperCase())
.single();
if (error) {
if (error.code === 'PGRST116') {
return null; // Not found
}
throw new Error(`Failed to fetch task: ${error.message}`);
}
// Get dependencies for this task
const { data: depsData } = await this.supabase
.from('task_dependencies')
.select('*')
.eq('task_id', taskId);
// Get subtasks if this is a parent task
const { data: subtasksData } = await this.supabase
.from('tasks')
.select('*')
.eq('parent_task_id', taskId)
.order('subtask_position', { ascending: true });
// Create dependency map
const dependenciesByTaskId = new Map<string, string[]>();
if (depsData) {
dependenciesByTaskId.set(
taskId,
depsData.map(
(d: Database['public']['Tables']['task_dependencies']['Row']) =>
d.depends_on_task_id
)
);
}
// Use mapper to convert single task
return TaskMapper.mapDatabaseTaskToTask(
data,
subtasksData || [],
dependenciesByTaskId
);
}
async updateTask(
projectId: string,
taskId: string,
updates: Partial<Task>
): Promise<Task> {
// Get the current context to determine briefId
const authManager = AuthManager.getInstance();
const context = authManager.getContext();
if (!context || !context.briefId) {
throw new Error(
'No brief selected. Please select a brief first using: tm context brief'
);
}
// Validate updates using Zod schema
try {
TaskUpdateSchema.parse(updates);
} catch (error) {
if (error instanceof z.ZodError) {
const errorMessages = error.errors
.map((err) => `${err.path.join('.')}: ${err.message}`)
.join(', ');
throw new Error(`Invalid task update data: ${errorMessages}`);
}
throw error;
}
// Convert Task fields to database fields - only include fields that actually exist in the database
const dbUpdates: any = {};
if (updates.title !== undefined) dbUpdates.title = updates.title;
if (updates.description !== undefined)
dbUpdates.description = updates.description;
if (updates.status !== undefined)
dbUpdates.status = this.mapStatusToDatabase(updates.status);
if (updates.priority !== undefined) dbUpdates.priority = updates.priority;
// Skip fields that don't exist in database schema: details, testStrategy, etc.
// Update the task
const { error } = await this.supabase
.from('tasks')
.update(dbUpdates)
.eq('brief_id', context.briefId)
.eq('display_id', taskId.toUpperCase());
if (error) {
throw new Error(`Failed to update task: ${error.message}`);
}
// Return the updated task by fetching it
const updatedTask = await this.getTask(projectId, taskId);
if (!updatedTask) {
throw new Error(`Failed to retrieve updated task ${taskId}`);
}
return updatedTask;
}
/**
* Maps internal status to database status
*/
private mapStatusToDatabase(
status: string
): Database['public']['Enums']['task_status'] {
switch (status) {
case 'pending':
return 'todo';
case 'in-progress':
case 'in_progress': // Accept both formats
return 'in_progress';
case 'done':
return 'done';
default:
throw new Error(
`Invalid task status: ${status}. Valid statuses are: pending, in-progress, done`
);
}
}
}

View File

@@ -0,0 +1,36 @@
import { Task, TaskTag } from '../types/index.js';
export interface TaskRepository {
// Task operations
getTasks(projectId: string): Promise<Task[]>;
getTask(projectId: string, taskId: string): Promise<Task | null>;
createTask(projectId: string, task: Omit<Task, 'id'>): Promise<Task>;
updateTask(
projectId: string,
taskId: string,
updates: Partial<Task>
): Promise<Task>;
deleteTask(projectId: string, taskId: string): Promise<void>;
// Tag operations
getTags(projectId: string): Promise<TaskTag[]>;
getTag(projectId: string, tagName: string): Promise<TaskTag | null>;
createTag(projectId: string, tag: TaskTag): Promise<TaskTag>;
updateTag(
projectId: string,
tagName: string,
updates: Partial<TaskTag>
): Promise<TaskTag>;
deleteTag(projectId: string, tagName: string): Promise<void>;
// Bulk operations
bulkCreateTasks(
projectId: string,
tasks: Omit<Task, 'id'>[]
): Promise<Task[]>;
bulkUpdateTasks(
projectId: string,
updates: Array<{ id: string; updates: Partial<Task> }>
): Promise<Task[]>;
bulkDeleteTasks(projectId: string, taskIds: string[]): Promise<void>;
}

View File

@@ -3,4 +3,6 @@
* Provides business logic and service layer functionality
*/
export { TaskService } from './task-service';
export { TaskService } from './task-service.js';
export { OrganizationService } from './organization.service.js';
export type { Organization, Brief } from './organization.service.js';

View File

@@ -0,0 +1,363 @@
/**
* @fileoverview Organization and Brief management service
* Handles fetching and managing organizations and briefs from the API
*/
import { SupabaseClient } from '@supabase/supabase-js';
import { Database } from '../types/database.types.js';
import { TaskMasterError, ERROR_CODES } from '../errors/task-master-error.js';
import { getLogger } from '../logger/index.js';
/**
* Organization data structure
*/
export interface Organization {
id: string;
name: string;
slug: string;
}
/**
* Brief data structure
*/
export interface Brief {
id: string;
accountId: string;
documentId: string;
status: string;
createdAt: string;
updatedAt: string;
}
/**
* Task data structure from the remote database
*/
export interface RemoteTask {
id: string;
briefId: string;
documentId: string;
position: number | null;
subtaskPosition: number | null;
status: string;
createdAt: string;
updatedAt: string;
// Document details from join
document?: {
id: string;
document_name: string;
title: string;
description: string;
};
}
/**
* Service for managing organizations and briefs
*/
export class OrganizationService {
private logger = getLogger('OrganizationService');
constructor(private supabaseClient: SupabaseClient<Database>) {}
/**
* Get all organizations for the authenticated user
*/
async getOrganizations(): Promise<Organization[]> {
try {
// The user is already authenticated via the Authorization header
// Query the user_accounts view/table (filtered by RLS for current user)
const { data, error } = await this.supabaseClient
.from('user_accounts')
.select(`
id,
name,
slug
`);
if (error) {
throw new TaskMasterError(
`Failed to fetch organizations: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getOrganizations' },
error
);
}
if (!data || data.length === 0) {
this.logger.debug('No organizations found for user');
return [];
}
// Map to our Organization interface
return data.map((org) => ({
id: org.id ?? '',
name: org.name ?? '',
slug: org.slug ?? org.id ?? '' // Use ID as fallback if slug is null
}));
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch organizations',
ERROR_CODES.API_ERROR,
{ operation: 'getOrganizations' },
error as Error
);
}
}
/**
* Get a specific organization by ID
*/
async getOrganization(orgId: string): Promise<Organization | null> {
try {
const { data, error } = await this.supabaseClient
.from('accounts')
.select(`
id,
name,
slug
`)
.eq('id', orgId)
.single();
if (error) {
if (error.code === 'PGRST116') {
// No rows found
return null;
}
throw new TaskMasterError(
`Failed to fetch organization: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getOrganization', orgId },
error
);
}
if (!data) {
return null;
}
const accountData =
data as Database['public']['Tables']['accounts']['Row'];
return {
id: accountData.id,
name: accountData.name,
slug: accountData.slug || accountData.id
};
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch organization',
ERROR_CODES.API_ERROR,
{ operation: 'getOrganization', orgId },
error as Error
);
}
}
/**
* Get all briefs for a specific organization
*/
async getBriefs(orgId: string): Promise<Brief[]> {
try {
const { data, error } = await this.supabaseClient
.from('brief')
.select(`
id,
account_id,
document_id,
status,
created_at,
updated_at
`)
.eq('account_id', orgId);
if (error) {
throw new TaskMasterError(
`Failed to fetch briefs: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getBriefs', orgId },
error
);
}
if (!data || data.length === 0) {
this.logger.debug(`No briefs found for organization ${orgId}`);
return [];
}
// Map to our Brief interface
return data.map((brief: any) => ({
id: brief.id,
accountId: brief.account_id,
documentId: brief.document_id,
status: brief.status,
createdAt: brief.created_at,
updatedAt: brief.updated_at
}));
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch briefs',
ERROR_CODES.API_ERROR,
{ operation: 'getBriefs', orgId },
error as Error
);
}
}
/**
* Get a specific brief by ID
*/
async getBrief(briefId: string): Promise<Brief | null> {
try {
const { data, error } = await this.supabaseClient
.from('brief')
.select(`
id,
account_id,
document_id,
status,
created_at,
updated_at
`)
.eq('id', briefId)
.single();
if (error) {
if (error.code === 'PGRST116') {
// No rows found
return null;
}
throw new TaskMasterError(
`Failed to fetch brief: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getBrief', briefId },
error
);
}
if (!data) {
return null;
}
const briefData = data as any;
return {
id: briefData.id,
accountId: briefData.account_id,
documentId: briefData.document_id,
status: briefData.status,
createdAt: briefData.created_at,
updatedAt: briefData.updated_at
};
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch brief',
ERROR_CODES.API_ERROR,
{ operation: 'getBrief', briefId },
error as Error
);
}
}
/**
* Validate that a user has access to an organization
*/
async validateOrgAccess(orgId: string): Promise<boolean> {
try {
const org = await this.getOrganization(orgId);
return org !== null;
} catch (error) {
this.logger.error(`Failed to validate org access: ${error}`);
return false;
}
}
/**
* Validate that a user has access to a brief
*/
async validateBriefAccess(briefId: string): Promise<boolean> {
try {
const brief = await this.getBrief(briefId);
return brief !== null;
} catch (error) {
this.logger.error(`Failed to validate brief access: ${error}`);
return false;
}
}
/**
* Get all tasks for a specific brief
*/
async getTasks(briefId: string): Promise<RemoteTask[]> {
try {
const { data, error } = await this.supabaseClient
.from('tasks')
.select(`
*,
document:document_id (
id,
document_name,
title,
description
)
`)
.eq('brief_id', briefId)
.order('position', { ascending: true })
.order('subtask_position', { ascending: true })
.order('created_at', { ascending: true });
if (error) {
throw new TaskMasterError(
`Failed to fetch tasks: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getTasks', briefId },
error
);
}
if (!data || data.length === 0) {
this.logger.debug(`No tasks found for brief ${briefId}`);
return [];
}
// Map to our RemoteTask interface
return data.map((task: any) => ({
id: task.id,
briefId: task.brief_id,
documentId: task.document_id,
position: task.position,
subtaskPosition: task.subtask_position,
status: task.status,
createdAt: task.created_at,
updatedAt: task.updated_at,
document: task.document
? {
id: task.document.id,
document_name: task.document.document_name,
title: task.document.title,
description: task.document.description
}
: undefined
}));
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch tasks',
ERROR_CODES.API_ERROR,
{ operation: 'getTasks', briefId },
error as Error
);
}
}
}

View File

@@ -3,7 +3,12 @@
* Core service for task operations - handles business logic between storage and API
*/
import type { Task, TaskFilter, TaskStatus } from '../types/index.js';
import type {
Task,
TaskFilter,
TaskStatus,
StorageType
} from '../types/index.js';
import type { IStorage } from '../interfaces/storage.interface.js';
import { ConfigManager } from '../config/config-manager.js';
import { StorageFactory } from '../storage/storage-factory.js';
@@ -22,8 +27,8 @@ export interface TaskListResult {
filtered: number;
/** The tag these tasks belong to (only present if explicitly provided) */
tag?: string;
/** Storage type being used - includes 'auto' for automatic detection */
storageType: 'file' | 'api' | 'auto';
/** Storage type being used */
storageType: StorageType;
}
/**
@@ -64,8 +69,8 @@ export class TaskService {
const storageConfig = this.configManager.getStorageConfig();
const projectRoot = this.configManager.getProjectRoot();
this.storage = StorageFactory.create(
{ storage: storageConfig } as any,
this.storage = StorageFactory.createFromStorageConfig(
storageConfig,
projectRoot
);
@@ -113,7 +118,7 @@ export class TaskService {
total: rawTasks.length,
filtered: filteredEntities.length,
tag: options.tag, // Only include tag if explicitly provided
storageType: this.configManager.getStorageConfig().type
storageType: this.getStorageType()
};
} catch (error) {
throw new TaskMasterError(
@@ -166,7 +171,7 @@ export class TaskService {
byStatus: Record<TaskStatus, number>;
withSubtasks: number;
blocked: number;
storageType: 'file' | 'api' | 'auto';
storageType: StorageType;
}> {
const result = await this.getTaskList({
tag,
@@ -334,8 +339,12 @@ export class TaskService {
/**
* Get current storage type
*/
getStorageType(): 'file' | 'api' | 'auto' {
return this.configManager.getStorageConfig().type;
getStorageType(): StorageType {
// Prefer the runtime storage type if available to avoid exposing 'auto'
const s = this.storage as { getType?: () => 'file' | 'api' } | null;
const runtimeType = s?.getType?.();
return (runtimeType ??
this.configManager.getStorageConfig().type) as StorageType;
}
/**
@@ -351,4 +360,73 @@ export class TaskService {
async setActiveTag(tag: string): Promise<void> {
await this.configManager.setActiveTag(tag);
}
/**
* Update task status
*/
async updateTaskStatus(
taskId: string | number,
newStatus: TaskStatus,
tag?: string
): Promise<{
success: boolean;
oldStatus: TaskStatus;
newStatus: TaskStatus;
taskId: string;
}> {
// Ensure we have storage
if (!this.storage) {
throw new TaskMasterError(
'Storage not initialized',
ERROR_CODES.STORAGE_ERROR
);
}
// Use provided tag or get active tag
const activeTag = tag || this.getActiveTag();
const taskIdStr = String(taskId);
// TODO: For now, assume it's a regular task and just try to update directly
// In the future, we can add subtask support if needed
if (taskIdStr.includes('.')) {
throw new TaskMasterError(
'Subtask status updates not yet supported in API storage',
ERROR_CODES.NOT_IMPLEMENTED
);
}
// Get the current task to get old status (simple, direct approach)
let currentTask: Task | null;
try {
// Try to get the task directly
currentTask = await this.storage.loadTask(taskIdStr, activeTag);
} catch (error) {
throw new TaskMasterError(
`Failed to load task ${taskIdStr}`,
ERROR_CODES.TASK_NOT_FOUND,
{ taskId: taskIdStr },
error as Error
);
}
if (!currentTask) {
throw new TaskMasterError(
`Task ${taskIdStr} not found`,
ERROR_CODES.TASK_NOT_FOUND
);
}
const oldStatus = currentTask.status;
// Simple, direct update - just change the status
await this.storage.updateTask(taskIdStr, { status: newStatus }, activeTag);
return {
success: true,
oldStatus,
newStatus,
taskId: taskIdStr
};
}
}

View File

@@ -1,27 +1,29 @@
/**
* @fileoverview API-based storage implementation for Hamster integration
* This provides storage via REST API instead of local file system
* @fileoverview API-based storage implementation using repository pattern
* This provides storage via repository abstraction for flexibility
*/
import type {
IStorage,
StorageStats
} from '../interfaces/storage.interface.js';
import type { Task, TaskMetadata } from '../types/index.js';
import type { Task, TaskMetadata, TaskTag } from '../types/index.js';
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
import { TaskRepository } from '../repositories/task-repository.interface.js';
import { SupabaseTaskRepository } from '../repositories/supabase-task-repository.js';
import { SupabaseClient } from '@supabase/supabase-js';
import { AuthManager } from '../auth/auth-manager.js';
/**
* API storage configuration
*/
export interface ApiStorageConfig {
/** API endpoint base URL */
endpoint: string;
/** Access token for authentication */
accessToken: string;
/** Optional project ID */
projectId?: string;
/** Request timeout in milliseconds */
timeout?: number;
/** Supabase client instance */
supabaseClient?: SupabaseClient;
/** Custom repository implementation */
repository?: TaskRepository;
/** Project ID for scoping */
projectId: string;
/** Enable request retries */
enableRetry?: boolean;
/** Maximum retry attempts */
@@ -29,64 +31,58 @@ export interface ApiStorageConfig {
}
/**
* API response wrapper
*/
interface ApiResponse<T> {
success: boolean;
data?: T;
error?: string;
message?: string;
}
/**
* ApiStorage implementation for Hamster integration
* Fetches and stores tasks via REST API
* ApiStorage implementation using repository pattern
* Provides flexibility to swap between different backend implementations
*/
export class ApiStorage implements IStorage {
private readonly config: Required<ApiStorageConfig>;
private readonly repository: TaskRepository;
private readonly projectId: string;
private readonly enableRetry: boolean;
private readonly maxRetries: number;
private initialized = false;
private tagsCache: Map<string, TaskTag> = new Map();
constructor(config: ApiStorageConfig) {
this.validateConfig(config);
this.config = {
endpoint: config.endpoint.replace(/\/$/, ''), // Remove trailing slash
accessToken: config.accessToken,
projectId: config.projectId || 'default',
timeout: config.timeout || 30000,
enableRetry: config.enableRetry ?? true,
maxRetries: config.maxRetries || 3
};
// Use provided repository or create Supabase repository
if (config.repository) {
this.repository = config.repository;
} else if (config.supabaseClient) {
// TODO: SupabaseTaskRepository doesn't implement all TaskRepository methods yet
// Cast for now until full implementation is complete
this.repository = new SupabaseTaskRepository(
config.supabaseClient
) as unknown as TaskRepository;
} else {
throw new TaskMasterError(
'Either repository or supabaseClient must be provided',
ERROR_CODES.MISSING_CONFIGURATION
);
}
this.projectId = config.projectId;
this.enableRetry = config.enableRetry ?? true;
this.maxRetries = config.maxRetries ?? 3;
}
/**
* Validate API storage configuration
*/
private validateConfig(config: ApiStorageConfig): void {
if (!config.endpoint) {
if (!config.projectId) {
throw new TaskMasterError(
'API endpoint is required for API storage',
'Project ID is required for API storage',
ERROR_CODES.MISSING_CONFIGURATION
);
}
if (!config.accessToken) {
if (!config.repository && !config.supabaseClient) {
throw new TaskMasterError(
'Access token is required for API storage',
'Either repository or supabaseClient must be provided',
ERROR_CODES.MISSING_CONFIGURATION
);
}
// Validate endpoint URL format
try {
new URL(config.endpoint);
} catch {
throw new TaskMasterError(
'Invalid API endpoint URL',
ERROR_CODES.INVALID_INPUT,
{ endpoint: config.endpoint }
);
}
}
/**
@@ -96,8 +92,8 @@ export class ApiStorage implements IStorage {
if (this.initialized) return;
try {
// Verify API connectivity
await this.verifyConnection();
// Load initial tags
await this.loadTagsIntoCache();
this.initialized = true;
} catch (error) {
throw new TaskMasterError(
@@ -110,39 +106,71 @@ export class ApiStorage implements IStorage {
}
/**
* Verify API connection
* Load tags into cache
* In our API-based system, "tags" represent briefs
*/
private async verifyConnection(): Promise<void> {
const response = await this.makeRequest<{ status: string }>('/health');
private async loadTagsIntoCache(): Promise<void> {
try {
const authManager = AuthManager.getInstance();
const context = authManager.getContext();
if (!response.success) {
throw new Error(`API health check failed: ${response.error}`);
// If we have a selected brief, create a virtual "tag" for it
if (context?.briefId) {
// Create a virtual tag representing the current brief
const briefTag: TaskTag = {
name: context.briefId,
tasks: [], // Will be populated when tasks are loaded
metadata: {
briefId: context.briefId,
briefName: context.briefName,
organizationId: context.orgId
}
};
this.tagsCache.clear();
this.tagsCache.set(context.briefId, briefTag);
}
} catch (error) {
// If no brief is selected, that's okay - user needs to select one first
console.debug('No brief selected, starting with empty cache');
}
}
/**
* Load tasks from API
* In our system, the tag parameter represents a brief ID
*/
async loadTasks(tag?: string): Promise<Task[]> {
await this.ensureInitialized();
try {
const endpoint = tag
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}`
: `/projects/${this.config.projectId}/tasks`;
const authManager = AuthManager.getInstance();
const context = authManager.getContext();
const response = await this.makeRequest<{ tasks: Task[] }>(endpoint);
if (!response.success) {
throw new Error(response.error || 'Failed to load tasks');
// If no brief is selected in context, throw an error
if (!context?.briefId) {
throw new Error(
'No brief selected. Please select a brief first using: tm context brief <brief-id>'
);
}
return response.data?.tasks || [];
// Load tasks from the current brief context
const tasks = await this.retryOperation(() =>
this.repository.getTasks(this.projectId)
);
// Update the tag cache with the loaded task IDs
const briefTag = this.tagsCache.get(context.briefId);
if (briefTag) {
briefTag.tasks = tasks.map((task) => task.id);
}
return tasks;
} catch (error) {
throw new TaskMasterError(
'Failed to load tasks from API',
ERROR_CODES.STORAGE_ERROR,
{ operation: 'loadTasks', tag },
{ operation: 'loadTasks', tag, context: 'brief-based loading' },
error as Error
);
}
@@ -155,15 +183,29 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const endpoint = tag
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}`
: `/projects/${this.config.projectId}/tasks`;
if (tag) {
// Update tag with task IDs
const tagData = this.tagsCache.get(tag) || {
name: tag,
tasks: [],
metadata: {}
};
tagData.tasks = tasks.map((t) => t.id);
const response = await this.makeRequest(endpoint, 'PUT', { tasks });
// Save or update tag
if (this.tagsCache.has(tag)) {
await this.repository.updateTag(this.projectId, tag, tagData);
} else {
await this.repository.createTag(this.projectId, tagData);
}
if (!response.success) {
throw new Error(response.error || 'Failed to save tasks');
this.tagsCache.set(tag, tagData);
}
// Save tasks using bulk operation
await this.retryOperation(() =>
this.repository.bulkCreateTasks(this.projectId, tasks)
);
} catch (error) {
throw new TaskMasterError(
'Failed to save tasks to API',
@@ -181,20 +223,9 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const endpoint = tag
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}`
: `/projects/${this.config.projectId}/tasks/${taskId}`;
const response = await this.makeRequest<{ task: Task }>(endpoint);
if (!response.success) {
if (response.error?.includes('not found')) {
return null;
}
throw new Error(response.error || 'Failed to load task');
}
return response.data?.task || null;
return await this.retryOperation(() =>
this.repository.getTask(this.projectId, taskId)
);
} catch (error) {
throw new TaskMasterError(
'Failed to load task from API',
@@ -212,14 +243,26 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const endpoint = tag
? `/projects/${this.config.projectId}/tasks/${task.id}?tag=${encodeURIComponent(tag)}`
: `/projects/${this.config.projectId}/tasks/${task.id}`;
// Check if task exists
const existing = await this.repository.getTask(this.projectId, task.id);
const response = await this.makeRequest(endpoint, 'PUT', { task });
if (existing) {
await this.retryOperation(() =>
this.repository.updateTask(this.projectId, task.id, task)
);
} else {
await this.retryOperation(() =>
this.repository.createTask(this.projectId, task)
);
}
if (!response.success) {
throw new Error(response.error || 'Failed to save task');
// Update tag if specified
if (tag) {
const tagData = this.tagsCache.get(tag);
if (tagData && !tagData.tasks.includes(task.id)) {
tagData.tasks.push(task.id);
await this.repository.updateTag(this.projectId, tag, tagData);
}
}
} catch (error) {
throw new TaskMasterError(
@@ -238,14 +281,17 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const endpoint = tag
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}`
: `/projects/${this.config.projectId}/tasks/${taskId}`;
await this.retryOperation(() =>
this.repository.deleteTask(this.projectId, taskId)
);
const response = await this.makeRequest(endpoint, 'DELETE');
if (!response.success) {
throw new Error(response.error || 'Failed to delete task');
// Remove from tag if specified
if (tag) {
const tagData = this.tagsCache.get(tag);
if (tagData) {
tagData.tasks = tagData.tasks.filter((id) => id !== taskId);
await this.repository.updateTag(this.projectId, tag, tagData);
}
}
} catch (error) {
throw new TaskMasterError(
@@ -258,21 +304,24 @@ export class ApiStorage implements IStorage {
}
/**
* List available tags
* List available tags (briefs in our system)
*/
async listTags(): Promise<string[]> {
await this.ensureInitialized();
try {
const response = await this.makeRequest<{ tags: string[] }>(
`/projects/${this.config.projectId}/tags`
);
const authManager = AuthManager.getInstance();
const context = authManager.getContext();
if (!response.success) {
throw new Error(response.error || 'Failed to list tags');
// In our API-based system, we only have one "tag" at a time - the current brief
if (context?.briefId) {
// Ensure the current brief is in our cache
await this.loadTagsIntoCache();
return [context.briefId];
}
return response.data?.tags || [];
// No brief selected, return empty array
return [];
} catch (error) {
throw new TaskMasterError(
'Failed to list tags from API',
@@ -290,19 +339,15 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const endpoint = tag
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}`
: `/projects/${this.config.projectId}/metadata`;
const response = await this.makeRequest<{ metadata: TaskMetadata }>(
endpoint
);
if (!response.success) {
return null;
if (tag) {
const tagData = this.tagsCache.get(tag);
return (tagData?.metadata as TaskMetadata) || null;
}
return response.data?.metadata || null;
// Return global metadata if no tag specified
// This could be stored in a special system tag
const systemTag = await this.repository.getTag(this.projectId, '_system');
return (systemTag?.metadata as TaskMetadata) || null;
} catch (error) {
throw new TaskMasterError(
'Failed to load metadata from API',
@@ -320,14 +365,38 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const endpoint = tag
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}`
: `/projects/${this.config.projectId}/metadata`;
if (tag) {
const tagData = this.tagsCache.get(tag) || {
name: tag,
tasks: [],
metadata: {}
};
tagData.metadata = metadata as any;
const response = await this.makeRequest(endpoint, 'PUT', { metadata });
if (this.tagsCache.has(tag)) {
await this.repository.updateTag(this.projectId, tag, tagData);
} else {
await this.repository.createTag(this.projectId, tagData);
}
if (!response.success) {
throw new Error(response.error || 'Failed to save metadata');
this.tagsCache.set(tag, tagData);
} else {
// Save to system tag
const systemTag: TaskTag = {
name: '_system',
tasks: [],
metadata: metadata as any
};
const existing = await this.repository.getTag(
this.projectId,
'_system'
);
if (existing) {
await this.repository.updateTag(this.projectId, '_system', systemTag);
} else {
await this.repository.createTag(this.projectId, systemTag);
}
}
} catch (error) {
throw new TaskMasterError(
@@ -358,14 +427,30 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
// First load existing tasks
const existingTasks = await this.loadTasks(tag);
// Use bulk create - repository should handle duplicates
await this.retryOperation(() =>
this.repository.bulkCreateTasks(this.projectId, tasks)
);
// Append new tasks
const allTasks = [...existingTasks, ...tasks];
// Update tag if specified
if (tag) {
const tagData = this.tagsCache.get(tag) || {
name: tag,
tasks: [],
metadata: {}
};
// Save all tasks
await this.saveTasks(allTasks, tag);
const newTaskIds = tasks.map((t) => t.id);
tagData.tasks = [...new Set([...tagData.tasks, ...newTaskIds])];
if (this.tagsCache.has(tag)) {
await this.repository.updateTag(this.projectId, tag, tagData);
} else {
await this.repository.createTag(this.projectId, tagData);
}
this.tagsCache.set(tag, tagData);
}
} catch (error) {
throw new TaskMasterError(
'Failed to append tasks to API',
@@ -387,18 +472,9 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
// Load the task
const task = await this.loadTask(taskId, tag);
if (!task) {
throw new Error(`Task ${taskId} not found`);
}
// Merge updates
const updatedTask = { ...task, ...updates, id: taskId };
// Save updated task
await this.saveTask(updatedTask, tag);
await this.retryOperation(() =>
this.repository.updateTask(this.projectId, taskId, updates)
);
} catch (error) {
throw new TaskMasterError(
'Failed to update task via API',
@@ -423,14 +499,11 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const response = await this.makeRequest(
`/projects/${this.config.projectId}/tags/${encodeURIComponent(tag)}`,
'DELETE'
await this.retryOperation(() =>
this.repository.deleteTag(this.projectId, tag)
);
if (!response.success) {
throw new Error(response.error || 'Failed to delete tag');
}
this.tagsCache.delete(tag);
} catch (error) {
throw new TaskMasterError(
'Failed to delete tag via API',
@@ -448,15 +521,21 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const response = await this.makeRequest(
`/projects/${this.config.projectId}/tags/${encodeURIComponent(oldTag)}/rename`,
'POST',
{ newTag }
);
if (!response.success) {
throw new Error(response.error || 'Failed to rename tag');
const tagData = this.tagsCache.get(oldTag);
if (!tagData) {
throw new Error(`Tag ${oldTag} not found`);
}
// Create new tag with same data
const newTagData = { ...tagData, name: newTag };
await this.repository.createTag(this.projectId, newTagData);
// Delete old tag
await this.repository.deleteTag(this.projectId, oldTag);
// Update cache
this.tagsCache.delete(oldTag);
this.tagsCache.set(newTag, newTagData);
} catch (error) {
throw new TaskMasterError(
'Failed to rename tag via API',
@@ -474,15 +553,17 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const response = await this.makeRequest(
`/projects/${this.config.projectId}/tags/${encodeURIComponent(sourceTag)}/copy`,
'POST',
{ targetTag }
);
if (!response.success) {
throw new Error(response.error || 'Failed to copy tag');
const sourceData = this.tagsCache.get(sourceTag);
if (!sourceData) {
throw new Error(`Source tag ${sourceTag} not found`);
}
// Create new tag with copied data
const targetData = { ...sourceData, name: targetTag };
await this.repository.createTag(this.projectId, targetData);
// Update cache
this.tagsCache.set(targetTag, targetData);
} catch (error) {
throw new TaskMasterError(
'Failed to copy tag via API',
@@ -500,24 +581,22 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const response = await this.makeRequest<{
stats: StorageStats;
}>(`/projects/${this.config.projectId}/stats`);
const tasks = await this.repository.getTasks(this.projectId);
const tags = await this.repository.getTags(this.projectId);
if (!response.success) {
throw new Error(response.error || 'Failed to get stats');
}
const tagStats = tags.map((tag) => ({
tag: tag.name,
taskCount: tag.tasks.length,
lastModified: new Date().toISOString() // TODO: Get actual last modified from tag data
}));
// Return stats or default values
return (
response.data?.stats || {
totalTasks: 0,
totalTags: 0,
storageSize: 0,
lastModified: new Date().toISOString(),
tagStats: []
}
);
return {
totalTasks: tasks.length,
totalTags: tags.length,
storageSize: 0, // Not applicable for API storage
lastModified: new Date().toISOString(),
tagStats
};
} catch (error) {
throw new TaskMasterError(
'Failed to get stats from API',
@@ -535,16 +614,15 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const response = await this.makeRequest<{ backupId: string }>(
`/projects/${this.config.projectId}/backup`,
'POST'
);
// Export all data
await this.repository.getTasks(this.projectId);
await this.repository.getTags(this.projectId);
if (!response.success) {
throw new Error(response.error || 'Failed to create backup');
}
return response.data?.backupId || 'unknown';
// TODO: In a real implementation, this would:
// 1. Create backup data structure with tasks and tags
// 2. Save the backup to a storage service
// For now, return a backup identifier
return `backup-${this.projectId}-${Date.now()}`;
} catch (error) {
throw new TaskMasterError(
'Failed to create backup via API',
@@ -558,27 +636,16 @@ export class ApiStorage implements IStorage {
/**
* Restore from backup
*/
async restore(backupPath: string): Promise<void> {
async restore(backupId: string): Promise<void> {
await this.ensureInitialized();
try {
const response = await this.makeRequest(
`/projects/${this.config.projectId}/restore`,
'POST',
{ backupId: backupPath }
);
if (!response.success) {
throw new Error(response.error || 'Failed to restore backup');
}
} catch (error) {
throw new TaskMasterError(
'Failed to restore backup via API',
ERROR_CODES.STORAGE_ERROR,
{ operation: 'restore', backupPath },
error as Error
);
}
// This would restore from a backup service
// Implementation depends on backup strategy
throw new TaskMasterError(
'Restore not implemented for API storage',
ERROR_CODES.NOT_IMPLEMENTED,
{ operation: 'restore', backupId }
);
}
/**
@@ -588,14 +655,23 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized();
try {
const response = await this.makeRequest(
`/projects/${this.config.projectId}/clear`,
'POST'
);
if (!response.success) {
throw new Error(response.error || 'Failed to clear data');
// Delete all tasks
const tasks = await this.repository.getTasks(this.projectId);
if (tasks.length > 0) {
await this.repository.bulkDeleteTasks(
this.projectId,
tasks.map((t) => t.id)
);
}
// Delete all tags
const tags = await this.repository.getTags(this.projectId);
for (const tag of tags) {
await this.repository.deleteTag(this.projectId, tag.name);
}
// Clear cache
this.tagsCache.clear();
} catch (error) {
throw new TaskMasterError(
'Failed to clear data via API',
@@ -611,6 +687,7 @@ export class ApiStorage implements IStorage {
*/
async close(): Promise<void> {
this.initialized = false;
this.tagsCache.clear();
}
/**
@@ -623,102 +700,21 @@ export class ApiStorage implements IStorage {
}
/**
* Make HTTP request to API
* Retry an operation with exponential backoff
*/
private async makeRequest<T>(
path: string,
method: 'GET' | 'POST' | 'PUT' | 'DELETE' = 'GET',
body?: unknown
): Promise<ApiResponse<T>> {
const url = `${this.config.endpoint}${path}`;
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);
private async retryOperation<T>(
operation: () => Promise<T>,
attempt: number = 1
): Promise<T> {
try {
const options: RequestInit = {
method,
headers: {
Authorization: `Bearer ${this.config.accessToken}`,
'Content-Type': 'application/json',
Accept: 'application/json'
},
signal: controller.signal
};
if (body && (method === 'POST' || method === 'PUT')) {
options.body = JSON.stringify(body);
return await operation();
} catch (error) {
if (this.enableRetry && attempt < this.maxRetries) {
const delay = Math.pow(2, attempt) * 1000;
await new Promise((resolve) => setTimeout(resolve, delay));
return this.retryOperation(operation, attempt + 1);
}
let lastError: Error | null = null;
let attempt = 0;
while (attempt < this.config.maxRetries) {
attempt++;
try {
const response = await fetch(url, options);
const data = await response.json();
if (response.ok) {
return { success: true, data: data as T };
}
// Handle specific error codes
if (response.status === 401) {
return {
success: false,
error: 'Authentication failed - check access token'
};
}
if (response.status === 404) {
return {
success: false,
error: 'Resource not found'
};
}
if (response.status === 429) {
// Rate limited - retry with backoff
if (this.config.enableRetry && attempt < this.config.maxRetries) {
await this.delay(Math.pow(2, attempt) * 1000);
continue;
}
}
const errorData = data as any;
return {
success: false,
error:
errorData.error ||
errorData.message ||
`HTTP ${response.status}: ${response.statusText}`
};
} catch (error) {
lastError = error as Error;
// Retry on network errors
if (this.config.enableRetry && attempt < this.config.maxRetries) {
await this.delay(Math.pow(2, attempt) * 1000);
continue;
}
}
}
// All retries exhausted
return {
success: false,
error: lastError?.message || 'Request failed after retries'
};
} finally {
clearTimeout(timeoutId);
throw error;
}
}
/**
* Delay helper for retries
*/
private delay(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
}

View File

@@ -102,6 +102,14 @@ export class FileStorage implements IStorage {
}
}
/**
* Load a single task by ID from the tasks.json file
*/
async loadTask(taskId: string, tag?: string): Promise<Task | null> {
const tasks = await this.loadTasks(tag);
return tasks.find((task) => task.id === taskId) || null;
}
/**
* Save tasks for a specific tag in the single tasks.json file
*/

View File

@@ -3,17 +3,41 @@
*/
import type { IStorage } from '../interfaces/storage.interface.js';
import type { IConfiguration } from '../interfaces/configuration.interface.js';
import { FileStorage } from './file-storage';
import type {
IConfiguration,
RuntimeStorageConfig,
StorageSettings
} from '../interfaces/configuration.interface.js';
import { FileStorage } from './file-storage/index.js';
import { ApiStorage } from './api-storage.js';
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
import { AuthManager } from '../auth/auth-manager.js';
import { getLogger } from '../logger/index.js';
import { SupabaseAuthClient } from '../clients/supabase-client.js';
/**
* Factory for creating storage implementations based on configuration
*/
export class StorageFactory {
/**
* Create a storage implementation from runtime storage config
* This is the preferred method when you have a RuntimeStorageConfig
* @param storageConfig - Runtime storage configuration
* @param projectPath - Project root path (for file storage)
* @returns Storage implementation
*/
static createFromStorageConfig(
storageConfig: RuntimeStorageConfig,
projectPath: string
): IStorage {
// Wrap the storage config in the expected format, including projectPath
// This ensures ApiStorage receives the projectPath for projectId
return StorageFactory.create(
{ storage: storageConfig, projectPath } as Partial<IConfiguration>,
projectPath
);
}
/**
* Create a storage implementation based on configuration
* @param config - Configuration object
@@ -35,28 +59,33 @@ export class StorageFactory {
case 'api':
if (!StorageFactory.isHamsterAvailable(config)) {
const missing: string[] = [];
if (!config.storage?.apiEndpoint) missing.push('apiEndpoint');
if (!config.storage?.apiAccessToken) missing.push('apiAccessToken');
// Check if authenticated via AuthManager
const authManager = AuthManager.getInstance();
if (!authManager.isAuthenticated()) {
throw new TaskMasterError(
'API storage configured but not authenticated. Run: tm auth login',
`API storage not fully configured (${missing.join(', ') || 'credentials missing'}). Run: tm auth login, or set the missing field(s).`,
ERROR_CODES.MISSING_CONFIGURATION,
{ storageType: 'api' }
{ storageType: 'api', missing }
);
}
// Use auth token from AuthManager
const credentials = authManager.getCredentials();
if (credentials) {
// Merge with existing storage config, ensuring required fields
config.storage = {
...config.storage,
type: 'api' as const,
const nextStorage: StorageSettings = {
...(config.storage as StorageSettings),
type: 'api',
apiAccessToken: credentials.token,
apiEndpoint:
config.storage?.apiEndpoint ||
process.env.HAMSTER_API_URL ||
'https://tryhamster.com/api'
} as any; // Cast to any to bypass strict type checking for partial config
};
config.storage = nextStorage;
}
}
logger.info('☁️ Using API storage');
@@ -77,15 +106,16 @@ export class StorageFactory {
const credentials = authManager.getCredentials();
if (credentials) {
// Configure API storage with auth credentials
config.storage = {
...config.storage,
type: 'api' as const,
const nextStorage: StorageSettings = {
...(config.storage as StorageSettings),
type: 'api',
apiAccessToken: credentials.token,
apiEndpoint:
config.storage?.apiEndpoint ||
process.env.HAMSTER_API_URL ||
'https://tryhamster.com/api'
} as any; // Cast to any to bypass strict type checking for partial config
};
config.storage = nextStorage;
logger.info('☁️ Using API storage (authenticated)');
return StorageFactory.createApiStorage(config);
}
@@ -119,29 +149,13 @@ export class StorageFactory {
* Create API storage implementation
*/
private static createApiStorage(config: Partial<IConfiguration>): ApiStorage {
const { apiEndpoint, apiAccessToken } = config.storage || {};
if (!apiEndpoint) {
throw new TaskMasterError(
'API endpoint is required for API storage',
ERROR_CODES.MISSING_CONFIGURATION,
{ storageType: 'api' }
);
}
if (!apiAccessToken) {
throw new TaskMasterError(
'API access token is required for API storage',
ERROR_CODES.MISSING_CONFIGURATION,
{ storageType: 'api' }
);
}
// Use our SupabaseAuthClient instead of creating a raw Supabase client
const supabaseAuthClient = new SupabaseAuthClient();
const supabaseClient = supabaseAuthClient.getClient();
return new ApiStorage({
endpoint: apiEndpoint,
accessToken: apiAccessToken,
projectId: config.projectPath,
timeout: config.retry?.requestTimeout,
supabaseClient,
projectId: config.projectPath || '',
enableRetry: config.retry?.retryOnNetworkError,
maxRetries: config.retry?.retryAttempts
});
@@ -189,6 +203,11 @@ export class StorageFactory {
// File storage doesn't require additional config
break;
case 'auto':
// Auto storage is valid - it will determine the actual type at runtime
// No specific validation needed as it will fall back to file if API not configured
break;
default:
errors.push(`Unknown storage type: ${storageType}`);
}

View File

@@ -10,7 +10,12 @@ import {
} from './services/task-service.js';
import { ERROR_CODES, TaskMasterError } from './errors/task-master-error.js';
import type { IConfiguration } from './interfaces/configuration.interface.js';
import type { Task, TaskStatus, TaskFilter } from './types/index.js';
import type {
Task,
TaskStatus,
TaskFilter,
StorageType
} from './types/index.js';
/**
* Options for creating TaskMasterCore instance
@@ -152,7 +157,7 @@ export class TaskMasterCore {
/**
* Get current storage type
*/
getStorageType(): 'file' | 'api' | 'auto' {
getStorageType(): StorageType {
return this.taskService.getStorageType();
}
@@ -170,6 +175,22 @@ export class TaskMasterCore {
await this.configManager.setActiveTag(tag);
}
/**
* Update task status
*/
async updateTaskStatus(
taskId: string | number,
newStatus: TaskStatus,
tag?: string
): Promise<{
success: boolean;
oldStatus: TaskStatus;
newStatus: TaskStatus;
taskId: string;
}> {
return this.taskService.updateTaskStatus(taskId, newStatus, tag);
}
/**
* Close and cleanup resources
*/

View File

@@ -0,0 +1,491 @@
export type Json =
| string
| number
| boolean
| null
| { [key: string]: Json | undefined }
| Json[];
export type Database = {
public: {
Tables: {
accounts: {
Row: {
created_at: string | null;
created_by: string | null;
email: string | null;
id: string;
is_personal_account: boolean;
name: string;
picture_url: string | null;
primary_owner_user_id: string;
public_data: Json;
slug: string | null;
updated_at: string | null;
updated_by: string | null;
};
Insert: {
created_at?: string | null;
created_by?: string | null;
email?: string | null;
id?: string;
is_personal_account?: boolean;
name: string;
picture_url?: string | null;
primary_owner_user_id?: string;
public_data?: Json;
slug?: string | null;
updated_at?: string | null;
updated_by?: string | null;
};
Update: {
created_at?: string | null;
created_by?: string | null;
email?: string | null;
id?: string;
is_personal_account?: boolean;
name?: string;
picture_url?: string | null;
primary_owner_user_id?: string;
public_data?: Json;
slug?: string | null;
updated_at?: string | null;
updated_by?: string | null;
};
Relationships: [];
};
brief: {
Row: {
account_id: string;
created_at: string;
created_by: string;
document_id: string;
id: string;
plan_generation_completed_at: string | null;
plan_generation_error: string | null;
plan_generation_started_at: string | null;
plan_generation_status: Database['public']['Enums']['plan_generation_status'];
status: Database['public']['Enums']['brief_status'];
updated_at: string;
};
Insert: {
account_id: string;
created_at?: string;
created_by: string;
document_id: string;
id?: string;
plan_generation_completed_at?: string | null;
plan_generation_error?: string | null;
plan_generation_started_at?: string | null;
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
status?: Database['public']['Enums']['brief_status'];
updated_at?: string;
};
Update: {
account_id?: string;
created_at?: string;
created_by?: string;
document_id?: string;
id?: string;
plan_generation_completed_at?: string | null;
plan_generation_error?: string | null;
plan_generation_started_at?: string | null;
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
status?: Database['public']['Enums']['brief_status'];
updated_at?: string;
};
Relationships: [
{
foreignKeyName: 'brief_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
},
{
foreignKeyName: 'brief_document_id_fkey';
columns: ['document_id'];
isOneToOne: false;
referencedRelation: 'document';
referencedColumns: ['id'];
}
];
};
document: {
Row: {
account_id: string;
created_at: string;
created_by: string;
description: string | null;
document_name: string;
document_type: Database['public']['Enums']['document_type'];
file_path: string | null;
file_size: number | null;
id: string;
metadata: Json | null;
mime_type: string | null;
processed_at: string | null;
processing_error: string | null;
processing_status:
| Database['public']['Enums']['document_processing_status']
| null;
source_id: string | null;
source_type: string | null;
title: string;
updated_at: string;
};
Insert: {
account_id: string;
created_at?: string;
created_by: string;
description?: string | null;
document_name: string;
document_type?: Database['public']['Enums']['document_type'];
file_path?: string | null;
file_size?: number | null;
id?: string;
metadata?: Json | null;
mime_type?: string | null;
processed_at?: string | null;
processing_error?: string | null;
processing_status?:
| Database['public']['Enums']['document_processing_status']
| null;
source_id?: string | null;
source_type?: string | null;
title: string;
updated_at?: string;
};
Update: {
account_id?: string;
created_at?: string;
created_by?: string;
description?: string | null;
document_name?: string;
document_type?: Database['public']['Enums']['document_type'];
file_path?: string | null;
file_size?: number | null;
id?: string;
metadata?: Json | null;
mime_type?: string | null;
processed_at?: string | null;
processing_error?: string | null;
processing_status?:
| Database['public']['Enums']['document_processing_status']
| null;
source_id?: string | null;
source_type?: string | null;
title?: string;
updated_at?: string;
};
Relationships: [
{
foreignKeyName: 'document_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
}
];
};
tasks: {
Row: {
account_id: string;
actual_hours: number;
assignee_id: string | null;
brief_id: string | null;
completed_subtasks: number;
complexity: number | null;
created_at: string;
created_by: string;
description: string | null;
display_id: string | null;
document_id: string | null;
due_date: string | null;
estimated_hours: number | null;
id: string;
metadata: Json;
parent_task_id: string | null;
position: number;
priority: Database['public']['Enums']['task_priority'];
status: Database['public']['Enums']['task_status'];
subtask_position: number;
title: string;
total_subtasks: number;
updated_at: string;
updated_by: string;
};
Insert: {
account_id: string;
actual_hours?: number;
assignee_id?: string | null;
brief_id?: string | null;
completed_subtasks?: number;
complexity?: number | null;
created_at?: string;
created_by: string;
description?: string | null;
display_id?: string | null;
document_id?: string | null;
due_date?: string | null;
estimated_hours?: number | null;
id?: string;
metadata?: Json;
parent_task_id?: string | null;
position?: number;
priority?: Database['public']['Enums']['task_priority'];
status?: Database['public']['Enums']['task_status'];
subtask_position?: number;
title: string;
total_subtasks?: number;
updated_at?: string;
updated_by: string;
};
Update: {
account_id?: string;
actual_hours?: number;
assignee_id?: string | null;
brief_id?: string | null;
completed_subtasks?: number;
complexity?: number | null;
created_at?: string;
created_by?: string;
description?: string | null;
display_id?: string | null;
document_id?: string | null;
due_date?: string | null;
estimated_hours?: number | null;
id?: string;
metadata?: Json;
parent_task_id?: string | null;
position?: number;
priority?: Database['public']['Enums']['task_priority'];
status?: Database['public']['Enums']['task_status'];
subtask_position?: number;
title?: string;
total_subtasks?: number;
updated_at?: string;
updated_by?: string;
};
Relationships: [
{
foreignKeyName: 'tasks_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
},
{
foreignKeyName: 'tasks_brief_id_fkey';
columns: ['brief_id'];
isOneToOne: false;
referencedRelation: 'brief';
referencedColumns: ['id'];
},
{
foreignKeyName: 'tasks_document_id_fkey';
columns: ['document_id'];
isOneToOne: false;
referencedRelation: 'document';
referencedColumns: ['id'];
},
{
foreignKeyName: 'tasks_parent_task_id_fkey';
columns: ['parent_task_id'];
isOneToOne: false;
referencedRelation: 'tasks';
referencedColumns: ['id'];
}
];
};
task_dependencies: {
Row: {
account_id: string;
created_at: string;
depends_on_task_id: string;
id: string;
task_id: string;
};
Insert: {
account_id: string;
created_at?: string;
depends_on_task_id: string;
id?: string;
task_id: string;
};
Update: {
account_id?: string;
created_at?: string;
depends_on_task_id?: string;
id?: string;
task_id?: string;
};
Relationships: [
{
foreignKeyName: 'task_dependencies_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
},
{
foreignKeyName: 'task_dependencies_depends_on_task_id_fkey';
columns: ['depends_on_task_id'];
isOneToOne: false;
referencedRelation: 'tasks';
referencedColumns: ['id'];
},
{
foreignKeyName: 'task_dependencies_task_id_fkey';
columns: ['task_id'];
isOneToOne: false;
referencedRelation: 'tasks';
referencedColumns: ['id'];
}
];
};
user_accounts: {
Row: {
id: string | null;
name: string | null;
picture_url: string | null;
role: string | null;
slug: string | null;
};
Insert: {
id?: string | null;
name?: string | null;
picture_url?: string | null;
role?: string | null;
slug?: string | null;
};
Update: {
id?: string | null;
name?: string | null;
picture_url?: string | null;
role?: string | null;
slug?: string | null;
};
Relationships: [];
};
};
Views: {
[_ in never]: never;
};
Functions: {
[_ in never]: never;
};
Enums: {
brief_status:
| 'draft'
| 'refining'
| 'aligned'
| 'delivering'
| 'delivered'
| 'done'
| 'archived';
document_processing_status: 'pending' | 'processing' | 'ready' | 'failed';
document_type:
| 'brief'
| 'blueprint'
| 'file'
| 'note'
| 'transcript'
| 'generated_plan'
| 'generated_task'
| 'generated_summary'
| 'method'
| 'task';
plan_generation_status:
| 'not_started'
| 'generating'
| 'completed'
| 'failed';
task_priority: 'low' | 'medium' | 'high' | 'urgent';
task_status: 'todo' | 'in_progress' | 'done';
};
CompositeTypes: {
[_ in never]: never;
};
};
};
export type Tables<
PublicTableNameOrOptions extends
| keyof (Database['public']['Tables'] & Database['public']['Views'])
| { schema: keyof Database },
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
? keyof (Database[PublicTableNameOrOptions['schema']]['Tables'] &
Database[PublicTableNameOrOptions['schema']]['Views'])
: never = never
> = PublicTableNameOrOptions extends { schema: keyof Database }
? (Database[PublicTableNameOrOptions['schema']]['Tables'] &
Database[PublicTableNameOrOptions['schema']]['Views'])[TableName] extends {
Row: infer R;
}
? R
: never
: PublicTableNameOrOptions extends keyof (Database['public']['Tables'] &
Database['public']['Views'])
? (Database['public']['Tables'] &
Database['public']['Views'])[PublicTableNameOrOptions] extends {
Row: infer R;
}
? R
: never
: never;
export type TablesInsert<
PublicTableNameOrOptions extends
| keyof Database['public']['Tables']
| { schema: keyof Database },
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
: never = never
> = PublicTableNameOrOptions extends { schema: keyof Database }
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
Insert: infer I;
}
? I
: never
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
Insert: infer I;
}
? I
: never
: never;
export type TablesUpdate<
PublicTableNameOrOptions extends
| keyof Database['public']['Tables']
| { schema: keyof Database },
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
: never = never
> = PublicTableNameOrOptions extends { schema: keyof Database }
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
Update: infer U;
}
? U
: never
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
Update: infer U;
}
? U
: never
: never;
export type Enums<
PublicEnumNameOrOptions extends
| keyof Database['public']['Enums']
| { schema: keyof Database },
EnumName extends PublicEnumNameOrOptions extends { schema: keyof Database }
? keyof Database[PublicEnumNameOrOptions['schema']]['Enums']
: never = never
> = PublicEnumNameOrOptions extends { schema: keyof Database }
? Database[PublicEnumNameOrOptions['schema']]['Enums'][EnumName]
: PublicEnumNameOrOptions extends keyof Database['public']['Enums']
? Database['public']['Enums'][PublicEnumNameOrOptions]
: never;

View File

@@ -2,6 +2,14 @@
* Core type definitions for Task Master
*/
/**
* Storage type options
* - 'file': Local file system storage
* - 'api': Remote API storage (Hamster integration)
* - 'auto': Automatically detect based on auth status
*/
export type StorageType = 'file' | 'api' | 'auto';
// ============================================================================
// Type Literals
// ============================================================================
@@ -16,7 +24,8 @@ export type TaskStatus =
| 'deferred'
| 'cancelled'
| 'blocked'
| 'review';
| 'review'
| 'completed';
/**
* Task priority levels
@@ -96,6 +105,15 @@ export interface TaskCollection {
metadata: TaskMetadata;
}
/**
* Task tag for organizing tasks
*/
export interface TaskTag {
name: string;
tasks: string[]; // Task IDs belonging to this tag
metadata: Record<string, any>;
}
// ============================================================================
// Utility Types
// ============================================================================

View File

@@ -11,7 +11,7 @@ export {
isValidTaskId,
isValidSubtaskId,
getParentTaskId
} from './id-generator';
} from './id-generator.js';
// Additional utility exports

View File

@@ -1,12 +1,13 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"module": "NodeNext",
"lib": ["ES2022"],
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"outDir": "./dist",
"baseUrl": ".",
"rootDir": "./src",
"strict": true,
"noImplicitAny": true,
@@ -23,23 +24,12 @@
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"moduleResolution": "node",
"moduleResolution": "NodeNext",
"moduleDetection": "force",
"types": ["node"],
"resolveJsonModule": true,
"isolatedModules": true,
"paths": {
"@/*": ["./src/*"],
"@/auth": ["./src/auth"],
"@/config": ["./src/config"],
"@/errors": ["./src/errors"],
"@/interfaces": ["./src/interfaces"],
"@/logger": ["./src/logger"],
"@/parser": ["./src/parser"],
"@/providers": ["./src/providers"],
"@/services": ["./src/services"],
"@/storage": ["./src/storage"],
"@/types": ["./src/types"],
"@/utils": ["./src/utils"]
}
"allowImportingTsExtensions": false
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"]

View File

@@ -1,52 +0,0 @@
import { defineConfig } from 'tsup';
import { dotenvLoad } from 'dotenv-mono';
dotenvLoad();
// Get all TM_PUBLIC_* env variables for build-time injection
const getBuildTimeEnvs = () => {
const envs: Record<string, string> = {};
for (const [key, value] of Object.entries(process.env)) {
if (key.startsWith('TM_PUBLIC_')) {
// Return the actual value, not JSON.stringify'd
envs[key] = value || '';
}
}
return envs;
};
export default defineConfig({
entry: {
index: 'src/index.ts',
'auth/index': 'src/auth/index.ts',
'config/index': 'src/config/index.ts',
'errors/index': 'src/errors/index.ts',
'interfaces/index': 'src/interfaces/index.ts',
'logger/index': 'src/logger/index.ts',
'parser/index': 'src/parser/index.ts',
'providers/index': 'src/providers/index.ts',
'services/index': 'src/services/index.ts',
'storage/index': 'src/storage/index.ts',
'types/index': 'src/types/index.ts',
'utils/index': 'src/utils/index.ts'
},
format: ['cjs', 'esm'],
dts: true,
sourcemap: true,
clean: true,
splitting: false,
treeshake: true,
minify: false,
target: 'es2022',
tsconfig: './tsconfig.json',
outDir: 'dist',
// Replace process.env.TM_PUBLIC_* with actual values at build time
env: getBuildTimeEnvs(),
// Auto-external all dependencies from package.json
external: [
// External all node_modules - everything not starting with . or /
/^[^./]/
],
esbuildOptions(options) {
options.conditions = ['module'];
}
});

View File

@@ -16,13 +16,21 @@ import ora from 'ora'; // Import ora
import { log, readJSON } from './utils.js';
// Import new commands from @tm/cli
import { ListTasksCommand, AuthCommand } from '@tm/cli';
import {
ListTasksCommand,
ShowCommand,
AuthCommand,
ContextCommand,
SetStatusCommand,
checkForUpdate,
performAutoUpdate,
displayUpgradeNotification
} from '@tm/cli';
import {
parsePRD,
updateTasks,
generateTaskFiles,
setTaskStatus,
listTasks,
expandTask,
expandAllTasks,
@@ -77,8 +85,7 @@ import {
isConfigFilePresent,
getAvailableModels,
getBaseUrlForRole,
getDefaultNumTasks,
getDefaultSubtasks
getDefaultNumTasks
} from './config-manager.js';
import { CUSTOM_PROVIDERS } from '../../src/constants/providers.js';
@@ -1679,63 +1686,9 @@ function registerCommands(programInstance) {
});
});
// set-status command
programInstance
.command('set-status')
.alias('mark')
.alias('set')
.description('Set the status of a task')
.option(
'-i, --id <id>',
'Task ID (can be comma-separated for multiple tasks)'
)
.option(
'-s, --status <status>',
`New status (one of: ${TASK_STATUS_OPTIONS.join(', ')})`
)
.option(
'-f, --file <file>',
'Path to the tasks file',
TASKMASTER_TASKS_FILE
)
.option('--tag <tag>', 'Specify tag context for task operations')
.action(async (options) => {
// Initialize TaskMaster
const taskMaster = initTaskMaster({
tasksPath: options.file || true,
tag: options.tag
});
const taskId = options.id;
const status = options.status;
if (!taskId || !status) {
console.error(chalk.red('Error: Both --id and --status are required'));
process.exit(1);
}
if (!isValidTaskStatus(status)) {
console.error(
chalk.red(
`Error: Invalid status value: ${status}. Use one of: ${TASK_STATUS_OPTIONS.join(', ')}`
)
);
process.exit(1);
}
const tag = taskMaster.getCurrentTag();
displayCurrentTagIndicator(tag);
console.log(
chalk.blue(`Setting status of task(s) ${taskId} to: ${status}`)
);
await setTaskStatus(taskMaster.getTasksPath(), taskId, status, {
projectRoot: taskMaster.getProjectRoot(),
tag
});
});
// Register the set-status command from @tm/cli
// Handles task status updates with proper error handling and validation
SetStatusCommand.registerOn(programInstance);
// NEW: Register the new list command from @tm/cli
// This command handles all its own configuration and logic
@@ -1745,6 +1698,14 @@ function registerCommands(programInstance) {
// Handles authentication with tryhamster.com
AuthCommand.registerOn(programInstance);
// Register the context command from @tm/cli
// Manages workspace context (org/brief selection)
ContextCommand.registerOn(programInstance);
// Register the show command from @tm/cli
// Displays detailed information about tasks
ShowCommand.registerOn(programInstance);
// expand command
programInstance
.command('expand')
@@ -2563,80 +2524,6 @@ ${result.result}
);
});
// show command
programInstance
.command('show')
.description(
`Display detailed information about one or more tasks${chalk.reset('')}`
)
.argument('[id]', 'Task ID(s) to show (comma-separated for multiple)')
.option(
'-i, --id <id>',
'Task ID(s) to show (comma-separated for multiple)'
)
.option('-s, --status <status>', 'Filter subtasks by status')
.option(
'-f, --file <file>',
'Path to the tasks file',
TASKMASTER_TASKS_FILE
)
.option(
'-r, --report <report>',
'Path to the complexity report file',
COMPLEXITY_REPORT_FILE
)
.option('--tag <tag>', 'Specify tag context for task operations')
.action(async (taskId, options) => {
// Initialize TaskMaster
const initOptions = {
tasksPath: options.file || true,
tag: options.tag
};
// Only pass complexityReportPath if user provided a custom path
if (options.report && options.report !== COMPLEXITY_REPORT_FILE) {
initOptions.complexityReportPath = options.report;
}
const taskMaster = initTaskMaster(initOptions);
const idArg = taskId || options.id;
const statusFilter = options.status;
const tag = taskMaster.getCurrentTag();
// Show current tag context
displayCurrentTagIndicator(tag);
if (!idArg) {
console.error(chalk.red('Error: Please provide a task ID'));
process.exit(1);
}
// Check if multiple IDs are provided (comma-separated)
const taskIds = idArg
.split(',')
.map((id) => id.trim())
.filter((id) => id.length > 0);
if (taskIds.length > 1) {
// Multiple tasks - use compact summary view with interactive drill-down
await displayMultipleTasksSummary(
taskMaster.getTasksPath(),
taskIds,
taskMaster.getComplexityReportPath(),
statusFilter,
{ projectRoot: taskMaster.getProjectRoot(), tag }
);
} else {
// Single task - use detailed view
await displayTaskById(
taskMaster.getTasksPath(),
taskIds[0],
taskMaster.getComplexityReportPath(),
statusFilter,
{ projectRoot: taskMaster.getProjectRoot(), tag }
);
}
});
// add-dependency command
programInstance
.command('add-dependency')
@@ -5228,122 +5115,6 @@ function setupCLI() {
return programInstance;
}
/**
* Check for newer version of task-master-ai
* @returns {Promise<{currentVersion: string, latestVersion: string, needsUpdate: boolean}>}
*/
async function checkForUpdate() {
// Get current version from package.json ONLY
const currentVersion = getTaskMasterVersion();
return new Promise((resolve) => {
// Get the latest version from npm registry
const options = {
hostname: 'registry.npmjs.org',
path: '/task-master-ai',
method: 'GET',
headers: {
Accept: 'application/vnd.npm.install-v1+json' // Lightweight response
}
};
const req = https.request(options, (res) => {
let data = '';
res.on('data', (chunk) => {
data += chunk;
});
res.on('end', () => {
try {
const npmData = JSON.parse(data);
const latestVersion = npmData['dist-tags']?.latest || currentVersion;
// Compare versions
const needsUpdate =
compareVersions(currentVersion, latestVersion) < 0;
resolve({
currentVersion,
latestVersion,
needsUpdate
});
} catch (error) {
log('debug', `Error parsing npm response: ${error.message}`);
resolve({
currentVersion,
latestVersion: currentVersion,
needsUpdate: false
});
}
});
});
req.on('error', (error) => {
log('debug', `Error checking for updates: ${error.message}`);
resolve({
currentVersion,
latestVersion: currentVersion,
needsUpdate: false
});
});
// Set a timeout to avoid hanging if npm is slow
req.setTimeout(3000, () => {
req.abort();
log('debug', 'Update check timed out');
resolve({
currentVersion,
latestVersion: currentVersion,
needsUpdate: false
});
});
req.end();
});
}
/**
* Compare semantic versions
* @param {string} v1 - First version
* @param {string} v2 - Second version
* @returns {number} -1 if v1 < v2, 0 if v1 = v2, 1 if v1 > v2
*/
function compareVersions(v1, v2) {
const v1Parts = v1.split('.').map((p) => parseInt(p, 10));
const v2Parts = v2.split('.').map((p) => parseInt(p, 10));
for (let i = 0; i < Math.max(v1Parts.length, v2Parts.length); i++) {
const v1Part = v1Parts[i] || 0;
const v2Part = v2Parts[i] || 0;
if (v1Part < v2Part) return -1;
if (v1Part > v2Part) return 1;
}
return 0;
}
/**
* Display upgrade notification message
* @param {string} currentVersion - Current version
* @param {string} latestVersion - Latest version
*/
function displayUpgradeNotification(currentVersion, latestVersion) {
const message = boxen(
`${chalk.blue.bold('Update Available!')} ${chalk.dim(currentVersion)}${chalk.green(latestVersion)}\n\n` +
`Run ${chalk.cyan('npm i task-master-ai@latest -g')} to update to the latest version with new features and bug fixes.`,
{
padding: 1,
margin: { top: 1, bottom: 1 },
borderColor: 'yellow',
borderStyle: 'round'
}
);
console.log(message);
}
/**
* Parse arguments and run the CLI
* @param {Array} argv - Command-line arguments
@@ -5362,7 +5133,8 @@ async function runCLI(argv = process.argv) {
}
// Start the update check in the background - don't await yet
const updateCheckPromise = checkForUpdate();
const currentVersion = getTaskMasterVersion();
const updateCheckPromise = checkForUpdate(currentVersion);
// Setup and parse
// NOTE: getConfig() might be called during setupCLI->registerCommands if commands need config
@@ -5373,10 +5145,18 @@ async function runCLI(argv = process.argv) {
// After command execution, check if an update is available
const updateInfo = await updateCheckPromise;
if (updateInfo.needsUpdate) {
// Display the upgrade notification first
displayUpgradeNotification(
updateInfo.currentVersion,
updateInfo.latestVersion
);
// Then automatically perform the update
const updateSuccess = await performAutoUpdate(updateInfo.latestVersion);
if (updateSuccess) {
// Exit gracefully after successful update
process.exit(0);
}
}
// Check if migration has occurred and show FYI notice once
@@ -5500,11 +5280,4 @@ export function resolveComplexityReportPath({
return tag !== 'master' ? base.replace('.json', `_${tag}.json`) : base;
}
export {
registerCommands,
setupCLI,
runCLI,
checkForUpdate,
compareVersions,
displayUpgradeNotification
};
export { registerCommands, setupCLI, runCLI };

View File

@@ -1,7 +1,4 @@
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
import { log } from '../../scripts/modules/utils.js';
import packageJson from '../../package.json' with { type: 'json' };
/**
* Reads the version from the nearest package.json relative to this file.
@@ -9,27 +6,5 @@ import { log } from '../../scripts/modules/utils.js';
* @returns {string} The version string or 'unknown'.
*/
export function getTaskMasterVersion() {
let version = 'unknown';
try {
// Get the directory of the current module (getPackageVersion.js)
const currentModuleFilename = fileURLToPath(import.meta.url);
const currentModuleDirname = path.dirname(currentModuleFilename);
// Construct the path to package.json relative to this file (../../package.json)
const packageJsonPath = path.join(
currentModuleDirname,
'..',
'..',
'package.json'
);
if (fs.existsSync(packageJsonPath)) {
const packageJsonContent = fs.readFileSync(packageJsonPath, 'utf8');
const packageJson = JSON.parse(packageJsonContent);
version = packageJson.version;
}
} catch (error) {
// Silently fall back to default version
log('warn', 'Could not read own package.json for version info.', error);
}
return version;
return packageJson.version || 'unknown';
}

View File

@@ -17,7 +17,7 @@ describe('Complex Cross-Tag Scenarios', () => {
'..',
'..',
'..',
'bin',
'dist',
'task-master.js'
);
@@ -25,6 +25,8 @@ describe('Complex Cross-Tag Scenarios', () => {
// Create test directory
testDir = fs.mkdtempSync(path.join(__dirname, 'test-'));
process.chdir(testDir);
// Keep integration timings deterministic
process.env.TASKMASTER_SKIP_AUTO_UPDATE = '1';
// Initialize task-master
execSync(`node ${binPath} init --yes`, {
@@ -137,6 +139,7 @@ describe('Complex Cross-Tag Scenarios', () => {
if (testDir && fs.existsSync(testDir)) {
fs.rmSync(testDir, { recursive: true, force: true });
}
delete process.env.TASKMASTER_SKIP_AUTO_UPDATE;
});
describe('Circular Dependency Detection', () => {
@@ -369,7 +372,7 @@ describe('Complex Cross-Tag Scenarios', () => {
fs.writeFileSync(tasksPath, JSON.stringify(largeTaskSet, null, 2));
// Should complete within reasonable time
const timeout = process.env.CI ? 10000 : 5000;
const timeout = process.env.CI ? 11000 : 6000;
const startTime = Date.now();
execSync(
`node ${binPath} move --from=50 --from-tag=master --to-tag=in-progress --with-dependencies`,

View File

@@ -280,8 +280,26 @@ describe('Version comparison utility', () => {
let compareVersions;
beforeAll(async () => {
const commandsModule = await import('../../scripts/modules/commands.js');
compareVersions = commandsModule.compareVersions;
// Import from @tm/cli instead of commands.js
const { compareVersions: cv } = await import(
'../../apps/cli/src/utils/auto-update.js'
);
// Create a local compareVersions function for testing
compareVersions = (v1, v2) => {
const v1Parts = v1.split('.').map((p) => parseInt(p, 10));
const v2Parts = v2.split('.').map((p) => parseInt(p, 10));
for (let i = 0; i < Math.max(v1Parts.length, v2Parts.length); i++) {
const v1Part = v1Parts[i] || 0;
const v2Part = v2Parts[i] || 0;
if (v1Part < v2Part) return -1;
if (v1Part > v2Part) return 1;
}
return 0;
};
});
test('compareVersions correctly compares semantic versions', () => {
@@ -303,8 +321,9 @@ describe('Update check functionality', () => {
let consoleLogSpy;
beforeAll(async () => {
const commandsModule = await import('../../scripts/modules/commands.js');
displayUpgradeNotification = commandsModule.displayUpgradeNotification;
// Import from @tm/cli instead of commands.js
const cliModule = await import('../../apps/cli/src/utils/auto-update.js');
displayUpgradeNotification = cliModule.displayUpgradeNotification;
});
beforeEach(() => {

View File

@@ -198,11 +198,13 @@ jest.unstable_mockModule('fs', () => ({
default: {
existsSync: jest.fn(() => false),
readFileSync: jest.fn(),
writeFileSync: mockWriteFileSync
writeFileSync: mockWriteFileSync,
unlinkSync: jest.fn()
},
existsSync: jest.fn(() => false),
readFileSync: jest.fn(),
writeFileSync: mockWriteFileSync
writeFileSync: mockWriteFileSync,
unlinkSync: jest.fn()
}));
jest.unstable_mockModule(

View File

@@ -12,7 +12,15 @@
"strict": true,
"skipLibCheck": true,
"noEmit": true,
"baseUrl": "."
"baseUrl": ".",
"paths": {
"@tm/core": ["./packages/tm-core/src/index.ts"],
"@tm/core/*": ["./packages/tm-core/src/*"],
"@tm/cli": ["./apps/cli/src/index.ts"],
"@tm/cli/*": ["./apps/cli/src/*"],
"@tm/build-config": ["./packages/build-config/src/index.ts"],
"@tm/build-config/*": ["./packages/build-config/src/*"]
}
},
"tsx": {
"tsconfig": {

32
tsdown.config.ts Normal file
View File

@@ -0,0 +1,32 @@
import { defineConfig } from 'tsdown';
import { baseConfig, mergeConfig } from '@tm/build-config';
import { load as dotenvLoad } from 'dotenv-mono';
dotenvLoad();
// Get all TM_PUBLIC_* env variables for build-time injection
const getBuildTimeEnvs = () => {
const envs: Record<string, string> = {};
for (const [key, value] of Object.entries(process.env)) {
if (key.startsWith('TM_PUBLIC_')) {
// Return the actual value, not JSON.stringify'd
envs[key] = value || '';
}
}
return envs;
};
export default defineConfig(
mergeConfig(baseConfig, {
entry: {
'task-master': 'scripts/dev.js',
'mcp-server': 'mcp-server/server.js'
},
outDir: 'dist',
copy: ['public'],
ignoreWatch: ['node_modules', 'dist', 'tests'],
// Bundle only our workspace packages, keep npm dependencies external
noExternal: [/^@tm\//],
env: getBuildTimeEnvs()
})
);

Some files were not shown because too many files have changed in this diff Show More