Compare commits

..

10 Commits

Author SHA1 Message Date
github-actions[bot]
3d11093732 docs: auto-update documentation based on changes in next branch
This PR was automatically generated to update documentation based on recent changes.

  Original commit: feat: implement workflow (wip)\n\n

  Co-authored-by: Claude <claude-assistant@anthropic.com>
2025-09-11 18:11:46 +00:00
Ralph Khreish
7c1d05958f feat: implement workflow (wip) 2025-09-11 11:03:31 -07:00
Ralph Khreish
3eeb19590a chore: fix CI with new typescript setup (#1194)
Co-authored-by: Ralph Khreish <Crunchyman-ralph@users.noreply.github.com>
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
2025-09-09 23:35:47 +02:00
Ralph Khreish
587745046f chore: fix format 2025-09-09 03:32:48 +02:00
Ralph Khreish
c61c73f827 feat: implement tm list remote (#1185) 2025-09-09 03:32:48 +02:00
Ralph Khreish
15900d9fd5 chore: address oauth PR concerns (#1184) 2025-09-09 03:32:48 +02:00
Ralph Khreish
7cf4004038 feat: add oauth with remote server (#1178) 2025-09-09 03:32:48 +02:00
Ralph Khreish
0f3ab00f26 feat: create tm-core and apps/cli (#1093)
- add typescript
- add npm workspaces
2025-09-09 03:32:48 +02:00
Ralph Khreish
a7ad4c8e92 chore: improve Claude documentation workflows (#1155) 2025-09-08 22:11:46 +02:00
Ralph Khreish
0d54747894 chore: fix CI 2025-09-08 12:46:07 -07:00
46 changed files with 13193 additions and 5581 deletions

View File

@@ -1,5 +1,5 @@
{ {
"mode": "pre", "mode": "exit",
"tag": "rc", "tag": "rc",
"initialVersions": { "initialVersions": {
"task-master-ai": "0.25.1", "task-master-ai": "0.25.1",

View File

@@ -9,70 +9,109 @@ on:
branches: branches:
- main - main
- next - next
workflow_dispatch:
permissions: permissions:
contents: read contents: read
env:
DO_NOT_TRACK: 1
NODE_ENV: development
jobs: jobs:
setup: # Fast checks that can run in parallel
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-node@v4
with:
node-version: 20
cache: 'npm'
- name: Install Dependencies
id: install
run: npm ci
timeout-minutes: 2
- name: Cache node_modules
uses: actions/cache@v4
with:
path: node_modules
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
format-check: format-check:
needs: setup name: Format Check
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version: 20 node-version: 20
cache: "npm"
- name: Restore node_modules - name: Install dependencies
uses: actions/cache@v4 run: npm install --frozen-lockfile --prefer-offline
with: timeout-minutes: 5
path: node_modules
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
- name: Format Check - name: Format Check
run: npm run format-check run: npm run format-check
env: env:
FORCE_COLOR: 1 FORCE_COLOR: 1
test: typecheck:
needs: setup name: Typecheck
timeout-minutes: 10
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version: 20 node-version: 20
cache: "npm"
- name: Restore node_modules - name: Install dependencies
uses: actions/cache@v4 run: npm install --frozen-lockfile --prefer-offline
timeout-minutes: 5
- name: Typecheck
run: npm run typecheck
env:
FORCE_COLOR: 1
# Build job to ensure everything compiles
build:
name: Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with: with:
path: node_modules fetch-depth: 2
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
- uses: actions/setup-node@v4
with:
node-version: 20
cache: "npm"
- name: Install dependencies
run: npm install --frozen-lockfile --prefer-offline
timeout-minutes: 5
- name: Build
run: npm run build
env:
NODE_ENV: production
FORCE_COLOR: 1
test:
name: Test
timeout-minutes: 15
runs-on: ubuntu-latest
needs: [format-check, typecheck, build]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4
with:
node-version: 20
cache: "npm"
- name: Install dependencies
run: npm install --frozen-lockfile --prefer-offline
timeout-minutes: 5
- name: Build packages (required for tests)
run: npm run build:packages
env:
NODE_ENV: production
- name: Run Tests - name: Run Tests
run: | run: |
@@ -81,7 +120,6 @@ jobs:
NODE_ENV: test NODE_ENV: test
CI: true CI: true
FORCE_COLOR: 1 FORCE_COLOR: 1
timeout-minutes: 10
- name: Upload Test Results - name: Upload Test Results
if: always() if: always()

View File

@@ -0,0 +1,57 @@
name: Trigger Claude Documentation Update
on:
push:
branches:
- next
paths-ignore:
- "apps/docs/**"
- "*.md"
- ".github/workflows/**"
jobs:
trigger-docs-update:
# Only run if changes were merged (not direct pushes from bots)
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
runs-on: ubuntu-latest
permissions:
contents: read
actions: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 2 # Need previous commit for comparison
- name: Get changed files
id: changed-files
run: |
echo "Changed files in this push:"
git diff --name-only HEAD^ HEAD | tee changed_files.txt
# Store changed files for Claude to analyze (escaped for JSON)
CHANGED_FILES=$(git diff --name-only HEAD^ HEAD | jq -Rs .)
echo "changed_files=$CHANGED_FILES" >> $GITHUB_OUTPUT
# Get the commit message (escaped for JSON)
COMMIT_MSG=$(git log -1 --pretty=%B | jq -Rs .)
echo "commit_message=$COMMIT_MSG" >> $GITHUB_OUTPUT
# Get diff for documentation context (escaped for JSON)
COMMIT_DIFF=$(git diff HEAD^ HEAD --stat | jq -Rs .)
echo "commit_diff=$COMMIT_DIFF" >> $GITHUB_OUTPUT
# Get commit SHA
echo "commit_sha=${{ github.sha }}" >> $GITHUB_OUTPUT
- name: Trigger Claude workflow
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Trigger the Claude docs updater workflow with the change information
gh workflow run claude-docs-updater.yml \
--ref next \
-f commit_sha="${{ steps.changed-files.outputs.commit_sha }}" \
-f commit_message=${{ steps.changed-files.outputs.commit_message }} \
-f changed_files=${{ steps.changed-files.outputs.changed_files }} \
-f commit_diff=${{ steps.changed-files.outputs.commit_diff }}

View File

@@ -1,18 +1,27 @@
name: Claude Documentation Updater name: Claude Documentation Updater
on: on:
push: workflow_dispatch:
branches: inputs:
- next commit_sha:
paths-ignore: description: 'The commit SHA that triggered this update'
- "apps/docs/**" required: true
- "*.md" type: string
- ".github/workflows/**" commit_message:
description: 'The commit message'
required: true
type: string
changed_files:
description: 'List of changed files'
required: true
type: string
commit_diff:
description: 'Diff summary of changes'
required: true
type: string
jobs: jobs:
update-docs: update-docs:
# Only run if changes were merged (not direct pushes from bots)
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
contents: write contents: write
@@ -22,28 +31,8 @@ jobs:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 2 # Need previous commit for comparison ref: next
fetch-depth: 0 # Need full history to checkout specific commit
- name: Get changed files
id: changed-files
run: |
echo "Changed files in this push:"
git diff --name-only HEAD^ HEAD | tee changed_files.txt
# Store changed files for Claude to analyze
echo "changed_files<<EOF" >> $GITHUB_OUTPUT
git diff --name-only HEAD^ HEAD >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Get the commit message and changes summary
echo "commit_message<<EOF" >> $GITHUB_OUTPUT
git log -1 --pretty=%B >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Get diff for documentation context
echo "commit_diff<<EOF" >> $GITHUB_OUTPUT
git diff HEAD^ HEAD --stat >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Create docs update branch - name: Create docs update branch
id: create-branch id: create-branch
@@ -71,12 +60,12 @@ jobs:
You are a documentation specialist. Analyze the recent changes pushed to the 'next' branch and update the documentation accordingly. You are a documentation specialist. Analyze the recent changes pushed to the 'next' branch and update the documentation accordingly.
Recent changes: Recent changes:
- Commit: ${{ steps.changed-files.outputs.commit_message }} - Commit: ${{ inputs.commit_message }}
- Changed files: - Changed files:
${{ steps.changed-files.outputs.changed_files }} ${{ inputs.changed_files }}
- Changes summary: - Changes summary:
${{ steps.changed-files.outputs.commit_diff }} ${{ inputs.commit_diff }}
Your task: Your task:
1. Analyze the changes to understand what functionality was added, modified, or removed 1. Analyze the changes to understand what functionality was added, modified, or removed
@@ -113,7 +102,7 @@ jobs:
This PR was automatically generated to update documentation based on recent changes. This PR was automatically generated to update documentation based on recent changes.
Original commit: ${{ steps.changed-files.outputs.commit_message }} Original commit: ${{ inputs.commit_message }}
Co-authored-by: Claude <claude-assistant@anthropic.com>" Co-authored-by: Claude <claude-assistant@anthropic.com>"
fi fi
@@ -133,12 +122,12 @@ jobs:
This PR automatically updates documentation based on recent changes merged to the \`next\` branch. This PR automatically updates documentation based on recent changes merged to the \`next\` branch.
### Original Changes ### Original Changes
**Commit:** ${{ github.sha }} **Commit:** ${{ inputs.commit_sha }}
**Message:** ${{ steps.changed-files.outputs.commit_message }} **Message:** ${{ inputs.commit_message }}
### Changed Files in Original Commit ### Changed Files in Original Commit
\`\`\` \`\`\`
${{ steps.changed-files.outputs.changed_files }} ${{ inputs.changed_files }}
\`\`\` \`\`\`
### Documentation Updates ### Documentation Updates

View File

@@ -4,12 +4,11 @@
"description": "Task Master CLI - Command line interface for task management", "description": "Task Master CLI - Command line interface for task management",
"type": "module", "type": "module",
"main": "./dist/index.js", "main": "./dist/index.js",
"types": "./dist/index.d.ts", "types": "./src/index.ts",
"exports": { "exports": {
".": { ".": {
"types": "./src/index.ts", "types": "./src/index.ts",
"import": "./dist/index.js", "import": "./dist/index.js"
"require": "./dist/index.js"
} }
}, },
"files": ["dist", "README.md"], "files": ["dist", "README.md"],
@@ -20,21 +19,26 @@
"lint": "biome check src", "lint": "biome check src",
"format": "biome format --write src", "format": "biome format --write src",
"test": "vitest run", "test": "vitest run",
"test:watch": "vitest" "test:watch": "vitest",
"test:coverage": "vitest run --coverage",
"test:unit": "vitest run -t unit",
"test:integration": "vitest run -t integration",
"test:e2e": "vitest run --dir tests/e2e",
"test:ci": "vitest run --coverage --reporter=dot"
}, },
"dependencies": { "dependencies": {
"@tm/core": "*", "@tm/core": "*",
"@tm/workflow-engine": "*", "@tm/workflow-engine": "*",
"boxen": "^7.1.1", "boxen": "^7.1.1",
"chalk": "^5.3.0", "chalk": "5.6.2",
"cli-table3": "^0.6.5", "cli-table3": "^0.6.5",
"commander": "^12.1.0", "commander": "^12.1.0",
"inquirer": "^9.2.10", "inquirer": "^9.2.10",
"open": "^10.2.0",
"ora": "^8.1.0" "ora": "^8.1.0"
}, },
"devDependencies": { "devDependencies": {
"@biomejs/biome": "^1.9.4", "@biomejs/biome": "^1.9.4",
"@tm/build-config": "*",
"@types/inquirer": "^9.0.3", "@types/inquirer": "^9.0.3",
"@types/node": "^22.10.5", "@types/node": "^22.10.5",
"tsup": "^8.3.0", "tsup": "^8.3.0",

View File

@@ -0,0 +1,570 @@
/**
* @fileoverview Context command for managing org/brief selection
* Provides a clean interface for workspace context management
*/
import { Command } from 'commander';
import chalk from 'chalk';
import inquirer from 'inquirer';
import ora from 'ora';
import {
AuthManager,
AuthenticationError,
type UserContext
} from '@tm/core/auth';
import * as ui from '../utils/ui.js';
/**
* Result type from context command
*/
export interface ContextResult {
success: boolean;
action: 'show' | 'select-org' | 'select-brief' | 'clear' | 'set';
context?: UserContext;
message?: string;
}
/**
* ContextCommand extending Commander's Command class
* Manages user's workspace context (org/brief selection)
*/
export class ContextCommand extends Command {
private authManager: AuthManager;
private lastResult?: ContextResult;
constructor(name?: string) {
super(name || 'context');
// Initialize auth manager
this.authManager = AuthManager.getInstance();
// Configure the command
this.description(
'Manage workspace context (organization and brief selection)'
);
// Add subcommands
this.addOrgCommand();
this.addBriefCommand();
this.addClearCommand();
this.addSetCommand();
// Default action shows current context
this.action(async () => {
await this.executeShow();
});
}
/**
* Add org selection subcommand
*/
private addOrgCommand(): void {
this.command('org')
.description('Select an organization')
.action(async () => {
await this.executeSelectOrg();
});
}
/**
* Add brief selection subcommand
*/
private addBriefCommand(): void {
this.command('brief')
.description('Select a brief within the current organization')
.action(async () => {
await this.executeSelectBrief();
});
}
/**
* Add clear subcommand
*/
private addClearCommand(): void {
this.command('clear')
.description('Clear all context selections')
.action(async () => {
await this.executeClear();
});
}
/**
* Add set subcommand for direct context setting
*/
private addSetCommand(): void {
this.command('set')
.description('Set context directly')
.option('--org <id>', 'Organization ID')
.option('--org-name <name>', 'Organization name')
.option('--brief <id>', 'Brief ID')
.option('--brief-name <name>', 'Brief name')
.action(async (options) => {
await this.executeSet(options);
});
}
/**
* Execute show current context
*/
private async executeShow(): Promise<void> {
try {
const result = this.displayContext();
this.setLastResult(result);
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Display current context
*/
private displayContext(): ContextResult {
// Check authentication first
if (!this.authManager.isAuthenticated()) {
console.log(chalk.yellow('✗ Not authenticated'));
console.log(chalk.gray('\n Run "tm auth login" to authenticate first'));
return {
success: false,
action: 'show',
message: 'Not authenticated'
};
}
const context = this.authManager.getContext();
console.log(chalk.cyan('\n🌍 Workspace Context\n'));
if (context && (context.orgId || context.briefId)) {
if (context.orgName || context.orgId) {
console.log(chalk.green('✓ Organization'));
if (context.orgName) {
console.log(chalk.white(` ${context.orgName}`));
}
if (context.orgId) {
console.log(chalk.gray(` ID: ${context.orgId}`));
}
}
if (context.briefName || context.briefId) {
console.log(chalk.green('\n✓ Brief'));
if (context.briefName) {
console.log(chalk.white(` ${context.briefName}`));
}
if (context.briefId) {
console.log(chalk.gray(` ID: ${context.briefId}`));
}
}
if (context.updatedAt) {
console.log(
chalk.gray(
`\n Last updated: ${new Date(context.updatedAt).toLocaleString()}`
)
);
}
return {
success: true,
action: 'show',
context,
message: 'Context loaded'
};
} else {
console.log(chalk.yellow('✗ No context selected'));
console.log(
chalk.gray('\n Run "tm context org" to select an organization')
);
console.log(chalk.gray(' Run "tm context brief" to select a brief'));
return {
success: true,
action: 'show',
message: 'No context selected'
};
}
}
/**
* Execute org selection
*/
private async executeSelectOrg(): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
const result = await this.selectOrganization();
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Select an organization interactively
*/
private async selectOrganization(): Promise<ContextResult> {
const spinner = ora('Fetching organizations...').start();
try {
// Fetch organizations from API
const organizations = await this.authManager.getOrganizations();
spinner.stop();
if (organizations.length === 0) {
ui.displayWarning('No organizations available');
return {
success: false,
action: 'select-org',
message: 'No organizations available'
};
}
// Prompt for selection
const { selectedOrg } = await inquirer.prompt([
{
type: 'list',
name: 'selectedOrg',
message: 'Select an organization:',
choices: organizations.map((org) => ({
name: org.name,
value: org
}))
}
]);
// Update context
await this.authManager.updateContext({
orgId: selectedOrg.id,
orgName: selectedOrg.name,
// Clear brief when changing org
briefId: undefined,
briefName: undefined
});
ui.displaySuccess(`Selected organization: ${selectedOrg.name}`);
return {
success: true,
action: 'select-org',
context: this.authManager.getContext() || undefined,
message: `Selected organization: ${selectedOrg.name}`
};
} catch (error) {
spinner.fail('Failed to fetch organizations');
throw error;
}
}
/**
* Execute brief selection
*/
private async executeSelectBrief(): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
// Check if org is selected
const context = this.authManager.getContext();
if (!context?.orgId) {
ui.displayError(
'No organization selected. Run "tm context org" first.'
);
process.exit(1);
}
const result = await this.selectBrief(context.orgId);
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Select a brief within the current organization
*/
private async selectBrief(orgId: string): Promise<ContextResult> {
const spinner = ora('Fetching briefs...').start();
try {
// Fetch briefs from API
const briefs = await this.authManager.getBriefs(orgId);
spinner.stop();
if (briefs.length === 0) {
ui.displayWarning('No briefs available in this organization');
return {
success: false,
action: 'select-brief',
message: 'No briefs available'
};
}
// Prompt for selection
const { selectedBrief } = await inquirer.prompt([
{
type: 'list',
name: 'selectedBrief',
message: 'Select a brief:',
choices: [
{ name: '(No brief - organization level)', value: null },
...briefs.map((brief) => ({
name: `Brief ${brief.id.slice(0, 8)} (${new Date(brief.createdAt).toLocaleDateString()})`,
value: brief
}))
]
}
]);
if (selectedBrief) {
// Update context with brief
const briefName = `Brief ${selectedBrief.id.slice(0, 8)}`;
await this.authManager.updateContext({
briefId: selectedBrief.id,
briefName: briefName
});
ui.displaySuccess(`Selected brief: ${briefName}`);
return {
success: true,
action: 'select-brief',
context: this.authManager.getContext() || undefined,
message: `Selected brief: ${selectedBrief.name}`
};
} else {
// Clear brief selection
await this.authManager.updateContext({
briefId: undefined,
briefName: undefined
});
ui.displaySuccess('Cleared brief selection (organization level)');
return {
success: true,
action: 'select-brief',
context: this.authManager.getContext() || undefined,
message: 'Cleared brief selection'
};
}
} catch (error) {
spinner.fail('Failed to fetch briefs');
throw error;
}
}
/**
* Execute clear context
*/
private async executeClear(): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
const result = await this.clearContext();
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Clear all context selections
*/
private async clearContext(): Promise<ContextResult> {
try {
await this.authManager.clearContext();
ui.displaySuccess('Context cleared');
return {
success: true,
action: 'clear',
message: 'Context cleared'
};
} catch (error) {
ui.displayError(`Failed to clear context: ${(error as Error).message}`);
return {
success: false,
action: 'clear',
message: `Failed to clear context: ${(error as Error).message}`
};
}
}
/**
* Execute set context with options
*/
private async executeSet(options: any): Promise<void> {
try {
// Check authentication
if (!this.authManager.isAuthenticated()) {
ui.displayError('Not authenticated. Run "tm auth login" first.');
process.exit(1);
}
const result = await this.setContext(options);
this.setLastResult(result);
if (!result.success) {
process.exit(1);
}
} catch (error: any) {
this.handleError(error);
process.exit(1);
}
}
/**
* Set context directly from options
*/
private async setContext(options: any): Promise<ContextResult> {
try {
const context: Partial<UserContext> = {};
if (options.org) {
context.orgId = options.org;
}
if (options.orgName) {
context.orgName = options.orgName;
}
if (options.brief) {
context.briefId = options.brief;
}
if (options.briefName) {
context.briefName = options.briefName;
}
if (Object.keys(context).length === 0) {
ui.displayWarning('No context options provided');
return {
success: false,
action: 'set',
message: 'No context options provided'
};
}
await this.authManager.updateContext(context);
ui.displaySuccess('Context updated');
// Display what was set
if (context.orgName || context.orgId) {
console.log(
chalk.gray(` Organization: ${context.orgName || context.orgId}`)
);
}
if (context.briefName || context.briefId) {
console.log(
chalk.gray(` Brief: ${context.briefName || context.briefId}`)
);
}
return {
success: true,
action: 'set',
context: this.authManager.getContext() || undefined,
message: 'Context updated'
};
} catch (error) {
ui.displayError(`Failed to set context: ${(error as Error).message}`);
return {
success: false,
action: 'set',
message: `Failed to set context: ${(error as Error).message}`
};
}
}
/**
* Handle errors
*/
private handleError(error: any): void {
if (error instanceof AuthenticationError) {
console.error(chalk.red(`\n✗ ${error.message}`));
if (error.code === 'NOT_AUTHENTICATED') {
ui.displayWarning('Please authenticate first: tm auth login');
}
} else {
const msg = error?.message ?? String(error);
console.error(chalk.red(`Error: ${msg}`));
if (error.stack && process.env.DEBUG) {
console.error(chalk.gray(error.stack));
}
}
}
/**
* Set the last result for programmatic access
*/
private setLastResult(result: ContextResult): void {
this.lastResult = result;
}
/**
* Get the last result (for programmatic usage)
*/
getLastResult(): ContextResult | undefined {
return this.lastResult;
}
/**
* Get current context (for programmatic usage)
*/
getContext(): UserContext | null {
return this.authManager.getContext();
}
/**
* Clean up resources
*/
async cleanup(): Promise<void> {
// No resources to clean up for context command
}
/**
* Static method to register this command on an existing program
*/
static registerOn(program: Command): Command {
const contextCommand = new ContextCommand();
program.addCommand(contextCommand);
return contextCommand;
}
/**
* Alternative registration that returns the command for chaining
*/
static register(program: Command, name?: string): ContextCommand {
const contextCommand = new ContextCommand(name);
program.addCommand(contextCommand);
return contextCommand;
}
}

View File

@@ -15,6 +15,7 @@ import {
STATUS_ICONS, STATUS_ICONS,
type OutputFormat type OutputFormat
} from '@tm/core'; } from '@tm/core';
import type { StorageType } from '@tm/core/types';
import * as ui from '../utils/ui.js'; import * as ui from '../utils/ui.js';
/** /**
@@ -37,7 +38,7 @@ export interface ListTasksResult {
total: number; total: number;
filtered: number; filtered: number;
tag?: string; tag?: string;
storageType: 'file' | 'api'; storageType: Exclude<StorageType, 'auto'>;
} }
/** /**
@@ -172,6 +173,13 @@ export class ListTasksCommand extends Command {
includeSubtasks: options.withSubtasks includeSubtasks: options.withSubtasks
}); });
// Runtime guard to prevent 'auto' from reaching CLI consumers
if (result.storageType === 'auto') {
throw new Error(
'Internal error: unresolved storage type reached CLI. Please check TaskService.getStorageType() implementation.'
);
}
return result as ListTasksResult; return result as ListTasksResult;
} }

View File

@@ -7,6 +7,7 @@
export { ListTasksCommand } from './commands/list.command.js'; export { ListTasksCommand } from './commands/list.command.js';
export { AuthCommand } from './commands/auth.command.js'; export { AuthCommand } from './commands/auth.command.js';
export { WorkflowCommand } from './commands/workflow.command.js'; export { WorkflowCommand } from './commands/workflow.command.js';
export { ContextCommand } from './commands/context.command.js';
// Command registry // Command registry
export { registerAllCommands } from './commands/index.js'; export { registerAllCommands } from './commands/index.js';

View File

@@ -6,7 +6,7 @@
import chalk from 'chalk'; import chalk from 'chalk';
import boxen from 'boxen'; import boxen from 'boxen';
import Table from 'cli-table3'; import Table from 'cli-table3';
import type { Task, TaskStatus, TaskPriority } from '@tm/core'; import type { Task, TaskStatus, TaskPriority } from '@tm/core/types';
/** /**
* Get colored status display with ASCII icons (matches scripts/modules/ui.js style) * Get colored status display with ASCII icons (matches scripts/modules/ui.js style)

View File

@@ -1,15 +1,8 @@
import { defineConfig } from 'tsup'; import { defineConfig } from 'tsup';
import { cliConfig, mergeConfig } from '@tm/build-config';
export default defineConfig({ export default defineConfig(
entry: ['src/index.ts'], mergeConfig(cliConfig, {
format: ['esm'], entry: ['src/index.ts']
target: 'node18', })
splitting: false, );
sourcemap: true,
clean: true,
dts: true,
shims: true,
esbuildOptions(options) {
options.platform = 'node';
}
});

View File

@@ -200,6 +200,34 @@ sidebarTitle: "CLI Commands"
``` ```
</Accordion> </Accordion>
<Accordion title="Workflow Management">
```bash
# Start workflow execution for a task
task-master workflow start <task-id>
# or use alias
task-master workflow run <task-id>
# List all active workflows
task-master workflow list
# Check status of a specific workflow
task-master workflow status <workflow-id>
# or use alias
task-master workflow info <workflow-id>
# Stop a running workflow
task-master workflow stop <workflow-id>
# or use alias
task-master workflow kill <workflow-id>
```
The workflow system executes tasks in isolated git worktrees with dedicated Claude Code processes, providing:
- **Isolated Execution**: Each task runs in its own git worktree
- **Process Management**: Spawns dedicated Claude Code processes
- **Real-time Monitoring**: Track progress and output
- **Parallel Execution**: Run multiple tasks concurrently
</Accordion>
<Accordion title="Initialize a Project"> <Accordion title="Initialize a Project">
```bash ```bash
# Initialize a new project with Task Master structure # Initialize a new project with Task Master structure

View File

@@ -0,0 +1,221 @@
---
title: "Workflow Engine"
sidebarTitle: "Workflows"
---
The Task Master Workflow Engine provides advanced task execution capabilities with git worktree isolation and Claude Code process management.
## Overview
The workflow system extends Task Master with powerful execution features:
- **Git Worktree Isolation**: Each task runs in its own isolated git worktree
- **Process Sandboxing**: Spawns dedicated Claude Code processes for task execution
- **Real-time Monitoring**: Track workflow progress and process output
- **State Management**: Persistent workflow state across sessions
- **Parallel Execution**: Run multiple tasks concurrently with resource limits
## Quick Start
### Starting a Workflow
```bash
# Start workflow for a specific task
task-master workflow start 1.2
# Using the alias
task-master workflow run 1.2
```
### Monitoring Workflows
```bash
# List all active workflows
task-master workflow list
# Check specific workflow status
task-master workflow status workflow-1.2-1234567890-abc123
# Using the alias
task-master workflow info workflow-1.2-1234567890-abc123
```
### Stopping Workflows
```bash
# Stop a running workflow
task-master workflow stop workflow-1.2-1234567890-abc123
# Force stop using alias
task-master workflow kill workflow-1.2-1234567890-abc123
```
## Workflow States
| State | Description |
|-------|-------------|
| `pending` | Created but not started |
| `initializing` | Setting up worktree and process |
| `running` | Active execution in progress |
| `paused` | Temporarily stopped |
| `completed` | Successfully finished |
| `failed` | Error occurred during execution |
| `cancelled` | User cancelled the workflow |
| `timeout` | Exceeded time limit |
## Environment Configuration
### Environment Variables
Set these environment variables to customize workflow behavior:
- `TASKMASTER_WORKFLOW_DEBUG`: Enable debug logging
- `TASKMASTER_CLAUDE_PATH`: Custom Claude Code executable path
- `TASKMASTER_WORKTREE_BASE`: Base directory for worktrees
- `TASKMASTER_MAX_CONCURRENT`: Maximum concurrent workflows
### Example Configuration
```bash
# Enable debug mode
export TASKMASTER_WORKFLOW_DEBUG=true
# Set custom Claude path
export TASKMASTER_CLAUDE_PATH=/usr/local/bin/claude
# Set worktree base directory
export TASKMASTER_WORKTREE_BASE=./worktrees
# Limit concurrent workflows
export TASKMASTER_MAX_CONCURRENT=3
```
## Git Worktree Integration
### How It Works
When you start a workflow:
1. **Worktree Creation**: A new git worktree is created for the task
2. **Process Spawn**: A dedicated Claude Code process is launched in the worktree
3. **Task Execution**: The task runs in complete isolation
4. **State Tracking**: Progress is monitored and persisted
5. **Cleanup**: Worktree is removed when workflow completes
### Worktree Structure
```
project/
├── .git/ # Main repository
├── src/ # Main working directory
└── worktrees/ # Workflow worktrees
├── task-1.2/ # Worktree for task 1.2
├── task-2.1/ # Worktree for task 2.1
└── task-3.4/ # Worktree for task 3.4
```
## Best Practices
### When to Use Workflows
Use workflows for tasks that:
- Require isolated development environments
- Need dedicated Claude Code attention
- Benefit from parallel execution
- Require process monitoring and state tracking
### Workflow Management
- **Start workflows for complex tasks** that need focused execution
- **Monitor progress** using `workflow status` command
- **Clean up completed workflows** to free resources
- **Use meaningful task descriptions** for better workflow tracking
### Resource Management
- **Limit concurrent workflows** based on system resources
- **Monitor workflow output** for debugging and progress tracking
- **Stop unnecessary workflows** to free up resources
## Troubleshooting
### Common Issues
**Worktree Creation Fails**
```bash
# Check git version (requires 2.5+)
git --version
# Verify project is a git repository
git status
```
**Claude Code Not Found**
```bash
# Check Claude installation
which claude
# Set custom path
export TASKMASTER_CLAUDE_PATH=/path/to/claude
```
**Permission Errors**
```bash
# Check worktree directory permissions
chmod -R 755 ./worktrees
```
### Debug Mode
Enable debug logging for troubleshooting:
```bash
export TASKMASTER_WORKFLOW_DEBUG=true
task-master workflow start 1.2
```
## Integration Examples
### With VS Code Extension
The workflow engine integrates with the Task Master VS Code extension to provide:
- **Workflow Tree View**: Visual workflow management
- **Process Monitoring**: Real-time output streaming
- **Worktree Navigation**: Quick access to isolated workspaces
- **Status Indicators**: Visual workflow state tracking
### With Task Management
```bash
# Typical workflow
task-master next # Find next task
task-master workflow start 1.2 # Start workflow
task-master workflow status <id> # Monitor progress
task-master set-status --id=1.2 --status=done # Mark complete
```
## Advanced Features
### Parallel Execution
Run multiple workflows simultaneously:
```bash
# Start multiple workflows
task-master workflow start 1.2
task-master workflow start 2.1
task-master workflow start 3.4
# Monitor all active workflows
task-master workflow list
```
### Process Monitoring
Each workflow provides real-time output monitoring and process management through the workflow engine's event system.
### State Persistence
Workflow state is automatically persisted across sessions, allowing you to resume monitoring workflows after restarting the CLI.

View File

@@ -49,6 +49,7 @@
"pages": [ "pages": [
"capabilities/mcp", "capabilities/mcp",
"capabilities/cli-root-commands", "capabilities/cli-root-commands",
"capabilities/workflows",
"capabilities/task-structure" "capabilities/task-structure"
] ]
} }

View File

@@ -3,4 +3,38 @@ title: "What's New"
sidebarTitle: "What's New" sidebarTitle: "What's New"
--- ---
## New Workflow Engine (Latest)
Task Master now includes a powerful workflow engine that revolutionizes how tasks are executed:
### 🚀 Key Features
- **Git Worktree Isolation**: Each task runs in its own isolated git worktree
- **Claude Code Integration**: Spawns dedicated Claude Code processes for task execution
- **Real-time Monitoring**: Track workflow progress and process output
- **Parallel Execution**: Run multiple tasks concurrently with resource management
- **State Persistence**: Workflow state is maintained across sessions
### 🔧 New CLI Commands
```bash
# Start workflow execution
task-master workflow start <task-id>
# Monitor active workflows
task-master workflow list
# Check workflow status
task-master workflow status <workflow-id>
# Stop running workflow
task-master workflow stop <workflow-id>
```
### 📖 Learn More
Check out the new [Workflow Documentation](/capabilities/workflows) for comprehensive usage guides and best practices.
---
An easy way to see the latest releases An easy way to see the latest releases

40
output.txt Normal file

File diff suppressed because one or more lines are too long

14583
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -21,10 +21,16 @@
"build:core": "cd packages/tm-core && npm run build", "build:core": "cd packages/tm-core && npm run build",
"build:workflow": "cd packages/workflow-engine && npm run build", "build:workflow": "cd packages/workflow-engine && npm run build",
"build:cli": "cd apps/cli && npm run build", "build:cli": "cd apps/cli && npm run build",
"typecheck": "npm run typecheck:core && npm run typecheck:cli",
"typecheck:core": "cd packages/tm-core && npm run typecheck",
"typecheck:cli": "cd apps/cli && npm run typecheck",
"test": "node --experimental-vm-modules node_modules/.bin/jest", "test": "node --experimental-vm-modules node_modules/.bin/jest",
"test:unit": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=unit",
"test:integration": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=integration",
"test:fails": "node --experimental-vm-modules node_modules/.bin/jest --onlyFailures", "test:fails": "node --experimental-vm-modules node_modules/.bin/jest --onlyFailures",
"test:watch": "node --experimental-vm-modules node_modules/.bin/jest --watch", "test:watch": "node --experimental-vm-modules node_modules/.bin/jest --watch",
"test:coverage": "node --experimental-vm-modules node_modules/.bin/jest --coverage", "test:coverage": "node --experimental-vm-modules node_modules/.bin/jest --coverage",
"test:ci": "node --experimental-vm-modules node_modules/.bin/jest --coverage --ci",
"test:e2e": "./tests/e2e/run_e2e.sh", "test:e2e": "./tests/e2e/run_e2e.sh",
"test:e2e-report": "./tests/e2e/run_e2e.sh --analyze-log", "test:e2e-report": "./tests/e2e/run_e2e.sh --analyze-log",
"postpack": "chmod +x dist/task-master.js dist/mcp-server.js", "postpack": "chmod +x dist/task-master.js dist/mcp-server.js",
@@ -69,7 +75,7 @@
"ajv": "^8.17.1", "ajv": "^8.17.1",
"ajv-formats": "^3.0.1", "ajv-formats": "^3.0.1",
"boxen": "^8.0.1", "boxen": "^8.0.1",
"chalk": "^5.4.1", "chalk": "5.6.2",
"cli-highlight": "^2.1.11", "cli-highlight": "^2.1.11",
"cli-progress": "^3.12.0", "cli-progress": "^3.12.0",
"cli-table3": "^0.6.5", "cli-table3": "^0.6.5",

View File

@@ -0,0 +1,31 @@
{
"name": "@tm/build-config",
"version": "1.0.0",
"description": "Shared build configuration for Task Master monorepo",
"type": "module",
"main": "./dist/tsup.base.js",
"types": "./dist/tsup.base.d.ts",
"exports": {
".": {
"types": "./src/tsup.base.ts",
"import": "./dist/tsup.base.js",
"require": "./dist/tsup.base.cjs"
}
},
"files": ["dist", "src"],
"keywords": ["build-config", "tsup", "monorepo"],
"author": "",
"license": "MIT",
"scripts": {
"build": "tsup",
"dev": "tsup --watch",
"typecheck": "tsc --noEmit"
},
"devDependencies": {
"tsup": "^8.5.0",
"typescript": "^5.7.3"
},
"peerDependencies": {
"tsup": "^8.0.0"
}
}

View File

@@ -0,0 +1,151 @@
/**
* Base tsup configuration for Task Master monorepo
* Provides shared configuration that can be extended by individual packages
*/
import type { Options } from 'tsup';
const isProduction = process.env.NODE_ENV === 'production';
const isDevelopment = !isProduction;
/**
* Base configuration for library packages (tm-core, etc.)
*/
export const libraryConfig: Partial<Options> = {
format: ['cjs', 'esm'],
target: 'es2022',
// Sourcemaps only in development to reduce production bundle size
sourcemap: isDevelopment,
clean: true,
dts: true,
// Enable optimizations in production
splitting: isProduction,
treeshake: isProduction,
minify: isProduction,
bundle: true,
esbuildOptions(options) {
options.conditions = ['module'];
// Better source mapping in development only
options.sourcesContent = isDevelopment;
// Keep original names for better debugging in development
options.keepNames = isDevelopment;
},
// Watch mode configuration for development
watch: isDevelopment ? ['src'] : false
};
/**
* Base configuration for CLI packages
*/
export const cliConfig: Partial<Options> = {
format: ['esm'],
target: 'node18',
splitting: false,
// Sourcemaps only in development to reduce production bundle size
sourcemap: isDevelopment,
clean: true,
dts: true,
shims: true,
// Enable minification in production for smaller bundles
minify: isProduction,
treeshake: isProduction,
esbuildOptions(options) {
options.platform = 'node';
// Better source mapping in development only
options.sourcesContent = isDevelopment;
// Keep original names for better debugging in development
options.keepNames = isDevelopment;
}
};
/**
* Base configuration for executable bundles (root level)
*/
export const executableConfig: Partial<Options> = {
format: ['esm'],
target: 'node18',
splitting: false,
// Sourcemaps only in development to reduce production bundle size
sourcemap: isDevelopment,
clean: true,
bundle: true, // Bundle everything into one file
// Minify in production for smaller executables
minify: isProduction,
// Handle TypeScript imports transparently
loader: {
'.js': 'jsx',
'.ts': 'ts'
},
esbuildOptions(options) {
options.platform = 'node';
// Allow importing TypeScript from JavaScript
options.resolveExtensions = ['.ts', '.js', '.mjs', '.json'];
// Better source mapping in development only
options.sourcesContent = isDevelopment;
// Keep original names for better debugging in development
options.keepNames = isDevelopment;
}
};
/**
* Common external modules that should not be bundled
*/
export const commonExternals = [
// Native Node.js modules
'fs',
'path',
'child_process',
'crypto',
'os',
'url',
'util',
'stream',
'http',
'https',
'events',
'assert',
'buffer',
'querystring',
'readline',
'zlib',
'tty',
'net',
'dgram',
'dns',
'tls',
'cluster',
'process',
'module'
];
/**
* Utility function to merge configurations
*/
export function mergeConfig(
baseConfig: Partial<Options>,
overrides: Partial<Options>
): Options {
return {
...baseConfig,
...overrides,
// Merge arrays instead of overwriting
external: [...(baseConfig.external || []), ...(overrides.external || [])],
// Merge esbuildOptions
esbuildOptions(options, context) {
if (baseConfig.esbuildOptions) {
baseConfig.esbuildOptions(options, context);
}
if (overrides.esbuildOptions) {
overrides.esbuildOptions(options, context);
}
}
} as Options;
}
/**
* Environment helpers
*/
export const env = {
isProduction,
isDevelopment,
NODE_ENV: process.env.NODE_ENV || 'development'
};

View File

@@ -0,0 +1,20 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022"],
"module": "ESNext",
"moduleResolution": "bundler",
"allowSyntheticDefaultImports": true,
"esModuleInterop": true,
"allowJs": true,
"strict": true,
"noEmit": true,
"resolveJsonModule": true,
"isolatedModules": true,
"declaration": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

View File

@@ -0,0 +1,23 @@
import { defineConfig } from 'tsup';
const isProduction = process.env.NODE_ENV === 'production';
export default defineConfig({
entry: ['src/tsup.base.ts'],
format: ['esm', 'cjs'],
target: 'node18',
// Sourcemaps only in development
sourcemap: !isProduction,
clean: true,
dts: true,
// Enable minification in production
minify: isProduction,
treeshake: isProduction,
external: ['tsup'],
esbuildOptions(options) {
// Better source mapping in development only
options.sourcesContent = !isProduction;
// Keep original names for better debugging in development
options.keepNames = !isProduction;
}
});

View File

@@ -1,60 +1,51 @@
{ {
"name": "@tm/core", "name": "@tm/core",
"version": "1.0.0", "version": "1.0.0",
"private": true,
"description": "Core library for Task Master - TypeScript task management system", "description": "Core library for Task Master - TypeScript task management system",
"type": "module", "type": "module",
"types": "./dist/index.d.ts", "types": "./src/index.ts",
"main": "./dist/index.js", "main": "./dist/index.js",
"exports": { "exports": {
".": { ".": {
"types": "./src/index.ts", "types": "./src/index.ts",
"import": "./dist/index.js", "import": "./dist/index.js"
"require": "./dist/index.js"
}, },
"./auth": { "./auth": {
"types": "./src/auth/index.ts", "types": "./src/auth/index.ts",
"import": "./dist/auth/index.js", "import": "./dist/auth/index.js"
"require": "./dist/auth/index.js"
}, },
"./storage": { "./storage": {
"types": "./src/storage/index.ts", "types": "./src/storage/index.ts",
"import": "./dist/storage/index.js", "import": "./dist/storage/index.js"
"require": "./dist/storage/index.js"
}, },
"./config": { "./config": {
"types": "./src/config/index.ts", "types": "./src/config/index.ts",
"import": "./dist/config/index.js", "import": "./dist/config/index.js"
"require": "./dist/config/index.js"
}, },
"./providers": { "./providers": {
"types": "./src/providers/index.ts", "types": "./src/providers/index.ts",
"import": "./dist/providers/index.js", "import": "./dist/providers/index.js"
"require": "./dist/providers/index.js"
}, },
"./services": { "./services": {
"types": "./src/services/index.ts", "types": "./src/services/index.ts",
"import": "./dist/services/index.js", "import": "./dist/services/index.js"
"require": "./dist/services/index.js"
}, },
"./errors": { "./errors": {
"types": "./src/errors/index.ts", "types": "./src/errors/index.ts",
"import": "./dist/errors/index.js", "import": "./dist/errors/index.js"
"require": "./dist/errors/index.js"
}, },
"./logger": { "./logger": {
"types": "./src/logger/index.ts", "types": "./src/logger/index.ts",
"import": "./dist/logger/index.js", "import": "./dist/logger/index.js"
"require": "./dist/logger/index.js"
}, },
"./types": { "./types": {
"types": "./src/types/index.ts", "types": "./src/types/index.ts",
"import": "./dist/types/index.js", "import": "./dist/types/index.js"
"require": "./dist/types/index.js"
}, },
"./interfaces": { "./interfaces": {
"types": "./src/interfaces/index.ts", "types": "./src/interfaces/index.ts",
"import": "./dist/interfaces/index.js", "import": "./dist/interfaces/index.js"
"require": "./dist/interfaces/index.js"
}, },
"./utils": { "./utils": {
"types": "./src/utils/index.ts", "types": "./src/utils/index.ts",
@@ -89,9 +80,9 @@
}, },
"devDependencies": { "devDependencies": {
"@biomejs/biome": "^1.9.4", "@biomejs/biome": "^1.9.4",
"@tm/build-config": "*",
"@types/node": "^20.11.30", "@types/node": "^20.11.30",
"@vitest/coverage-v8": "^2.0.5", "@vitest/coverage-v8": "^2.0.5",
"dotenv-mono": "^1.5.1",
"ts-node": "^10.9.2", "ts-node": "^10.9.2",
"tsup": "^8.0.2", "tsup": "^8.0.2",
"typescript": "^5.4.3", "typescript": "^5.4.3",

View File

@@ -6,11 +6,18 @@ import {
AuthCredentials, AuthCredentials,
OAuthFlowOptions, OAuthFlowOptions,
AuthenticationError, AuthenticationError,
AuthConfig AuthConfig,
UserContext
} from './types.js'; } from './types.js';
import { CredentialStore } from './credential-store.js'; import { CredentialStore } from './credential-store.js';
import { OAuthService } from './oauth-service.js'; import { OAuthService } from './oauth-service.js';
import { SupabaseAuthClient } from '../clients/supabase-client.js'; import { SupabaseAuthClient } from '../clients/supabase-client.js';
import {
OrganizationService,
type Organization,
type Brief,
type RemoteTask
} from '../services/organization.service.js';
import { getLogger } from '../logger/index.js'; import { getLogger } from '../logger/index.js';
/** /**
@@ -21,11 +28,28 @@ export class AuthManager {
private credentialStore: CredentialStore; private credentialStore: CredentialStore;
private oauthService: OAuthService; private oauthService: OAuthService;
private supabaseClient: SupabaseAuthClient; private supabaseClient: SupabaseAuthClient;
private organizationService?: OrganizationService;
private constructor(config?: Partial<AuthConfig>) { private constructor(config?: Partial<AuthConfig>) {
this.credentialStore = new CredentialStore(config); this.credentialStore = new CredentialStore(config);
this.supabaseClient = new SupabaseAuthClient(); this.supabaseClient = new SupabaseAuthClient();
this.oauthService = new OAuthService(this.credentialStore, config); this.oauthService = new OAuthService(this.credentialStore, config);
// Initialize Supabase client with session restoration
this.initializeSupabaseSession();
}
/**
* Initialize Supabase session from stored credentials
*/
private async initializeSupabaseSession(): Promise<void> {
try {
await this.supabaseClient.initialize();
} catch (error) {
// Log but don't throw - session might not exist yet
const logger = getLogger('AuthManager');
logger.debug('No existing session to restore');
}
} }
/** /**
@@ -75,39 +99,48 @@ export class AuthManager {
} }
/** /**
* Refresh authentication token * Refresh authentication token using Supabase session
*/ */
async refreshToken(): Promise<AuthCredentials> { async refreshToken(): Promise<AuthCredentials> {
const authData = this.credentialStore.getCredentials({
allowExpired: true
});
if (!authData || !authData.refreshToken) {
throw new AuthenticationError(
'No refresh token available',
'NO_REFRESH_TOKEN'
);
}
try { try {
// Use Supabase client to refresh the token // Use Supabase's built-in session refresh
const response = await this.supabaseClient.refreshSession( const session = await this.supabaseClient.refreshSession();
authData.refreshToken
);
// Update authentication data if (!session) {
throw new AuthenticationError(
'Failed to refresh session',
'REFRESH_FAILED'
);
}
// Get existing credentials to preserve context
const existingCredentials = this.credentialStore.getCredentials({
allowExpired: true
});
// Update authentication data from session
const newAuthData: AuthCredentials = { const newAuthData: AuthCredentials = {
...authData, token: session.access_token,
token: response.token, refreshToken: session.refresh_token,
refreshToken: response.refreshToken, userId: session.user.id,
expiresAt: response.expiresAt, email: session.user.email,
savedAt: new Date().toISOString() expiresAt: session.expires_at
? new Date(session.expires_at * 1000).toISOString()
: undefined,
savedAt: new Date().toISOString(),
selectedContext: existingCredentials?.selectedContext
}; };
this.credentialStore.saveCredentials(newAuthData); this.credentialStore.saveCredentials(newAuthData);
return newAuthData; return newAuthData;
} catch (error) { } catch (error) {
throw error; if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Token refresh failed: ${(error as Error).message}`,
'REFRESH_FAILED'
);
} }
} }
@@ -133,4 +166,114 @@ export class AuthManager {
isAuthenticated(): boolean { isAuthenticated(): boolean {
return this.credentialStore.hasValidCredentials(); return this.credentialStore.hasValidCredentials();
} }
/**
* Get the current user context (org/brief selection)
*/
getContext(): UserContext | null {
const credentials = this.getCredentials();
return credentials?.selectedContext || null;
}
/**
* Update the user context (org/brief selection)
*/
async updateContext(context: Partial<UserContext>): Promise<void> {
const credentials = this.getCredentials();
if (!credentials) {
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
}
// Merge with existing context
const existingContext = credentials.selectedContext || {};
const newContext: UserContext = {
...existingContext,
...context,
updatedAt: new Date().toISOString()
};
// Save updated credentials with new context
const updatedCredentials: AuthCredentials = {
...credentials,
selectedContext: newContext
};
this.credentialStore.saveCredentials(updatedCredentials);
}
/**
* Clear the user context
*/
async clearContext(): Promise<void> {
const credentials = this.getCredentials();
if (!credentials) {
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
}
// Remove context from credentials
const { selectedContext, ...credentialsWithoutContext } = credentials;
this.credentialStore.saveCredentials(credentialsWithoutContext);
}
/**
* Get the organization service instance
* Uses the Supabase client with the current session or token
*/
private async getOrganizationService(): Promise<OrganizationService> {
if (!this.organizationService) {
// First check if we have credentials with a token
const credentials = this.getCredentials();
if (!credentials || !credentials.token) {
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
}
// Initialize session if needed (this will load from our storage adapter)
await this.supabaseClient.initialize();
// Use the SupabaseAuthClient which now has the session
const supabaseClient = this.supabaseClient.getClient();
this.organizationService = new OrganizationService(supabaseClient as any);
}
return this.organizationService;
}
/**
* Get all organizations for the authenticated user
*/
async getOrganizations(): Promise<Organization[]> {
const service = await this.getOrganizationService();
return service.getOrganizations();
}
/**
* Get all briefs for a specific organization
*/
async getBriefs(orgId: string): Promise<Brief[]> {
const service = await this.getOrganizationService();
return service.getBriefs(orgId);
}
/**
* Get a specific organization by ID
*/
async getOrganization(orgId: string): Promise<Organization | null> {
const service = await this.getOrganizationService();
return service.getOrganization(orgId);
}
/**
* Get a specific brief by ID
*/
async getBrief(briefId: string): Promise<Brief | null> {
const service = await this.getOrganizationService();
return service.getBrief(briefId);
}
/**
* Get all tasks for a specific brief
*/
async getTasks(briefId: string): Promise<RemoteTask[]> {
const service = await this.getOrganizationService();
return service.getTasks(briefId);
}
} }

View File

@@ -5,12 +5,19 @@
export { AuthManager } from './auth-manager.js'; export { AuthManager } from './auth-manager.js';
export { CredentialStore } from './credential-store.js'; export { CredentialStore } from './credential-store.js';
export { OAuthService } from './oauth-service.js'; export { OAuthService } from './oauth-service.js';
export { SupabaseSessionStorage } from './supabase-session-storage';
export type {
Organization,
Brief,
RemoteTask
} from '../services/organization.service.js';
export type { export type {
AuthCredentials, AuthCredentials,
OAuthFlowOptions, OAuthFlowOptions,
AuthConfig, AuthConfig,
CliData CliData,
UserContext
} from './types.js'; } from './types.js';
export { AuthenticationError } from './types.js'; export { AuthenticationError } from './types.js';

View File

@@ -181,8 +181,8 @@ export class OAuthService {
timestamp: Date.now() timestamp: Date.now()
}; };
// Build authorization URL for web app sign-in page // Build authorization URL for CLI-specific sign-in page
const authUrl = new URL(`${this.baseUrl}/auth/sign-in`); const authUrl = new URL(`${this.baseUrl}/auth/cli/sign-in`);
// Encode CLI data as base64 // Encode CLI data as base64
const cliParam = Buffer.from(JSON.stringify(cliData)).toString( const cliParam = Buffer.from(JSON.stringify(cliData)).toString(
@@ -272,7 +272,49 @@ export class OAuthService {
return; return;
} }
// Handle direct token response from server // Handle authorization code for PKCE flow
const code = url.searchParams.get('code');
if (code && type === 'pkce_callback') {
try {
this.logger.info('Received authorization code for PKCE flow');
// Exchange code for session using PKCE
const session = await this.supabaseClient.exchangeCodeForSession(code);
// Save authentication data
const authData: AuthCredentials = {
token: session.access_token,
refreshToken: session.refresh_token,
userId: session.user.id,
email: session.user.email,
expiresAt: session.expires_at
? new Date(session.expires_at * 1000).toISOString()
: undefined,
tokenType: 'standard',
savedAt: new Date().toISOString()
};
this.credentialStore.saveCredentials(authData);
if (server.listening) {
server.close();
}
// Clear timeout since authentication succeeded
if (timeoutId) {
clearTimeout(timeoutId);
}
resolve(authData);
return;
} catch (error) {
if (server.listening) {
server.close();
}
reject(error);
return;
}
}
// Handle direct token response from server (legacy flow)
if ( if (
accessToken && accessToken &&
(type === 'oauth_success' || type === 'session_transfer') (type === 'oauth_success' || type === 'session_transfer')
@@ -280,8 +322,23 @@ export class OAuthService {
try { try {
this.logger.info(`Received tokens via ${type}`); this.logger.info(`Received tokens via ${type}`);
// Get user info using the access token if possible // Create a session with the tokens and set it in Supabase client
const user = await this.supabaseClient.getUser(accessToken); const session = {
access_token: accessToken,
refresh_token: refreshToken || '',
expires_at: expiresIn
? Math.floor(Date.now() / 1000) + parseInt(expiresIn)
: undefined,
expires_in: expiresIn ? parseInt(expiresIn) : undefined,
token_type: 'bearer',
user: null as any // Will be populated by setSession
};
// Set the session in Supabase client
await this.supabaseClient.setSession(session as any);
// Get user info from the session
const user = await this.supabaseClient.getUser();
// Calculate expiration time // Calculate expiration time
const expiresAt = expiresIn const expiresAt = expiresIn

View File

@@ -0,0 +1,155 @@
/**
* Custom storage adapter for Supabase Auth sessions in CLI environment
* Implements the SupportedStorage interface required by Supabase Auth
*
* This adapter bridges Supabase's session management with our existing
* auth.json credential storage, maintaining backward compatibility
*/
import { SupportedStorage } from '@supabase/supabase-js';
import { CredentialStore } from './credential-store';
import { AuthCredentials } from './types';
import { getLogger } from '../logger';
const STORAGE_KEY = 'sb-taskmaster-auth-token';
export class SupabaseSessionStorage implements SupportedStorage {
private store: CredentialStore;
private logger = getLogger('SupabaseSessionStorage');
constructor(store: CredentialStore) {
this.store = store;
}
/**
* Build a Supabase session object from our credentials
*/
private buildSessionFromCredentials(credentials: AuthCredentials): any {
// Create a session object that Supabase expects
const session = {
access_token: credentials.token,
refresh_token: credentials.refreshToken || '',
expires_at: credentials.expiresAt
? Math.floor(new Date(credentials.expiresAt).getTime() / 1000)
: Math.floor(Date.now() / 1000) + 3600, // Default to 1 hour
token_type: 'bearer',
user: {
id: credentials.userId,
email: credentials.email || '',
aud: 'authenticated',
role: 'authenticated',
email_confirmed_at: new Date().toISOString(),
app_metadata: {},
user_metadata: {},
created_at: new Date().toISOString(),
updated_at: new Date().toISOString()
}
};
return session;
}
/**
* Parse a Supabase session back to our credentials
*/
private parseSessionToCredentials(
sessionData: any
): Partial<AuthCredentials> {
try {
const session = JSON.parse(sessionData);
return {
token: session.access_token,
refreshToken: session.refresh_token,
userId: session.user?.id || 'unknown',
email: session.user?.email,
expiresAt: session.expires_at
? new Date(session.expires_at * 1000).toISOString()
: undefined
};
} catch (error) {
this.logger.error('Error parsing session:', error);
return {};
}
}
/**
* Get item from storage - Supabase will request the session with a specific key
*/
getItem(key: string): string | null {
// Supabase uses a specific key pattern for sessions
if (key === STORAGE_KEY || key.includes('auth-token')) {
try {
const credentials = this.store.getCredentials({ allowExpired: true });
if (credentials && credentials.token) {
// Build and return a session object from our stored credentials
const session = this.buildSessionFromCredentials(credentials);
return JSON.stringify(session);
}
} catch (error) {
this.logger.error('Error getting session:', error);
}
}
return null;
}
/**
* Set item in storage - Supabase will store the session with a specific key
*/
setItem(key: string, value: string): void {
// Only handle Supabase session keys
if (key === STORAGE_KEY || key.includes('auth-token')) {
try {
// Parse the session and update our credentials
const sessionUpdates = this.parseSessionToCredentials(value);
const existingCredentials = this.store.getCredentials({
allowExpired: true
});
if (sessionUpdates.token) {
const updatedCredentials: AuthCredentials = {
...existingCredentials,
...sessionUpdates,
savedAt: new Date().toISOString(),
selectedContext: existingCredentials?.selectedContext
} as AuthCredentials;
this.store.saveCredentials(updatedCredentials);
}
} catch (error) {
this.logger.error('Error setting session:', error);
}
}
}
/**
* Remove item from storage - Called when signing out
*/
removeItem(key: string): void {
if (key === STORAGE_KEY || key.includes('auth-token')) {
// Don't actually remove credentials, just clear the tokens
// This preserves other data like selectedContext
try {
const credentials = this.store.getCredentials({ allowExpired: true });
if (credentials) {
// Keep context but clear auth tokens
const clearedCredentials: AuthCredentials = {
...credentials,
token: '',
refreshToken: undefined,
expiresAt: undefined
} as AuthCredentials;
this.store.saveCredentials(clearedCredentials);
}
} catch (error) {
this.logger.error('Error removing session:', error);
}
}
}
/**
* Clear all session data
*/
clear(): void {
// Clear auth tokens but preserve context
this.removeItem(STORAGE_KEY);
}
}

View File

@@ -10,6 +10,15 @@ export interface AuthCredentials {
expiresAt?: string | number; expiresAt?: string | number;
tokenType?: 'standard'; tokenType?: 'standard';
savedAt: string; savedAt: string;
selectedContext?: UserContext;
}
export interface UserContext {
orgId?: string;
orgName?: string;
briefId?: string;
briefName?: string;
updatedAt: string;
} }
export interface OAuthFlowOptions { export interface OAuthFlowOptions {
@@ -67,7 +76,11 @@ export type AuthErrorCode =
| 'STORAGE_ERROR' | 'STORAGE_ERROR'
| 'NOT_SUPPORTED' | 'NOT_SUPPORTED'
| 'REFRESH_FAILED' | 'REFRESH_FAILED'
| 'INVALID_RESPONSE'; | 'INVALID_RESPONSE'
| 'PKCE_INIT_FAILED'
| 'PKCE_FAILED'
| 'CODE_EXCHANGE_FAILED'
| 'SESSION_SET_FAILED';
/** /**
* Authentication error class * Authentication error class

View File

@@ -1,19 +1,32 @@
/** /**
* Supabase client for authentication * Supabase authentication client for CLI auth flows
*/ */
import { createClient, SupabaseClient, User } from '@supabase/supabase-js'; import {
createClient,
SupabaseClient as SupabaseJSClient,
User,
Session
} from '@supabase/supabase-js';
import { AuthenticationError } from '../auth/types.js'; import { AuthenticationError } from '../auth/types.js';
import { getLogger } from '../logger/index.js'; import { getLogger } from '../logger/index.js';
import { SupabaseSessionStorage } from '../auth/supabase-session-storage';
import { CredentialStore } from '../auth/credential-store';
export class SupabaseAuthClient { export class SupabaseAuthClient {
private client: SupabaseClient | null = null; private client: SupabaseJSClient | null = null;
private sessionStorage: SupabaseSessionStorage;
private logger = getLogger('SupabaseAuthClient'); private logger = getLogger('SupabaseAuthClient');
constructor() {
const credentialStore = new CredentialStore();
this.sessionStorage = new SupabaseSessionStorage(credentialStore);
}
/** /**
* Initialize Supabase client * Get Supabase client with proper session management
*/ */
private getClient(): SupabaseClient { getClient(): SupabaseJSClient {
if (!this.client) { if (!this.client) {
// Get Supabase configuration from environment - using TM_PUBLIC prefix // Get Supabase configuration from environment - using TM_PUBLIC prefix
const supabaseUrl = process.env.TM_PUBLIC_SUPABASE_URL; const supabaseUrl = process.env.TM_PUBLIC_SUPABASE_URL;
@@ -26,10 +39,12 @@ export class SupabaseAuthClient {
); );
} }
// Create client with custom storage adapter (similar to React Native AsyncStorage)
this.client = createClient(supabaseUrl, supabaseAnonKey, { this.client = createClient(supabaseUrl, supabaseAnonKey, {
auth: { auth: {
storage: this.sessionStorage,
autoRefreshToken: true, autoRefreshToken: true,
persistSession: false, // We handle persistence ourselves persistSession: true,
detectSessionInUrl: false detectSessionInUrl: false
} }
}); });
@@ -39,40 +54,159 @@ export class SupabaseAuthClient {
} }
/** /**
* Note: Code exchange is now handled server-side * Initialize the client and restore session if available
* The server returns tokens directly to avoid PKCE issues
* This method is kept for potential future use
*/ */
async exchangeCodeForSession(_code: string): Promise<{ async initialize(): Promise<Session | null> {
token: string; const client = this.getClient();
refreshToken?: string;
userId: string; try {
email?: string; // Get the current session from storage
expiresAt?: string; const {
}> { data: { session },
throw new AuthenticationError( error
'Code exchange is handled server-side. CLI receives tokens directly.', } = await client.auth.getSession();
'NOT_SUPPORTED'
); if (error) {
this.logger.warn('Failed to restore session:', error);
return null;
}
if (session) {
this.logger.info('Session restored successfully');
}
return session;
} catch (error) {
this.logger.error('Error initializing session:', error);
return null;
}
} }
/** /**
* Refresh an access token * Sign in with PKCE flow (for CLI auth)
*/ */
async refreshSession(refreshToken: string): Promise<{ async signInWithPKCE(): Promise<{ url: string; codeVerifier: string }> {
token: string; const client = this.getClient();
refreshToken?: string;
expiresAt?: string;
}> {
try {
const client = this.getClient();
try {
// Generate PKCE challenge
const { data, error } = await client.auth.signInWithOAuth({
provider: 'github',
options: {
redirectTo:
process.env.TM_AUTH_CALLBACK_URL ||
'http://localhost:3421/auth/callback',
scopes: 'email'
}
});
if (error) {
throw new AuthenticationError(
`Failed to initiate PKCE flow: ${error.message}`,
'PKCE_INIT_FAILED'
);
}
if (!data?.url) {
throw new AuthenticationError(
'No authorization URL returned',
'INVALID_RESPONSE'
);
}
// Extract code_verifier from the URL or generate it
// Note: Supabase handles PKCE internally, we just need to handle the callback
return {
url: data.url,
codeVerifier: '' // Supabase manages this internally
};
} catch (error) {
if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Failed to start PKCE flow: ${(error as Error).message}`,
'PKCE_FAILED'
);
}
}
/**
* Exchange authorization code for session (PKCE flow)
*/
async exchangeCodeForSession(code: string): Promise<Session> {
const client = this.getClient();
try {
const { data, error } = await client.auth.exchangeCodeForSession(code);
if (error) {
throw new AuthenticationError(
`Failed to exchange code: ${error.message}`,
'CODE_EXCHANGE_FAILED'
);
}
if (!data?.session) {
throw new AuthenticationError(
'No session returned from code exchange',
'INVALID_RESPONSE'
);
}
this.logger.info('Successfully exchanged code for session');
return data.session;
} catch (error) {
if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Code exchange failed: ${(error as Error).message}`,
'CODE_EXCHANGE_FAILED'
);
}
}
/**
* Get the current session
*/
async getSession(): Promise<Session | null> {
const client = this.getClient();
try {
const {
data: { session },
error
} = await client.auth.getSession();
if (error) {
this.logger.warn('Failed to get session:', error);
return null;
}
return session;
} catch (error) {
this.logger.error('Error getting session:', error);
return null;
}
}
/**
* Refresh the current session
*/
async refreshSession(): Promise<Session | null> {
const client = this.getClient();
try {
this.logger.info('Refreshing session...'); this.logger.info('Refreshing session...');
// Set the session with refresh token // Supabase will automatically use the stored refresh token
const { data, error } = await client.auth.refreshSession({ const {
refresh_token: refreshToken data: { session },
}); error
} = await client.auth.refreshSession();
if (error) { if (error) {
this.logger.error('Failed to refresh session:', error); this.logger.error('Failed to refresh session:', error);
@@ -82,22 +216,11 @@ export class SupabaseAuthClient {
); );
} }
if (!data.session) { if (session) {
throw new AuthenticationError( this.logger.info('Successfully refreshed session');
'No session data returned',
'INVALID_RESPONSE'
);
} }
this.logger.info('Successfully refreshed session'); return session;
return {
token: data.session.access_token,
refreshToken: data.session.refresh_token,
expiresAt: data.session.expires_at
? new Date(data.session.expires_at * 1000).toISOString()
: undefined
};
} catch (error) { } catch (error) {
if (error instanceof AuthenticationError) { if (error instanceof AuthenticationError) {
throw error; throw error;
@@ -111,21 +234,23 @@ export class SupabaseAuthClient {
} }
/** /**
* Get user details from token * Get current user from session
*/ */
async getUser(token: string): Promise<User | null> { async getUser(): Promise<User | null> {
try { const client = this.getClient();
const client = this.getClient();
// Get user with the token try {
const { data, error } = await client.auth.getUser(token); const {
data: { user },
error
} = await client.auth.getUser();
if (error) { if (error) {
this.logger.warn('Failed to get user:', error); this.logger.warn('Failed to get user:', error);
return null; return null;
} }
return data.user; return user;
} catch (error) { } catch (error) {
this.logger.error('Error getting user:', error); this.logger.error('Error getting user:', error);
return null; return null;
@@ -133,22 +258,55 @@ export class SupabaseAuthClient {
} }
/** /**
* Sign out (revoke tokens) * Sign out and clear session
* Note: This requires the user to be authenticated with the current session.
* For remote token revocation, a server-side admin API with service_role key would be needed.
*/ */
async signOut(): Promise<void> { async signOut(): Promise<void> {
try { const client = this.getClient();
const client = this.getClient();
// Sign out the current session with global scope to revoke all refresh tokens try {
// Sign out with global scope to revoke all refresh tokens
const { error } = await client.auth.signOut({ scope: 'global' }); const { error } = await client.auth.signOut({ scope: 'global' });
if (error) { if (error) {
this.logger.warn('Failed to sign out:', error); this.logger.warn('Failed to sign out:', error);
} }
// Clear cached session data
this.sessionStorage.clear();
} catch (error) { } catch (error) {
this.logger.error('Error during sign out:', error); this.logger.error('Error during sign out:', error);
} }
} }
/**
* Set session from external auth (e.g., from server callback)
*/
async setSession(session: Session): Promise<void> {
const client = this.getClient();
try {
const { error } = await client.auth.setSession({
access_token: session.access_token,
refresh_token: session.refresh_token
});
if (error) {
throw new AuthenticationError(
`Failed to set session: ${error.message}`,
'SESSION_SET_FAILED'
);
}
this.logger.info('Session set successfully');
} catch (error) {
if (error instanceof AuthenticationError) {
throw error;
}
throw new AuthenticationError(
`Failed to set session: ${(error as Error).message}`,
'SESSION_SET_FAILED'
);
}
}
} }

View File

@@ -177,7 +177,7 @@ describe('ConfigManager', () => {
it('should return storage configuration', () => { it('should return storage configuration', () => {
const storage = manager.getStorageConfig(); const storage = manager.getStorageConfig();
expect(storage).toEqual({ type: 'auto', apiConfigured: false }); expect(storage).toEqual({ type: 'file' });
}); });
it('should return API storage configuration when configured', async () => { it('should return API storage configuration when configured', async () => {
@@ -206,65 +206,7 @@ describe('ConfigManager', () => {
expect(storage).toEqual({ expect(storage).toEqual({
type: 'api', type: 'api',
apiEndpoint: 'https://api.example.com', apiEndpoint: 'https://api.example.com',
apiAccessToken: 'token123', apiAccessToken: 'token123'
apiConfigured: true
});
});
it('should return auto storage configuration with apiConfigured flag', async () => {
// Create a new instance with auto storage config and partial API settings
vi.mocked(ConfigMerger).mockImplementationOnce(
() =>
({
addSource: vi.fn(),
clearSources: vi.fn(),
merge: vi.fn().mockReturnValue({
storage: {
type: 'auto',
apiEndpoint: 'https://api.example.com'
// No apiAccessToken - partial config
}
}),
getSources: vi.fn().mockReturnValue([])
}) as any
);
const autoManager = await ConfigManager.create(testProjectRoot);
const storage = autoManager.getStorageConfig();
expect(storage).toEqual({
type: 'auto',
apiEndpoint: 'https://api.example.com',
apiAccessToken: undefined,
apiConfigured: true // true because apiEndpoint is provided
});
});
it('should return auto storage with apiConfigured false when no API settings', async () => {
// Create a new instance with auto storage but no API settings
vi.mocked(ConfigMerger).mockImplementationOnce(
() =>
({
addSource: vi.fn(),
clearSources: vi.fn(),
merge: vi.fn().mockReturnValue({
storage: {
type: 'auto'
// No API settings at all
}
}),
getSources: vi.fn().mockReturnValue([])
}) as any
);
const autoManager = await ConfigManager.create(testProjectRoot);
const storage = autoManager.getStorageConfig();
expect(storage).toEqual({
type: 'auto',
apiEndpoint: undefined,
apiAccessToken: undefined,
apiConfigured: false // false because no API settings
}); });
}); });

View File

@@ -85,11 +85,6 @@ describe('EnvironmentConfigProvider', () => {
provider = new EnvironmentConfigProvider(); // Reset provider provider = new EnvironmentConfigProvider(); // Reset provider
config = provider.loadConfig(); config = provider.loadConfig();
expect(config.storage?.type).toBe('api'); expect(config.storage?.type).toBe('api');
process.env.TASKMASTER_STORAGE_TYPE = 'auto';
provider = new EnvironmentConfigProvider(); // Reset provider
config = provider.loadConfig();
expect(config.storage?.type).toBe('auto');
}); });
it('should handle nested configuration paths', () => { it('should handle nested configuration paths', () => {

View File

@@ -31,7 +31,7 @@ export class EnvironmentConfigProvider {
{ {
env: 'TASKMASTER_STORAGE_TYPE', env: 'TASKMASTER_STORAGE_TYPE',
path: ['storage', 'type'], path: ['storage', 'type'],
validate: (v: string) => ['file', 'api', 'auto'].includes(v) validate: (v: string) => ['file', 'api'].includes(v)
}, },
{ env: 'TASKMASTER_API_ENDPOINT', path: ['storage', 'apiEndpoint'] }, { env: 'TASKMASTER_API_ENDPOINT', path: ['storage', 'apiEndpoint'] },
{ env: 'TASKMASTER_API_TOKEN', path: ['storage', 'apiAccessToken'] }, { env: 'TASKMASTER_API_TOKEN', path: ['storage', 'apiAccessToken'] },

View File

@@ -3,7 +3,11 @@
* This file defines the contract for configuration management * This file defines the contract for configuration management
*/ */
import type { TaskComplexity, TaskPriority } from '../types/index.js'; import type {
TaskComplexity,
TaskPriority,
StorageType
} from '../types/index.js';
/** /**
* Model configuration for different AI roles * Model configuration for different AI roles
@@ -73,14 +77,6 @@ export interface TagSettings {
tagNamingConvention: 'kebab-case' | 'camelCase' | 'snake_case'; tagNamingConvention: 'kebab-case' | 'camelCase' | 'snake_case';
} }
/**
* Storage type options
* - 'file': Local file system storage
* - 'api': Remote API storage (Hamster integration)
* - 'auto': Automatically detect based on auth status
*/
export type StorageType = 'file' | 'api' | 'auto';
/** /**
* Runtime storage configuration used for storage backend selection * Runtime storage configuration used for storage backend selection
* This is what getStorageConfig() returns and what StorageFactory expects * This is what getStorageConfig() returns and what StorageFactory expects

View File

@@ -0,0 +1,170 @@
import { Task, Subtask } from '../types/index.js';
import { Database, Tables } from '../types/database.types.js';
type TaskRow = Tables<'tasks'>;
type DependencyRow = Tables<'task_dependencies'>;
export class TaskMapper {
/**
* Maps database tasks to internal Task format
*/
static mapDatabaseTasksToTasks(
dbTasks: TaskRow[],
dbDependencies: DependencyRow[]
): Task[] {
if (!dbTasks || dbTasks.length === 0) {
return [];
}
// Group dependencies by task_id
const dependenciesByTaskId = this.groupDependenciesByTaskId(dbDependencies);
// Separate parent tasks and subtasks
const parentTasks = dbTasks.filter((t) => !t.parent_task_id);
const subtasksByParentId = this.groupSubtasksByParentId(dbTasks);
// Map parent tasks with their subtasks
return parentTasks.map((taskRow) =>
this.mapDatabaseTaskToTask(
taskRow,
subtasksByParentId.get(taskRow.id) || [],
dependenciesByTaskId
)
);
}
/**
* Maps a single database task to internal Task format
*/
static mapDatabaseTaskToTask(
dbTask: TaskRow,
dbSubtasks: TaskRow[],
dependenciesByTaskId: Map<string, string[]>
): Task {
// Map subtasks
const subtasks: Subtask[] = dbSubtasks.map((subtask, index) => ({
id: index + 1, // Use numeric ID for subtasks
parentId: dbTask.id,
title: subtask.title,
description: subtask.description || '',
status: this.mapStatus(subtask.status),
priority: this.mapPriority(subtask.priority),
dependencies: dependenciesByTaskId.get(subtask.id) || [],
details: (subtask.metadata as any)?.details || '',
testStrategy: (subtask.metadata as any)?.testStrategy || '',
createdAt: subtask.created_at,
updatedAt: subtask.updated_at,
assignee: subtask.assignee_id || undefined,
complexity: subtask.complexity
? this.mapComplexityToInternal(subtask.complexity)
: undefined
}));
return {
id: dbTask.display_id || dbTask.id, // Use display_id if available
title: dbTask.title,
description: dbTask.description || '',
status: this.mapStatus(dbTask.status),
priority: this.mapPriority(dbTask.priority),
dependencies: dependenciesByTaskId.get(dbTask.id) || [],
details: (dbTask.metadata as any)?.details || '',
testStrategy: (dbTask.metadata as any)?.testStrategy || '',
subtasks,
createdAt: dbTask.created_at,
updatedAt: dbTask.updated_at,
assignee: dbTask.assignee_id || undefined,
complexity: dbTask.complexity
? this.mapComplexityToInternal(dbTask.complexity)
: undefined,
effort: dbTask.estimated_hours || undefined,
actualEffort: dbTask.actual_hours || undefined
};
}
/**
* Groups dependencies by task ID
*/
private static groupDependenciesByTaskId(
dependencies: DependencyRow[]
): Map<string, string[]> {
const dependenciesByTaskId = new Map<string, string[]>();
if (dependencies) {
for (const dep of dependencies) {
const deps = dependenciesByTaskId.get(dep.task_id) || [];
deps.push(dep.depends_on_task_id);
dependenciesByTaskId.set(dep.task_id, deps);
}
}
return dependenciesByTaskId;
}
/**
* Groups subtasks by their parent ID
*/
private static groupSubtasksByParentId(
tasks: TaskRow[]
): Map<string, TaskRow[]> {
const subtasksByParentId = new Map<string, TaskRow[]>();
for (const task of tasks) {
if (task.parent_task_id) {
const subtasks = subtasksByParentId.get(task.parent_task_id) || [];
subtasks.push(task);
subtasksByParentId.set(task.parent_task_id, subtasks);
}
}
// Sort subtasks by subtask_position for each parent
for (const subtasks of subtasksByParentId.values()) {
subtasks.sort((a, b) => a.subtask_position - b.subtask_position);
}
return subtasksByParentId;
}
/**
* Maps database status to internal status
*/
private static mapStatus(
status: Database['public']['Enums']['task_status']
): Task['status'] {
switch (status) {
case 'todo':
return 'pending';
case 'in_progress':
return 'in-progress';
case 'done':
return 'done';
default:
return 'pending';
}
}
/**
* Maps database priority to internal priority
*/
private static mapPriority(
priority: Database['public']['Enums']['task_priority']
): Task['priority'] {
switch (priority) {
case 'urgent':
return 'critical';
default:
return priority as Task['priority'];
}
}
/**
* Maps numeric complexity to descriptive complexity
*/
private static mapComplexityToInternal(
complexity: number
): Task['complexity'] {
if (complexity <= 2) return 'simple';
if (complexity <= 5) return 'moderate';
if (complexity <= 8) return 'complex';
return 'very-complex';
}
}

View File

@@ -0,0 +1,110 @@
import { SupabaseClient } from '@supabase/supabase-js';
import { Task } from '../types/index.js';
import { Database } from '../types/database.types.js';
import { TaskMapper } from '../mappers/TaskMapper.js';
import { AuthManager } from '../auth/auth-manager.js';
export class SupabaseTaskRepository {
constructor(private supabase: SupabaseClient<Database>) {}
async getTasks(_projectId?: string): Promise<Task[]> {
// Get the current context to determine briefId
const authManager = AuthManager.getInstance();
const context = authManager.getContext();
if (!context || !context.briefId) {
throw new Error(
'No brief selected. Please select a brief first using: tm context brief'
);
}
// Get all tasks for the brief using the exact query structure
const { data: tasks, error } = await this.supabase
.from('tasks')
.select(`
*,
document:document_id (
id,
document_name,
title,
description
)
`)
.eq('brief_id', context.briefId)
.order('position', { ascending: true })
.order('subtask_position', { ascending: true })
.order('created_at', { ascending: true });
if (error) {
throw new Error(`Failed to fetch tasks: ${error.message}`);
}
if (!tasks || tasks.length === 0) {
return [];
}
// Get all dependencies for these tasks
const taskIds = tasks.map((t: any) => t.id);
const { data: depsData, error: depsError } = await this.supabase
.from('task_dependencies')
.select('*')
.in('task_id', taskIds);
if (depsError) {
throw new Error(
`Failed to fetch task dependencies: ${depsError.message}`
);
}
// Use mapper to convert to internal format
return TaskMapper.mapDatabaseTasksToTasks(tasks, depsData || []);
}
async getTask(accountId: string, taskId: string): Promise<Task | null> {
const { data, error } = await this.supabase
.from('tasks')
.select('*')
.eq('account_id', accountId)
.eq('id', taskId)
.single();
if (error) {
if (error.code === 'PGRST116') {
return null; // Not found
}
throw new Error(`Failed to fetch task: ${error.message}`);
}
// Get dependencies for this task
const { data: depsData } = await this.supabase
.from('task_dependencies')
.select('*')
.eq('task_id', taskId);
// Get subtasks if this is a parent task
const { data: subtasksData } = await this.supabase
.from('tasks')
.select('*')
.eq('parent_task_id', taskId)
.order('subtask_position', { ascending: true });
// Create dependency map
const dependenciesByTaskId = new Map<string, string[]>();
if (depsData) {
dependenciesByTaskId.set(
taskId,
depsData.map(
(d: Database['public']['Tables']['task_dependencies']['Row']) =>
d.depends_on_task_id
)
);
}
// Use mapper to convert single task
return TaskMapper.mapDatabaseTaskToTask(
data,
subtasksData || [],
dependenciesByTaskId
);
}
}

View File

@@ -0,0 +1,36 @@
import { Task, TaskTag } from '../types/index.js';
export interface TaskRepository {
// Task operations
getTasks(projectId: string): Promise<Task[]>;
getTask(projectId: string, taskId: string): Promise<Task | null>;
createTask(projectId: string, task: Omit<Task, 'id'>): Promise<Task>;
updateTask(
projectId: string,
taskId: string,
updates: Partial<Task>
): Promise<Task>;
deleteTask(projectId: string, taskId: string): Promise<void>;
// Tag operations
getTags(projectId: string): Promise<TaskTag[]>;
getTag(projectId: string, tagName: string): Promise<TaskTag | null>;
createTag(projectId: string, tag: TaskTag): Promise<TaskTag>;
updateTag(
projectId: string,
tagName: string,
updates: Partial<TaskTag>
): Promise<TaskTag>;
deleteTag(projectId: string, tagName: string): Promise<void>;
// Bulk operations
bulkCreateTasks(
projectId: string,
tasks: Omit<Task, 'id'>[]
): Promise<Task[]>;
bulkUpdateTasks(
projectId: string,
updates: Array<{ id: string; updates: Partial<Task> }>
): Promise<Task[]>;
bulkDeleteTasks(projectId: string, taskIds: string[]): Promise<void>;
}

View File

@@ -4,3 +4,5 @@
*/ */
export { TaskService } from './task-service.js'; export { TaskService } from './task-service.js';
export { OrganizationService } from './organization.service.js';
export type { Organization, Brief } from './organization.service.js';

View File

@@ -0,0 +1,363 @@
/**
* @fileoverview Organization and Brief management service
* Handles fetching and managing organizations and briefs from the API
*/
import { SupabaseClient } from '@supabase/supabase-js';
import { Database } from '../types/database.types.js';
import { TaskMasterError, ERROR_CODES } from '../errors/task-master-error.js';
import { getLogger } from '../logger/index.js';
/**
* Organization data structure
*/
export interface Organization {
id: string;
name: string;
slug: string;
}
/**
* Brief data structure
*/
export interface Brief {
id: string;
accountId: string;
documentId: string;
status: string;
createdAt: string;
updatedAt: string;
}
/**
* Task data structure from the remote database
*/
export interface RemoteTask {
id: string;
briefId: string;
documentId: string;
position: number | null;
subtaskPosition: number | null;
status: string;
createdAt: string;
updatedAt: string;
// Document details from join
document?: {
id: string;
document_name: string;
title: string;
description: string;
};
}
/**
* Service for managing organizations and briefs
*/
export class OrganizationService {
private logger = getLogger('OrganizationService');
constructor(private supabaseClient: SupabaseClient<Database>) {}
/**
* Get all organizations for the authenticated user
*/
async getOrganizations(): Promise<Organization[]> {
try {
// The user is already authenticated via the Authorization header
// Query the user_accounts view/table (filtered by RLS for current user)
const { data, error } = await this.supabaseClient
.from('user_accounts')
.select(`
id,
name,
slug
`);
if (error) {
throw new TaskMasterError(
`Failed to fetch organizations: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getOrganizations' },
error
);
}
if (!data || data.length === 0) {
this.logger.debug('No organizations found for user');
return [];
}
// Map to our Organization interface
return data.map((org) => ({
id: org.id ?? '',
name: org.name ?? '',
slug: org.slug ?? org.id ?? '' // Use ID as fallback if slug is null
}));
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch organizations',
ERROR_CODES.API_ERROR,
{ operation: 'getOrganizations' },
error as Error
);
}
}
/**
* Get a specific organization by ID
*/
async getOrganization(orgId: string): Promise<Organization | null> {
try {
const { data, error } = await this.supabaseClient
.from('accounts')
.select(`
id,
name,
slug
`)
.eq('id', orgId)
.single();
if (error) {
if (error.code === 'PGRST116') {
// No rows found
return null;
}
throw new TaskMasterError(
`Failed to fetch organization: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getOrganization', orgId },
error
);
}
if (!data) {
return null;
}
const accountData =
data as Database['public']['Tables']['accounts']['Row'];
return {
id: accountData.id,
name: accountData.name,
slug: accountData.slug || accountData.id
};
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch organization',
ERROR_CODES.API_ERROR,
{ operation: 'getOrganization', orgId },
error as Error
);
}
}
/**
* Get all briefs for a specific organization
*/
async getBriefs(orgId: string): Promise<Brief[]> {
try {
const { data, error } = await this.supabaseClient
.from('brief')
.select(`
id,
account_id,
document_id,
status,
created_at,
updated_at
`)
.eq('account_id', orgId);
if (error) {
throw new TaskMasterError(
`Failed to fetch briefs: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getBriefs', orgId },
error
);
}
if (!data || data.length === 0) {
this.logger.debug(`No briefs found for organization ${orgId}`);
return [];
}
// Map to our Brief interface
return data.map((brief: any) => ({
id: brief.id,
accountId: brief.account_id,
documentId: brief.document_id,
status: brief.status,
createdAt: brief.created_at,
updatedAt: brief.updated_at
}));
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch briefs',
ERROR_CODES.API_ERROR,
{ operation: 'getBriefs', orgId },
error as Error
);
}
}
/**
* Get a specific brief by ID
*/
async getBrief(briefId: string): Promise<Brief | null> {
try {
const { data, error } = await this.supabaseClient
.from('brief')
.select(`
id,
account_id,
document_id,
status,
created_at,
updated_at
`)
.eq('id', briefId)
.single();
if (error) {
if (error.code === 'PGRST116') {
// No rows found
return null;
}
throw new TaskMasterError(
`Failed to fetch brief: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getBrief', briefId },
error
);
}
if (!data) {
return null;
}
const briefData = data as any;
return {
id: briefData.id,
accountId: briefData.account_id,
documentId: briefData.document_id,
status: briefData.status,
createdAt: briefData.created_at,
updatedAt: briefData.updated_at
};
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch brief',
ERROR_CODES.API_ERROR,
{ operation: 'getBrief', briefId },
error as Error
);
}
}
/**
* Validate that a user has access to an organization
*/
async validateOrgAccess(orgId: string): Promise<boolean> {
try {
const org = await this.getOrganization(orgId);
return org !== null;
} catch (error) {
this.logger.error(`Failed to validate org access: ${error}`);
return false;
}
}
/**
* Validate that a user has access to a brief
*/
async validateBriefAccess(briefId: string): Promise<boolean> {
try {
const brief = await this.getBrief(briefId);
return brief !== null;
} catch (error) {
this.logger.error(`Failed to validate brief access: ${error}`);
return false;
}
}
/**
* Get all tasks for a specific brief
*/
async getTasks(briefId: string): Promise<RemoteTask[]> {
try {
const { data, error } = await this.supabaseClient
.from('tasks')
.select(`
*,
document:document_id (
id,
document_name,
title,
description
)
`)
.eq('brief_id', briefId)
.order('position', { ascending: true })
.order('subtask_position', { ascending: true })
.order('created_at', { ascending: true });
if (error) {
throw new TaskMasterError(
`Failed to fetch tasks: ${error.message}`,
ERROR_CODES.API_ERROR,
{ operation: 'getTasks', briefId },
error
);
}
if (!data || data.length === 0) {
this.logger.debug(`No tasks found for brief ${briefId}`);
return [];
}
// Map to our RemoteTask interface
return data.map((task: any) => ({
id: task.id,
briefId: task.brief_id,
documentId: task.document_id,
position: task.position,
subtaskPosition: task.subtask_position,
status: task.status,
createdAt: task.created_at,
updatedAt: task.updated_at,
document: task.document
? {
id: task.document.id,
document_name: task.document.document_name,
title: task.document.title,
description: task.document.description
}
: undefined
}));
} catch (error) {
if (error instanceof TaskMasterError) {
throw error;
}
throw new TaskMasterError(
'Failed to fetch tasks',
ERROR_CODES.API_ERROR,
{ operation: 'getTasks', briefId },
error as Error
);
}
}
}

View File

@@ -3,7 +3,12 @@
* Core service for task operations - handles business logic between storage and API * Core service for task operations - handles business logic between storage and API
*/ */
import type { Task, TaskFilter, TaskStatus } from '../types/index.js'; import type {
Task,
TaskFilter,
TaskStatus,
StorageType
} from '../types/index.js';
import type { IStorage } from '../interfaces/storage.interface.js'; import type { IStorage } from '../interfaces/storage.interface.js';
import { ConfigManager } from '../config/config-manager.js'; import { ConfigManager } from '../config/config-manager.js';
import { StorageFactory } from '../storage/storage-factory.js'; import { StorageFactory } from '../storage/storage-factory.js';
@@ -22,8 +27,8 @@ export interface TaskListResult {
filtered: number; filtered: number;
/** The tag these tasks belong to (only present if explicitly provided) */ /** The tag these tasks belong to (only present if explicitly provided) */
tag?: string; tag?: string;
/** Storage type being used - includes 'auto' for automatic detection */ /** Storage type being used */
storageType: 'file' | 'api' | 'auto'; storageType: StorageType;
} }
/** /**
@@ -113,7 +118,7 @@ export class TaskService {
total: rawTasks.length, total: rawTasks.length,
filtered: filteredEntities.length, filtered: filteredEntities.length,
tag: options.tag, // Only include tag if explicitly provided tag: options.tag, // Only include tag if explicitly provided
storageType: this.configManager.getStorageConfig().type storageType: this.getStorageType()
}; };
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
@@ -166,7 +171,7 @@ export class TaskService {
byStatus: Record<TaskStatus, number>; byStatus: Record<TaskStatus, number>;
withSubtasks: number; withSubtasks: number;
blocked: number; blocked: number;
storageType: 'file' | 'api' | 'auto'; storageType: StorageType;
}> { }> {
const result = await this.getTaskList({ const result = await this.getTaskList({
tag, tag,
@@ -334,8 +339,12 @@ export class TaskService {
/** /**
* Get current storage type * Get current storage type
*/ */
getStorageType(): 'file' | 'api' | 'auto' { getStorageType(): StorageType {
return this.configManager.getStorageConfig().type; // Prefer the runtime storage type if available to avoid exposing 'auto'
const s = this.storage as { getType?: () => 'file' | 'api' } | null;
const runtimeType = s?.getType?.();
return (runtimeType ??
this.configManager.getStorageConfig().type) as StorageType;
} }
/** /**

View File

@@ -1,27 +1,29 @@
/** /**
* @fileoverview API-based storage implementation for Hamster integration * @fileoverview API-based storage implementation using repository pattern
* This provides storage via REST API instead of local file system * This provides storage via repository abstraction for flexibility
*/ */
import type { import type {
IStorage, IStorage,
StorageStats StorageStats
} from '../interfaces/storage.interface.js'; } from '../interfaces/storage.interface.js';
import type { Task, TaskMetadata } from '../types/index.js'; import type { Task, TaskMetadata, TaskTag } from '../types/index.js';
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js'; import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
import { TaskRepository } from '../repositories/task-repository.interface.js';
import { SupabaseTaskRepository } from '../repositories/supabase-task-repository.js';
import { SupabaseClient } from '@supabase/supabase-js';
import { AuthManager } from '../auth/auth-manager.js';
/** /**
* API storage configuration * API storage configuration
*/ */
export interface ApiStorageConfig { export interface ApiStorageConfig {
/** API endpoint base URL */ /** Supabase client instance */
endpoint: string; supabaseClient?: SupabaseClient;
/** Access token for authentication */ /** Custom repository implementation */
accessToken: string; repository?: TaskRepository;
/** Optional project ID */ /** Project ID for scoping */
projectId?: string; projectId: string;
/** Request timeout in milliseconds */
timeout?: number;
/** Enable request retries */ /** Enable request retries */
enableRetry?: boolean; enableRetry?: boolean;
/** Maximum retry attempts */ /** Maximum retry attempts */
@@ -29,64 +31,58 @@ export interface ApiStorageConfig {
} }
/** /**
* API response wrapper * ApiStorage implementation using repository pattern
*/ * Provides flexibility to swap between different backend implementations
interface ApiResponse<T> {
success: boolean;
data?: T;
error?: string;
message?: string;
}
/**
* ApiStorage implementation for Hamster integration
* Fetches and stores tasks via REST API
*/ */
export class ApiStorage implements IStorage { export class ApiStorage implements IStorage {
private readonly config: Required<ApiStorageConfig>; private readonly repository: TaskRepository;
private readonly projectId: string;
private readonly enableRetry: boolean;
private readonly maxRetries: number;
private initialized = false; private initialized = false;
private tagsCache: Map<string, TaskTag> = new Map();
constructor(config: ApiStorageConfig) { constructor(config: ApiStorageConfig) {
this.validateConfig(config); this.validateConfig(config);
this.config = { // Use provided repository or create Supabase repository
endpoint: config.endpoint.replace(/\/$/, ''), // Remove trailing slash if (config.repository) {
accessToken: config.accessToken, this.repository = config.repository;
projectId: config.projectId || 'default', } else if (config.supabaseClient) {
timeout: config.timeout || 30000, // TODO: SupabaseTaskRepository doesn't implement all TaskRepository methods yet
enableRetry: config.enableRetry ?? true, // Cast for now until full implementation is complete
maxRetries: config.maxRetries || 3 this.repository = new SupabaseTaskRepository(
}; config.supabaseClient
) as unknown as TaskRepository;
} else {
throw new TaskMasterError(
'Either repository or supabaseClient must be provided',
ERROR_CODES.MISSING_CONFIGURATION
);
}
this.projectId = config.projectId;
this.enableRetry = config.enableRetry ?? true;
this.maxRetries = config.maxRetries ?? 3;
} }
/** /**
* Validate API storage configuration * Validate API storage configuration
*/ */
private validateConfig(config: ApiStorageConfig): void { private validateConfig(config: ApiStorageConfig): void {
if (!config.endpoint) { if (!config.projectId) {
throw new TaskMasterError( throw new TaskMasterError(
'API endpoint is required for API storage', 'Project ID is required for API storage',
ERROR_CODES.MISSING_CONFIGURATION ERROR_CODES.MISSING_CONFIGURATION
); );
} }
if (!config.accessToken) { if (!config.repository && !config.supabaseClient) {
throw new TaskMasterError( throw new TaskMasterError(
'Access token is required for API storage', 'Either repository or supabaseClient must be provided',
ERROR_CODES.MISSING_CONFIGURATION ERROR_CODES.MISSING_CONFIGURATION
); );
} }
// Validate endpoint URL format
try {
new URL(config.endpoint);
} catch {
throw new TaskMasterError(
'Invalid API endpoint URL',
ERROR_CODES.INVALID_INPUT,
{ endpoint: config.endpoint }
);
}
} }
/** /**
@@ -96,8 +92,8 @@ export class ApiStorage implements IStorage {
if (this.initialized) return; if (this.initialized) return;
try { try {
// Verify API connectivity // Load initial tags
await this.verifyConnection(); await this.loadTagsIntoCache();
this.initialized = true; this.initialized = true;
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
@@ -110,39 +106,71 @@ export class ApiStorage implements IStorage {
} }
/** /**
* Verify API connection * Load tags into cache
* In our API-based system, "tags" represent briefs
*/ */
private async verifyConnection(): Promise<void> { private async loadTagsIntoCache(): Promise<void> {
const response = await this.makeRequest<{ status: string }>('/health'); try {
const authManager = AuthManager.getInstance();
const context = authManager.getContext();
if (!response.success) { // If we have a selected brief, create a virtual "tag" for it
throw new Error(`API health check failed: ${response.error}`); if (context?.briefId) {
// Create a virtual tag representing the current brief
const briefTag: TaskTag = {
name: context.briefId,
tasks: [], // Will be populated when tasks are loaded
metadata: {
briefId: context.briefId,
briefName: context.briefName,
organizationId: context.orgId
}
};
this.tagsCache.clear();
this.tagsCache.set(context.briefId, briefTag);
}
} catch (error) {
// If no brief is selected, that's okay - user needs to select one first
console.debug('No brief selected, starting with empty cache');
} }
} }
/** /**
* Load tasks from API * Load tasks from API
* In our system, the tag parameter represents a brief ID
*/ */
async loadTasks(tag?: string): Promise<Task[]> { async loadTasks(tag?: string): Promise<Task[]> {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag const authManager = AuthManager.getInstance();
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}` const context = authManager.getContext();
: `/projects/${this.config.projectId}/tasks`;
const response = await this.makeRequest<{ tasks: Task[] }>(endpoint); // If no brief is selected in context, throw an error
if (!context?.briefId) {
if (!response.success) { throw new Error(
throw new Error(response.error || 'Failed to load tasks'); 'No brief selected. Please select a brief first using: tm context brief <brief-id>'
);
} }
return response.data?.tasks || []; // Load tasks from the current brief context
const tasks = await this.retryOperation(() =>
this.repository.getTasks(this.projectId)
);
// Update the tag cache with the loaded task IDs
const briefTag = this.tagsCache.get(context.briefId);
if (briefTag) {
briefTag.tasks = tasks.map((task) => task.id);
}
return tasks;
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to load tasks from API', 'Failed to load tasks from API',
ERROR_CODES.STORAGE_ERROR, ERROR_CODES.STORAGE_ERROR,
{ operation: 'loadTasks', tag }, { operation: 'loadTasks', tag, context: 'brief-based loading' },
error as Error error as Error
); );
} }
@@ -155,15 +183,29 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag if (tag) {
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}` // Update tag with task IDs
: `/projects/${this.config.projectId}/tasks`; const tagData = this.tagsCache.get(tag) || {
name: tag,
tasks: [],
metadata: {}
};
tagData.tasks = tasks.map((t) => t.id);
const response = await this.makeRequest(endpoint, 'PUT', { tasks }); // Save or update tag
if (this.tagsCache.has(tag)) {
await this.repository.updateTag(this.projectId, tag, tagData);
} else {
await this.repository.createTag(this.projectId, tagData);
}
if (!response.success) { this.tagsCache.set(tag, tagData);
throw new Error(response.error || 'Failed to save tasks');
} }
// Save tasks using bulk operation
await this.retryOperation(() =>
this.repository.bulkCreateTasks(this.projectId, tasks)
);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to save tasks to API', 'Failed to save tasks to API',
@@ -181,20 +223,17 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag if (tag) {
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}` // Check if task is in tag
: `/projects/${this.config.projectId}/tasks/${taskId}`; const tagData = this.tagsCache.get(tag);
if (!tagData || !tagData.tasks.includes(taskId)) {
const response = await this.makeRequest<{ task: Task }>(endpoint);
if (!response.success) {
if (response.error?.includes('not found')) {
return null; return null;
} }
throw new Error(response.error || 'Failed to load task');
} }
return response.data?.task || null; return await this.retryOperation(() =>
this.repository.getTask(this.projectId, taskId)
);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to load task from API', 'Failed to load task from API',
@@ -212,14 +251,26 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag // Check if task exists
? `/projects/${this.config.projectId}/tasks/${task.id}?tag=${encodeURIComponent(tag)}` const existing = await this.repository.getTask(this.projectId, task.id);
: `/projects/${this.config.projectId}/tasks/${task.id}`;
const response = await this.makeRequest(endpoint, 'PUT', { task }); if (existing) {
await this.retryOperation(() =>
this.repository.updateTask(this.projectId, task.id, task)
);
} else {
await this.retryOperation(() =>
this.repository.createTask(this.projectId, task)
);
}
if (!response.success) { // Update tag if specified
throw new Error(response.error || 'Failed to save task'); if (tag) {
const tagData = this.tagsCache.get(tag);
if (tagData && !tagData.tasks.includes(task.id)) {
tagData.tasks.push(task.id);
await this.repository.updateTag(this.projectId, tag, tagData);
}
} }
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
@@ -238,14 +289,17 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag await this.retryOperation(() =>
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}` this.repository.deleteTask(this.projectId, taskId)
: `/projects/${this.config.projectId}/tasks/${taskId}`; );
const response = await this.makeRequest(endpoint, 'DELETE'); // Remove from tag if specified
if (tag) {
if (!response.success) { const tagData = this.tagsCache.get(tag);
throw new Error(response.error || 'Failed to delete task'); if (tagData) {
tagData.tasks = tagData.tasks.filter((id) => id !== taskId);
await this.repository.updateTag(this.projectId, tag, tagData);
}
} }
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
@@ -258,21 +312,24 @@ export class ApiStorage implements IStorage {
} }
/** /**
* List available tags * List available tags (briefs in our system)
*/ */
async listTags(): Promise<string[]> { async listTags(): Promise<string[]> {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest<{ tags: string[] }>( const authManager = AuthManager.getInstance();
`/projects/${this.config.projectId}/tags` const context = authManager.getContext();
);
if (!response.success) { // In our API-based system, we only have one "tag" at a time - the current brief
throw new Error(response.error || 'Failed to list tags'); if (context?.briefId) {
// Ensure the current brief is in our cache
await this.loadTagsIntoCache();
return [context.briefId];
} }
return response.data?.tags || []; // No brief selected, return empty array
return [];
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to list tags from API', 'Failed to list tags from API',
@@ -290,19 +347,15 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag if (tag) {
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}` const tagData = this.tagsCache.get(tag);
: `/projects/${this.config.projectId}/metadata`; return (tagData?.metadata as TaskMetadata) || null;
const response = await this.makeRequest<{ metadata: TaskMetadata }>(
endpoint
);
if (!response.success) {
return null;
} }
return response.data?.metadata || null; // Return global metadata if no tag specified
// This could be stored in a special system tag
const systemTag = await this.repository.getTag(this.projectId, '_system');
return (systemTag?.metadata as TaskMetadata) || null;
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to load metadata from API', 'Failed to load metadata from API',
@@ -320,14 +373,38 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const endpoint = tag if (tag) {
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}` const tagData = this.tagsCache.get(tag) || {
: `/projects/${this.config.projectId}/metadata`; name: tag,
tasks: [],
metadata: {}
};
tagData.metadata = metadata as any;
const response = await this.makeRequest(endpoint, 'PUT', { metadata }); if (this.tagsCache.has(tag)) {
await this.repository.updateTag(this.projectId, tag, tagData);
} else {
await this.repository.createTag(this.projectId, tagData);
}
if (!response.success) { this.tagsCache.set(tag, tagData);
throw new Error(response.error || 'Failed to save metadata'); } else {
// Save to system tag
const systemTag: TaskTag = {
name: '_system',
tasks: [],
metadata: metadata as any
};
const existing = await this.repository.getTag(
this.projectId,
'_system'
);
if (existing) {
await this.repository.updateTag(this.projectId, '_system', systemTag);
} else {
await this.repository.createTag(this.projectId, systemTag);
}
} }
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
@@ -358,14 +435,30 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
// First load existing tasks // Use bulk create - repository should handle duplicates
const existingTasks = await this.loadTasks(tag); await this.retryOperation(() =>
this.repository.bulkCreateTasks(this.projectId, tasks)
);
// Append new tasks // Update tag if specified
const allTasks = [...existingTasks, ...tasks]; if (tag) {
const tagData = this.tagsCache.get(tag) || {
name: tag,
tasks: [],
metadata: {}
};
// Save all tasks const newTaskIds = tasks.map((t) => t.id);
await this.saveTasks(allTasks, tag); tagData.tasks = [...new Set([...tagData.tasks, ...newTaskIds])];
if (this.tagsCache.has(tag)) {
await this.repository.updateTag(this.projectId, tag, tagData);
} else {
await this.repository.createTag(this.projectId, tagData);
}
this.tagsCache.set(tag, tagData);
}
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to append tasks to API', 'Failed to append tasks to API',
@@ -387,18 +480,9 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
// Load the task await this.retryOperation(() =>
const task = await this.loadTask(taskId, tag); this.repository.updateTask(this.projectId, taskId, updates)
);
if (!task) {
throw new Error(`Task ${taskId} not found`);
}
// Merge updates
const updatedTask = { ...task, ...updates, id: taskId };
// Save updated task
await this.saveTask(updatedTask, tag);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to update task via API', 'Failed to update task via API',
@@ -423,14 +507,11 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest( await this.retryOperation(() =>
`/projects/${this.config.projectId}/tags/${encodeURIComponent(tag)}`, this.repository.deleteTag(this.projectId, tag)
'DELETE'
); );
if (!response.success) { this.tagsCache.delete(tag);
throw new Error(response.error || 'Failed to delete tag');
}
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to delete tag via API', 'Failed to delete tag via API',
@@ -448,15 +529,21 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest( const tagData = this.tagsCache.get(oldTag);
`/projects/${this.config.projectId}/tags/${encodeURIComponent(oldTag)}/rename`, if (!tagData) {
'POST', throw new Error(`Tag ${oldTag} not found`);
{ newTag }
);
if (!response.success) {
throw new Error(response.error || 'Failed to rename tag');
} }
// Create new tag with same data
const newTagData = { ...tagData, name: newTag };
await this.repository.createTag(this.projectId, newTagData);
// Delete old tag
await this.repository.deleteTag(this.projectId, oldTag);
// Update cache
this.tagsCache.delete(oldTag);
this.tagsCache.set(newTag, newTagData);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to rename tag via API', 'Failed to rename tag via API',
@@ -474,15 +561,17 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest( const sourceData = this.tagsCache.get(sourceTag);
`/projects/${this.config.projectId}/tags/${encodeURIComponent(sourceTag)}/copy`, if (!sourceData) {
'POST', throw new Error(`Source tag ${sourceTag} not found`);
{ targetTag }
);
if (!response.success) {
throw new Error(response.error || 'Failed to copy tag');
} }
// Create new tag with copied data
const targetData = { ...sourceData, name: targetTag };
await this.repository.createTag(this.projectId, targetData);
// Update cache
this.tagsCache.set(targetTag, targetData);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to copy tag via API', 'Failed to copy tag via API',
@@ -500,24 +589,22 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest<{ const tasks = await this.repository.getTasks(this.projectId);
stats: StorageStats; const tags = await this.repository.getTags(this.projectId);
}>(`/projects/${this.config.projectId}/stats`);
if (!response.success) { const tagStats = tags.map((tag) => ({
throw new Error(response.error || 'Failed to get stats'); tag: tag.name,
} taskCount: tag.tasks.length,
lastModified: new Date().toISOString() // TODO: Get actual last modified from tag data
}));
// Return stats or default values return {
return ( totalTasks: tasks.length,
response.data?.stats || { totalTags: tags.length,
totalTasks: 0, storageSize: 0, // Not applicable for API storage
totalTags: 0, lastModified: new Date().toISOString(),
storageSize: 0, tagStats
lastModified: new Date().toISOString(), };
tagStats: []
}
);
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to get stats from API', 'Failed to get stats from API',
@@ -535,16 +622,15 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest<{ backupId: string }>( // Export all data
`/projects/${this.config.projectId}/backup`, await this.repository.getTasks(this.projectId);
'POST' await this.repository.getTags(this.projectId);
);
if (!response.success) { // TODO: In a real implementation, this would:
throw new Error(response.error || 'Failed to create backup'); // 1. Create backup data structure with tasks and tags
} // 2. Save the backup to a storage service
// For now, return a backup identifier
return response.data?.backupId || 'unknown'; return `backup-${this.projectId}-${Date.now()}`;
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to create backup via API', 'Failed to create backup via API',
@@ -558,27 +644,16 @@ export class ApiStorage implements IStorage {
/** /**
* Restore from backup * Restore from backup
*/ */
async restore(backupPath: string): Promise<void> { async restore(backupId: string): Promise<void> {
await this.ensureInitialized(); await this.ensureInitialized();
try { // This would restore from a backup service
const response = await this.makeRequest( // Implementation depends on backup strategy
`/projects/${this.config.projectId}/restore`, throw new TaskMasterError(
'POST', 'Restore not implemented for API storage',
{ backupId: backupPath } ERROR_CODES.NOT_IMPLEMENTED,
); { operation: 'restore', backupId }
);
if (!response.success) {
throw new Error(response.error || 'Failed to restore backup');
}
} catch (error) {
throw new TaskMasterError(
'Failed to restore backup via API',
ERROR_CODES.STORAGE_ERROR,
{ operation: 'restore', backupPath },
error as Error
);
}
} }
/** /**
@@ -588,14 +663,23 @@ export class ApiStorage implements IStorage {
await this.ensureInitialized(); await this.ensureInitialized();
try { try {
const response = await this.makeRequest( // Delete all tasks
`/projects/${this.config.projectId}/clear`, const tasks = await this.repository.getTasks(this.projectId);
'POST' if (tasks.length > 0) {
); await this.repository.bulkDeleteTasks(
this.projectId,
if (!response.success) { tasks.map((t) => t.id)
throw new Error(response.error || 'Failed to clear data'); );
} }
// Delete all tags
const tags = await this.repository.getTags(this.projectId);
for (const tag of tags) {
await this.repository.deleteTag(this.projectId, tag.name);
}
// Clear cache
this.tagsCache.clear();
} catch (error) { } catch (error) {
throw new TaskMasterError( throw new TaskMasterError(
'Failed to clear data via API', 'Failed to clear data via API',
@@ -611,6 +695,7 @@ export class ApiStorage implements IStorage {
*/ */
async close(): Promise<void> { async close(): Promise<void> {
this.initialized = false; this.initialized = false;
this.tagsCache.clear();
} }
/** /**
@@ -623,102 +708,21 @@ export class ApiStorage implements IStorage {
} }
/** /**
* Make HTTP request to API * Retry an operation with exponential backoff
*/ */
private async makeRequest<T>( private async retryOperation<T>(
path: string, operation: () => Promise<T>,
method: 'GET' | 'POST' | 'PUT' | 'DELETE' = 'GET', attempt: number = 1
body?: unknown ): Promise<T> {
): Promise<ApiResponse<T>> {
const url = `${this.config.endpoint}${path}`;
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);
try { try {
const options: RequestInit = { return await operation();
method, } catch (error) {
headers: { if (this.enableRetry && attempt < this.maxRetries) {
Authorization: `Bearer ${this.config.accessToken}`, const delay = Math.pow(2, attempt) * 1000;
'Content-Type': 'application/json', await new Promise((resolve) => setTimeout(resolve, delay));
Accept: 'application/json' return this.retryOperation(operation, attempt + 1);
},
signal: controller.signal
};
if (body && (method === 'POST' || method === 'PUT')) {
options.body = JSON.stringify(body);
} }
throw error;
let lastError: Error | null = null;
let attempt = 0;
while (attempt < this.config.maxRetries) {
attempt++;
try {
const response = await fetch(url, options);
const data = await response.json();
if (response.ok) {
return { success: true, data: data as T };
}
// Handle specific error codes
if (response.status === 401) {
return {
success: false,
error: 'Authentication failed - check access token'
};
}
if (response.status === 404) {
return {
success: false,
error: 'Resource not found'
};
}
if (response.status === 429) {
// Rate limited - retry with backoff
if (this.config.enableRetry && attempt < this.config.maxRetries) {
await this.delay(Math.pow(2, attempt) * 1000);
continue;
}
}
const errorData = data as any;
return {
success: false,
error:
errorData.error ||
errorData.message ||
`HTTP ${response.status}: ${response.statusText}`
};
} catch (error) {
lastError = error as Error;
// Retry on network errors
if (this.config.enableRetry && attempt < this.config.maxRetries) {
await this.delay(Math.pow(2, attempt) * 1000);
continue;
}
}
}
// All retries exhausted
return {
success: false,
error: lastError?.message || 'Request failed after retries'
};
} finally {
clearTimeout(timeoutId);
} }
} }
/**
* Delay helper for retries
*/
private delay(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
} }

View File

@@ -13,6 +13,7 @@ import { ApiStorage } from './api-storage.js';
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js'; import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
import { AuthManager } from '../auth/auth-manager.js'; import { AuthManager } from '../auth/auth-manager.js';
import { getLogger } from '../logger/index.js'; import { getLogger } from '../logger/index.js';
import { SupabaseAuthClient } from '../clients/supabase-client.js';
/** /**
* Factory for creating storage implementations based on configuration * Factory for creating storage implementations based on configuration
@@ -148,29 +149,13 @@ export class StorageFactory {
* Create API storage implementation * Create API storage implementation
*/ */
private static createApiStorage(config: Partial<IConfiguration>): ApiStorage { private static createApiStorage(config: Partial<IConfiguration>): ApiStorage {
const { apiEndpoint, apiAccessToken } = config.storage || {}; // Use our SupabaseAuthClient instead of creating a raw Supabase client
const supabaseAuthClient = new SupabaseAuthClient();
if (!apiEndpoint) { const supabaseClient = supabaseAuthClient.getClient();
throw new TaskMasterError(
'API endpoint is required for API storage',
ERROR_CODES.MISSING_CONFIGURATION,
{ storageType: 'api' }
);
}
if (!apiAccessToken) {
throw new TaskMasterError(
'API access token is required for API storage',
ERROR_CODES.MISSING_CONFIGURATION,
{ storageType: 'api' }
);
}
return new ApiStorage({ return new ApiStorage({
endpoint: apiEndpoint, supabaseClient,
accessToken: apiAccessToken, projectId: config.projectPath || '',
projectId: config.projectPath,
timeout: config.retry?.requestTimeout,
enableRetry: config.retry?.retryOnNetworkError, enableRetry: config.retry?.retryOnNetworkError,
maxRetries: config.retry?.retryAttempts maxRetries: config.retry?.retryAttempts
}); });

View File

@@ -10,8 +10,16 @@ import {
} from './services/task-service.js'; } from './services/task-service.js';
import { ERROR_CODES, TaskMasterError } from './errors/task-master-error.js'; import { ERROR_CODES, TaskMasterError } from './errors/task-master-error.js';
import type { IConfiguration } from './interfaces/configuration.interface.js'; import type { IConfiguration } from './interfaces/configuration.interface.js';
import type { Task, TaskStatus, TaskFilter } from './types/index.js'; import type {
import { WorkflowService, type WorkflowServiceConfig } from './workflow/index.js'; Task,
TaskStatus,
TaskFilter,
StorageType
} from './types/index.js';
import {
WorkflowService,
type WorkflowServiceConfig
} from './workflow/index.js';
/** /**
* Options for creating TaskMasterCore instance * Options for creating TaskMasterCore instance
@@ -91,10 +99,10 @@ export class TaskMasterCore {
projectRoot: options.projectPath, projectRoot: options.projectPath,
...options.workflow ...options.workflow
}; };
// Pass task retrieval function to workflow service // Pass task retrieval function to workflow service
this.workflowService = new WorkflowService( this.workflowService = new WorkflowService(
workflowConfig, workflowConfig,
async (taskId: string) => { async (taskId: string) => {
const task = await this.getTask(taskId); const task = await this.getTask(taskId);
if (!task) { if (!task) {
@@ -178,7 +186,7 @@ export class TaskMasterCore {
/** /**
* Get current storage type * Get current storage type
*/ */
getStorageType(): 'file' | 'api' | 'auto' { getStorageType(): StorageType {
return this.taskService.getStorageType(); return this.taskService.getStorageType();
} }

View File

@@ -0,0 +1,491 @@
export type Json =
| string
| number
| boolean
| null
| { [key: string]: Json | undefined }
| Json[];
export type Database = {
public: {
Tables: {
accounts: {
Row: {
created_at: string | null;
created_by: string | null;
email: string | null;
id: string;
is_personal_account: boolean;
name: string;
picture_url: string | null;
primary_owner_user_id: string;
public_data: Json;
slug: string | null;
updated_at: string | null;
updated_by: string | null;
};
Insert: {
created_at?: string | null;
created_by?: string | null;
email?: string | null;
id?: string;
is_personal_account?: boolean;
name: string;
picture_url?: string | null;
primary_owner_user_id?: string;
public_data?: Json;
slug?: string | null;
updated_at?: string | null;
updated_by?: string | null;
};
Update: {
created_at?: string | null;
created_by?: string | null;
email?: string | null;
id?: string;
is_personal_account?: boolean;
name?: string;
picture_url?: string | null;
primary_owner_user_id?: string;
public_data?: Json;
slug?: string | null;
updated_at?: string | null;
updated_by?: string | null;
};
Relationships: [];
};
brief: {
Row: {
account_id: string;
created_at: string;
created_by: string;
document_id: string;
id: string;
plan_generation_completed_at: string | null;
plan_generation_error: string | null;
plan_generation_started_at: string | null;
plan_generation_status: Database['public']['Enums']['plan_generation_status'];
status: Database['public']['Enums']['brief_status'];
updated_at: string;
};
Insert: {
account_id: string;
created_at?: string;
created_by: string;
document_id: string;
id?: string;
plan_generation_completed_at?: string | null;
plan_generation_error?: string | null;
plan_generation_started_at?: string | null;
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
status?: Database['public']['Enums']['brief_status'];
updated_at?: string;
};
Update: {
account_id?: string;
created_at?: string;
created_by?: string;
document_id?: string;
id?: string;
plan_generation_completed_at?: string | null;
plan_generation_error?: string | null;
plan_generation_started_at?: string | null;
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
status?: Database['public']['Enums']['brief_status'];
updated_at?: string;
};
Relationships: [
{
foreignKeyName: 'brief_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
},
{
foreignKeyName: 'brief_document_id_fkey';
columns: ['document_id'];
isOneToOne: false;
referencedRelation: 'document';
referencedColumns: ['id'];
}
];
};
document: {
Row: {
account_id: string;
created_at: string;
created_by: string;
description: string | null;
document_name: string;
document_type: Database['public']['Enums']['document_type'];
file_path: string | null;
file_size: number | null;
id: string;
metadata: Json | null;
mime_type: string | null;
processed_at: string | null;
processing_error: string | null;
processing_status:
| Database['public']['Enums']['document_processing_status']
| null;
source_id: string | null;
source_type: string | null;
title: string;
updated_at: string;
};
Insert: {
account_id: string;
created_at?: string;
created_by: string;
description?: string | null;
document_name: string;
document_type?: Database['public']['Enums']['document_type'];
file_path?: string | null;
file_size?: number | null;
id?: string;
metadata?: Json | null;
mime_type?: string | null;
processed_at?: string | null;
processing_error?: string | null;
processing_status?:
| Database['public']['Enums']['document_processing_status']
| null;
source_id?: string | null;
source_type?: string | null;
title: string;
updated_at?: string;
};
Update: {
account_id?: string;
created_at?: string;
created_by?: string;
description?: string | null;
document_name?: string;
document_type?: Database['public']['Enums']['document_type'];
file_path?: string | null;
file_size?: number | null;
id?: string;
metadata?: Json | null;
mime_type?: string | null;
processed_at?: string | null;
processing_error?: string | null;
processing_status?:
| Database['public']['Enums']['document_processing_status']
| null;
source_id?: string | null;
source_type?: string | null;
title?: string;
updated_at?: string;
};
Relationships: [
{
foreignKeyName: 'document_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
}
];
};
tasks: {
Row: {
account_id: string;
actual_hours: number;
assignee_id: string | null;
brief_id: string | null;
completed_subtasks: number;
complexity: number | null;
created_at: string;
created_by: string;
description: string | null;
display_id: string | null;
document_id: string | null;
due_date: string | null;
estimated_hours: number | null;
id: string;
metadata: Json;
parent_task_id: string | null;
position: number;
priority: Database['public']['Enums']['task_priority'];
status: Database['public']['Enums']['task_status'];
subtask_position: number;
title: string;
total_subtasks: number;
updated_at: string;
updated_by: string;
};
Insert: {
account_id: string;
actual_hours?: number;
assignee_id?: string | null;
brief_id?: string | null;
completed_subtasks?: number;
complexity?: number | null;
created_at?: string;
created_by: string;
description?: string | null;
display_id?: string | null;
document_id?: string | null;
due_date?: string | null;
estimated_hours?: number | null;
id?: string;
metadata?: Json;
parent_task_id?: string | null;
position?: number;
priority?: Database['public']['Enums']['task_priority'];
status?: Database['public']['Enums']['task_status'];
subtask_position?: number;
title: string;
total_subtasks?: number;
updated_at?: string;
updated_by: string;
};
Update: {
account_id?: string;
actual_hours?: number;
assignee_id?: string | null;
brief_id?: string | null;
completed_subtasks?: number;
complexity?: number | null;
created_at?: string;
created_by?: string;
description?: string | null;
display_id?: string | null;
document_id?: string | null;
due_date?: string | null;
estimated_hours?: number | null;
id?: string;
metadata?: Json;
parent_task_id?: string | null;
position?: number;
priority?: Database['public']['Enums']['task_priority'];
status?: Database['public']['Enums']['task_status'];
subtask_position?: number;
title?: string;
total_subtasks?: number;
updated_at?: string;
updated_by?: string;
};
Relationships: [
{
foreignKeyName: 'tasks_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
},
{
foreignKeyName: 'tasks_brief_id_fkey';
columns: ['brief_id'];
isOneToOne: false;
referencedRelation: 'brief';
referencedColumns: ['id'];
},
{
foreignKeyName: 'tasks_document_id_fkey';
columns: ['document_id'];
isOneToOne: false;
referencedRelation: 'document';
referencedColumns: ['id'];
},
{
foreignKeyName: 'tasks_parent_task_id_fkey';
columns: ['parent_task_id'];
isOneToOne: false;
referencedRelation: 'tasks';
referencedColumns: ['id'];
}
];
};
task_dependencies: {
Row: {
account_id: string;
created_at: string;
depends_on_task_id: string;
id: string;
task_id: string;
};
Insert: {
account_id: string;
created_at?: string;
depends_on_task_id: string;
id?: string;
task_id: string;
};
Update: {
account_id?: string;
created_at?: string;
depends_on_task_id?: string;
id?: string;
task_id?: string;
};
Relationships: [
{
foreignKeyName: 'task_dependencies_account_id_fkey';
columns: ['account_id'];
isOneToOne: false;
referencedRelation: 'accounts';
referencedColumns: ['id'];
},
{
foreignKeyName: 'task_dependencies_depends_on_task_id_fkey';
columns: ['depends_on_task_id'];
isOneToOne: false;
referencedRelation: 'tasks';
referencedColumns: ['id'];
},
{
foreignKeyName: 'task_dependencies_task_id_fkey';
columns: ['task_id'];
isOneToOne: false;
referencedRelation: 'tasks';
referencedColumns: ['id'];
}
];
};
user_accounts: {
Row: {
id: string | null;
name: string | null;
picture_url: string | null;
role: string | null;
slug: string | null;
};
Insert: {
id?: string | null;
name?: string | null;
picture_url?: string | null;
role?: string | null;
slug?: string | null;
};
Update: {
id?: string | null;
name?: string | null;
picture_url?: string | null;
role?: string | null;
slug?: string | null;
};
Relationships: [];
};
};
Views: {
[_ in never]: never;
};
Functions: {
[_ in never]: never;
};
Enums: {
brief_status:
| 'draft'
| 'refining'
| 'aligned'
| 'delivering'
| 'delivered'
| 'done'
| 'archived';
document_processing_status: 'pending' | 'processing' | 'ready' | 'failed';
document_type:
| 'brief'
| 'blueprint'
| 'file'
| 'note'
| 'transcript'
| 'generated_plan'
| 'generated_task'
| 'generated_summary'
| 'method'
| 'task';
plan_generation_status:
| 'not_started'
| 'generating'
| 'completed'
| 'failed';
task_priority: 'low' | 'medium' | 'high' | 'urgent';
task_status: 'todo' | 'in_progress' | 'done';
};
CompositeTypes: {
[_ in never]: never;
};
};
};
export type Tables<
PublicTableNameOrOptions extends
| keyof (Database['public']['Tables'] & Database['public']['Views'])
| { schema: keyof Database },
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
? keyof (Database[PublicTableNameOrOptions['schema']]['Tables'] &
Database[PublicTableNameOrOptions['schema']]['Views'])
: never = never
> = PublicTableNameOrOptions extends { schema: keyof Database }
? (Database[PublicTableNameOrOptions['schema']]['Tables'] &
Database[PublicTableNameOrOptions['schema']]['Views'])[TableName] extends {
Row: infer R;
}
? R
: never
: PublicTableNameOrOptions extends keyof (Database['public']['Tables'] &
Database['public']['Views'])
? (Database['public']['Tables'] &
Database['public']['Views'])[PublicTableNameOrOptions] extends {
Row: infer R;
}
? R
: never
: never;
export type TablesInsert<
PublicTableNameOrOptions extends
| keyof Database['public']['Tables']
| { schema: keyof Database },
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
: never = never
> = PublicTableNameOrOptions extends { schema: keyof Database }
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
Insert: infer I;
}
? I
: never
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
Insert: infer I;
}
? I
: never
: never;
export type TablesUpdate<
PublicTableNameOrOptions extends
| keyof Database['public']['Tables']
| { schema: keyof Database },
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
: never = never
> = PublicTableNameOrOptions extends { schema: keyof Database }
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
Update: infer U;
}
? U
: never
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
Update: infer U;
}
? U
: never
: never;
export type Enums<
PublicEnumNameOrOptions extends
| keyof Database['public']['Enums']
| { schema: keyof Database },
EnumName extends PublicEnumNameOrOptions extends { schema: keyof Database }
? keyof Database[PublicEnumNameOrOptions['schema']]['Enums']
: never = never
> = PublicEnumNameOrOptions extends { schema: keyof Database }
? Database[PublicEnumNameOrOptions['schema']]['Enums'][EnumName]
: PublicEnumNameOrOptions extends keyof Database['public']['Enums']
? Database['public']['Enums'][PublicEnumNameOrOptions]
: never;

View File

@@ -2,6 +2,14 @@
* Core type definitions for Task Master * Core type definitions for Task Master
*/ */
/**
* Storage type options
* - 'file': Local file system storage
* - 'api': Remote API storage (Hamster integration)
* - 'auto': Automatically detect based on auth status
*/
export type StorageType = 'file' | 'api' | 'auto';
// ============================================================================ // ============================================================================
// Type Literals // Type Literals
// ============================================================================ // ============================================================================
@@ -96,6 +104,15 @@ export interface TaskCollection {
metadata: TaskMetadata; metadata: TaskMetadata;
} }
/**
* Task tag for organizing tasks
*/
export interface TaskTag {
name: string;
tasks: string[]; // Task IDs belonging to this tag
metadata: Record<string, any>;
}
// ============================================================================ // ============================================================================
// Utility Types // Utility Types
// ============================================================================ // ============================================================================

View File

@@ -7,6 +7,7 @@
"declarationMap": true, "declarationMap": true,
"sourceMap": true, "sourceMap": true,
"outDir": "./dist", "outDir": "./dist",
"baseUrl": ".",
"rootDir": "./src", "rootDir": "./src",
"strict": true, "strict": true,
"noImplicitAny": true, "noImplicitAny": true,
@@ -27,21 +28,7 @@
"moduleDetection": "force", "moduleDetection": "force",
"types": ["node"], "types": ["node"],
"resolveJsonModule": true, "resolveJsonModule": true,
"isolatedModules": true, "isolatedModules": true
"paths": {
"@/*": ["./src/*"],
"@/auth": ["./src/auth"],
"@/config": ["./src/config"],
"@/errors": ["./src/errors"],
"@/interfaces": ["./src/interfaces"],
"@/logger": ["./src/logger"],
"@/parser": ["./src/parser"],
"@/providers": ["./src/providers"],
"@/services": ["./src/services"],
"@/storage": ["./src/storage"],
"@/types": ["./src/types"],
"@/utils": ["./src/utils"]
}
}, },
"include": ["src/**/*"], "include": ["src/**/*"],
"exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"] "exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"]

View File

@@ -198,11 +198,13 @@ jest.unstable_mockModule('fs', () => ({
default: { default: {
existsSync: jest.fn(() => false), existsSync: jest.fn(() => false),
readFileSync: jest.fn(), readFileSync: jest.fn(),
writeFileSync: mockWriteFileSync writeFileSync: mockWriteFileSync,
unlinkSync: jest.fn()
}, },
existsSync: jest.fn(() => false), existsSync: jest.fn(() => false),
readFileSync: jest.fn(), readFileSync: jest.fn(),
writeFileSync: mockWriteFileSync writeFileSync: mockWriteFileSync,
unlinkSync: jest.fn()
})); }));
jest.unstable_mockModule( jest.unstable_mockModule(

View File

@@ -1,51 +1,20 @@
import { defineConfig } from 'tsup'; import { defineConfig } from 'tsup';
import { dotenvLoad } from 'dotenv-mono'; import {
executableConfig,
mergeConfig,
commonExternals
} from '@tm/build-config';
// Load .env from root level (monorepo support) export default defineConfig(
dotenvLoad(); mergeConfig(executableConfig, {
entry: {
// Get all TM_PUBLIC_* env variables for build-time injection 'task-master': 'bin/task-master.js',
const getBuildTimeEnvs = () => { 'mcp-server': 'mcp-server/server.js'
const envs: Record<string, string> = {}; },
for (const [key, value] of Object.entries(process.env)) { outDir: 'dist',
if (key.startsWith('TM_PUBLIC_')) { publicDir: 'public',
// Return the actual value, not JSON.stringify'd // Bundle our monorepo packages but keep node_modules external
envs[key] = value || ''; noExternal: [/@tm\/.*/],
} external: commonExternals
} })
return envs; );
};
export default defineConfig({
entry: {
'task-master': 'bin/task-master.js',
'mcp-server': 'mcp-server/server.js'
},
format: ['esm'],
target: 'node18',
splitting: false,
sourcemap: true,
clean: true,
bundle: true, // Bundle everything into one file
outDir: 'dist',
publicDir: 'public',
// Handle TypeScript imports transparently
loader: {
'.js': 'jsx',
'.ts': 'ts'
},
// Replace process.env.TM_PUBLIC_* with actual values at build time
env: getBuildTimeEnvs(),
esbuildOptions(options) {
options.platform = 'node';
// Allow importing TypeScript from JavaScript
options.resolveExtensions = ['.ts', '.js', '.mjs', '.json'];
},
// Bundle our monorepo packages but keep node_modules external
noExternal: [/@tm\/.*/],
// Don't bundle any other dependencies (auto-external all node_modules)
// This regex matches anything that doesn't start with . or /
external: [/^[^./]/],
// Add success message for debugging
onSuccess: 'echo "✅ Build completed successfully"'
});