Compare commits
4 Commits
docs/auto-
...
ralph/feat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d4826e0258 | ||
|
|
b9e3eecafe | ||
|
|
6dd910fc52 | ||
|
|
19ec52181d |
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"mode": "exit",
|
||||
"mode": "pre",
|
||||
"tag": "rc",
|
||||
"initialVersions": {
|
||||
"task-master-ai": "0.25.1",
|
||||
|
||||
110
.github/workflows/ci.yml
vendored
110
.github/workflows/ci.yml
vendored
@@ -9,109 +9,70 @@ on:
|
||||
branches:
|
||||
- main
|
||||
- next
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
DO_NOT_TRACK: 1
|
||||
NODE_ENV: development
|
||||
|
||||
jobs:
|
||||
# Fast checks that can run in parallel
|
||||
format-check:
|
||||
name: Format Check
|
||||
setup:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --frozen-lockfile --prefer-offline
|
||||
timeout-minutes: 5
|
||||
- name: Install Dependencies
|
||||
id: install
|
||||
run: npm ci
|
||||
timeout-minutes: 2
|
||||
|
||||
- name: Cache node_modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
|
||||
|
||||
format-check:
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Restore node_modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
|
||||
|
||||
- name: Format Check
|
||||
run: npm run format-check
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
typecheck:
|
||||
name: Typecheck
|
||||
timeout-minutes: 10
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --frozen-lockfile --prefer-offline
|
||||
timeout-minutes: 5
|
||||
|
||||
- name: Typecheck
|
||||
run: npm run typecheck
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
# Build job to ensure everything compiles
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --frozen-lockfile --prefer-offline
|
||||
timeout-minutes: 5
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
env:
|
||||
NODE_ENV: production
|
||||
FORCE_COLOR: 1
|
||||
|
||||
test:
|
||||
name: Test
|
||||
timeout-minutes: 15
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
needs: [format-check, typecheck, build]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --frozen-lockfile --prefer-offline
|
||||
timeout-minutes: 5
|
||||
|
||||
- name: Build packages (required for tests)
|
||||
run: npm run build:packages
|
||||
env:
|
||||
NODE_ENV: production
|
||||
- name: Restore node_modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }}
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
@@ -120,6 +81,7 @@ jobs:
|
||||
NODE_ENV: test
|
||||
CI: true
|
||||
FORCE_COLOR: 1
|
||||
timeout-minutes: 10
|
||||
|
||||
- name: Upload Test Results
|
||||
if: always()
|
||||
|
||||
57
.github/workflows/claude-docs-trigger.yml
vendored
57
.github/workflows/claude-docs-trigger.yml
vendored
@@ -1,57 +0,0 @@
|
||||
name: Trigger Claude Documentation Update
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- next
|
||||
paths-ignore:
|
||||
- "apps/docs/**"
|
||||
- "*.md"
|
||||
- ".github/workflows/**"
|
||||
|
||||
jobs:
|
||||
trigger-docs-update:
|
||||
# Only run if changes were merged (not direct pushes from bots)
|
||||
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2 # Need previous commit for comparison
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
echo "Changed files in this push:"
|
||||
git diff --name-only HEAD^ HEAD | tee changed_files.txt
|
||||
|
||||
# Store changed files for Claude to analyze (escaped for JSON)
|
||||
CHANGED_FILES=$(git diff --name-only HEAD^ HEAD | jq -Rs .)
|
||||
echo "changed_files=$CHANGED_FILES" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get the commit message (escaped for JSON)
|
||||
COMMIT_MSG=$(git log -1 --pretty=%B | jq -Rs .)
|
||||
echo "commit_message=$COMMIT_MSG" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get diff for documentation context (escaped for JSON)
|
||||
COMMIT_DIFF=$(git diff HEAD^ HEAD --stat | jq -Rs .)
|
||||
echo "commit_diff=$COMMIT_DIFF" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get commit SHA
|
||||
echo "commit_sha=${{ github.sha }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Trigger Claude workflow
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Trigger the Claude docs updater workflow with the change information
|
||||
gh workflow run claude-docs-updater.yml \
|
||||
--ref next \
|
||||
-f commit_sha="${{ steps.changed-files.outputs.commit_sha }}" \
|
||||
-f commit_message=${{ steps.changed-files.outputs.commit_message }} \
|
||||
-f changed_files=${{ steps.changed-files.outputs.changed_files }} \
|
||||
-f commit_diff=${{ steps.changed-files.outputs.commit_diff }}
|
||||
65
.github/workflows/claude-docs-updater.yml
vendored
65
.github/workflows/claude-docs-updater.yml
vendored
@@ -1,27 +1,18 @@
|
||||
name: Claude Documentation Updater
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
commit_sha:
|
||||
description: 'The commit SHA that triggered this update'
|
||||
required: true
|
||||
type: string
|
||||
commit_message:
|
||||
description: 'The commit message'
|
||||
required: true
|
||||
type: string
|
||||
changed_files:
|
||||
description: 'List of changed files'
|
||||
required: true
|
||||
type: string
|
||||
commit_diff:
|
||||
description: 'Diff summary of changes'
|
||||
required: true
|
||||
type: string
|
||||
push:
|
||||
branches:
|
||||
- next
|
||||
paths-ignore:
|
||||
- "apps/docs/**"
|
||||
- "*.md"
|
||||
- ".github/workflows/**"
|
||||
|
||||
jobs:
|
||||
update-docs:
|
||||
# Only run if changes were merged (not direct pushes from bots)
|
||||
if: github.actor != 'github-actions[bot]' && github.actor != 'dependabot[bot]'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -31,8 +22,28 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: next
|
||||
fetch-depth: 0 # Need full history to checkout specific commit
|
||||
fetch-depth: 2 # Need previous commit for comparison
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
echo "Changed files in this push:"
|
||||
git diff --name-only HEAD^ HEAD | tee changed_files.txt
|
||||
|
||||
# Store changed files for Claude to analyze
|
||||
echo "changed_files<<EOF" >> $GITHUB_OUTPUT
|
||||
git diff --name-only HEAD^ HEAD >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get the commit message and changes summary
|
||||
echo "commit_message<<EOF" >> $GITHUB_OUTPUT
|
||||
git log -1 --pretty=%B >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get diff for documentation context
|
||||
echo "commit_diff<<EOF" >> $GITHUB_OUTPUT
|
||||
git diff HEAD^ HEAD --stat >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create docs update branch
|
||||
id: create-branch
|
||||
@@ -60,12 +71,12 @@ jobs:
|
||||
You are a documentation specialist. Analyze the recent changes pushed to the 'next' branch and update the documentation accordingly.
|
||||
|
||||
Recent changes:
|
||||
- Commit: ${{ inputs.commit_message }}
|
||||
- Commit: ${{ steps.changed-files.outputs.commit_message }}
|
||||
- Changed files:
|
||||
${{ inputs.changed_files }}
|
||||
${{ steps.changed-files.outputs.changed_files }}
|
||||
|
||||
- Changes summary:
|
||||
${{ inputs.commit_diff }}
|
||||
${{ steps.changed-files.outputs.commit_diff }}
|
||||
|
||||
Your task:
|
||||
1. Analyze the changes to understand what functionality was added, modified, or removed
|
||||
@@ -102,7 +113,7 @@ jobs:
|
||||
|
||||
This PR was automatically generated to update documentation based on recent changes.
|
||||
|
||||
Original commit: ${{ inputs.commit_message }}
|
||||
Original commit: ${{ steps.changed-files.outputs.commit_message }}
|
||||
|
||||
Co-authored-by: Claude <claude-assistant@anthropic.com>"
|
||||
fi
|
||||
@@ -122,12 +133,12 @@ jobs:
|
||||
This PR automatically updates documentation based on recent changes merged to the \`next\` branch.
|
||||
|
||||
### Original Changes
|
||||
**Commit:** ${{ inputs.commit_sha }}
|
||||
**Message:** ${{ inputs.commit_message }}
|
||||
**Commit:** ${{ github.sha }}
|
||||
**Message:** ${{ steps.changed-files.outputs.commit_message }}
|
||||
|
||||
### Changed Files in Original Commit
|
||||
\`\`\`
|
||||
${{ inputs.changed_files }}
|
||||
${{ steps.changed-files.outputs.changed_files }}
|
||||
\`\`\`
|
||||
|
||||
### Documentation Updates
|
||||
|
||||
@@ -4,11 +4,12 @@
|
||||
"description": "Task Master CLI - Command line interface for task management",
|
||||
"type": "module",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./src/index.ts",
|
||||
"types": "./dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./src/index.ts",
|
||||
"import": "./dist/index.js"
|
||||
"import": "./dist/index.js",
|
||||
"require": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
"files": ["dist", "README.md"],
|
||||
@@ -19,26 +20,21 @@
|
||||
"lint": "biome check src",
|
||||
"format": "biome format --write src",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"test:unit": "vitest run -t unit",
|
||||
"test:integration": "vitest run -t integration",
|
||||
"test:e2e": "vitest run --dir tests/e2e",
|
||||
"test:ci": "vitest run --coverage --reporter=dot"
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tm/core": "*",
|
||||
"@tm/workflow-engine": "*",
|
||||
"boxen": "^7.1.1",
|
||||
"chalk": "5.6.2",
|
||||
"chalk": "^5.3.0",
|
||||
"cli-table3": "^0.6.5",
|
||||
"commander": "^12.1.0",
|
||||
"inquirer": "^9.2.10",
|
||||
"open": "^10.2.0",
|
||||
"ora": "^8.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
"@tm/build-config": "*",
|
||||
"@types/inquirer": "^9.0.3",
|
||||
"@types/node": "^22.10.5",
|
||||
"tsup": "^8.3.0",
|
||||
|
||||
@@ -1,570 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Context command for managing org/brief selection
|
||||
* Provides a clean interface for workspace context management
|
||||
*/
|
||||
|
||||
import { Command } from 'commander';
|
||||
import chalk from 'chalk';
|
||||
import inquirer from 'inquirer';
|
||||
import ora from 'ora';
|
||||
import {
|
||||
AuthManager,
|
||||
AuthenticationError,
|
||||
type UserContext
|
||||
} from '@tm/core/auth';
|
||||
import * as ui from '../utils/ui.js';
|
||||
|
||||
/**
|
||||
* Result type from context command
|
||||
*/
|
||||
export interface ContextResult {
|
||||
success: boolean;
|
||||
action: 'show' | 'select-org' | 'select-brief' | 'clear' | 'set';
|
||||
context?: UserContext;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* ContextCommand extending Commander's Command class
|
||||
* Manages user's workspace context (org/brief selection)
|
||||
*/
|
||||
export class ContextCommand extends Command {
|
||||
private authManager: AuthManager;
|
||||
private lastResult?: ContextResult;
|
||||
|
||||
constructor(name?: string) {
|
||||
super(name || 'context');
|
||||
|
||||
// Initialize auth manager
|
||||
this.authManager = AuthManager.getInstance();
|
||||
|
||||
// Configure the command
|
||||
this.description(
|
||||
'Manage workspace context (organization and brief selection)'
|
||||
);
|
||||
|
||||
// Add subcommands
|
||||
this.addOrgCommand();
|
||||
this.addBriefCommand();
|
||||
this.addClearCommand();
|
||||
this.addSetCommand();
|
||||
|
||||
// Default action shows current context
|
||||
this.action(async () => {
|
||||
await this.executeShow();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add org selection subcommand
|
||||
*/
|
||||
private addOrgCommand(): void {
|
||||
this.command('org')
|
||||
.description('Select an organization')
|
||||
.action(async () => {
|
||||
await this.executeSelectOrg();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add brief selection subcommand
|
||||
*/
|
||||
private addBriefCommand(): void {
|
||||
this.command('brief')
|
||||
.description('Select a brief within the current organization')
|
||||
.action(async () => {
|
||||
await this.executeSelectBrief();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add clear subcommand
|
||||
*/
|
||||
private addClearCommand(): void {
|
||||
this.command('clear')
|
||||
.description('Clear all context selections')
|
||||
.action(async () => {
|
||||
await this.executeClear();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add set subcommand for direct context setting
|
||||
*/
|
||||
private addSetCommand(): void {
|
||||
this.command('set')
|
||||
.description('Set context directly')
|
||||
.option('--org <id>', 'Organization ID')
|
||||
.option('--org-name <name>', 'Organization name')
|
||||
.option('--brief <id>', 'Brief ID')
|
||||
.option('--brief-name <name>', 'Brief name')
|
||||
.action(async (options) => {
|
||||
await this.executeSet(options);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute show current context
|
||||
*/
|
||||
private async executeShow(): Promise<void> {
|
||||
try {
|
||||
const result = this.displayContext();
|
||||
this.setLastResult(result);
|
||||
} catch (error: any) {
|
||||
this.handleError(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Display current context
|
||||
*/
|
||||
private displayContext(): ContextResult {
|
||||
// Check authentication first
|
||||
if (!this.authManager.isAuthenticated()) {
|
||||
console.log(chalk.yellow('✗ Not authenticated'));
|
||||
console.log(chalk.gray('\n Run "tm auth login" to authenticate first'));
|
||||
|
||||
return {
|
||||
success: false,
|
||||
action: 'show',
|
||||
message: 'Not authenticated'
|
||||
};
|
||||
}
|
||||
|
||||
const context = this.authManager.getContext();
|
||||
|
||||
console.log(chalk.cyan('\n🌍 Workspace Context\n'));
|
||||
|
||||
if (context && (context.orgId || context.briefId)) {
|
||||
if (context.orgName || context.orgId) {
|
||||
console.log(chalk.green('✓ Organization'));
|
||||
if (context.orgName) {
|
||||
console.log(chalk.white(` ${context.orgName}`));
|
||||
}
|
||||
if (context.orgId) {
|
||||
console.log(chalk.gray(` ID: ${context.orgId}`));
|
||||
}
|
||||
}
|
||||
|
||||
if (context.briefName || context.briefId) {
|
||||
console.log(chalk.green('\n✓ Brief'));
|
||||
if (context.briefName) {
|
||||
console.log(chalk.white(` ${context.briefName}`));
|
||||
}
|
||||
if (context.briefId) {
|
||||
console.log(chalk.gray(` ID: ${context.briefId}`));
|
||||
}
|
||||
}
|
||||
|
||||
if (context.updatedAt) {
|
||||
console.log(
|
||||
chalk.gray(
|
||||
`\n Last updated: ${new Date(context.updatedAt).toLocaleString()}`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'show',
|
||||
context,
|
||||
message: 'Context loaded'
|
||||
};
|
||||
} else {
|
||||
console.log(chalk.yellow('✗ No context selected'));
|
||||
console.log(
|
||||
chalk.gray('\n Run "tm context org" to select an organization')
|
||||
);
|
||||
console.log(chalk.gray(' Run "tm context brief" to select a brief'));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'show',
|
||||
message: 'No context selected'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute org selection
|
||||
*/
|
||||
private async executeSelectOrg(): Promise<void> {
|
||||
try {
|
||||
// Check authentication
|
||||
if (!this.authManager.isAuthenticated()) {
|
||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const result = await this.selectOrganization();
|
||||
this.setLastResult(result);
|
||||
|
||||
if (!result.success) {
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error: any) {
|
||||
this.handleError(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select an organization interactively
|
||||
*/
|
||||
private async selectOrganization(): Promise<ContextResult> {
|
||||
const spinner = ora('Fetching organizations...').start();
|
||||
|
||||
try {
|
||||
// Fetch organizations from API
|
||||
const organizations = await this.authManager.getOrganizations();
|
||||
spinner.stop();
|
||||
|
||||
if (organizations.length === 0) {
|
||||
ui.displayWarning('No organizations available');
|
||||
return {
|
||||
success: false,
|
||||
action: 'select-org',
|
||||
message: 'No organizations available'
|
||||
};
|
||||
}
|
||||
|
||||
// Prompt for selection
|
||||
const { selectedOrg } = await inquirer.prompt([
|
||||
{
|
||||
type: 'list',
|
||||
name: 'selectedOrg',
|
||||
message: 'Select an organization:',
|
||||
choices: organizations.map((org) => ({
|
||||
name: org.name,
|
||||
value: org
|
||||
}))
|
||||
}
|
||||
]);
|
||||
|
||||
// Update context
|
||||
await this.authManager.updateContext({
|
||||
orgId: selectedOrg.id,
|
||||
orgName: selectedOrg.name,
|
||||
// Clear brief when changing org
|
||||
briefId: undefined,
|
||||
briefName: undefined
|
||||
});
|
||||
|
||||
ui.displaySuccess(`Selected organization: ${selectedOrg.name}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'select-org',
|
||||
context: this.authManager.getContext() || undefined,
|
||||
message: `Selected organization: ${selectedOrg.name}`
|
||||
};
|
||||
} catch (error) {
|
||||
spinner.fail('Failed to fetch organizations');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute brief selection
|
||||
*/
|
||||
private async executeSelectBrief(): Promise<void> {
|
||||
try {
|
||||
// Check authentication
|
||||
if (!this.authManager.isAuthenticated()) {
|
||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Check if org is selected
|
||||
const context = this.authManager.getContext();
|
||||
if (!context?.orgId) {
|
||||
ui.displayError(
|
||||
'No organization selected. Run "tm context org" first.'
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const result = await this.selectBrief(context.orgId);
|
||||
this.setLastResult(result);
|
||||
|
||||
if (!result.success) {
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error: any) {
|
||||
this.handleError(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select a brief within the current organization
|
||||
*/
|
||||
private async selectBrief(orgId: string): Promise<ContextResult> {
|
||||
const spinner = ora('Fetching briefs...').start();
|
||||
|
||||
try {
|
||||
// Fetch briefs from API
|
||||
const briefs = await this.authManager.getBriefs(orgId);
|
||||
spinner.stop();
|
||||
|
||||
if (briefs.length === 0) {
|
||||
ui.displayWarning('No briefs available in this organization');
|
||||
return {
|
||||
success: false,
|
||||
action: 'select-brief',
|
||||
message: 'No briefs available'
|
||||
};
|
||||
}
|
||||
|
||||
// Prompt for selection
|
||||
const { selectedBrief } = await inquirer.prompt([
|
||||
{
|
||||
type: 'list',
|
||||
name: 'selectedBrief',
|
||||
message: 'Select a brief:',
|
||||
choices: [
|
||||
{ name: '(No brief - organization level)', value: null },
|
||||
...briefs.map((brief) => ({
|
||||
name: `Brief ${brief.id.slice(0, 8)} (${new Date(brief.createdAt).toLocaleDateString()})`,
|
||||
value: brief
|
||||
}))
|
||||
]
|
||||
}
|
||||
]);
|
||||
|
||||
if (selectedBrief) {
|
||||
// Update context with brief
|
||||
const briefName = `Brief ${selectedBrief.id.slice(0, 8)}`;
|
||||
await this.authManager.updateContext({
|
||||
briefId: selectedBrief.id,
|
||||
briefName: briefName
|
||||
});
|
||||
|
||||
ui.displaySuccess(`Selected brief: ${briefName}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'select-brief',
|
||||
context: this.authManager.getContext() || undefined,
|
||||
message: `Selected brief: ${selectedBrief.name}`
|
||||
};
|
||||
} else {
|
||||
// Clear brief selection
|
||||
await this.authManager.updateContext({
|
||||
briefId: undefined,
|
||||
briefName: undefined
|
||||
});
|
||||
|
||||
ui.displaySuccess('Cleared brief selection (organization level)');
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'select-brief',
|
||||
context: this.authManager.getContext() || undefined,
|
||||
message: 'Cleared brief selection'
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
spinner.fail('Failed to fetch briefs');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute clear context
|
||||
*/
|
||||
private async executeClear(): Promise<void> {
|
||||
try {
|
||||
// Check authentication
|
||||
if (!this.authManager.isAuthenticated()) {
|
||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const result = await this.clearContext();
|
||||
this.setLastResult(result);
|
||||
|
||||
if (!result.success) {
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error: any) {
|
||||
this.handleError(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all context selections
|
||||
*/
|
||||
private async clearContext(): Promise<ContextResult> {
|
||||
try {
|
||||
await this.authManager.clearContext();
|
||||
ui.displaySuccess('Context cleared');
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'clear',
|
||||
message: 'Context cleared'
|
||||
};
|
||||
} catch (error) {
|
||||
ui.displayError(`Failed to clear context: ${(error as Error).message}`);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
action: 'clear',
|
||||
message: `Failed to clear context: ${(error as Error).message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute set context with options
|
||||
*/
|
||||
private async executeSet(options: any): Promise<void> {
|
||||
try {
|
||||
// Check authentication
|
||||
if (!this.authManager.isAuthenticated()) {
|
||||
ui.displayError('Not authenticated. Run "tm auth login" first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const result = await this.setContext(options);
|
||||
this.setLastResult(result);
|
||||
|
||||
if (!result.success) {
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error: any) {
|
||||
this.handleError(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set context directly from options
|
||||
*/
|
||||
private async setContext(options: any): Promise<ContextResult> {
|
||||
try {
|
||||
const context: Partial<UserContext> = {};
|
||||
|
||||
if (options.org) {
|
||||
context.orgId = options.org;
|
||||
}
|
||||
if (options.orgName) {
|
||||
context.orgName = options.orgName;
|
||||
}
|
||||
if (options.brief) {
|
||||
context.briefId = options.brief;
|
||||
}
|
||||
if (options.briefName) {
|
||||
context.briefName = options.briefName;
|
||||
}
|
||||
|
||||
if (Object.keys(context).length === 0) {
|
||||
ui.displayWarning('No context options provided');
|
||||
return {
|
||||
success: false,
|
||||
action: 'set',
|
||||
message: 'No context options provided'
|
||||
};
|
||||
}
|
||||
|
||||
await this.authManager.updateContext(context);
|
||||
ui.displaySuccess('Context updated');
|
||||
|
||||
// Display what was set
|
||||
if (context.orgName || context.orgId) {
|
||||
console.log(
|
||||
chalk.gray(` Organization: ${context.orgName || context.orgId}`)
|
||||
);
|
||||
}
|
||||
if (context.briefName || context.briefId) {
|
||||
console.log(
|
||||
chalk.gray(` Brief: ${context.briefName || context.briefId}`)
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
action: 'set',
|
||||
context: this.authManager.getContext() || undefined,
|
||||
message: 'Context updated'
|
||||
};
|
||||
} catch (error) {
|
||||
ui.displayError(`Failed to set context: ${(error as Error).message}`);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
action: 'set',
|
||||
message: `Failed to set context: ${(error as Error).message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle errors
|
||||
*/
|
||||
private handleError(error: any): void {
|
||||
if (error instanceof AuthenticationError) {
|
||||
console.error(chalk.red(`\n✗ ${error.message}`));
|
||||
|
||||
if (error.code === 'NOT_AUTHENTICATED') {
|
||||
ui.displayWarning('Please authenticate first: tm auth login');
|
||||
}
|
||||
} else {
|
||||
const msg = error?.message ?? String(error);
|
||||
console.error(chalk.red(`Error: ${msg}`));
|
||||
|
||||
if (error.stack && process.env.DEBUG) {
|
||||
console.error(chalk.gray(error.stack));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the last result for programmatic access
|
||||
*/
|
||||
private setLastResult(result: ContextResult): void {
|
||||
this.lastResult = result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the last result (for programmatic usage)
|
||||
*/
|
||||
getLastResult(): ContextResult | undefined {
|
||||
return this.lastResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current context (for programmatic usage)
|
||||
*/
|
||||
getContext(): UserContext | null {
|
||||
return this.authManager.getContext();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up resources
|
||||
*/
|
||||
async cleanup(): Promise<void> {
|
||||
// No resources to clean up for context command
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method to register this command on an existing program
|
||||
*/
|
||||
static registerOn(program: Command): Command {
|
||||
const contextCommand = new ContextCommand();
|
||||
program.addCommand(contextCommand);
|
||||
return contextCommand;
|
||||
}
|
||||
|
||||
/**
|
||||
* Alternative registration that returns the command for chaining
|
||||
*/
|
||||
static register(program: Command, name?: string): ContextCommand {
|
||||
const contextCommand = new ContextCommand(name);
|
||||
program.addCommand(contextCommand);
|
||||
return contextCommand;
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,6 @@ import {
|
||||
STATUS_ICONS,
|
||||
type OutputFormat
|
||||
} from '@tm/core';
|
||||
import type { StorageType } from '@tm/core/types';
|
||||
import * as ui from '../utils/ui.js';
|
||||
|
||||
/**
|
||||
@@ -38,7 +37,7 @@ export interface ListTasksResult {
|
||||
total: number;
|
||||
filtered: number;
|
||||
tag?: string;
|
||||
storageType: Exclude<StorageType, 'auto'>;
|
||||
storageType: 'file' | 'api';
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -173,13 +172,6 @@ export class ListTasksCommand extends Command {
|
||||
includeSubtasks: options.withSubtasks
|
||||
});
|
||||
|
||||
// Runtime guard to prevent 'auto' from reaching CLI consumers
|
||||
if (result.storageType === 'auto') {
|
||||
throw new Error(
|
||||
'Internal error: unresolved storage type reached CLI. Please check TaskService.getStorageType() implementation.'
|
||||
);
|
||||
}
|
||||
|
||||
return result as ListTasksResult;
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
export { ListTasksCommand } from './commands/list.command.js';
|
||||
export { AuthCommand } from './commands/auth.command.js';
|
||||
export { WorkflowCommand } from './commands/workflow.command.js';
|
||||
export { ContextCommand } from './commands/context.command.js';
|
||||
|
||||
// Command registry
|
||||
export { registerAllCommands } from './commands/index.js';
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import chalk from 'chalk';
|
||||
import boxen from 'boxen';
|
||||
import Table from 'cli-table3';
|
||||
import type { Task, TaskStatus, TaskPriority } from '@tm/core/types';
|
||||
import type { Task, TaskStatus, TaskPriority } from '@tm/core';
|
||||
|
||||
/**
|
||||
* Get colored status display with ASCII icons (matches scripts/modules/ui.js style)
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import { defineConfig } from 'tsup';
|
||||
import { cliConfig, mergeConfig } from '@tm/build-config';
|
||||
|
||||
export default defineConfig(
|
||||
mergeConfig(cliConfig, {
|
||||
entry: ['src/index.ts']
|
||||
})
|
||||
);
|
||||
export default defineConfig({
|
||||
entry: ['src/index.ts'],
|
||||
format: ['esm'],
|
||||
target: 'node18',
|
||||
splitting: false,
|
||||
sourcemap: true,
|
||||
clean: true,
|
||||
dts: true,
|
||||
shims: true,
|
||||
esbuildOptions(options) {
|
||||
options.platform = 'node';
|
||||
}
|
||||
});
|
||||
|
||||
@@ -200,34 +200,6 @@ sidebarTitle: "CLI Commands"
|
||||
```
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="Workflow Management">
|
||||
```bash
|
||||
# Start workflow execution for a task
|
||||
task-master workflow start <task-id>
|
||||
# or use alias
|
||||
task-master workflow run <task-id>
|
||||
|
||||
# List all active workflows
|
||||
task-master workflow list
|
||||
|
||||
# Check status of a specific workflow
|
||||
task-master workflow status <workflow-id>
|
||||
# or use alias
|
||||
task-master workflow info <workflow-id>
|
||||
|
||||
# Stop a running workflow
|
||||
task-master workflow stop <workflow-id>
|
||||
# or use alias
|
||||
task-master workflow kill <workflow-id>
|
||||
```
|
||||
|
||||
The workflow system executes tasks in isolated git worktrees with dedicated Claude Code processes, providing:
|
||||
- **Isolated Execution**: Each task runs in its own git worktree
|
||||
- **Process Management**: Spawns dedicated Claude Code processes
|
||||
- **Real-time Monitoring**: Track progress and output
|
||||
- **Parallel Execution**: Run multiple tasks concurrently
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="Initialize a Project">
|
||||
```bash
|
||||
# Initialize a new project with Task Master structure
|
||||
|
||||
@@ -1,221 +0,0 @@
|
||||
---
|
||||
title: "Workflow Engine"
|
||||
sidebarTitle: "Workflows"
|
||||
---
|
||||
|
||||
The Task Master Workflow Engine provides advanced task execution capabilities with git worktree isolation and Claude Code process management.
|
||||
|
||||
## Overview
|
||||
|
||||
The workflow system extends Task Master with powerful execution features:
|
||||
|
||||
- **Git Worktree Isolation**: Each task runs in its own isolated git worktree
|
||||
- **Process Sandboxing**: Spawns dedicated Claude Code processes for task execution
|
||||
- **Real-time Monitoring**: Track workflow progress and process output
|
||||
- **State Management**: Persistent workflow state across sessions
|
||||
- **Parallel Execution**: Run multiple tasks concurrently with resource limits
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Starting a Workflow
|
||||
|
||||
```bash
|
||||
# Start workflow for a specific task
|
||||
task-master workflow start 1.2
|
||||
|
||||
# Using the alias
|
||||
task-master workflow run 1.2
|
||||
```
|
||||
|
||||
### Monitoring Workflows
|
||||
|
||||
```bash
|
||||
# List all active workflows
|
||||
task-master workflow list
|
||||
|
||||
# Check specific workflow status
|
||||
task-master workflow status workflow-1.2-1234567890-abc123
|
||||
|
||||
# Using the alias
|
||||
task-master workflow info workflow-1.2-1234567890-abc123
|
||||
```
|
||||
|
||||
### Stopping Workflows
|
||||
|
||||
```bash
|
||||
# Stop a running workflow
|
||||
task-master workflow stop workflow-1.2-1234567890-abc123
|
||||
|
||||
# Force stop using alias
|
||||
task-master workflow kill workflow-1.2-1234567890-abc123
|
||||
```
|
||||
|
||||
## Workflow States
|
||||
|
||||
| State | Description |
|
||||
|-------|-------------|
|
||||
| `pending` | Created but not started |
|
||||
| `initializing` | Setting up worktree and process |
|
||||
| `running` | Active execution in progress |
|
||||
| `paused` | Temporarily stopped |
|
||||
| `completed` | Successfully finished |
|
||||
| `failed` | Error occurred during execution |
|
||||
| `cancelled` | User cancelled the workflow |
|
||||
| `timeout` | Exceeded time limit |
|
||||
|
||||
## Environment Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Set these environment variables to customize workflow behavior:
|
||||
|
||||
- `TASKMASTER_WORKFLOW_DEBUG`: Enable debug logging
|
||||
- `TASKMASTER_CLAUDE_PATH`: Custom Claude Code executable path
|
||||
- `TASKMASTER_WORKTREE_BASE`: Base directory for worktrees
|
||||
- `TASKMASTER_MAX_CONCURRENT`: Maximum concurrent workflows
|
||||
|
||||
### Example Configuration
|
||||
|
||||
```bash
|
||||
# Enable debug mode
|
||||
export TASKMASTER_WORKFLOW_DEBUG=true
|
||||
|
||||
# Set custom Claude path
|
||||
export TASKMASTER_CLAUDE_PATH=/usr/local/bin/claude
|
||||
|
||||
# Set worktree base directory
|
||||
export TASKMASTER_WORKTREE_BASE=./worktrees
|
||||
|
||||
# Limit concurrent workflows
|
||||
export TASKMASTER_MAX_CONCURRENT=3
|
||||
```
|
||||
|
||||
## Git Worktree Integration
|
||||
|
||||
### How It Works
|
||||
|
||||
When you start a workflow:
|
||||
|
||||
1. **Worktree Creation**: A new git worktree is created for the task
|
||||
2. **Process Spawn**: A dedicated Claude Code process is launched in the worktree
|
||||
3. **Task Execution**: The task runs in complete isolation
|
||||
4. **State Tracking**: Progress is monitored and persisted
|
||||
5. **Cleanup**: Worktree is removed when workflow completes
|
||||
|
||||
### Worktree Structure
|
||||
|
||||
```
|
||||
project/
|
||||
├── .git/ # Main repository
|
||||
├── src/ # Main working directory
|
||||
└── worktrees/ # Workflow worktrees
|
||||
├── task-1.2/ # Worktree for task 1.2
|
||||
├── task-2.1/ # Worktree for task 2.1
|
||||
└── task-3.4/ # Worktree for task 3.4
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### When to Use Workflows
|
||||
|
||||
Use workflows for tasks that:
|
||||
|
||||
- Require isolated development environments
|
||||
- Need dedicated Claude Code attention
|
||||
- Benefit from parallel execution
|
||||
- Require process monitoring and state tracking
|
||||
|
||||
### Workflow Management
|
||||
|
||||
- **Start workflows for complex tasks** that need focused execution
|
||||
- **Monitor progress** using `workflow status` command
|
||||
- **Clean up completed workflows** to free resources
|
||||
- **Use meaningful task descriptions** for better workflow tracking
|
||||
|
||||
### Resource Management
|
||||
|
||||
- **Limit concurrent workflows** based on system resources
|
||||
- **Monitor workflow output** for debugging and progress tracking
|
||||
- **Stop unnecessary workflows** to free up resources
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Worktree Creation Fails**
|
||||
```bash
|
||||
# Check git version (requires 2.5+)
|
||||
git --version
|
||||
|
||||
# Verify project is a git repository
|
||||
git status
|
||||
```
|
||||
|
||||
**Claude Code Not Found**
|
||||
```bash
|
||||
# Check Claude installation
|
||||
which claude
|
||||
|
||||
# Set custom path
|
||||
export TASKMASTER_CLAUDE_PATH=/path/to/claude
|
||||
```
|
||||
|
||||
**Permission Errors**
|
||||
```bash
|
||||
# Check worktree directory permissions
|
||||
chmod -R 755 ./worktrees
|
||||
```
|
||||
|
||||
### Debug Mode
|
||||
|
||||
Enable debug logging for troubleshooting:
|
||||
|
||||
```bash
|
||||
export TASKMASTER_WORKFLOW_DEBUG=true
|
||||
task-master workflow start 1.2
|
||||
```
|
||||
|
||||
## Integration Examples
|
||||
|
||||
### With VS Code Extension
|
||||
|
||||
The workflow engine integrates with the Task Master VS Code extension to provide:
|
||||
|
||||
- **Workflow Tree View**: Visual workflow management
|
||||
- **Process Monitoring**: Real-time output streaming
|
||||
- **Worktree Navigation**: Quick access to isolated workspaces
|
||||
- **Status Indicators**: Visual workflow state tracking
|
||||
|
||||
### With Task Management
|
||||
|
||||
```bash
|
||||
# Typical workflow
|
||||
task-master next # Find next task
|
||||
task-master workflow start 1.2 # Start workflow
|
||||
task-master workflow status <id> # Monitor progress
|
||||
task-master set-status --id=1.2 --status=done # Mark complete
|
||||
```
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Parallel Execution
|
||||
|
||||
Run multiple workflows simultaneously:
|
||||
|
||||
```bash
|
||||
# Start multiple workflows
|
||||
task-master workflow start 1.2
|
||||
task-master workflow start 2.1
|
||||
task-master workflow start 3.4
|
||||
|
||||
# Monitor all active workflows
|
||||
task-master workflow list
|
||||
```
|
||||
|
||||
### Process Monitoring
|
||||
|
||||
Each workflow provides real-time output monitoring and process management through the workflow engine's event system.
|
||||
|
||||
### State Persistence
|
||||
|
||||
Workflow state is automatically persisted across sessions, allowing you to resume monitoring workflows after restarting the CLI.
|
||||
@@ -49,7 +49,6 @@
|
||||
"pages": [
|
||||
"capabilities/mcp",
|
||||
"capabilities/cli-root-commands",
|
||||
"capabilities/workflows",
|
||||
"capabilities/task-structure"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -3,38 +3,4 @@ title: "What's New"
|
||||
sidebarTitle: "What's New"
|
||||
---
|
||||
|
||||
## New Workflow Engine (Latest)
|
||||
|
||||
Task Master now includes a powerful workflow engine that revolutionizes how tasks are executed:
|
||||
|
||||
### 🚀 Key Features
|
||||
|
||||
- **Git Worktree Isolation**: Each task runs in its own isolated git worktree
|
||||
- **Claude Code Integration**: Spawns dedicated Claude Code processes for task execution
|
||||
- **Real-time Monitoring**: Track workflow progress and process output
|
||||
- **Parallel Execution**: Run multiple tasks concurrently with resource management
|
||||
- **State Persistence**: Workflow state is maintained across sessions
|
||||
|
||||
### 🔧 New CLI Commands
|
||||
|
||||
```bash
|
||||
# Start workflow execution
|
||||
task-master workflow start <task-id>
|
||||
|
||||
# Monitor active workflows
|
||||
task-master workflow list
|
||||
|
||||
# Check workflow status
|
||||
task-master workflow status <workflow-id>
|
||||
|
||||
# Stop running workflow
|
||||
task-master workflow stop <workflow-id>
|
||||
```
|
||||
|
||||
### 📖 Learn More
|
||||
|
||||
Check out the new [Workflow Documentation](/capabilities/workflows) for comprehensive usage guides and best practices.
|
||||
|
||||
---
|
||||
|
||||
An easy way to see the latest releases
|
||||
40
output.txt
40
output.txt
File diff suppressed because one or more lines are too long
14567
package-lock.json
generated
14567
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -21,16 +21,10 @@
|
||||
"build:core": "cd packages/tm-core && npm run build",
|
||||
"build:workflow": "cd packages/workflow-engine && npm run build",
|
||||
"build:cli": "cd apps/cli && npm run build",
|
||||
"typecheck": "npm run typecheck:core && npm run typecheck:cli",
|
||||
"typecheck:core": "cd packages/tm-core && npm run typecheck",
|
||||
"typecheck:cli": "cd apps/cli && npm run typecheck",
|
||||
"test": "node --experimental-vm-modules node_modules/.bin/jest",
|
||||
"test:unit": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=unit",
|
||||
"test:integration": "node --experimental-vm-modules node_modules/.bin/jest --testPathPattern=integration",
|
||||
"test:fails": "node --experimental-vm-modules node_modules/.bin/jest --onlyFailures",
|
||||
"test:watch": "node --experimental-vm-modules node_modules/.bin/jest --watch",
|
||||
"test:coverage": "node --experimental-vm-modules node_modules/.bin/jest --coverage",
|
||||
"test:ci": "node --experimental-vm-modules node_modules/.bin/jest --coverage --ci",
|
||||
"test:e2e": "./tests/e2e/run_e2e.sh",
|
||||
"test:e2e-report": "./tests/e2e/run_e2e.sh --analyze-log",
|
||||
"postpack": "chmod +x dist/task-master.js dist/mcp-server.js",
|
||||
@@ -75,7 +69,7 @@
|
||||
"ajv": "^8.17.1",
|
||||
"ajv-formats": "^3.0.1",
|
||||
"boxen": "^8.0.1",
|
||||
"chalk": "5.6.2",
|
||||
"chalk": "^5.4.1",
|
||||
"cli-highlight": "^2.1.11",
|
||||
"cli-progress": "^3.12.0",
|
||||
"cli-table3": "^0.6.5",
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
{
|
||||
"name": "@tm/build-config",
|
||||
"version": "1.0.0",
|
||||
"description": "Shared build configuration for Task Master monorepo",
|
||||
"type": "module",
|
||||
"main": "./dist/tsup.base.js",
|
||||
"types": "./dist/tsup.base.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./src/tsup.base.ts",
|
||||
"import": "./dist/tsup.base.js",
|
||||
"require": "./dist/tsup.base.cjs"
|
||||
}
|
||||
},
|
||||
"files": ["dist", "src"],
|
||||
"keywords": ["build-config", "tsup", "monorepo"],
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"dev": "tsup --watch",
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tsup": "^8.5.0",
|
||||
"typescript": "^5.7.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"tsup": "^8.0.0"
|
||||
}
|
||||
}
|
||||
@@ -1,151 +0,0 @@
|
||||
/**
|
||||
* Base tsup configuration for Task Master monorepo
|
||||
* Provides shared configuration that can be extended by individual packages
|
||||
*/
|
||||
import type { Options } from 'tsup';
|
||||
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const isDevelopment = !isProduction;
|
||||
|
||||
/**
|
||||
* Base configuration for library packages (tm-core, etc.)
|
||||
*/
|
||||
export const libraryConfig: Partial<Options> = {
|
||||
format: ['cjs', 'esm'],
|
||||
target: 'es2022',
|
||||
// Sourcemaps only in development to reduce production bundle size
|
||||
sourcemap: isDevelopment,
|
||||
clean: true,
|
||||
dts: true,
|
||||
// Enable optimizations in production
|
||||
splitting: isProduction,
|
||||
treeshake: isProduction,
|
||||
minify: isProduction,
|
||||
bundle: true,
|
||||
esbuildOptions(options) {
|
||||
options.conditions = ['module'];
|
||||
// Better source mapping in development only
|
||||
options.sourcesContent = isDevelopment;
|
||||
// Keep original names for better debugging in development
|
||||
options.keepNames = isDevelopment;
|
||||
},
|
||||
// Watch mode configuration for development
|
||||
watch: isDevelopment ? ['src'] : false
|
||||
};
|
||||
|
||||
/**
|
||||
* Base configuration for CLI packages
|
||||
*/
|
||||
export const cliConfig: Partial<Options> = {
|
||||
format: ['esm'],
|
||||
target: 'node18',
|
||||
splitting: false,
|
||||
// Sourcemaps only in development to reduce production bundle size
|
||||
sourcemap: isDevelopment,
|
||||
clean: true,
|
||||
dts: true,
|
||||
shims: true,
|
||||
// Enable minification in production for smaller bundles
|
||||
minify: isProduction,
|
||||
treeshake: isProduction,
|
||||
esbuildOptions(options) {
|
||||
options.platform = 'node';
|
||||
// Better source mapping in development only
|
||||
options.sourcesContent = isDevelopment;
|
||||
// Keep original names for better debugging in development
|
||||
options.keepNames = isDevelopment;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Base configuration for executable bundles (root level)
|
||||
*/
|
||||
export const executableConfig: Partial<Options> = {
|
||||
format: ['esm'],
|
||||
target: 'node18',
|
||||
splitting: false,
|
||||
// Sourcemaps only in development to reduce production bundle size
|
||||
sourcemap: isDevelopment,
|
||||
clean: true,
|
||||
bundle: true, // Bundle everything into one file
|
||||
// Minify in production for smaller executables
|
||||
minify: isProduction,
|
||||
// Handle TypeScript imports transparently
|
||||
loader: {
|
||||
'.js': 'jsx',
|
||||
'.ts': 'ts'
|
||||
},
|
||||
esbuildOptions(options) {
|
||||
options.platform = 'node';
|
||||
// Allow importing TypeScript from JavaScript
|
||||
options.resolveExtensions = ['.ts', '.js', '.mjs', '.json'];
|
||||
// Better source mapping in development only
|
||||
options.sourcesContent = isDevelopment;
|
||||
// Keep original names for better debugging in development
|
||||
options.keepNames = isDevelopment;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Common external modules that should not be bundled
|
||||
*/
|
||||
export const commonExternals = [
|
||||
// Native Node.js modules
|
||||
'fs',
|
||||
'path',
|
||||
'child_process',
|
||||
'crypto',
|
||||
'os',
|
||||
'url',
|
||||
'util',
|
||||
'stream',
|
||||
'http',
|
||||
'https',
|
||||
'events',
|
||||
'assert',
|
||||
'buffer',
|
||||
'querystring',
|
||||
'readline',
|
||||
'zlib',
|
||||
'tty',
|
||||
'net',
|
||||
'dgram',
|
||||
'dns',
|
||||
'tls',
|
||||
'cluster',
|
||||
'process',
|
||||
'module'
|
||||
];
|
||||
|
||||
/**
|
||||
* Utility function to merge configurations
|
||||
*/
|
||||
export function mergeConfig(
|
||||
baseConfig: Partial<Options>,
|
||||
overrides: Partial<Options>
|
||||
): Options {
|
||||
return {
|
||||
...baseConfig,
|
||||
...overrides,
|
||||
// Merge arrays instead of overwriting
|
||||
external: [...(baseConfig.external || []), ...(overrides.external || [])],
|
||||
// Merge esbuildOptions
|
||||
esbuildOptions(options, context) {
|
||||
if (baseConfig.esbuildOptions) {
|
||||
baseConfig.esbuildOptions(options, context);
|
||||
}
|
||||
if (overrides.esbuildOptions) {
|
||||
overrides.esbuildOptions(options, context);
|
||||
}
|
||||
}
|
||||
} as Options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Environment helpers
|
||||
*/
|
||||
export const env = {
|
||||
isProduction,
|
||||
isDevelopment,
|
||||
NODE_ENV: process.env.NODE_ENV || 'development'
|
||||
};
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"lib": ["ES2022"],
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"esModuleInterop": true,
|
||||
"allowJs": true,
|
||||
"strict": true,
|
||||
"noEmit": true,
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"declaration": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
import { defineConfig } from 'tsup';
|
||||
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
|
||||
export default defineConfig({
|
||||
entry: ['src/tsup.base.ts'],
|
||||
format: ['esm', 'cjs'],
|
||||
target: 'node18',
|
||||
// Sourcemaps only in development
|
||||
sourcemap: !isProduction,
|
||||
clean: true,
|
||||
dts: true,
|
||||
// Enable minification in production
|
||||
minify: isProduction,
|
||||
treeshake: isProduction,
|
||||
external: ['tsup'],
|
||||
esbuildOptions(options) {
|
||||
// Better source mapping in development only
|
||||
options.sourcesContent = !isProduction;
|
||||
// Keep original names for better debugging in development
|
||||
options.keepNames = !isProduction;
|
||||
}
|
||||
});
|
||||
@@ -1,51 +1,60 @@
|
||||
{
|
||||
"name": "@tm/core",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"description": "Core library for Task Master - TypeScript task management system",
|
||||
"type": "module",
|
||||
"types": "./src/index.ts",
|
||||
"types": "./dist/index.d.ts",
|
||||
"main": "./dist/index.js",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./src/index.ts",
|
||||
"import": "./dist/index.js"
|
||||
"import": "./dist/index.js",
|
||||
"require": "./dist/index.js"
|
||||
},
|
||||
"./auth": {
|
||||
"types": "./src/auth/index.ts",
|
||||
"import": "./dist/auth/index.js"
|
||||
"import": "./dist/auth/index.js",
|
||||
"require": "./dist/auth/index.js"
|
||||
},
|
||||
"./storage": {
|
||||
"types": "./src/storage/index.ts",
|
||||
"import": "./dist/storage/index.js"
|
||||
"import": "./dist/storage/index.js",
|
||||
"require": "./dist/storage/index.js"
|
||||
},
|
||||
"./config": {
|
||||
"types": "./src/config/index.ts",
|
||||
"import": "./dist/config/index.js"
|
||||
"import": "./dist/config/index.js",
|
||||
"require": "./dist/config/index.js"
|
||||
},
|
||||
"./providers": {
|
||||
"types": "./src/providers/index.ts",
|
||||
"import": "./dist/providers/index.js"
|
||||
"import": "./dist/providers/index.js",
|
||||
"require": "./dist/providers/index.js"
|
||||
},
|
||||
"./services": {
|
||||
"types": "./src/services/index.ts",
|
||||
"import": "./dist/services/index.js"
|
||||
"import": "./dist/services/index.js",
|
||||
"require": "./dist/services/index.js"
|
||||
},
|
||||
"./errors": {
|
||||
"types": "./src/errors/index.ts",
|
||||
"import": "./dist/errors/index.js"
|
||||
"import": "./dist/errors/index.js",
|
||||
"require": "./dist/errors/index.js"
|
||||
},
|
||||
"./logger": {
|
||||
"types": "./src/logger/index.ts",
|
||||
"import": "./dist/logger/index.js"
|
||||
"import": "./dist/logger/index.js",
|
||||
"require": "./dist/logger/index.js"
|
||||
},
|
||||
"./types": {
|
||||
"types": "./src/types/index.ts",
|
||||
"import": "./dist/types/index.js"
|
||||
"import": "./dist/types/index.js",
|
||||
"require": "./dist/types/index.js"
|
||||
},
|
||||
"./interfaces": {
|
||||
"types": "./src/interfaces/index.ts",
|
||||
"import": "./dist/interfaces/index.js"
|
||||
"import": "./dist/interfaces/index.js",
|
||||
"require": "./dist/interfaces/index.js"
|
||||
},
|
||||
"./utils": {
|
||||
"types": "./src/utils/index.ts",
|
||||
@@ -80,9 +89,9 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
"@tm/build-config": "*",
|
||||
"@types/node": "^20.11.30",
|
||||
"@vitest/coverage-v8": "^2.0.5",
|
||||
"dotenv-mono": "^1.5.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsup": "^8.0.2",
|
||||
"typescript": "^5.4.3",
|
||||
|
||||
@@ -6,18 +6,11 @@ import {
|
||||
AuthCredentials,
|
||||
OAuthFlowOptions,
|
||||
AuthenticationError,
|
||||
AuthConfig,
|
||||
UserContext
|
||||
AuthConfig
|
||||
} from './types.js';
|
||||
import { CredentialStore } from './credential-store.js';
|
||||
import { OAuthService } from './oauth-service.js';
|
||||
import { SupabaseAuthClient } from '../clients/supabase-client.js';
|
||||
import {
|
||||
OrganizationService,
|
||||
type Organization,
|
||||
type Brief,
|
||||
type RemoteTask
|
||||
} from '../services/organization.service.js';
|
||||
import { getLogger } from '../logger/index.js';
|
||||
|
||||
/**
|
||||
@@ -28,28 +21,11 @@ export class AuthManager {
|
||||
private credentialStore: CredentialStore;
|
||||
private oauthService: OAuthService;
|
||||
private supabaseClient: SupabaseAuthClient;
|
||||
private organizationService?: OrganizationService;
|
||||
|
||||
private constructor(config?: Partial<AuthConfig>) {
|
||||
this.credentialStore = new CredentialStore(config);
|
||||
this.supabaseClient = new SupabaseAuthClient();
|
||||
this.oauthService = new OAuthService(this.credentialStore, config);
|
||||
|
||||
// Initialize Supabase client with session restoration
|
||||
this.initializeSupabaseSession();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize Supabase session from stored credentials
|
||||
*/
|
||||
private async initializeSupabaseSession(): Promise<void> {
|
||||
try {
|
||||
await this.supabaseClient.initialize();
|
||||
} catch (error) {
|
||||
// Log but don't throw - session might not exist yet
|
||||
const logger = getLogger('AuthManager');
|
||||
logger.debug('No existing session to restore');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -99,48 +75,39 @@ export class AuthManager {
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh authentication token using Supabase session
|
||||
* Refresh authentication token
|
||||
*/
|
||||
async refreshToken(): Promise<AuthCredentials> {
|
||||
const authData = this.credentialStore.getCredentials({
|
||||
allowExpired: true
|
||||
});
|
||||
|
||||
if (!authData || !authData.refreshToken) {
|
||||
throw new AuthenticationError(
|
||||
'No refresh token available',
|
||||
'NO_REFRESH_TOKEN'
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
// Use Supabase's built-in session refresh
|
||||
const session = await this.supabaseClient.refreshSession();
|
||||
// Use Supabase client to refresh the token
|
||||
const response = await this.supabaseClient.refreshSession(
|
||||
authData.refreshToken
|
||||
);
|
||||
|
||||
if (!session) {
|
||||
throw new AuthenticationError(
|
||||
'Failed to refresh session',
|
||||
'REFRESH_FAILED'
|
||||
);
|
||||
}
|
||||
|
||||
// Get existing credentials to preserve context
|
||||
const existingCredentials = this.credentialStore.getCredentials({
|
||||
allowExpired: true
|
||||
});
|
||||
|
||||
// Update authentication data from session
|
||||
// Update authentication data
|
||||
const newAuthData: AuthCredentials = {
|
||||
token: session.access_token,
|
||||
refreshToken: session.refresh_token,
|
||||
userId: session.user.id,
|
||||
email: session.user.email,
|
||||
expiresAt: session.expires_at
|
||||
? new Date(session.expires_at * 1000).toISOString()
|
||||
: undefined,
|
||||
savedAt: new Date().toISOString(),
|
||||
selectedContext: existingCredentials?.selectedContext
|
||||
...authData,
|
||||
token: response.token,
|
||||
refreshToken: response.refreshToken,
|
||||
expiresAt: response.expiresAt,
|
||||
savedAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
this.credentialStore.saveCredentials(newAuthData);
|
||||
return newAuthData;
|
||||
} catch (error) {
|
||||
if (error instanceof AuthenticationError) {
|
||||
throw error;
|
||||
}
|
||||
throw new AuthenticationError(
|
||||
`Token refresh failed: ${(error as Error).message}`,
|
||||
'REFRESH_FAILED'
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -166,114 +133,4 @@ export class AuthManager {
|
||||
isAuthenticated(): boolean {
|
||||
return this.credentialStore.hasValidCredentials();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current user context (org/brief selection)
|
||||
*/
|
||||
getContext(): UserContext | null {
|
||||
const credentials = this.getCredentials();
|
||||
return credentials?.selectedContext || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the user context (org/brief selection)
|
||||
*/
|
||||
async updateContext(context: Partial<UserContext>): Promise<void> {
|
||||
const credentials = this.getCredentials();
|
||||
if (!credentials) {
|
||||
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
|
||||
}
|
||||
|
||||
// Merge with existing context
|
||||
const existingContext = credentials.selectedContext || {};
|
||||
const newContext: UserContext = {
|
||||
...existingContext,
|
||||
...context,
|
||||
updatedAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
// Save updated credentials with new context
|
||||
const updatedCredentials: AuthCredentials = {
|
||||
...credentials,
|
||||
selectedContext: newContext
|
||||
};
|
||||
|
||||
this.credentialStore.saveCredentials(updatedCredentials);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the user context
|
||||
*/
|
||||
async clearContext(): Promise<void> {
|
||||
const credentials = this.getCredentials();
|
||||
if (!credentials) {
|
||||
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
|
||||
}
|
||||
|
||||
// Remove context from credentials
|
||||
const { selectedContext, ...credentialsWithoutContext } = credentials;
|
||||
this.credentialStore.saveCredentials(credentialsWithoutContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the organization service instance
|
||||
* Uses the Supabase client with the current session or token
|
||||
*/
|
||||
private async getOrganizationService(): Promise<OrganizationService> {
|
||||
if (!this.organizationService) {
|
||||
// First check if we have credentials with a token
|
||||
const credentials = this.getCredentials();
|
||||
if (!credentials || !credentials.token) {
|
||||
throw new AuthenticationError('Not authenticated', 'NOT_AUTHENTICATED');
|
||||
}
|
||||
|
||||
// Initialize session if needed (this will load from our storage adapter)
|
||||
await this.supabaseClient.initialize();
|
||||
|
||||
// Use the SupabaseAuthClient which now has the session
|
||||
const supabaseClient = this.supabaseClient.getClient();
|
||||
this.organizationService = new OrganizationService(supabaseClient as any);
|
||||
}
|
||||
return this.organizationService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all organizations for the authenticated user
|
||||
*/
|
||||
async getOrganizations(): Promise<Organization[]> {
|
||||
const service = await this.getOrganizationService();
|
||||
return service.getOrganizations();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all briefs for a specific organization
|
||||
*/
|
||||
async getBriefs(orgId: string): Promise<Brief[]> {
|
||||
const service = await this.getOrganizationService();
|
||||
return service.getBriefs(orgId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific organization by ID
|
||||
*/
|
||||
async getOrganization(orgId: string): Promise<Organization | null> {
|
||||
const service = await this.getOrganizationService();
|
||||
return service.getOrganization(orgId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific brief by ID
|
||||
*/
|
||||
async getBrief(briefId: string): Promise<Brief | null> {
|
||||
const service = await this.getOrganizationService();
|
||||
return service.getBrief(briefId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all tasks for a specific brief
|
||||
*/
|
||||
async getTasks(briefId: string): Promise<RemoteTask[]> {
|
||||
const service = await this.getOrganizationService();
|
||||
return service.getTasks(briefId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,19 +5,12 @@
|
||||
export { AuthManager } from './auth-manager.js';
|
||||
export { CredentialStore } from './credential-store.js';
|
||||
export { OAuthService } from './oauth-service.js';
|
||||
export { SupabaseSessionStorage } from './supabase-session-storage';
|
||||
export type {
|
||||
Organization,
|
||||
Brief,
|
||||
RemoteTask
|
||||
} from '../services/organization.service.js';
|
||||
|
||||
export type {
|
||||
AuthCredentials,
|
||||
OAuthFlowOptions,
|
||||
AuthConfig,
|
||||
CliData,
|
||||
UserContext
|
||||
CliData
|
||||
} from './types.js';
|
||||
|
||||
export { AuthenticationError } from './types.js';
|
||||
|
||||
@@ -181,8 +181,8 @@ export class OAuthService {
|
||||
timestamp: Date.now()
|
||||
};
|
||||
|
||||
// Build authorization URL for CLI-specific sign-in page
|
||||
const authUrl = new URL(`${this.baseUrl}/auth/cli/sign-in`);
|
||||
// Build authorization URL for web app sign-in page
|
||||
const authUrl = new URL(`${this.baseUrl}/auth/sign-in`);
|
||||
|
||||
// Encode CLI data as base64
|
||||
const cliParam = Buffer.from(JSON.stringify(cliData)).toString(
|
||||
@@ -272,49 +272,7 @@ export class OAuthService {
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle authorization code for PKCE flow
|
||||
const code = url.searchParams.get('code');
|
||||
if (code && type === 'pkce_callback') {
|
||||
try {
|
||||
this.logger.info('Received authorization code for PKCE flow');
|
||||
|
||||
// Exchange code for session using PKCE
|
||||
const session = await this.supabaseClient.exchangeCodeForSession(code);
|
||||
|
||||
// Save authentication data
|
||||
const authData: AuthCredentials = {
|
||||
token: session.access_token,
|
||||
refreshToken: session.refresh_token,
|
||||
userId: session.user.id,
|
||||
email: session.user.email,
|
||||
expiresAt: session.expires_at
|
||||
? new Date(session.expires_at * 1000).toISOString()
|
||||
: undefined,
|
||||
tokenType: 'standard',
|
||||
savedAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
this.credentialStore.saveCredentials(authData);
|
||||
|
||||
if (server.listening) {
|
||||
server.close();
|
||||
}
|
||||
// Clear timeout since authentication succeeded
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
resolve(authData);
|
||||
return;
|
||||
} catch (error) {
|
||||
if (server.listening) {
|
||||
server.close();
|
||||
}
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle direct token response from server (legacy flow)
|
||||
// Handle direct token response from server
|
||||
if (
|
||||
accessToken &&
|
||||
(type === 'oauth_success' || type === 'session_transfer')
|
||||
@@ -322,23 +280,8 @@ export class OAuthService {
|
||||
try {
|
||||
this.logger.info(`Received tokens via ${type}`);
|
||||
|
||||
// Create a session with the tokens and set it in Supabase client
|
||||
const session = {
|
||||
access_token: accessToken,
|
||||
refresh_token: refreshToken || '',
|
||||
expires_at: expiresIn
|
||||
? Math.floor(Date.now() / 1000) + parseInt(expiresIn)
|
||||
: undefined,
|
||||
expires_in: expiresIn ? parseInt(expiresIn) : undefined,
|
||||
token_type: 'bearer',
|
||||
user: null as any // Will be populated by setSession
|
||||
};
|
||||
|
||||
// Set the session in Supabase client
|
||||
await this.supabaseClient.setSession(session as any);
|
||||
|
||||
// Get user info from the session
|
||||
const user = await this.supabaseClient.getUser();
|
||||
// Get user info using the access token if possible
|
||||
const user = await this.supabaseClient.getUser(accessToken);
|
||||
|
||||
// Calculate expiration time
|
||||
const expiresAt = expiresIn
|
||||
|
||||
@@ -1,155 +0,0 @@
|
||||
/**
|
||||
* Custom storage adapter for Supabase Auth sessions in CLI environment
|
||||
* Implements the SupportedStorage interface required by Supabase Auth
|
||||
*
|
||||
* This adapter bridges Supabase's session management with our existing
|
||||
* auth.json credential storage, maintaining backward compatibility
|
||||
*/
|
||||
|
||||
import { SupportedStorage } from '@supabase/supabase-js';
|
||||
import { CredentialStore } from './credential-store';
|
||||
import { AuthCredentials } from './types';
|
||||
import { getLogger } from '../logger';
|
||||
|
||||
const STORAGE_KEY = 'sb-taskmaster-auth-token';
|
||||
|
||||
export class SupabaseSessionStorage implements SupportedStorage {
|
||||
private store: CredentialStore;
|
||||
private logger = getLogger('SupabaseSessionStorage');
|
||||
|
||||
constructor(store: CredentialStore) {
|
||||
this.store = store;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a Supabase session object from our credentials
|
||||
*/
|
||||
private buildSessionFromCredentials(credentials: AuthCredentials): any {
|
||||
// Create a session object that Supabase expects
|
||||
const session = {
|
||||
access_token: credentials.token,
|
||||
refresh_token: credentials.refreshToken || '',
|
||||
expires_at: credentials.expiresAt
|
||||
? Math.floor(new Date(credentials.expiresAt).getTime() / 1000)
|
||||
: Math.floor(Date.now() / 1000) + 3600, // Default to 1 hour
|
||||
token_type: 'bearer',
|
||||
user: {
|
||||
id: credentials.userId,
|
||||
email: credentials.email || '',
|
||||
aud: 'authenticated',
|
||||
role: 'authenticated',
|
||||
email_confirmed_at: new Date().toISOString(),
|
||||
app_metadata: {},
|
||||
user_metadata: {},
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString()
|
||||
}
|
||||
};
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a Supabase session back to our credentials
|
||||
*/
|
||||
private parseSessionToCredentials(
|
||||
sessionData: any
|
||||
): Partial<AuthCredentials> {
|
||||
try {
|
||||
const session = JSON.parse(sessionData);
|
||||
return {
|
||||
token: session.access_token,
|
||||
refreshToken: session.refresh_token,
|
||||
userId: session.user?.id || 'unknown',
|
||||
email: session.user?.email,
|
||||
expiresAt: session.expires_at
|
||||
? new Date(session.expires_at * 1000).toISOString()
|
||||
: undefined
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error('Error parsing session:', error);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get item from storage - Supabase will request the session with a specific key
|
||||
*/
|
||||
getItem(key: string): string | null {
|
||||
// Supabase uses a specific key pattern for sessions
|
||||
if (key === STORAGE_KEY || key.includes('auth-token')) {
|
||||
try {
|
||||
const credentials = this.store.getCredentials({ allowExpired: true });
|
||||
if (credentials && credentials.token) {
|
||||
// Build and return a session object from our stored credentials
|
||||
const session = this.buildSessionFromCredentials(credentials);
|
||||
return JSON.stringify(session);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error('Error getting session:', error);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set item in storage - Supabase will store the session with a specific key
|
||||
*/
|
||||
setItem(key: string, value: string): void {
|
||||
// Only handle Supabase session keys
|
||||
if (key === STORAGE_KEY || key.includes('auth-token')) {
|
||||
try {
|
||||
// Parse the session and update our credentials
|
||||
const sessionUpdates = this.parseSessionToCredentials(value);
|
||||
const existingCredentials = this.store.getCredentials({
|
||||
allowExpired: true
|
||||
});
|
||||
|
||||
if (sessionUpdates.token) {
|
||||
const updatedCredentials: AuthCredentials = {
|
||||
...existingCredentials,
|
||||
...sessionUpdates,
|
||||
savedAt: new Date().toISOString(),
|
||||
selectedContext: existingCredentials?.selectedContext
|
||||
} as AuthCredentials;
|
||||
|
||||
this.store.saveCredentials(updatedCredentials);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error('Error setting session:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove item from storage - Called when signing out
|
||||
*/
|
||||
removeItem(key: string): void {
|
||||
if (key === STORAGE_KEY || key.includes('auth-token')) {
|
||||
// Don't actually remove credentials, just clear the tokens
|
||||
// This preserves other data like selectedContext
|
||||
try {
|
||||
const credentials = this.store.getCredentials({ allowExpired: true });
|
||||
if (credentials) {
|
||||
// Keep context but clear auth tokens
|
||||
const clearedCredentials: AuthCredentials = {
|
||||
...credentials,
|
||||
token: '',
|
||||
refreshToken: undefined,
|
||||
expiresAt: undefined
|
||||
} as AuthCredentials;
|
||||
this.store.saveCredentials(clearedCredentials);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error('Error removing session:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all session data
|
||||
*/
|
||||
clear(): void {
|
||||
// Clear auth tokens but preserve context
|
||||
this.removeItem(STORAGE_KEY);
|
||||
}
|
||||
}
|
||||
@@ -10,15 +10,6 @@ export interface AuthCredentials {
|
||||
expiresAt?: string | number;
|
||||
tokenType?: 'standard';
|
||||
savedAt: string;
|
||||
selectedContext?: UserContext;
|
||||
}
|
||||
|
||||
export interface UserContext {
|
||||
orgId?: string;
|
||||
orgName?: string;
|
||||
briefId?: string;
|
||||
briefName?: string;
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
export interface OAuthFlowOptions {
|
||||
@@ -76,11 +67,7 @@ export type AuthErrorCode =
|
||||
| 'STORAGE_ERROR'
|
||||
| 'NOT_SUPPORTED'
|
||||
| 'REFRESH_FAILED'
|
||||
| 'INVALID_RESPONSE'
|
||||
| 'PKCE_INIT_FAILED'
|
||||
| 'PKCE_FAILED'
|
||||
| 'CODE_EXCHANGE_FAILED'
|
||||
| 'SESSION_SET_FAILED';
|
||||
| 'INVALID_RESPONSE';
|
||||
|
||||
/**
|
||||
* Authentication error class
|
||||
|
||||
@@ -1,32 +1,19 @@
|
||||
/**
|
||||
* Supabase authentication client for CLI auth flows
|
||||
* Supabase client for authentication
|
||||
*/
|
||||
|
||||
import {
|
||||
createClient,
|
||||
SupabaseClient as SupabaseJSClient,
|
||||
User,
|
||||
Session
|
||||
} from '@supabase/supabase-js';
|
||||
import { createClient, SupabaseClient, User } from '@supabase/supabase-js';
|
||||
import { AuthenticationError } from '../auth/types.js';
|
||||
import { getLogger } from '../logger/index.js';
|
||||
import { SupabaseSessionStorage } from '../auth/supabase-session-storage';
|
||||
import { CredentialStore } from '../auth/credential-store';
|
||||
|
||||
export class SupabaseAuthClient {
|
||||
private client: SupabaseJSClient | null = null;
|
||||
private sessionStorage: SupabaseSessionStorage;
|
||||
private client: SupabaseClient | null = null;
|
||||
private logger = getLogger('SupabaseAuthClient');
|
||||
|
||||
constructor() {
|
||||
const credentialStore = new CredentialStore();
|
||||
this.sessionStorage = new SupabaseSessionStorage(credentialStore);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Supabase client with proper session management
|
||||
* Initialize Supabase client
|
||||
*/
|
||||
getClient(): SupabaseJSClient {
|
||||
private getClient(): SupabaseClient {
|
||||
if (!this.client) {
|
||||
// Get Supabase configuration from environment - using TM_PUBLIC prefix
|
||||
const supabaseUrl = process.env.TM_PUBLIC_SUPABASE_URL;
|
||||
@@ -39,12 +26,10 @@ export class SupabaseAuthClient {
|
||||
);
|
||||
}
|
||||
|
||||
// Create client with custom storage adapter (similar to React Native AsyncStorage)
|
||||
this.client = createClient(supabaseUrl, supabaseAnonKey, {
|
||||
auth: {
|
||||
storage: this.sessionStorage,
|
||||
autoRefreshToken: true,
|
||||
persistSession: true,
|
||||
persistSession: false, // We handle persistence ourselves
|
||||
detectSessionInUrl: false
|
||||
}
|
||||
});
|
||||
@@ -54,159 +39,40 @@ export class SupabaseAuthClient {
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the client and restore session if available
|
||||
* Note: Code exchange is now handled server-side
|
||||
* The server returns tokens directly to avoid PKCE issues
|
||||
* This method is kept for potential future use
|
||||
*/
|
||||
async initialize(): Promise<Session | null> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
// Get the current session from storage
|
||||
const {
|
||||
data: { session },
|
||||
error
|
||||
} = await client.auth.getSession();
|
||||
|
||||
if (error) {
|
||||
this.logger.warn('Failed to restore session:', error);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (session) {
|
||||
this.logger.info('Session restored successfully');
|
||||
}
|
||||
|
||||
return session;
|
||||
} catch (error) {
|
||||
this.logger.error('Error initializing session:', error);
|
||||
return null;
|
||||
}
|
||||
async exchangeCodeForSession(_code: string): Promise<{
|
||||
token: string;
|
||||
refreshToken?: string;
|
||||
userId: string;
|
||||
email?: string;
|
||||
expiresAt?: string;
|
||||
}> {
|
||||
throw new AuthenticationError(
|
||||
'Code exchange is handled server-side. CLI receives tokens directly.',
|
||||
'NOT_SUPPORTED'
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sign in with PKCE flow (for CLI auth)
|
||||
* Refresh an access token
|
||||
*/
|
||||
async signInWithPKCE(): Promise<{ url: string; codeVerifier: string }> {
|
||||
const client = this.getClient();
|
||||
|
||||
async refreshSession(refreshToken: string): Promise<{
|
||||
token: string;
|
||||
refreshToken?: string;
|
||||
expiresAt?: string;
|
||||
}> {
|
||||
try {
|
||||
// Generate PKCE challenge
|
||||
const { data, error } = await client.auth.signInWithOAuth({
|
||||
provider: 'github',
|
||||
options: {
|
||||
redirectTo:
|
||||
process.env.TM_AUTH_CALLBACK_URL ||
|
||||
'http://localhost:3421/auth/callback',
|
||||
scopes: 'email'
|
||||
}
|
||||
});
|
||||
const client = this.getClient();
|
||||
|
||||
if (error) {
|
||||
throw new AuthenticationError(
|
||||
`Failed to initiate PKCE flow: ${error.message}`,
|
||||
'PKCE_INIT_FAILED'
|
||||
);
|
||||
}
|
||||
|
||||
if (!data?.url) {
|
||||
throw new AuthenticationError(
|
||||
'No authorization URL returned',
|
||||
'INVALID_RESPONSE'
|
||||
);
|
||||
}
|
||||
|
||||
// Extract code_verifier from the URL or generate it
|
||||
// Note: Supabase handles PKCE internally, we just need to handle the callback
|
||||
return {
|
||||
url: data.url,
|
||||
codeVerifier: '' // Supabase manages this internally
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof AuthenticationError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new AuthenticationError(
|
||||
`Failed to start PKCE flow: ${(error as Error).message}`,
|
||||
'PKCE_FAILED'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exchange authorization code for session (PKCE flow)
|
||||
*/
|
||||
async exchangeCodeForSession(code: string): Promise<Session> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
const { data, error } = await client.auth.exchangeCodeForSession(code);
|
||||
|
||||
if (error) {
|
||||
throw new AuthenticationError(
|
||||
`Failed to exchange code: ${error.message}`,
|
||||
'CODE_EXCHANGE_FAILED'
|
||||
);
|
||||
}
|
||||
|
||||
if (!data?.session) {
|
||||
throw new AuthenticationError(
|
||||
'No session returned from code exchange',
|
||||
'INVALID_RESPONSE'
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.info('Successfully exchanged code for session');
|
||||
return data.session;
|
||||
} catch (error) {
|
||||
if (error instanceof AuthenticationError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new AuthenticationError(
|
||||
`Code exchange failed: ${(error as Error).message}`,
|
||||
'CODE_EXCHANGE_FAILED'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current session
|
||||
*/
|
||||
async getSession(): Promise<Session | null> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
const {
|
||||
data: { session },
|
||||
error
|
||||
} = await client.auth.getSession();
|
||||
|
||||
if (error) {
|
||||
this.logger.warn('Failed to get session:', error);
|
||||
return null;
|
||||
}
|
||||
|
||||
return session;
|
||||
} catch (error) {
|
||||
this.logger.error('Error getting session:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh the current session
|
||||
*/
|
||||
async refreshSession(): Promise<Session | null> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
this.logger.info('Refreshing session...');
|
||||
|
||||
// Supabase will automatically use the stored refresh token
|
||||
const {
|
||||
data: { session },
|
||||
error
|
||||
} = await client.auth.refreshSession();
|
||||
// Set the session with refresh token
|
||||
const { data, error } = await client.auth.refreshSession({
|
||||
refresh_token: refreshToken
|
||||
});
|
||||
|
||||
if (error) {
|
||||
this.logger.error('Failed to refresh session:', error);
|
||||
@@ -216,11 +82,22 @@ export class SupabaseAuthClient {
|
||||
);
|
||||
}
|
||||
|
||||
if (session) {
|
||||
this.logger.info('Successfully refreshed session');
|
||||
if (!data.session) {
|
||||
throw new AuthenticationError(
|
||||
'No session data returned',
|
||||
'INVALID_RESPONSE'
|
||||
);
|
||||
}
|
||||
|
||||
return session;
|
||||
this.logger.info('Successfully refreshed session');
|
||||
|
||||
return {
|
||||
token: data.session.access_token,
|
||||
refreshToken: data.session.refresh_token,
|
||||
expiresAt: data.session.expires_at
|
||||
? new Date(data.session.expires_at * 1000).toISOString()
|
||||
: undefined
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof AuthenticationError) {
|
||||
throw error;
|
||||
@@ -234,23 +111,21 @@ export class SupabaseAuthClient {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current user from session
|
||||
* Get user details from token
|
||||
*/
|
||||
async getUser(): Promise<User | null> {
|
||||
const client = this.getClient();
|
||||
|
||||
async getUser(token: string): Promise<User | null> {
|
||||
try {
|
||||
const {
|
||||
data: { user },
|
||||
error
|
||||
} = await client.auth.getUser();
|
||||
const client = this.getClient();
|
||||
|
||||
// Get user with the token
|
||||
const { data, error } = await client.auth.getUser(token);
|
||||
|
||||
if (error) {
|
||||
this.logger.warn('Failed to get user:', error);
|
||||
return null;
|
||||
}
|
||||
|
||||
return user;
|
||||
return data.user;
|
||||
} catch (error) {
|
||||
this.logger.error('Error getting user:', error);
|
||||
return null;
|
||||
@@ -258,55 +133,22 @@ export class SupabaseAuthClient {
|
||||
}
|
||||
|
||||
/**
|
||||
* Sign out and clear session
|
||||
* Sign out (revoke tokens)
|
||||
* Note: This requires the user to be authenticated with the current session.
|
||||
* For remote token revocation, a server-side admin API with service_role key would be needed.
|
||||
*/
|
||||
async signOut(): Promise<void> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
// Sign out with global scope to revoke all refresh tokens
|
||||
const client = this.getClient();
|
||||
|
||||
// Sign out the current session with global scope to revoke all refresh tokens
|
||||
const { error } = await client.auth.signOut({ scope: 'global' });
|
||||
|
||||
if (error) {
|
||||
this.logger.warn('Failed to sign out:', error);
|
||||
}
|
||||
|
||||
// Clear cached session data
|
||||
this.sessionStorage.clear();
|
||||
} catch (error) {
|
||||
this.logger.error('Error during sign out:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set session from external auth (e.g., from server callback)
|
||||
*/
|
||||
async setSession(session: Session): Promise<void> {
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
const { error } = await client.auth.setSession({
|
||||
access_token: session.access_token,
|
||||
refresh_token: session.refresh_token
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw new AuthenticationError(
|
||||
`Failed to set session: ${error.message}`,
|
||||
'SESSION_SET_FAILED'
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.info('Session set successfully');
|
||||
} catch (error) {
|
||||
if (error instanceof AuthenticationError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new AuthenticationError(
|
||||
`Failed to set session: ${(error as Error).message}`,
|
||||
'SESSION_SET_FAILED'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,7 +177,7 @@ describe('ConfigManager', () => {
|
||||
|
||||
it('should return storage configuration', () => {
|
||||
const storage = manager.getStorageConfig();
|
||||
expect(storage).toEqual({ type: 'file' });
|
||||
expect(storage).toEqual({ type: 'auto', apiConfigured: false });
|
||||
});
|
||||
|
||||
it('should return API storage configuration when configured', async () => {
|
||||
@@ -206,7 +206,65 @@ describe('ConfigManager', () => {
|
||||
expect(storage).toEqual({
|
||||
type: 'api',
|
||||
apiEndpoint: 'https://api.example.com',
|
||||
apiAccessToken: 'token123'
|
||||
apiAccessToken: 'token123',
|
||||
apiConfigured: true
|
||||
});
|
||||
});
|
||||
|
||||
it('should return auto storage configuration with apiConfigured flag', async () => {
|
||||
// Create a new instance with auto storage config and partial API settings
|
||||
vi.mocked(ConfigMerger).mockImplementationOnce(
|
||||
() =>
|
||||
({
|
||||
addSource: vi.fn(),
|
||||
clearSources: vi.fn(),
|
||||
merge: vi.fn().mockReturnValue({
|
||||
storage: {
|
||||
type: 'auto',
|
||||
apiEndpoint: 'https://api.example.com'
|
||||
// No apiAccessToken - partial config
|
||||
}
|
||||
}),
|
||||
getSources: vi.fn().mockReturnValue([])
|
||||
}) as any
|
||||
);
|
||||
|
||||
const autoManager = await ConfigManager.create(testProjectRoot);
|
||||
|
||||
const storage = autoManager.getStorageConfig();
|
||||
expect(storage).toEqual({
|
||||
type: 'auto',
|
||||
apiEndpoint: 'https://api.example.com',
|
||||
apiAccessToken: undefined,
|
||||
apiConfigured: true // true because apiEndpoint is provided
|
||||
});
|
||||
});
|
||||
|
||||
it('should return auto storage with apiConfigured false when no API settings', async () => {
|
||||
// Create a new instance with auto storage but no API settings
|
||||
vi.mocked(ConfigMerger).mockImplementationOnce(
|
||||
() =>
|
||||
({
|
||||
addSource: vi.fn(),
|
||||
clearSources: vi.fn(),
|
||||
merge: vi.fn().mockReturnValue({
|
||||
storage: {
|
||||
type: 'auto'
|
||||
// No API settings at all
|
||||
}
|
||||
}),
|
||||
getSources: vi.fn().mockReturnValue([])
|
||||
}) as any
|
||||
);
|
||||
|
||||
const autoManager = await ConfigManager.create(testProjectRoot);
|
||||
|
||||
const storage = autoManager.getStorageConfig();
|
||||
expect(storage).toEqual({
|
||||
type: 'auto',
|
||||
apiEndpoint: undefined,
|
||||
apiAccessToken: undefined,
|
||||
apiConfigured: false // false because no API settings
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -85,6 +85,11 @@ describe('EnvironmentConfigProvider', () => {
|
||||
provider = new EnvironmentConfigProvider(); // Reset provider
|
||||
config = provider.loadConfig();
|
||||
expect(config.storage?.type).toBe('api');
|
||||
|
||||
process.env.TASKMASTER_STORAGE_TYPE = 'auto';
|
||||
provider = new EnvironmentConfigProvider(); // Reset provider
|
||||
config = provider.loadConfig();
|
||||
expect(config.storage?.type).toBe('auto');
|
||||
});
|
||||
|
||||
it('should handle nested configuration paths', () => {
|
||||
|
||||
@@ -31,7 +31,7 @@ export class EnvironmentConfigProvider {
|
||||
{
|
||||
env: 'TASKMASTER_STORAGE_TYPE',
|
||||
path: ['storage', 'type'],
|
||||
validate: (v: string) => ['file', 'api'].includes(v)
|
||||
validate: (v: string) => ['file', 'api', 'auto'].includes(v)
|
||||
},
|
||||
{ env: 'TASKMASTER_API_ENDPOINT', path: ['storage', 'apiEndpoint'] },
|
||||
{ env: 'TASKMASTER_API_TOKEN', path: ['storage', 'apiAccessToken'] },
|
||||
|
||||
@@ -3,11 +3,7 @@
|
||||
* This file defines the contract for configuration management
|
||||
*/
|
||||
|
||||
import type {
|
||||
TaskComplexity,
|
||||
TaskPriority,
|
||||
StorageType
|
||||
} from '../types/index.js';
|
||||
import type { TaskComplexity, TaskPriority } from '../types/index.js';
|
||||
|
||||
/**
|
||||
* Model configuration for different AI roles
|
||||
@@ -77,6 +73,14 @@ export interface TagSettings {
|
||||
tagNamingConvention: 'kebab-case' | 'camelCase' | 'snake_case';
|
||||
}
|
||||
|
||||
/**
|
||||
* Storage type options
|
||||
* - 'file': Local file system storage
|
||||
* - 'api': Remote API storage (Hamster integration)
|
||||
* - 'auto': Automatically detect based on auth status
|
||||
*/
|
||||
export type StorageType = 'file' | 'api' | 'auto';
|
||||
|
||||
/**
|
||||
* Runtime storage configuration used for storage backend selection
|
||||
* This is what getStorageConfig() returns and what StorageFactory expects
|
||||
|
||||
@@ -1,170 +0,0 @@
|
||||
import { Task, Subtask } from '../types/index.js';
|
||||
import { Database, Tables } from '../types/database.types.js';
|
||||
|
||||
type TaskRow = Tables<'tasks'>;
|
||||
type DependencyRow = Tables<'task_dependencies'>;
|
||||
|
||||
export class TaskMapper {
|
||||
/**
|
||||
* Maps database tasks to internal Task format
|
||||
*/
|
||||
static mapDatabaseTasksToTasks(
|
||||
dbTasks: TaskRow[],
|
||||
dbDependencies: DependencyRow[]
|
||||
): Task[] {
|
||||
if (!dbTasks || dbTasks.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Group dependencies by task_id
|
||||
const dependenciesByTaskId = this.groupDependenciesByTaskId(dbDependencies);
|
||||
|
||||
// Separate parent tasks and subtasks
|
||||
const parentTasks = dbTasks.filter((t) => !t.parent_task_id);
|
||||
const subtasksByParentId = this.groupSubtasksByParentId(dbTasks);
|
||||
|
||||
// Map parent tasks with their subtasks
|
||||
return parentTasks.map((taskRow) =>
|
||||
this.mapDatabaseTaskToTask(
|
||||
taskRow,
|
||||
subtasksByParentId.get(taskRow.id) || [],
|
||||
dependenciesByTaskId
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps a single database task to internal Task format
|
||||
*/
|
||||
static mapDatabaseTaskToTask(
|
||||
dbTask: TaskRow,
|
||||
dbSubtasks: TaskRow[],
|
||||
dependenciesByTaskId: Map<string, string[]>
|
||||
): Task {
|
||||
// Map subtasks
|
||||
const subtasks: Subtask[] = dbSubtasks.map((subtask, index) => ({
|
||||
id: index + 1, // Use numeric ID for subtasks
|
||||
parentId: dbTask.id,
|
||||
title: subtask.title,
|
||||
description: subtask.description || '',
|
||||
status: this.mapStatus(subtask.status),
|
||||
priority: this.mapPriority(subtask.priority),
|
||||
dependencies: dependenciesByTaskId.get(subtask.id) || [],
|
||||
details: (subtask.metadata as any)?.details || '',
|
||||
testStrategy: (subtask.metadata as any)?.testStrategy || '',
|
||||
createdAt: subtask.created_at,
|
||||
updatedAt: subtask.updated_at,
|
||||
assignee: subtask.assignee_id || undefined,
|
||||
complexity: subtask.complexity
|
||||
? this.mapComplexityToInternal(subtask.complexity)
|
||||
: undefined
|
||||
}));
|
||||
|
||||
return {
|
||||
id: dbTask.display_id || dbTask.id, // Use display_id if available
|
||||
title: dbTask.title,
|
||||
description: dbTask.description || '',
|
||||
status: this.mapStatus(dbTask.status),
|
||||
priority: this.mapPriority(dbTask.priority),
|
||||
dependencies: dependenciesByTaskId.get(dbTask.id) || [],
|
||||
details: (dbTask.metadata as any)?.details || '',
|
||||
testStrategy: (dbTask.metadata as any)?.testStrategy || '',
|
||||
subtasks,
|
||||
createdAt: dbTask.created_at,
|
||||
updatedAt: dbTask.updated_at,
|
||||
assignee: dbTask.assignee_id || undefined,
|
||||
complexity: dbTask.complexity
|
||||
? this.mapComplexityToInternal(dbTask.complexity)
|
||||
: undefined,
|
||||
effort: dbTask.estimated_hours || undefined,
|
||||
actualEffort: dbTask.actual_hours || undefined
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups dependencies by task ID
|
||||
*/
|
||||
private static groupDependenciesByTaskId(
|
||||
dependencies: DependencyRow[]
|
||||
): Map<string, string[]> {
|
||||
const dependenciesByTaskId = new Map<string, string[]>();
|
||||
|
||||
if (dependencies) {
|
||||
for (const dep of dependencies) {
|
||||
const deps = dependenciesByTaskId.get(dep.task_id) || [];
|
||||
deps.push(dep.depends_on_task_id);
|
||||
dependenciesByTaskId.set(dep.task_id, deps);
|
||||
}
|
||||
}
|
||||
|
||||
return dependenciesByTaskId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups subtasks by their parent ID
|
||||
*/
|
||||
private static groupSubtasksByParentId(
|
||||
tasks: TaskRow[]
|
||||
): Map<string, TaskRow[]> {
|
||||
const subtasksByParentId = new Map<string, TaskRow[]>();
|
||||
|
||||
for (const task of tasks) {
|
||||
if (task.parent_task_id) {
|
||||
const subtasks = subtasksByParentId.get(task.parent_task_id) || [];
|
||||
subtasks.push(task);
|
||||
subtasksByParentId.set(task.parent_task_id, subtasks);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort subtasks by subtask_position for each parent
|
||||
for (const subtasks of subtasksByParentId.values()) {
|
||||
subtasks.sort((a, b) => a.subtask_position - b.subtask_position);
|
||||
}
|
||||
|
||||
return subtasksByParentId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps database status to internal status
|
||||
*/
|
||||
private static mapStatus(
|
||||
status: Database['public']['Enums']['task_status']
|
||||
): Task['status'] {
|
||||
switch (status) {
|
||||
case 'todo':
|
||||
return 'pending';
|
||||
case 'in_progress':
|
||||
return 'in-progress';
|
||||
case 'done':
|
||||
return 'done';
|
||||
default:
|
||||
return 'pending';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps database priority to internal priority
|
||||
*/
|
||||
private static mapPriority(
|
||||
priority: Database['public']['Enums']['task_priority']
|
||||
): Task['priority'] {
|
||||
switch (priority) {
|
||||
case 'urgent':
|
||||
return 'critical';
|
||||
default:
|
||||
return priority as Task['priority'];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps numeric complexity to descriptive complexity
|
||||
*/
|
||||
private static mapComplexityToInternal(
|
||||
complexity: number
|
||||
): Task['complexity'] {
|
||||
if (complexity <= 2) return 'simple';
|
||||
if (complexity <= 5) return 'moderate';
|
||||
if (complexity <= 8) return 'complex';
|
||||
return 'very-complex';
|
||||
}
|
||||
}
|
||||
@@ -1,110 +0,0 @@
|
||||
import { SupabaseClient } from '@supabase/supabase-js';
|
||||
import { Task } from '../types/index.js';
|
||||
import { Database } from '../types/database.types.js';
|
||||
import { TaskMapper } from '../mappers/TaskMapper.js';
|
||||
import { AuthManager } from '../auth/auth-manager.js';
|
||||
|
||||
export class SupabaseTaskRepository {
|
||||
constructor(private supabase: SupabaseClient<Database>) {}
|
||||
|
||||
async getTasks(_projectId?: string): Promise<Task[]> {
|
||||
// Get the current context to determine briefId
|
||||
const authManager = AuthManager.getInstance();
|
||||
const context = authManager.getContext();
|
||||
|
||||
if (!context || !context.briefId) {
|
||||
throw new Error(
|
||||
'No brief selected. Please select a brief first using: tm context brief'
|
||||
);
|
||||
}
|
||||
|
||||
// Get all tasks for the brief using the exact query structure
|
||||
const { data: tasks, error } = await this.supabase
|
||||
.from('tasks')
|
||||
.select(`
|
||||
*,
|
||||
document:document_id (
|
||||
id,
|
||||
document_name,
|
||||
title,
|
||||
description
|
||||
)
|
||||
`)
|
||||
.eq('brief_id', context.briefId)
|
||||
.order('position', { ascending: true })
|
||||
.order('subtask_position', { ascending: true })
|
||||
.order('created_at', { ascending: true });
|
||||
|
||||
if (error) {
|
||||
throw new Error(`Failed to fetch tasks: ${error.message}`);
|
||||
}
|
||||
|
||||
if (!tasks || tasks.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Get all dependencies for these tasks
|
||||
const taskIds = tasks.map((t: any) => t.id);
|
||||
const { data: depsData, error: depsError } = await this.supabase
|
||||
.from('task_dependencies')
|
||||
.select('*')
|
||||
.in('task_id', taskIds);
|
||||
|
||||
if (depsError) {
|
||||
throw new Error(
|
||||
`Failed to fetch task dependencies: ${depsError.message}`
|
||||
);
|
||||
}
|
||||
|
||||
// Use mapper to convert to internal format
|
||||
return TaskMapper.mapDatabaseTasksToTasks(tasks, depsData || []);
|
||||
}
|
||||
|
||||
async getTask(accountId: string, taskId: string): Promise<Task | null> {
|
||||
const { data, error } = await this.supabase
|
||||
.from('tasks')
|
||||
.select('*')
|
||||
.eq('account_id', accountId)
|
||||
.eq('id', taskId)
|
||||
.single();
|
||||
|
||||
if (error) {
|
||||
if (error.code === 'PGRST116') {
|
||||
return null; // Not found
|
||||
}
|
||||
throw new Error(`Failed to fetch task: ${error.message}`);
|
||||
}
|
||||
|
||||
// Get dependencies for this task
|
||||
const { data: depsData } = await this.supabase
|
||||
.from('task_dependencies')
|
||||
.select('*')
|
||||
.eq('task_id', taskId);
|
||||
|
||||
// Get subtasks if this is a parent task
|
||||
const { data: subtasksData } = await this.supabase
|
||||
.from('tasks')
|
||||
.select('*')
|
||||
.eq('parent_task_id', taskId)
|
||||
.order('subtask_position', { ascending: true });
|
||||
|
||||
// Create dependency map
|
||||
const dependenciesByTaskId = new Map<string, string[]>();
|
||||
if (depsData) {
|
||||
dependenciesByTaskId.set(
|
||||
taskId,
|
||||
depsData.map(
|
||||
(d: Database['public']['Tables']['task_dependencies']['Row']) =>
|
||||
d.depends_on_task_id
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Use mapper to convert single task
|
||||
return TaskMapper.mapDatabaseTaskToTask(
|
||||
data,
|
||||
subtasksData || [],
|
||||
dependenciesByTaskId
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
import { Task, TaskTag } from '../types/index.js';
|
||||
|
||||
export interface TaskRepository {
|
||||
// Task operations
|
||||
getTasks(projectId: string): Promise<Task[]>;
|
||||
getTask(projectId: string, taskId: string): Promise<Task | null>;
|
||||
createTask(projectId: string, task: Omit<Task, 'id'>): Promise<Task>;
|
||||
updateTask(
|
||||
projectId: string,
|
||||
taskId: string,
|
||||
updates: Partial<Task>
|
||||
): Promise<Task>;
|
||||
deleteTask(projectId: string, taskId: string): Promise<void>;
|
||||
|
||||
// Tag operations
|
||||
getTags(projectId: string): Promise<TaskTag[]>;
|
||||
getTag(projectId: string, tagName: string): Promise<TaskTag | null>;
|
||||
createTag(projectId: string, tag: TaskTag): Promise<TaskTag>;
|
||||
updateTag(
|
||||
projectId: string,
|
||||
tagName: string,
|
||||
updates: Partial<TaskTag>
|
||||
): Promise<TaskTag>;
|
||||
deleteTag(projectId: string, tagName: string): Promise<void>;
|
||||
|
||||
// Bulk operations
|
||||
bulkCreateTasks(
|
||||
projectId: string,
|
||||
tasks: Omit<Task, 'id'>[]
|
||||
): Promise<Task[]>;
|
||||
bulkUpdateTasks(
|
||||
projectId: string,
|
||||
updates: Array<{ id: string; updates: Partial<Task> }>
|
||||
): Promise<Task[]>;
|
||||
bulkDeleteTasks(projectId: string, taskIds: string[]): Promise<void>;
|
||||
}
|
||||
@@ -4,5 +4,3 @@
|
||||
*/
|
||||
|
||||
export { TaskService } from './task-service.js';
|
||||
export { OrganizationService } from './organization.service.js';
|
||||
export type { Organization, Brief } from './organization.service.js';
|
||||
|
||||
@@ -1,363 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Organization and Brief management service
|
||||
* Handles fetching and managing organizations and briefs from the API
|
||||
*/
|
||||
|
||||
import { SupabaseClient } from '@supabase/supabase-js';
|
||||
import { Database } from '../types/database.types.js';
|
||||
import { TaskMasterError, ERROR_CODES } from '../errors/task-master-error.js';
|
||||
import { getLogger } from '../logger/index.js';
|
||||
|
||||
/**
|
||||
* Organization data structure
|
||||
*/
|
||||
export interface Organization {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Brief data structure
|
||||
*/
|
||||
export interface Brief {
|
||||
id: string;
|
||||
accountId: string;
|
||||
documentId: string;
|
||||
status: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Task data structure from the remote database
|
||||
*/
|
||||
export interface RemoteTask {
|
||||
id: string;
|
||||
briefId: string;
|
||||
documentId: string;
|
||||
position: number | null;
|
||||
subtaskPosition: number | null;
|
||||
status: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
// Document details from join
|
||||
document?: {
|
||||
id: string;
|
||||
document_name: string;
|
||||
title: string;
|
||||
description: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Service for managing organizations and briefs
|
||||
*/
|
||||
export class OrganizationService {
|
||||
private logger = getLogger('OrganizationService');
|
||||
|
||||
constructor(private supabaseClient: SupabaseClient<Database>) {}
|
||||
|
||||
/**
|
||||
* Get all organizations for the authenticated user
|
||||
*/
|
||||
async getOrganizations(): Promise<Organization[]> {
|
||||
try {
|
||||
// The user is already authenticated via the Authorization header
|
||||
// Query the user_accounts view/table (filtered by RLS for current user)
|
||||
const { data, error } = await this.supabaseClient
|
||||
.from('user_accounts')
|
||||
.select(`
|
||||
id,
|
||||
name,
|
||||
slug
|
||||
`);
|
||||
|
||||
if (error) {
|
||||
throw new TaskMasterError(
|
||||
`Failed to fetch organizations: ${error.message}`,
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getOrganizations' },
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
if (!data || data.length === 0) {
|
||||
this.logger.debug('No organizations found for user');
|
||||
return [];
|
||||
}
|
||||
|
||||
// Map to our Organization interface
|
||||
return data.map((org) => ({
|
||||
id: org.id ?? '',
|
||||
name: org.name ?? '',
|
||||
slug: org.slug ?? org.id ?? '' // Use ID as fallback if slug is null
|
||||
}));
|
||||
} catch (error) {
|
||||
if (error instanceof TaskMasterError) {
|
||||
throw error;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
'Failed to fetch organizations',
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getOrganizations' },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific organization by ID
|
||||
*/
|
||||
async getOrganization(orgId: string): Promise<Organization | null> {
|
||||
try {
|
||||
const { data, error } = await this.supabaseClient
|
||||
.from('accounts')
|
||||
.select(`
|
||||
id,
|
||||
name,
|
||||
slug
|
||||
`)
|
||||
.eq('id', orgId)
|
||||
.single();
|
||||
|
||||
if (error) {
|
||||
if (error.code === 'PGRST116') {
|
||||
// No rows found
|
||||
return null;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
`Failed to fetch organization: ${error.message}`,
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getOrganization', orgId },
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const accountData =
|
||||
data as Database['public']['Tables']['accounts']['Row'];
|
||||
return {
|
||||
id: accountData.id,
|
||||
name: accountData.name,
|
||||
slug: accountData.slug || accountData.id
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof TaskMasterError) {
|
||||
throw error;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
'Failed to fetch organization',
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getOrganization', orgId },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all briefs for a specific organization
|
||||
*/
|
||||
async getBriefs(orgId: string): Promise<Brief[]> {
|
||||
try {
|
||||
const { data, error } = await this.supabaseClient
|
||||
.from('brief')
|
||||
.select(`
|
||||
id,
|
||||
account_id,
|
||||
document_id,
|
||||
status,
|
||||
created_at,
|
||||
updated_at
|
||||
`)
|
||||
.eq('account_id', orgId);
|
||||
|
||||
if (error) {
|
||||
throw new TaskMasterError(
|
||||
`Failed to fetch briefs: ${error.message}`,
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getBriefs', orgId },
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
if (!data || data.length === 0) {
|
||||
this.logger.debug(`No briefs found for organization ${orgId}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
// Map to our Brief interface
|
||||
return data.map((brief: any) => ({
|
||||
id: brief.id,
|
||||
accountId: brief.account_id,
|
||||
documentId: brief.document_id,
|
||||
status: brief.status,
|
||||
createdAt: brief.created_at,
|
||||
updatedAt: brief.updated_at
|
||||
}));
|
||||
} catch (error) {
|
||||
if (error instanceof TaskMasterError) {
|
||||
throw error;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
'Failed to fetch briefs',
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getBriefs', orgId },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific brief by ID
|
||||
*/
|
||||
async getBrief(briefId: string): Promise<Brief | null> {
|
||||
try {
|
||||
const { data, error } = await this.supabaseClient
|
||||
.from('brief')
|
||||
.select(`
|
||||
id,
|
||||
account_id,
|
||||
document_id,
|
||||
status,
|
||||
created_at,
|
||||
updated_at
|
||||
`)
|
||||
.eq('id', briefId)
|
||||
.single();
|
||||
|
||||
if (error) {
|
||||
if (error.code === 'PGRST116') {
|
||||
// No rows found
|
||||
return null;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
`Failed to fetch brief: ${error.message}`,
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getBrief', briefId },
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const briefData = data as any;
|
||||
return {
|
||||
id: briefData.id,
|
||||
accountId: briefData.account_id,
|
||||
documentId: briefData.document_id,
|
||||
status: briefData.status,
|
||||
createdAt: briefData.created_at,
|
||||
updatedAt: briefData.updated_at
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof TaskMasterError) {
|
||||
throw error;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
'Failed to fetch brief',
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getBrief', briefId },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a user has access to an organization
|
||||
*/
|
||||
async validateOrgAccess(orgId: string): Promise<boolean> {
|
||||
try {
|
||||
const org = await this.getOrganization(orgId);
|
||||
return org !== null;
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to validate org access: ${error}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a user has access to a brief
|
||||
*/
|
||||
async validateBriefAccess(briefId: string): Promise<boolean> {
|
||||
try {
|
||||
const brief = await this.getBrief(briefId);
|
||||
return brief !== null;
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to validate brief access: ${error}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all tasks for a specific brief
|
||||
*/
|
||||
async getTasks(briefId: string): Promise<RemoteTask[]> {
|
||||
try {
|
||||
const { data, error } = await this.supabaseClient
|
||||
.from('tasks')
|
||||
.select(`
|
||||
*,
|
||||
document:document_id (
|
||||
id,
|
||||
document_name,
|
||||
title,
|
||||
description
|
||||
)
|
||||
`)
|
||||
.eq('brief_id', briefId)
|
||||
.order('position', { ascending: true })
|
||||
.order('subtask_position', { ascending: true })
|
||||
.order('created_at', { ascending: true });
|
||||
|
||||
if (error) {
|
||||
throw new TaskMasterError(
|
||||
`Failed to fetch tasks: ${error.message}`,
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getTasks', briefId },
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
if (!data || data.length === 0) {
|
||||
this.logger.debug(`No tasks found for brief ${briefId}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
// Map to our RemoteTask interface
|
||||
return data.map((task: any) => ({
|
||||
id: task.id,
|
||||
briefId: task.brief_id,
|
||||
documentId: task.document_id,
|
||||
position: task.position,
|
||||
subtaskPosition: task.subtask_position,
|
||||
status: task.status,
|
||||
createdAt: task.created_at,
|
||||
updatedAt: task.updated_at,
|
||||
document: task.document
|
||||
? {
|
||||
id: task.document.id,
|
||||
document_name: task.document.document_name,
|
||||
title: task.document.title,
|
||||
description: task.document.description
|
||||
}
|
||||
: undefined
|
||||
}));
|
||||
} catch (error) {
|
||||
if (error instanceof TaskMasterError) {
|
||||
throw error;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
'Failed to fetch tasks',
|
||||
ERROR_CODES.API_ERROR,
|
||||
{ operation: 'getTasks', briefId },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,12 +3,7 @@
|
||||
* Core service for task operations - handles business logic between storage and API
|
||||
*/
|
||||
|
||||
import type {
|
||||
Task,
|
||||
TaskFilter,
|
||||
TaskStatus,
|
||||
StorageType
|
||||
} from '../types/index.js';
|
||||
import type { Task, TaskFilter, TaskStatus } from '../types/index.js';
|
||||
import type { IStorage } from '../interfaces/storage.interface.js';
|
||||
import { ConfigManager } from '../config/config-manager.js';
|
||||
import { StorageFactory } from '../storage/storage-factory.js';
|
||||
@@ -27,8 +22,8 @@ export interface TaskListResult {
|
||||
filtered: number;
|
||||
/** The tag these tasks belong to (only present if explicitly provided) */
|
||||
tag?: string;
|
||||
/** Storage type being used */
|
||||
storageType: StorageType;
|
||||
/** Storage type being used - includes 'auto' for automatic detection */
|
||||
storageType: 'file' | 'api' | 'auto';
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -118,7 +113,7 @@ export class TaskService {
|
||||
total: rawTasks.length,
|
||||
filtered: filteredEntities.length,
|
||||
tag: options.tag, // Only include tag if explicitly provided
|
||||
storageType: this.getStorageType()
|
||||
storageType: this.configManager.getStorageConfig().type
|
||||
};
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
@@ -171,7 +166,7 @@ export class TaskService {
|
||||
byStatus: Record<TaskStatus, number>;
|
||||
withSubtasks: number;
|
||||
blocked: number;
|
||||
storageType: StorageType;
|
||||
storageType: 'file' | 'api' | 'auto';
|
||||
}> {
|
||||
const result = await this.getTaskList({
|
||||
tag,
|
||||
@@ -339,12 +334,8 @@ export class TaskService {
|
||||
/**
|
||||
* Get current storage type
|
||||
*/
|
||||
getStorageType(): StorageType {
|
||||
// Prefer the runtime storage type if available to avoid exposing 'auto'
|
||||
const s = this.storage as { getType?: () => 'file' | 'api' } | null;
|
||||
const runtimeType = s?.getType?.();
|
||||
return (runtimeType ??
|
||||
this.configManager.getStorageConfig().type) as StorageType;
|
||||
getStorageType(): 'file' | 'api' | 'auto' {
|
||||
return this.configManager.getStorageConfig().type;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,29 +1,27 @@
|
||||
/**
|
||||
* @fileoverview API-based storage implementation using repository pattern
|
||||
* This provides storage via repository abstraction for flexibility
|
||||
* @fileoverview API-based storage implementation for Hamster integration
|
||||
* This provides storage via REST API instead of local file system
|
||||
*/
|
||||
|
||||
import type {
|
||||
IStorage,
|
||||
StorageStats
|
||||
} from '../interfaces/storage.interface.js';
|
||||
import type { Task, TaskMetadata, TaskTag } from '../types/index.js';
|
||||
import type { Task, TaskMetadata } from '../types/index.js';
|
||||
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
|
||||
import { TaskRepository } from '../repositories/task-repository.interface.js';
|
||||
import { SupabaseTaskRepository } from '../repositories/supabase-task-repository.js';
|
||||
import { SupabaseClient } from '@supabase/supabase-js';
|
||||
import { AuthManager } from '../auth/auth-manager.js';
|
||||
|
||||
/**
|
||||
* API storage configuration
|
||||
*/
|
||||
export interface ApiStorageConfig {
|
||||
/** Supabase client instance */
|
||||
supabaseClient?: SupabaseClient;
|
||||
/** Custom repository implementation */
|
||||
repository?: TaskRepository;
|
||||
/** Project ID for scoping */
|
||||
projectId: string;
|
||||
/** API endpoint base URL */
|
||||
endpoint: string;
|
||||
/** Access token for authentication */
|
||||
accessToken: string;
|
||||
/** Optional project ID */
|
||||
projectId?: string;
|
||||
/** Request timeout in milliseconds */
|
||||
timeout?: number;
|
||||
/** Enable request retries */
|
||||
enableRetry?: boolean;
|
||||
/** Maximum retry attempts */
|
||||
@@ -31,58 +29,64 @@ export interface ApiStorageConfig {
|
||||
}
|
||||
|
||||
/**
|
||||
* ApiStorage implementation using repository pattern
|
||||
* Provides flexibility to swap between different backend implementations
|
||||
* API response wrapper
|
||||
*/
|
||||
interface ApiResponse<T> {
|
||||
success: boolean;
|
||||
data?: T;
|
||||
error?: string;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* ApiStorage implementation for Hamster integration
|
||||
* Fetches and stores tasks via REST API
|
||||
*/
|
||||
export class ApiStorage implements IStorage {
|
||||
private readonly repository: TaskRepository;
|
||||
private readonly projectId: string;
|
||||
private readonly enableRetry: boolean;
|
||||
private readonly maxRetries: number;
|
||||
private readonly config: Required<ApiStorageConfig>;
|
||||
private initialized = false;
|
||||
private tagsCache: Map<string, TaskTag> = new Map();
|
||||
|
||||
constructor(config: ApiStorageConfig) {
|
||||
this.validateConfig(config);
|
||||
|
||||
// Use provided repository or create Supabase repository
|
||||
if (config.repository) {
|
||||
this.repository = config.repository;
|
||||
} else if (config.supabaseClient) {
|
||||
// TODO: SupabaseTaskRepository doesn't implement all TaskRepository methods yet
|
||||
// Cast for now until full implementation is complete
|
||||
this.repository = new SupabaseTaskRepository(
|
||||
config.supabaseClient
|
||||
) as unknown as TaskRepository;
|
||||
} else {
|
||||
throw new TaskMasterError(
|
||||
'Either repository or supabaseClient must be provided',
|
||||
ERROR_CODES.MISSING_CONFIGURATION
|
||||
);
|
||||
}
|
||||
|
||||
this.projectId = config.projectId;
|
||||
this.enableRetry = config.enableRetry ?? true;
|
||||
this.maxRetries = config.maxRetries ?? 3;
|
||||
this.config = {
|
||||
endpoint: config.endpoint.replace(/\/$/, ''), // Remove trailing slash
|
||||
accessToken: config.accessToken,
|
||||
projectId: config.projectId || 'default',
|
||||
timeout: config.timeout || 30000,
|
||||
enableRetry: config.enableRetry ?? true,
|
||||
maxRetries: config.maxRetries || 3
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate API storage configuration
|
||||
*/
|
||||
private validateConfig(config: ApiStorageConfig): void {
|
||||
if (!config.projectId) {
|
||||
if (!config.endpoint) {
|
||||
throw new TaskMasterError(
|
||||
'Project ID is required for API storage',
|
||||
'API endpoint is required for API storage',
|
||||
ERROR_CODES.MISSING_CONFIGURATION
|
||||
);
|
||||
}
|
||||
|
||||
if (!config.repository && !config.supabaseClient) {
|
||||
if (!config.accessToken) {
|
||||
throw new TaskMasterError(
|
||||
'Either repository or supabaseClient must be provided',
|
||||
'Access token is required for API storage',
|
||||
ERROR_CODES.MISSING_CONFIGURATION
|
||||
);
|
||||
}
|
||||
|
||||
// Validate endpoint URL format
|
||||
try {
|
||||
new URL(config.endpoint);
|
||||
} catch {
|
||||
throw new TaskMasterError(
|
||||
'Invalid API endpoint URL',
|
||||
ERROR_CODES.INVALID_INPUT,
|
||||
{ endpoint: config.endpoint }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -92,8 +96,8 @@ export class ApiStorage implements IStorage {
|
||||
if (this.initialized) return;
|
||||
|
||||
try {
|
||||
// Load initial tags
|
||||
await this.loadTagsIntoCache();
|
||||
// Verify API connectivity
|
||||
await this.verifyConnection();
|
||||
this.initialized = true;
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
@@ -106,71 +110,39 @@ export class ApiStorage implements IStorage {
|
||||
}
|
||||
|
||||
/**
|
||||
* Load tags into cache
|
||||
* In our API-based system, "tags" represent briefs
|
||||
* Verify API connection
|
||||
*/
|
||||
private async loadTagsIntoCache(): Promise<void> {
|
||||
try {
|
||||
const authManager = AuthManager.getInstance();
|
||||
const context = authManager.getContext();
|
||||
private async verifyConnection(): Promise<void> {
|
||||
const response = await this.makeRequest<{ status: string }>('/health');
|
||||
|
||||
// If we have a selected brief, create a virtual "tag" for it
|
||||
if (context?.briefId) {
|
||||
// Create a virtual tag representing the current brief
|
||||
const briefTag: TaskTag = {
|
||||
name: context.briefId,
|
||||
tasks: [], // Will be populated when tasks are loaded
|
||||
metadata: {
|
||||
briefId: context.briefId,
|
||||
briefName: context.briefName,
|
||||
organizationId: context.orgId
|
||||
}
|
||||
};
|
||||
|
||||
this.tagsCache.clear();
|
||||
this.tagsCache.set(context.briefId, briefTag);
|
||||
}
|
||||
} catch (error) {
|
||||
// If no brief is selected, that's okay - user needs to select one first
|
||||
console.debug('No brief selected, starting with empty cache');
|
||||
if (!response.success) {
|
||||
throw new Error(`API health check failed: ${response.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load tasks from API
|
||||
* In our system, the tag parameter represents a brief ID
|
||||
*/
|
||||
async loadTasks(tag?: string): Promise<Task[]> {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const authManager = AuthManager.getInstance();
|
||||
const context = authManager.getContext();
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/tasks`;
|
||||
|
||||
// If no brief is selected in context, throw an error
|
||||
if (!context?.briefId) {
|
||||
throw new Error(
|
||||
'No brief selected. Please select a brief first using: tm context brief <brief-id>'
|
||||
);
|
||||
const response = await this.makeRequest<{ tasks: Task[] }>(endpoint);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to load tasks');
|
||||
}
|
||||
|
||||
// Load tasks from the current brief context
|
||||
const tasks = await this.retryOperation(() =>
|
||||
this.repository.getTasks(this.projectId)
|
||||
);
|
||||
|
||||
// Update the tag cache with the loaded task IDs
|
||||
const briefTag = this.tagsCache.get(context.briefId);
|
||||
if (briefTag) {
|
||||
briefTag.tasks = tasks.map((task) => task.id);
|
||||
}
|
||||
|
||||
return tasks;
|
||||
return response.data?.tasks || [];
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to load tasks from API',
|
||||
ERROR_CODES.STORAGE_ERROR,
|
||||
{ operation: 'loadTasks', tag, context: 'brief-based loading' },
|
||||
{ operation: 'loadTasks', tag },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
@@ -183,29 +155,15 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
if (tag) {
|
||||
// Update tag with task IDs
|
||||
const tagData = this.tagsCache.get(tag) || {
|
||||
name: tag,
|
||||
tasks: [],
|
||||
metadata: {}
|
||||
};
|
||||
tagData.tasks = tasks.map((t) => t.id);
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/tasks?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/tasks`;
|
||||
|
||||
// Save or update tag
|
||||
if (this.tagsCache.has(tag)) {
|
||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
||||
} else {
|
||||
await this.repository.createTag(this.projectId, tagData);
|
||||
}
|
||||
const response = await this.makeRequest(endpoint, 'PUT', { tasks });
|
||||
|
||||
this.tagsCache.set(tag, tagData);
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to save tasks');
|
||||
}
|
||||
|
||||
// Save tasks using bulk operation
|
||||
await this.retryOperation(() =>
|
||||
this.repository.bulkCreateTasks(this.projectId, tasks)
|
||||
);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to save tasks to API',
|
||||
@@ -223,17 +181,20 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
if (tag) {
|
||||
// Check if task is in tag
|
||||
const tagData = this.tagsCache.get(tag);
|
||||
if (!tagData || !tagData.tasks.includes(taskId)) {
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/tasks/${taskId}`;
|
||||
|
||||
const response = await this.makeRequest<{ task: Task }>(endpoint);
|
||||
|
||||
if (!response.success) {
|
||||
if (response.error?.includes('not found')) {
|
||||
return null;
|
||||
}
|
||||
throw new Error(response.error || 'Failed to load task');
|
||||
}
|
||||
|
||||
return await this.retryOperation(() =>
|
||||
this.repository.getTask(this.projectId, taskId)
|
||||
);
|
||||
return response.data?.task || null;
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to load task from API',
|
||||
@@ -251,26 +212,14 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
// Check if task exists
|
||||
const existing = await this.repository.getTask(this.projectId, task.id);
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/tasks/${task.id}?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/tasks/${task.id}`;
|
||||
|
||||
if (existing) {
|
||||
await this.retryOperation(() =>
|
||||
this.repository.updateTask(this.projectId, task.id, task)
|
||||
);
|
||||
} else {
|
||||
await this.retryOperation(() =>
|
||||
this.repository.createTask(this.projectId, task)
|
||||
);
|
||||
}
|
||||
const response = await this.makeRequest(endpoint, 'PUT', { task });
|
||||
|
||||
// Update tag if specified
|
||||
if (tag) {
|
||||
const tagData = this.tagsCache.get(tag);
|
||||
if (tagData && !tagData.tasks.includes(task.id)) {
|
||||
tagData.tasks.push(task.id);
|
||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
||||
}
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to save task');
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
@@ -289,17 +238,14 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
await this.retryOperation(() =>
|
||||
this.repository.deleteTask(this.projectId, taskId)
|
||||
);
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/tasks/${taskId}?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/tasks/${taskId}`;
|
||||
|
||||
// Remove from tag if specified
|
||||
if (tag) {
|
||||
const tagData = this.tagsCache.get(tag);
|
||||
if (tagData) {
|
||||
tagData.tasks = tagData.tasks.filter((id) => id !== taskId);
|
||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
||||
}
|
||||
const response = await this.makeRequest(endpoint, 'DELETE');
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to delete task');
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
@@ -312,24 +258,21 @@ export class ApiStorage implements IStorage {
|
||||
}
|
||||
|
||||
/**
|
||||
* List available tags (briefs in our system)
|
||||
* List available tags
|
||||
*/
|
||||
async listTags(): Promise<string[]> {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const authManager = AuthManager.getInstance();
|
||||
const context = authManager.getContext();
|
||||
const response = await this.makeRequest<{ tags: string[] }>(
|
||||
`/projects/${this.config.projectId}/tags`
|
||||
);
|
||||
|
||||
// In our API-based system, we only have one "tag" at a time - the current brief
|
||||
if (context?.briefId) {
|
||||
// Ensure the current brief is in our cache
|
||||
await this.loadTagsIntoCache();
|
||||
return [context.briefId];
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to list tags');
|
||||
}
|
||||
|
||||
// No brief selected, return empty array
|
||||
return [];
|
||||
return response.data?.tags || [];
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to list tags from API',
|
||||
@@ -347,15 +290,19 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
if (tag) {
|
||||
const tagData = this.tagsCache.get(tag);
|
||||
return (tagData?.metadata as TaskMetadata) || null;
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/metadata`;
|
||||
|
||||
const response = await this.makeRequest<{ metadata: TaskMetadata }>(
|
||||
endpoint
|
||||
);
|
||||
|
||||
if (!response.success) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Return global metadata if no tag specified
|
||||
// This could be stored in a special system tag
|
||||
const systemTag = await this.repository.getTag(this.projectId, '_system');
|
||||
return (systemTag?.metadata as TaskMetadata) || null;
|
||||
return response.data?.metadata || null;
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to load metadata from API',
|
||||
@@ -373,38 +320,14 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
if (tag) {
|
||||
const tagData = this.tagsCache.get(tag) || {
|
||||
name: tag,
|
||||
tasks: [],
|
||||
metadata: {}
|
||||
};
|
||||
tagData.metadata = metadata as any;
|
||||
const endpoint = tag
|
||||
? `/projects/${this.config.projectId}/metadata?tag=${encodeURIComponent(tag)}`
|
||||
: `/projects/${this.config.projectId}/metadata`;
|
||||
|
||||
if (this.tagsCache.has(tag)) {
|
||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
||||
} else {
|
||||
await this.repository.createTag(this.projectId, tagData);
|
||||
}
|
||||
const response = await this.makeRequest(endpoint, 'PUT', { metadata });
|
||||
|
||||
this.tagsCache.set(tag, tagData);
|
||||
} else {
|
||||
// Save to system tag
|
||||
const systemTag: TaskTag = {
|
||||
name: '_system',
|
||||
tasks: [],
|
||||
metadata: metadata as any
|
||||
};
|
||||
|
||||
const existing = await this.repository.getTag(
|
||||
this.projectId,
|
||||
'_system'
|
||||
);
|
||||
if (existing) {
|
||||
await this.repository.updateTag(this.projectId, '_system', systemTag);
|
||||
} else {
|
||||
await this.repository.createTag(this.projectId, systemTag);
|
||||
}
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to save metadata');
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
@@ -435,30 +358,14 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
// Use bulk create - repository should handle duplicates
|
||||
await this.retryOperation(() =>
|
||||
this.repository.bulkCreateTasks(this.projectId, tasks)
|
||||
);
|
||||
// First load existing tasks
|
||||
const existingTasks = await this.loadTasks(tag);
|
||||
|
||||
// Update tag if specified
|
||||
if (tag) {
|
||||
const tagData = this.tagsCache.get(tag) || {
|
||||
name: tag,
|
||||
tasks: [],
|
||||
metadata: {}
|
||||
};
|
||||
// Append new tasks
|
||||
const allTasks = [...existingTasks, ...tasks];
|
||||
|
||||
const newTaskIds = tasks.map((t) => t.id);
|
||||
tagData.tasks = [...new Set([...tagData.tasks, ...newTaskIds])];
|
||||
|
||||
if (this.tagsCache.has(tag)) {
|
||||
await this.repository.updateTag(this.projectId, tag, tagData);
|
||||
} else {
|
||||
await this.repository.createTag(this.projectId, tagData);
|
||||
}
|
||||
|
||||
this.tagsCache.set(tag, tagData);
|
||||
}
|
||||
// Save all tasks
|
||||
await this.saveTasks(allTasks, tag);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to append tasks to API',
|
||||
@@ -480,9 +387,18 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
await this.retryOperation(() =>
|
||||
this.repository.updateTask(this.projectId, taskId, updates)
|
||||
);
|
||||
// Load the task
|
||||
const task = await this.loadTask(taskId, tag);
|
||||
|
||||
if (!task) {
|
||||
throw new Error(`Task ${taskId} not found`);
|
||||
}
|
||||
|
||||
// Merge updates
|
||||
const updatedTask = { ...task, ...updates, id: taskId };
|
||||
|
||||
// Save updated task
|
||||
await this.saveTask(updatedTask, tag);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to update task via API',
|
||||
@@ -507,11 +423,14 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
await this.retryOperation(() =>
|
||||
this.repository.deleteTag(this.projectId, tag)
|
||||
const response = await this.makeRequest(
|
||||
`/projects/${this.config.projectId}/tags/${encodeURIComponent(tag)}`,
|
||||
'DELETE'
|
||||
);
|
||||
|
||||
this.tagsCache.delete(tag);
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to delete tag');
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to delete tag via API',
|
||||
@@ -529,21 +448,15 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const tagData = this.tagsCache.get(oldTag);
|
||||
if (!tagData) {
|
||||
throw new Error(`Tag ${oldTag} not found`);
|
||||
const response = await this.makeRequest(
|
||||
`/projects/${this.config.projectId}/tags/${encodeURIComponent(oldTag)}/rename`,
|
||||
'POST',
|
||||
{ newTag }
|
||||
);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to rename tag');
|
||||
}
|
||||
|
||||
// Create new tag with same data
|
||||
const newTagData = { ...tagData, name: newTag };
|
||||
await this.repository.createTag(this.projectId, newTagData);
|
||||
|
||||
// Delete old tag
|
||||
await this.repository.deleteTag(this.projectId, oldTag);
|
||||
|
||||
// Update cache
|
||||
this.tagsCache.delete(oldTag);
|
||||
this.tagsCache.set(newTag, newTagData);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to rename tag via API',
|
||||
@@ -561,17 +474,15 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const sourceData = this.tagsCache.get(sourceTag);
|
||||
if (!sourceData) {
|
||||
throw new Error(`Source tag ${sourceTag} not found`);
|
||||
const response = await this.makeRequest(
|
||||
`/projects/${this.config.projectId}/tags/${encodeURIComponent(sourceTag)}/copy`,
|
||||
'POST',
|
||||
{ targetTag }
|
||||
);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to copy tag');
|
||||
}
|
||||
|
||||
// Create new tag with copied data
|
||||
const targetData = { ...sourceData, name: targetTag };
|
||||
await this.repository.createTag(this.projectId, targetData);
|
||||
|
||||
// Update cache
|
||||
this.tagsCache.set(targetTag, targetData);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to copy tag via API',
|
||||
@@ -589,22 +500,24 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
const tasks = await this.repository.getTasks(this.projectId);
|
||||
const tags = await this.repository.getTags(this.projectId);
|
||||
const response = await this.makeRequest<{
|
||||
stats: StorageStats;
|
||||
}>(`/projects/${this.config.projectId}/stats`);
|
||||
|
||||
const tagStats = tags.map((tag) => ({
|
||||
tag: tag.name,
|
||||
taskCount: tag.tasks.length,
|
||||
lastModified: new Date().toISOString() // TODO: Get actual last modified from tag data
|
||||
}));
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to get stats');
|
||||
}
|
||||
|
||||
return {
|
||||
totalTasks: tasks.length,
|
||||
totalTags: tags.length,
|
||||
storageSize: 0, // Not applicable for API storage
|
||||
lastModified: new Date().toISOString(),
|
||||
tagStats
|
||||
};
|
||||
// Return stats or default values
|
||||
return (
|
||||
response.data?.stats || {
|
||||
totalTasks: 0,
|
||||
totalTags: 0,
|
||||
storageSize: 0,
|
||||
lastModified: new Date().toISOString(),
|
||||
tagStats: []
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to get stats from API',
|
||||
@@ -622,15 +535,16 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
// Export all data
|
||||
await this.repository.getTasks(this.projectId);
|
||||
await this.repository.getTags(this.projectId);
|
||||
const response = await this.makeRequest<{ backupId: string }>(
|
||||
`/projects/${this.config.projectId}/backup`,
|
||||
'POST'
|
||||
);
|
||||
|
||||
// TODO: In a real implementation, this would:
|
||||
// 1. Create backup data structure with tasks and tags
|
||||
// 2. Save the backup to a storage service
|
||||
// For now, return a backup identifier
|
||||
return `backup-${this.projectId}-${Date.now()}`;
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to create backup');
|
||||
}
|
||||
|
||||
return response.data?.backupId || 'unknown';
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to create backup via API',
|
||||
@@ -644,16 +558,27 @@ export class ApiStorage implements IStorage {
|
||||
/**
|
||||
* Restore from backup
|
||||
*/
|
||||
async restore(backupId: string): Promise<void> {
|
||||
async restore(backupPath: string): Promise<void> {
|
||||
await this.ensureInitialized();
|
||||
|
||||
// This would restore from a backup service
|
||||
// Implementation depends on backup strategy
|
||||
throw new TaskMasterError(
|
||||
'Restore not implemented for API storage',
|
||||
ERROR_CODES.NOT_IMPLEMENTED,
|
||||
{ operation: 'restore', backupId }
|
||||
);
|
||||
try {
|
||||
const response = await this.makeRequest(
|
||||
`/projects/${this.config.projectId}/restore`,
|
||||
'POST',
|
||||
{ backupId: backupPath }
|
||||
);
|
||||
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to restore backup');
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to restore backup via API',
|
||||
ERROR_CODES.STORAGE_ERROR,
|
||||
{ operation: 'restore', backupPath },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -663,23 +588,14 @@ export class ApiStorage implements IStorage {
|
||||
await this.ensureInitialized();
|
||||
|
||||
try {
|
||||
// Delete all tasks
|
||||
const tasks = await this.repository.getTasks(this.projectId);
|
||||
if (tasks.length > 0) {
|
||||
await this.repository.bulkDeleteTasks(
|
||||
this.projectId,
|
||||
tasks.map((t) => t.id)
|
||||
);
|
||||
}
|
||||
const response = await this.makeRequest(
|
||||
`/projects/${this.config.projectId}/clear`,
|
||||
'POST'
|
||||
);
|
||||
|
||||
// Delete all tags
|
||||
const tags = await this.repository.getTags(this.projectId);
|
||||
for (const tag of tags) {
|
||||
await this.repository.deleteTag(this.projectId, tag.name);
|
||||
if (!response.success) {
|
||||
throw new Error(response.error || 'Failed to clear data');
|
||||
}
|
||||
|
||||
// Clear cache
|
||||
this.tagsCache.clear();
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to clear data via API',
|
||||
@@ -695,7 +611,6 @@ export class ApiStorage implements IStorage {
|
||||
*/
|
||||
async close(): Promise<void> {
|
||||
this.initialized = false;
|
||||
this.tagsCache.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -708,21 +623,102 @@ export class ApiStorage implements IStorage {
|
||||
}
|
||||
|
||||
/**
|
||||
* Retry an operation with exponential backoff
|
||||
* Make HTTP request to API
|
||||
*/
|
||||
private async retryOperation<T>(
|
||||
operation: () => Promise<T>,
|
||||
attempt: number = 1
|
||||
): Promise<T> {
|
||||
private async makeRequest<T>(
|
||||
path: string,
|
||||
method: 'GET' | 'POST' | 'PUT' | 'DELETE' = 'GET',
|
||||
body?: unknown
|
||||
): Promise<ApiResponse<T>> {
|
||||
const url = `${this.config.endpoint}${path}`;
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);
|
||||
|
||||
try {
|
||||
return await operation();
|
||||
} catch (error) {
|
||||
if (this.enableRetry && attempt < this.maxRetries) {
|
||||
const delay = Math.pow(2, attempt) * 1000;
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
return this.retryOperation(operation, attempt + 1);
|
||||
const options: RequestInit = {
|
||||
method,
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.config.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json'
|
||||
},
|
||||
signal: controller.signal
|
||||
};
|
||||
|
||||
if (body && (method === 'POST' || method === 'PUT')) {
|
||||
options.body = JSON.stringify(body);
|
||||
}
|
||||
throw error;
|
||||
|
||||
let lastError: Error | null = null;
|
||||
let attempt = 0;
|
||||
|
||||
while (attempt < this.config.maxRetries) {
|
||||
attempt++;
|
||||
|
||||
try {
|
||||
const response = await fetch(url, options);
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
return { success: true, data: data as T };
|
||||
}
|
||||
|
||||
// Handle specific error codes
|
||||
if (response.status === 401) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Authentication failed - check access token'
|
||||
};
|
||||
}
|
||||
|
||||
if (response.status === 404) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Resource not found'
|
||||
};
|
||||
}
|
||||
|
||||
if (response.status === 429) {
|
||||
// Rate limited - retry with backoff
|
||||
if (this.config.enableRetry && attempt < this.config.maxRetries) {
|
||||
await this.delay(Math.pow(2, attempt) * 1000);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const errorData = data as any;
|
||||
return {
|
||||
success: false,
|
||||
error:
|
||||
errorData.error ||
|
||||
errorData.message ||
|
||||
`HTTP ${response.status}: ${response.statusText}`
|
||||
};
|
||||
} catch (error) {
|
||||
lastError = error as Error;
|
||||
|
||||
// Retry on network errors
|
||||
if (this.config.enableRetry && attempt < this.config.maxRetries) {
|
||||
await this.delay(Math.pow(2, attempt) * 1000);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// All retries exhausted
|
||||
return {
|
||||
success: false,
|
||||
error: lastError?.message || 'Request failed after retries'
|
||||
};
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delay helper for retries
|
||||
*/
|
||||
private delay(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,6 @@ import { ApiStorage } from './api-storage.js';
|
||||
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
|
||||
import { AuthManager } from '../auth/auth-manager.js';
|
||||
import { getLogger } from '../logger/index.js';
|
||||
import { SupabaseAuthClient } from '../clients/supabase-client.js';
|
||||
|
||||
/**
|
||||
* Factory for creating storage implementations based on configuration
|
||||
@@ -149,13 +148,29 @@ export class StorageFactory {
|
||||
* Create API storage implementation
|
||||
*/
|
||||
private static createApiStorage(config: Partial<IConfiguration>): ApiStorage {
|
||||
// Use our SupabaseAuthClient instead of creating a raw Supabase client
|
||||
const supabaseAuthClient = new SupabaseAuthClient();
|
||||
const supabaseClient = supabaseAuthClient.getClient();
|
||||
const { apiEndpoint, apiAccessToken } = config.storage || {};
|
||||
|
||||
if (!apiEndpoint) {
|
||||
throw new TaskMasterError(
|
||||
'API endpoint is required for API storage',
|
||||
ERROR_CODES.MISSING_CONFIGURATION,
|
||||
{ storageType: 'api' }
|
||||
);
|
||||
}
|
||||
|
||||
if (!apiAccessToken) {
|
||||
throw new TaskMasterError(
|
||||
'API access token is required for API storage',
|
||||
ERROR_CODES.MISSING_CONFIGURATION,
|
||||
{ storageType: 'api' }
|
||||
);
|
||||
}
|
||||
|
||||
return new ApiStorage({
|
||||
supabaseClient,
|
||||
projectId: config.projectPath || '',
|
||||
endpoint: apiEndpoint,
|
||||
accessToken: apiAccessToken,
|
||||
projectId: config.projectPath,
|
||||
timeout: config.retry?.requestTimeout,
|
||||
enableRetry: config.retry?.retryOnNetworkError,
|
||||
maxRetries: config.retry?.retryAttempts
|
||||
});
|
||||
|
||||
@@ -10,16 +10,8 @@ import {
|
||||
} from './services/task-service.js';
|
||||
import { ERROR_CODES, TaskMasterError } from './errors/task-master-error.js';
|
||||
import type { IConfiguration } from './interfaces/configuration.interface.js';
|
||||
import type {
|
||||
Task,
|
||||
TaskStatus,
|
||||
TaskFilter,
|
||||
StorageType
|
||||
} from './types/index.js';
|
||||
import {
|
||||
WorkflowService,
|
||||
type WorkflowServiceConfig
|
||||
} from './workflow/index.js';
|
||||
import type { Task, TaskStatus, TaskFilter } from './types/index.js';
|
||||
import { WorkflowService, type WorkflowServiceConfig } from './workflow/index.js';
|
||||
|
||||
/**
|
||||
* Options for creating TaskMasterCore instance
|
||||
@@ -99,10 +91,10 @@ export class TaskMasterCore {
|
||||
projectRoot: options.projectPath,
|
||||
...options.workflow
|
||||
};
|
||||
|
||||
|
||||
// Pass task retrieval function to workflow service
|
||||
this.workflowService = new WorkflowService(
|
||||
workflowConfig,
|
||||
workflowConfig,
|
||||
async (taskId: string) => {
|
||||
const task = await this.getTask(taskId);
|
||||
if (!task) {
|
||||
@@ -186,7 +178,7 @@ export class TaskMasterCore {
|
||||
/**
|
||||
* Get current storage type
|
||||
*/
|
||||
getStorageType(): StorageType {
|
||||
getStorageType(): 'file' | 'api' | 'auto' {
|
||||
return this.taskService.getStorageType();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,491 +0,0 @@
|
||||
export type Json =
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null
|
||||
| { [key: string]: Json | undefined }
|
||||
| Json[];
|
||||
|
||||
export type Database = {
|
||||
public: {
|
||||
Tables: {
|
||||
accounts: {
|
||||
Row: {
|
||||
created_at: string | null;
|
||||
created_by: string | null;
|
||||
email: string | null;
|
||||
id: string;
|
||||
is_personal_account: boolean;
|
||||
name: string;
|
||||
picture_url: string | null;
|
||||
primary_owner_user_id: string;
|
||||
public_data: Json;
|
||||
slug: string | null;
|
||||
updated_at: string | null;
|
||||
updated_by: string | null;
|
||||
};
|
||||
Insert: {
|
||||
created_at?: string | null;
|
||||
created_by?: string | null;
|
||||
email?: string | null;
|
||||
id?: string;
|
||||
is_personal_account?: boolean;
|
||||
name: string;
|
||||
picture_url?: string | null;
|
||||
primary_owner_user_id?: string;
|
||||
public_data?: Json;
|
||||
slug?: string | null;
|
||||
updated_at?: string | null;
|
||||
updated_by?: string | null;
|
||||
};
|
||||
Update: {
|
||||
created_at?: string | null;
|
||||
created_by?: string | null;
|
||||
email?: string | null;
|
||||
id?: string;
|
||||
is_personal_account?: boolean;
|
||||
name?: string;
|
||||
picture_url?: string | null;
|
||||
primary_owner_user_id?: string;
|
||||
public_data?: Json;
|
||||
slug?: string | null;
|
||||
updated_at?: string | null;
|
||||
updated_by?: string | null;
|
||||
};
|
||||
Relationships: [];
|
||||
};
|
||||
brief: {
|
||||
Row: {
|
||||
account_id: string;
|
||||
created_at: string;
|
||||
created_by: string;
|
||||
document_id: string;
|
||||
id: string;
|
||||
plan_generation_completed_at: string | null;
|
||||
plan_generation_error: string | null;
|
||||
plan_generation_started_at: string | null;
|
||||
plan_generation_status: Database['public']['Enums']['plan_generation_status'];
|
||||
status: Database['public']['Enums']['brief_status'];
|
||||
updated_at: string;
|
||||
};
|
||||
Insert: {
|
||||
account_id: string;
|
||||
created_at?: string;
|
||||
created_by: string;
|
||||
document_id: string;
|
||||
id?: string;
|
||||
plan_generation_completed_at?: string | null;
|
||||
plan_generation_error?: string | null;
|
||||
plan_generation_started_at?: string | null;
|
||||
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
|
||||
status?: Database['public']['Enums']['brief_status'];
|
||||
updated_at?: string;
|
||||
};
|
||||
Update: {
|
||||
account_id?: string;
|
||||
created_at?: string;
|
||||
created_by?: string;
|
||||
document_id?: string;
|
||||
id?: string;
|
||||
plan_generation_completed_at?: string | null;
|
||||
plan_generation_error?: string | null;
|
||||
plan_generation_started_at?: string | null;
|
||||
plan_generation_status?: Database['public']['Enums']['plan_generation_status'];
|
||||
status?: Database['public']['Enums']['brief_status'];
|
||||
updated_at?: string;
|
||||
};
|
||||
Relationships: [
|
||||
{
|
||||
foreignKeyName: 'brief_account_id_fkey';
|
||||
columns: ['account_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'accounts';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'brief_document_id_fkey';
|
||||
columns: ['document_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'document';
|
||||
referencedColumns: ['id'];
|
||||
}
|
||||
];
|
||||
};
|
||||
document: {
|
||||
Row: {
|
||||
account_id: string;
|
||||
created_at: string;
|
||||
created_by: string;
|
||||
description: string | null;
|
||||
document_name: string;
|
||||
document_type: Database['public']['Enums']['document_type'];
|
||||
file_path: string | null;
|
||||
file_size: number | null;
|
||||
id: string;
|
||||
metadata: Json | null;
|
||||
mime_type: string | null;
|
||||
processed_at: string | null;
|
||||
processing_error: string | null;
|
||||
processing_status:
|
||||
| Database['public']['Enums']['document_processing_status']
|
||||
| null;
|
||||
source_id: string | null;
|
||||
source_type: string | null;
|
||||
title: string;
|
||||
updated_at: string;
|
||||
};
|
||||
Insert: {
|
||||
account_id: string;
|
||||
created_at?: string;
|
||||
created_by: string;
|
||||
description?: string | null;
|
||||
document_name: string;
|
||||
document_type?: Database['public']['Enums']['document_type'];
|
||||
file_path?: string | null;
|
||||
file_size?: number | null;
|
||||
id?: string;
|
||||
metadata?: Json | null;
|
||||
mime_type?: string | null;
|
||||
processed_at?: string | null;
|
||||
processing_error?: string | null;
|
||||
processing_status?:
|
||||
| Database['public']['Enums']['document_processing_status']
|
||||
| null;
|
||||
source_id?: string | null;
|
||||
source_type?: string | null;
|
||||
title: string;
|
||||
updated_at?: string;
|
||||
};
|
||||
Update: {
|
||||
account_id?: string;
|
||||
created_at?: string;
|
||||
created_by?: string;
|
||||
description?: string | null;
|
||||
document_name?: string;
|
||||
document_type?: Database['public']['Enums']['document_type'];
|
||||
file_path?: string | null;
|
||||
file_size?: number | null;
|
||||
id?: string;
|
||||
metadata?: Json | null;
|
||||
mime_type?: string | null;
|
||||
processed_at?: string | null;
|
||||
processing_error?: string | null;
|
||||
processing_status?:
|
||||
| Database['public']['Enums']['document_processing_status']
|
||||
| null;
|
||||
source_id?: string | null;
|
||||
source_type?: string | null;
|
||||
title?: string;
|
||||
updated_at?: string;
|
||||
};
|
||||
Relationships: [
|
||||
{
|
||||
foreignKeyName: 'document_account_id_fkey';
|
||||
columns: ['account_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'accounts';
|
||||
referencedColumns: ['id'];
|
||||
}
|
||||
];
|
||||
};
|
||||
tasks: {
|
||||
Row: {
|
||||
account_id: string;
|
||||
actual_hours: number;
|
||||
assignee_id: string | null;
|
||||
brief_id: string | null;
|
||||
completed_subtasks: number;
|
||||
complexity: number | null;
|
||||
created_at: string;
|
||||
created_by: string;
|
||||
description: string | null;
|
||||
display_id: string | null;
|
||||
document_id: string | null;
|
||||
due_date: string | null;
|
||||
estimated_hours: number | null;
|
||||
id: string;
|
||||
metadata: Json;
|
||||
parent_task_id: string | null;
|
||||
position: number;
|
||||
priority: Database['public']['Enums']['task_priority'];
|
||||
status: Database['public']['Enums']['task_status'];
|
||||
subtask_position: number;
|
||||
title: string;
|
||||
total_subtasks: number;
|
||||
updated_at: string;
|
||||
updated_by: string;
|
||||
};
|
||||
Insert: {
|
||||
account_id: string;
|
||||
actual_hours?: number;
|
||||
assignee_id?: string | null;
|
||||
brief_id?: string | null;
|
||||
completed_subtasks?: number;
|
||||
complexity?: number | null;
|
||||
created_at?: string;
|
||||
created_by: string;
|
||||
description?: string | null;
|
||||
display_id?: string | null;
|
||||
document_id?: string | null;
|
||||
due_date?: string | null;
|
||||
estimated_hours?: number | null;
|
||||
id?: string;
|
||||
metadata?: Json;
|
||||
parent_task_id?: string | null;
|
||||
position?: number;
|
||||
priority?: Database['public']['Enums']['task_priority'];
|
||||
status?: Database['public']['Enums']['task_status'];
|
||||
subtask_position?: number;
|
||||
title: string;
|
||||
total_subtasks?: number;
|
||||
updated_at?: string;
|
||||
updated_by: string;
|
||||
};
|
||||
Update: {
|
||||
account_id?: string;
|
||||
actual_hours?: number;
|
||||
assignee_id?: string | null;
|
||||
brief_id?: string | null;
|
||||
completed_subtasks?: number;
|
||||
complexity?: number | null;
|
||||
created_at?: string;
|
||||
created_by?: string;
|
||||
description?: string | null;
|
||||
display_id?: string | null;
|
||||
document_id?: string | null;
|
||||
due_date?: string | null;
|
||||
estimated_hours?: number | null;
|
||||
id?: string;
|
||||
metadata?: Json;
|
||||
parent_task_id?: string | null;
|
||||
position?: number;
|
||||
priority?: Database['public']['Enums']['task_priority'];
|
||||
status?: Database['public']['Enums']['task_status'];
|
||||
subtask_position?: number;
|
||||
title?: string;
|
||||
total_subtasks?: number;
|
||||
updated_at?: string;
|
||||
updated_by?: string;
|
||||
};
|
||||
Relationships: [
|
||||
{
|
||||
foreignKeyName: 'tasks_account_id_fkey';
|
||||
columns: ['account_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'accounts';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'tasks_brief_id_fkey';
|
||||
columns: ['brief_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'brief';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'tasks_document_id_fkey';
|
||||
columns: ['document_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'document';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'tasks_parent_task_id_fkey';
|
||||
columns: ['parent_task_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'tasks';
|
||||
referencedColumns: ['id'];
|
||||
}
|
||||
];
|
||||
};
|
||||
task_dependencies: {
|
||||
Row: {
|
||||
account_id: string;
|
||||
created_at: string;
|
||||
depends_on_task_id: string;
|
||||
id: string;
|
||||
task_id: string;
|
||||
};
|
||||
Insert: {
|
||||
account_id: string;
|
||||
created_at?: string;
|
||||
depends_on_task_id: string;
|
||||
id?: string;
|
||||
task_id: string;
|
||||
};
|
||||
Update: {
|
||||
account_id?: string;
|
||||
created_at?: string;
|
||||
depends_on_task_id?: string;
|
||||
id?: string;
|
||||
task_id?: string;
|
||||
};
|
||||
Relationships: [
|
||||
{
|
||||
foreignKeyName: 'task_dependencies_account_id_fkey';
|
||||
columns: ['account_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'accounts';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'task_dependencies_depends_on_task_id_fkey';
|
||||
columns: ['depends_on_task_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'tasks';
|
||||
referencedColumns: ['id'];
|
||||
},
|
||||
{
|
||||
foreignKeyName: 'task_dependencies_task_id_fkey';
|
||||
columns: ['task_id'];
|
||||
isOneToOne: false;
|
||||
referencedRelation: 'tasks';
|
||||
referencedColumns: ['id'];
|
||||
}
|
||||
];
|
||||
};
|
||||
user_accounts: {
|
||||
Row: {
|
||||
id: string | null;
|
||||
name: string | null;
|
||||
picture_url: string | null;
|
||||
role: string | null;
|
||||
slug: string | null;
|
||||
};
|
||||
Insert: {
|
||||
id?: string | null;
|
||||
name?: string | null;
|
||||
picture_url?: string | null;
|
||||
role?: string | null;
|
||||
slug?: string | null;
|
||||
};
|
||||
Update: {
|
||||
id?: string | null;
|
||||
name?: string | null;
|
||||
picture_url?: string | null;
|
||||
role?: string | null;
|
||||
slug?: string | null;
|
||||
};
|
||||
Relationships: [];
|
||||
};
|
||||
};
|
||||
Views: {
|
||||
[_ in never]: never;
|
||||
};
|
||||
Functions: {
|
||||
[_ in never]: never;
|
||||
};
|
||||
Enums: {
|
||||
brief_status:
|
||||
| 'draft'
|
||||
| 'refining'
|
||||
| 'aligned'
|
||||
| 'delivering'
|
||||
| 'delivered'
|
||||
| 'done'
|
||||
| 'archived';
|
||||
document_processing_status: 'pending' | 'processing' | 'ready' | 'failed';
|
||||
document_type:
|
||||
| 'brief'
|
||||
| 'blueprint'
|
||||
| 'file'
|
||||
| 'note'
|
||||
| 'transcript'
|
||||
| 'generated_plan'
|
||||
| 'generated_task'
|
||||
| 'generated_summary'
|
||||
| 'method'
|
||||
| 'task';
|
||||
plan_generation_status:
|
||||
| 'not_started'
|
||||
| 'generating'
|
||||
| 'completed'
|
||||
| 'failed';
|
||||
task_priority: 'low' | 'medium' | 'high' | 'urgent';
|
||||
task_status: 'todo' | 'in_progress' | 'done';
|
||||
};
|
||||
CompositeTypes: {
|
||||
[_ in never]: never;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
export type Tables<
|
||||
PublicTableNameOrOptions extends
|
||||
| keyof (Database['public']['Tables'] & Database['public']['Views'])
|
||||
| { schema: keyof Database },
|
||||
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? keyof (Database[PublicTableNameOrOptions['schema']]['Tables'] &
|
||||
Database[PublicTableNameOrOptions['schema']]['Views'])
|
||||
: never = never
|
||||
> = PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? (Database[PublicTableNameOrOptions['schema']]['Tables'] &
|
||||
Database[PublicTableNameOrOptions['schema']]['Views'])[TableName] extends {
|
||||
Row: infer R;
|
||||
}
|
||||
? R
|
||||
: never
|
||||
: PublicTableNameOrOptions extends keyof (Database['public']['Tables'] &
|
||||
Database['public']['Views'])
|
||||
? (Database['public']['Tables'] &
|
||||
Database['public']['Views'])[PublicTableNameOrOptions] extends {
|
||||
Row: infer R;
|
||||
}
|
||||
? R
|
||||
: never
|
||||
: never;
|
||||
|
||||
export type TablesInsert<
|
||||
PublicTableNameOrOptions extends
|
||||
| keyof Database['public']['Tables']
|
||||
| { schema: keyof Database },
|
||||
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
|
||||
: never = never
|
||||
> = PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
|
||||
Insert: infer I;
|
||||
}
|
||||
? I
|
||||
: never
|
||||
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
|
||||
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
|
||||
Insert: infer I;
|
||||
}
|
||||
? I
|
||||
: never
|
||||
: never;
|
||||
|
||||
export type TablesUpdate<
|
||||
PublicTableNameOrOptions extends
|
||||
| keyof Database['public']['Tables']
|
||||
| { schema: keyof Database },
|
||||
TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? keyof Database[PublicTableNameOrOptions['schema']]['Tables']
|
||||
: never = never
|
||||
> = PublicTableNameOrOptions extends { schema: keyof Database }
|
||||
? Database[PublicTableNameOrOptions['schema']]['Tables'][TableName] extends {
|
||||
Update: infer U;
|
||||
}
|
||||
? U
|
||||
: never
|
||||
: PublicTableNameOrOptions extends keyof Database['public']['Tables']
|
||||
? Database['public']['Tables'][PublicTableNameOrOptions] extends {
|
||||
Update: infer U;
|
||||
}
|
||||
? U
|
||||
: never
|
||||
: never;
|
||||
|
||||
export type Enums<
|
||||
PublicEnumNameOrOptions extends
|
||||
| keyof Database['public']['Enums']
|
||||
| { schema: keyof Database },
|
||||
EnumName extends PublicEnumNameOrOptions extends { schema: keyof Database }
|
||||
? keyof Database[PublicEnumNameOrOptions['schema']]['Enums']
|
||||
: never = never
|
||||
> = PublicEnumNameOrOptions extends { schema: keyof Database }
|
||||
? Database[PublicEnumNameOrOptions['schema']]['Enums'][EnumName]
|
||||
: PublicEnumNameOrOptions extends keyof Database['public']['Enums']
|
||||
? Database['public']['Enums'][PublicEnumNameOrOptions]
|
||||
: never;
|
||||
@@ -2,14 +2,6 @@
|
||||
* Core type definitions for Task Master
|
||||
*/
|
||||
|
||||
/**
|
||||
* Storage type options
|
||||
* - 'file': Local file system storage
|
||||
* - 'api': Remote API storage (Hamster integration)
|
||||
* - 'auto': Automatically detect based on auth status
|
||||
*/
|
||||
export type StorageType = 'file' | 'api' | 'auto';
|
||||
|
||||
// ============================================================================
|
||||
// Type Literals
|
||||
// ============================================================================
|
||||
@@ -104,15 +96,6 @@ export interface TaskCollection {
|
||||
metadata: TaskMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Task tag for organizing tasks
|
||||
*/
|
||||
export interface TaskTag {
|
||||
name: string;
|
||||
tasks: string[]; // Task IDs belonging to this tag
|
||||
metadata: Record<string, any>;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Utility Types
|
||||
// ============================================================================
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
"declarationMap": true,
|
||||
"sourceMap": true,
|
||||
"outDir": "./dist",
|
||||
"baseUrl": ".",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
@@ -28,7 +27,21 @@
|
||||
"moduleDetection": "force",
|
||||
"types": ["node"],
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true
|
||||
"isolatedModules": true,
|
||||
"paths": {
|
||||
"@/*": ["./src/*"],
|
||||
"@/auth": ["./src/auth"],
|
||||
"@/config": ["./src/config"],
|
||||
"@/errors": ["./src/errors"],
|
||||
"@/interfaces": ["./src/interfaces"],
|
||||
"@/logger": ["./src/logger"],
|
||||
"@/parser": ["./src/parser"],
|
||||
"@/providers": ["./src/providers"],
|
||||
"@/services": ["./src/services"],
|
||||
"@/storage": ["./src/storage"],
|
||||
"@/types": ["./src/types"],
|
||||
"@/utils": ["./src/utils"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist", "tests", "**/*.test.ts", "**/*.spec.ts"]
|
||||
|
||||
@@ -198,13 +198,11 @@ jest.unstable_mockModule('fs', () => ({
|
||||
default: {
|
||||
existsSync: jest.fn(() => false),
|
||||
readFileSync: jest.fn(),
|
||||
writeFileSync: mockWriteFileSync,
|
||||
unlinkSync: jest.fn()
|
||||
writeFileSync: mockWriteFileSync
|
||||
},
|
||||
existsSync: jest.fn(() => false),
|
||||
readFileSync: jest.fn(),
|
||||
writeFileSync: mockWriteFileSync,
|
||||
unlinkSync: jest.fn()
|
||||
writeFileSync: mockWriteFileSync
|
||||
}));
|
||||
|
||||
jest.unstable_mockModule(
|
||||
|
||||
@@ -1,20 +1,51 @@
|
||||
import { defineConfig } from 'tsup';
|
||||
import {
|
||||
executableConfig,
|
||||
mergeConfig,
|
||||
commonExternals
|
||||
} from '@tm/build-config';
|
||||
import { dotenvLoad } from 'dotenv-mono';
|
||||
|
||||
export default defineConfig(
|
||||
mergeConfig(executableConfig, {
|
||||
entry: {
|
||||
'task-master': 'bin/task-master.js',
|
||||
'mcp-server': 'mcp-server/server.js'
|
||||
},
|
||||
outDir: 'dist',
|
||||
publicDir: 'public',
|
||||
// Bundle our monorepo packages but keep node_modules external
|
||||
noExternal: [/@tm\/.*/],
|
||||
external: commonExternals
|
||||
})
|
||||
);
|
||||
// Load .env from root level (monorepo support)
|
||||
dotenvLoad();
|
||||
|
||||
// Get all TM_PUBLIC_* env variables for build-time injection
|
||||
const getBuildTimeEnvs = () => {
|
||||
const envs: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(process.env)) {
|
||||
if (key.startsWith('TM_PUBLIC_')) {
|
||||
// Return the actual value, not JSON.stringify'd
|
||||
envs[key] = value || '';
|
||||
}
|
||||
}
|
||||
return envs;
|
||||
};
|
||||
|
||||
export default defineConfig({
|
||||
entry: {
|
||||
'task-master': 'bin/task-master.js',
|
||||
'mcp-server': 'mcp-server/server.js'
|
||||
},
|
||||
format: ['esm'],
|
||||
target: 'node18',
|
||||
splitting: false,
|
||||
sourcemap: true,
|
||||
clean: true,
|
||||
bundle: true, // Bundle everything into one file
|
||||
outDir: 'dist',
|
||||
publicDir: 'public',
|
||||
// Handle TypeScript imports transparently
|
||||
loader: {
|
||||
'.js': 'jsx',
|
||||
'.ts': 'ts'
|
||||
},
|
||||
// Replace process.env.TM_PUBLIC_* with actual values at build time
|
||||
env: getBuildTimeEnvs(),
|
||||
esbuildOptions(options) {
|
||||
options.platform = 'node';
|
||||
// Allow importing TypeScript from JavaScript
|
||||
options.resolveExtensions = ['.ts', '.js', '.mjs', '.json'];
|
||||
},
|
||||
// Bundle our monorepo packages but keep node_modules external
|
||||
noExternal: [/@tm\/.*/],
|
||||
// Don't bundle any other dependencies (auto-external all node_modules)
|
||||
// This regex matches anything that doesn't start with . or /
|
||||
external: [/^[^./]/],
|
||||
// Add success message for debugging
|
||||
onSuccess: 'echo "✅ Build completed successfully"'
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user