Compare commits

..

2 Commits

Author SHA1 Message Date
Ralph Khreish
c05643809e chore: add github config back 2025-05-03 21:24:35 +02:00
Ralph Khreish
451db98b1d chore: do a manual release 2025-05-03 20:58:20 +02:00
29 changed files with 116 additions and 193 deletions

View File

@@ -1,5 +0,0 @@
---
'task-master-ai': patch
---
Resolve all issues related to MCP

View File

@@ -1,9 +0,0 @@
---
'task-master-ai': patch
---
Fix CLI --force flag for parse-prd command
Previously, the --force flag was not respected when running `parse-prd`, causing the command to prompt for confirmation or fail even when --force was provided. This patch ensures that the flag is correctly passed and handled, allowing users to overwrite existing tasks.json files as intended.
- Fixes #477

View File

@@ -1,5 +0,0 @@
---
'task-master-ai': patch
---
Task Master no longer tells you to update when you're already up to date

View File

@@ -1,12 +0,0 @@
{
"mode": "exit",
"tag": "rc",
"initialVersions": {
"task-master-ai": "0.13.2"
},
"changesets": [
"beige-doodles-type",
"red-oranges-attend",
"red-suns-wash"
]
}

View File

@@ -1,5 +0,0 @@
---
'task-master-ai': patch
---
Fix ERR_MODULE_NOT_FOUND when trying to run MCP Server

View File

@@ -1,5 +0,0 @@
---
'task-master-ai': patch
---
Add src directory to exports

View File

@@ -1,5 +0,0 @@
---
'task-master-ai': patch
---
Fix for issue #409 LOG_LEVEL Pydantic validation error

View File

@@ -1,7 +0,0 @@
---
'task-master-ai': patch
---
Fix initial .env.example to work out of the box
- Closes #419

View File

@@ -1,5 +0,0 @@
---
'task-master-ai': patch
---
Fix default fallback model and maxTokens in Taskmaster initialization

View File

@@ -1,5 +0,0 @@
---
'task-master-ai': patch
---
Fix bug when updating tasks on the MCP server (#412)

View File

@@ -1,11 +0,0 @@
---
'task-master-ai': patch
---
Fix duplicate output on CLI help screen
- Prevent the Task Master CLI from printing the help screen more than once when using `-h` or `--help`.
- Removed redundant manual event handlers and guards for help output; now only the Commander `.helpInformation` override is used for custom help.
- Simplified logic so that help is only shown once for both "no arguments" and help flag flows.
- Ensures a clean, branded help experience with no repeated content.
- Fixes #339

View File

@@ -116,7 +116,7 @@ Taskmaster configuration is managed through two main mechanisms:
* For MCP/Cursor integration, configure these keys in the `env` section of `.cursor/mcp.json`.
* Available keys/variables: See `assets/env.example` or the Configuration section in the command reference (previously linked to `taskmaster.mdc`).
**Important:** Non-API key settings (like model selections, `MAX_TOKENS`, `TASKMASTER_LOG_LEVEL`) are **no longer configured via environment variables**. Use the `task-master models` command (or `--setup` for interactive configuration) or the `models` MCP tool.
**Important:** Non-API key settings (like model selections, `MAX_TOKENS`, `LOG_LEVEL`) are **no longer configured via environment variables**. Use the `task-master models` command (or `--setup` for interactive configuration) or the `models` MCP tool.
**If AI commands FAIL in MCP** verify that the API key for the selected provider is present in the `env` section of `.cursor/mcp.json`.
**If AI commands FAIL in CLI** verify that the API key for the selected provider is present in the `.env` file in the root of the project.

View File

@@ -1,29 +1,9 @@
# API Keys (Required)
ANTHROPIC_API_KEY="your_anthropic_api_key_here" # Format: sk-ant-api03-...
PERPLEXITY_API_KEY="your_perplexity_api_key_here" # Format: pplx-...
OPENAI_API_KEY="your_openai_api_key_here" # Format: sk-...
GOOGLE_API_KEY="your_google_api_key_here" # Format: AIza...
MISTRAL_API_KEY="your_mistral_api_key_here" # Format: ...
OPENROUTER_API_KEY="your_openrouter_api_key_here" # Format: sk-or-...
XAI_API_KEY="your_xai_api_key_here" # Format: ...
AZURE_OPENAI_API_KEY="your_azure_key_here" # Format: ...
# API Base URLs (Optional)
ANTHROPIC_API_BASE_URL="optional_base_url_here" # Optional custom base URL for Anthropic API
# Model Configuration
MODEL="claude-3-7-sonnet-20250219" # Recommended models: claude-3-7-sonnet-20250219, claude-3-opus-20240229
PERPLEXITY_MODEL="sonar-pro" # Perplexity model for research-backed subtasks
MAX_TOKENS="64000" # Maximum tokens for model responses
TEMPERATURE="0.2" # Temperature for model responses (0.0-1.0)
# Logging Configuration
DEBUG="false" # Enable debug logging (true/false)
LOG_LEVEL="info" # Log level (debug, info, warn, error)
# Task Generation Settings
DEFAULT_SUBTASKS="5" # Default number of subtasks when expanding
DEFAULT_PRIORITY="medium" # Default priority for generated tasks (high, medium, low)
# Project Metadata (Optional)
PROJECT_NAME="Your Project Name" # Override default project name in tasks.json
# API Keys (Required for using in any role i.e. main/research/fallback -- see `task-master models`)
ANTHROPIC_API_KEY=YOUR_ANTHROPIC_KEY_HERE
PERPLEXITY_API_KEY=YOUR_PERPLEXITY_KEY_HERE
OPENAI_API_KEY=YOUR_OPENAI_KEY_HERE
GOOGLE_API_KEY=YOUR_GOOGLE_KEY_HERE
MISTRAL_API_KEY=YOUR_MISTRAL_KEY_HERE
OPENROUTER_API_KEY=YOUR_OPENROUTER_KEY_HERE
XAI_API_KEY=YOUR_XAI_KEY_HERE
AZURE_OPENAI_API_KEY=YOUR_AZURE_KEY_HERE

View File

@@ -2,6 +2,9 @@ name: Pre-Release (RC)
on:
workflow_dispatch: # Allows manual triggering from GitHub UI/API
push:
branches:
- 'next'
concurrency: pre-release-${{ github.ref }}
@@ -38,10 +41,12 @@ jobs:
npx changeset pre enter rc
- name: Version RC packages
run: npx changeset version
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
git config user.name "GitHub Actions"
git config user.email "github-actions@example.com"
npx changeset version
git add .
git commit -m "chore: rc version bump" || echo "No changes to commit"
- name: Create Release Candidate Pull Request or Publish Release Candidate to npm
uses: changesets/action@v1
@@ -50,13 +55,3 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Exit RC mode
run: npx changeset pre exit
- name: Commit & Push changes
uses: actions-js/push@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: ${{ github.ref }}
message: 'chore: rc version bump'

View File

@@ -1,5 +1,13 @@
# task-master-ai
## 0.13.2
### Patch Changes
- 01963af: Resolve all issues related to MCP
- 01963af: Fix ERR_MODULE_NOT_FOUND when trying to run MCP Server
- 01963af: Add src directory to exports
## 0.13.1
### Patch Changes

View File

@@ -30,7 +30,6 @@ MCP (Model Control Protocol) provides the easiest way to get started with Task M
"args": ["-y", "--package=task-master-ai", "task-master-ai"],
"env": {
"ANTHROPIC_API_KEY": "YOUR_ANTHROPIC_API_KEY_HERE",
"ANTHROPIC_API_BASE_URL": "YOUR_CUSTOM_BASE_URL_HERE (optional)",
"PERPLEXITY_API_KEY": "YOUR_PERPLEXITY_API_KEY_HERE",
"OPENAI_API_KEY": "YOUR_OPENAI_KEY_HERE",
"GOOGLE_API_KEY": "YOUR_GOOGLE_KEY_HERE",

View File

@@ -14,8 +14,8 @@
},
"fallback": {
"provider": "anthropic",
"modelId": "claude-3-5-sonnet-20240620",
"maxTokens": 8192,
"modelId": "claude-3.5-sonnet-20240620",
"maxTokens": 120000,
"temperature": 0.1
}
},

View File

@@ -198,7 +198,7 @@ alwaysApply: true
- **MAX_TOKENS** (Default: `"4000"`): Maximum tokens for responses (Example: `MAX_TOKENS=8000`)
- **TEMPERATURE** (Default: `"0.7"`): Temperature for model responses (Example: `TEMPERATURE=0.5`)
- **DEBUG** (Default: `"false"`): Enable debug logging (Example: `DEBUG=true`)
- **TASKMASTER_LOG_LEVEL** (Default: `"info"`): Console output level (Example: `TASKMASTER_LOG_LEVEL=debug`)
- **LOG_LEVEL** (Default: `"info"`): Console output level (Example: `LOG_LEVEL=debug`)
- **DEFAULT_SUBTASKS** (Default: `"3"`): Default subtask count (Example: `DEFAULT_SUBTASKS=5`)
- **DEFAULT_PRIORITY** (Default: `"medium"`): Default priority (Example: `DEFAULT_PRIORITY=high`)
- **PROJECT_NAME** (Default: `"MCP SaaS MVP"`): Project name in metadata (Example: `PROJECT_NAME=My Awesome Project`)

View File

@@ -1,8 +1,8 @@
# API Keys (Required to enable respective provider)
ANTHROPIC_API_KEY="your_anthropic_api_key_here" # Required: Format: sk-ant-api03-...
PERPLEXITY_API_KEY="your_perplexity_api_key_here" # Optional: Format: pplx-...
OPENAI_API_KEY="your_openai_api_key_here" # Optional, for OpenAI/OpenRouter models. Format: sk-proj-...
GOOGLE_API_KEY="your_google_api_key_here" # Optional, for Google Gemini models.
MISTRAL_API_KEY="your_mistral_key_here" # Optional, for Mistral AI models.
XAI_API_KEY="YOUR_XAI_KEY_HERE" # Optional, for xAI AI models.
AZURE_OPENAI_API_KEY="your_azure_key_here" # Optional, for Azure OpenAI models (requires endpoint in .taskmasterconfig).
ANTHROPIC_API_KEY=your_anthropic_api_key_here # Required: Format: sk-ant-api03-...
PERPLEXITY_API_KEY=your_perplexity_api_key_here # Optional: Format: pplx-...
OPENAI_API_KEY=your_openai_api_key_here # Optional, for OpenAI/OpenRouter models. Format: sk-proj-...
GOOGLE_API_KEY=your_google_api_key_here # Optional, for Google Gemini models.
MISTRAL_API_KEY=your_mistral_key_here # Optional, for Mistral AI models.
XAI_API_KEY=YOUR_XAI_KEY_HERE # Optional, for xAI AI models.
AZURE_OPENAI_API_KEY=your_azure_key_here # Optional, for Azure OpenAI models (requires endpoint in .taskmasterconfig).

View File

@@ -31,7 +31,7 @@ Task Master configuration is now managed through two primary methods:
- Create a `.env` file in your project root for CLI usage.
- See `assets/env.example` for required key names.
**Important:** Settings like `MODEL`, `MAX_TOKENS`, `TEMPERATURE`, `TASKMASTER_LOG_LEVEL`, etc., are **no longer set via `.env`**. Use `task-master models --setup` instead.
**Important:** Settings like `MODEL`, `MAX_TOKENS`, `TEMPERATURE`, `LOG_LEVEL`, etc., are **no longer set via `.env`**. Use `task-master models --setup` instead.
## How It Works
@@ -42,7 +42,7 @@ Task Master configuration is now managed through two primary methods:
- Tasks can have `subtasks` for more detailed implementation steps.
- Dependencies are displayed with status indicators (✅ for completed, ⏱️ for pending) to easily track progress.
2. **CLI Commands**
2. **CLI Commands**
You can run the commands via:
```bash
@@ -200,7 +200,7 @@ Notes:
## Logging
The script supports different logging levels controlled by the `TASKMASTER_LOG_LEVEL` environment variable:
The script supports different logging levels controlled by the `LOG_LEVEL` environment variable:
- `debug`: Detailed information, typically useful for troubleshooting
- `info`: Confirmation that things are working as expected (default)

View File

@@ -3784,7 +3784,6 @@ In this tutorial, you'll learn how to build a LLM-powered chatbot client that co
if (!ANTHROPIC_API_KEY) {
throw new Error("ANTHROPIC_API_KEY is not set");
}
const ANTHROPIC_API_BASE_URL = process.env.ANTHROPIC_API_BASE_URL;
class MCPClient {
private mcp: Client;
@@ -3795,7 +3794,6 @@ In this tutorial, you'll learn how to build a LLM-powered chatbot client that co
constructor() {
this.anthropic = new Anthropic({
apiKey: ANTHROPIC_API_KEY,
baseUrl: ANTHROPIC_API_BASE_URL,
});
this.mcp = new Client({ name: "mcp-client-cli", version: "1.0.0" });
}

View File

@@ -6,10 +6,6 @@
import path from 'path';
import { updateTasks } from '../../../../scripts/modules/task-manager.js';
import { createLogWrapper } from '../../tools/utils.js';
import {
enableSilentMode,
disableSilentMode
} from '../../../../scripts/modules/utils.js';
/**
* Direct function wrapper for updating tasks based on new context.

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "task-master-ai",
"version": "0.12",
"version": "0.13.2-rc.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "task-master-ai",
"version": "0.12",
"version": "0.13.2-rc.1",
"license": "MIT WITH Commons-Clause",
"dependencies": {
"@ai-sdk/anthropic": "^1.2.10",

View File

@@ -32,7 +32,7 @@ The script can be configured through environment variables in a `.env` file at t
- `PERPLEXITY_API_KEY`: Your Perplexity API key for research-backed subtask generation
- `PERPLEXITY_MODEL`: Specify which Perplexity model to use (default: "sonar-medium-online")
- `DEBUG`: Enable debug logging (default: false)
- `TASKMASTER_LOG_LEVEL`: Log level - debug, info, warn, error (default: info)
- `LOG_LEVEL`: Log level - debug, info, warn, error (default: info)
- `DEFAULT_SUBTASKS`: Default number of subtasks when expanding (default: 3)
- `DEFAULT_PRIORITY`: Default priority for generated tasks (default: medium)
- `PROJECT_NAME`: Override default project name in tasks.json
@@ -47,7 +47,7 @@ The script can be configured through environment variables in a `.env` file at t
- Tasks can have `subtasks` for more detailed implementation steps.
- Dependencies are displayed with status indicators (✅ for completed, ⏱️ for pending) to easily track progress.
2. **Script Commands**
2. **Script Commands**
You can run the script via:
```bash
@@ -225,7 +225,7 @@ To use the Perplexity integration:
## Logging
The script supports different logging levels controlled by the `TASKMASTER_LOG_LEVEL` environment variable:
The script supports different logging levels controlled by the `LOG_LEVEL` environment variable:
- `debug`: Detailed information, typically useful for troubleshooting
- `info`: Confirmation that things are working as expected (default)

View File

@@ -38,10 +38,10 @@ const LOG_LEVELS = {
success: 4
};
// Determine log level from environment variable or default to 'info'
const LOG_LEVEL = process.env.TASKMASTER_LOG_LEVEL
? LOG_LEVELS[process.env.TASKMASTER_LOG_LEVEL.toLowerCase()]
: LOG_LEVELS.info; // Default to info
// Get log level from environment or default to info
const LOG_LEVEL = process.env.LOG_LEVEL
? LOG_LEVELS[process.env.LOG_LEVEL.toLowerCase()]
: LOG_LEVELS.info;
// Create a color gradient for the banner
const coolGradient = gradient(['#00b4d8', '#0077b6', '#03045e']);

View File

@@ -73,7 +73,7 @@ import {
getApiKeyStatusReport
} from './task-manager/models.js';
import { findProjectRoot } from './utils.js';
import { getTaskMasterVersion } from '../../src/utils/getVersion.js';
/**
* Runs the interactive setup process for model configuration.
* @param {string|null} projectRoot - The resolved project root directory.
@@ -486,6 +486,11 @@ function registerCommands(programInstance) {
process.exit(1);
});
// Default help
programInstance.on('--help', function () {
displayHelp();
});
// parse-prd command
programInstance
.command('parse-prd')
@@ -510,7 +515,7 @@ function registerCommands(programInstance) {
const outputPath = options.output;
const force = options.force || false;
const append = options.append || false;
let useForce = force;
let useForce = false;
let useAppend = false;
// Helper function to check if tasks.json exists and confirm overwrite
@@ -604,7 +609,7 @@ function registerCommands(programInstance) {
spinner = ora('Parsing PRD and generating tasks...').start();
await parsePRD(inputFile, outputPath, numTasks, {
append: useAppend,
useForce
force: useForce
});
spinner.succeed('Tasks generated successfully!');
} catch (error) {
@@ -1273,6 +1278,10 @@ function registerCommands(programInstance) {
'--details <details>',
'Implementation details (for manual task creation)'
)
.option(
'--test-strategy <testStrategy>',
'Test strategy (for manual task creation)'
)
.option(
'--dependencies <dependencies>',
'Comma-separated list of task IDs this task depends on'
@@ -1654,7 +1663,6 @@ function registerCommands(programInstance) {
}
} catch (error) {
console.error(chalk.red(`Error: ${error.message}`));
showAddSubtaskHelp();
process.exit(1);
}
})
@@ -2358,7 +2366,14 @@ function setupCLI() {
return 'unknown'; // Default fallback if package.json fails
})
.helpOption('-h, --help', 'Display help')
.addHelpCommand(false); // Disable default help command
.addHelpCommand(false) // Disable default help command
.on('--help', () => {
displayHelp(); // Use your custom help display instead
})
.on('-h', () => {
displayHelp();
process.exit(0);
});
// Modify the help option to use your custom display
programInstance.helpInformation = () => {
@@ -2378,7 +2393,28 @@ function setupCLI() {
*/
async function checkForUpdate() {
// Get current version from package.json ONLY
const currentVersion = getTaskMasterVersion();
let currentVersion = 'unknown'; // Initialize with a default
try {
// Try to get the version from the installed package (if applicable) or current dir
let packageJsonPath = path.join(
process.cwd(),
'node_modules',
'task-master-ai',
'package.json'
);
// Fallback to current directory package.json if not found in node_modules
if (!fs.existsSync(packageJsonPath)) {
packageJsonPath = path.join(process.cwd(), 'package.json');
}
if (fs.existsSync(packageJsonPath)) {
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
currentVersion = packageJson.version;
}
} catch (error) {
// Silently fail and use default
log('debug', `Error reading current package version: ${error.message}`);
}
return new Promise((resolve) => {
// Get the latest version from npm registry

View File

@@ -16,10 +16,10 @@ import {
truncate,
isSilentMode
} from './utils.js';
import path from 'path';
import fs from 'fs';
import { findNextTask, analyzeTaskComplexity } from './task-manager.js';
import { getProjectName, getDefaultSubtasks } from './config-manager.js';
import { getTaskMasterVersion } from '../../src/utils/getVersion.js';
// Create a color gradient for the banner
const coolGradient = gradient(['#00b4d8', '#0077b6', '#03045e']);
@@ -46,7 +46,17 @@ function displayBanner() {
);
// Read version directly from package.json
const version = getTaskMasterVersion();
let version = 'unknown'; // Initialize with a default
try {
const packageJsonPath = path.join(process.cwd(), 'package.json');
if (fs.existsSync(packageJsonPath)) {
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
version = packageJson.version;
}
} catch (error) {
// Silently fall back to default version
log('warn', 'Could not read package.json for version info.');
}
console.log(
boxen(
@@ -799,7 +809,12 @@ async function displayNextTask(tasksPath) {
'padding-bottom': 0,
compact: true
},
chars: { mid: '', 'left-mid': '', 'mid-mid': '', 'right-mid': '' },
chars: {
mid: '',
'left-mid': '',
'mid-mid': '',
'right-mid': ''
},
colWidths: [15, Math.min(75, process.stdout.columns - 20 || 60)],
wordWrap: true
});
@@ -887,7 +902,12 @@ async function displayNextTask(tasksPath) {
'padding-bottom': 0,
compact: true
},
chars: { mid: '', 'left-mid': '', 'mid-mid': '', 'right-mid': '' },
chars: {
mid: '',
'left-mid': '',
'mid-mid': '',
'right-mid': ''
},
wordWrap: true
});

View File

@@ -1,35 +0,0 @@
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
import { log } from '../../scripts/modules/utils.js';
/**
* Reads the version from the nearest package.json relative to this file.
* Returns 'unknown' if not found or on error.
* @returns {string} The version string or 'unknown'.
*/
export function getTaskMasterVersion() {
let version = 'unknown';
try {
// Get the directory of the current module (getPackageVersion.js)
const currentModuleFilename = fileURLToPath(import.meta.url);
const currentModuleDirname = path.dirname(currentModuleFilename);
// Construct the path to package.json relative to this file (../../package.json)
const packageJsonPath = path.join(
currentModuleDirname,
'..',
'..',
'package.json'
);
if (fs.existsSync(packageJsonPath)) {
const packageJsonContent = fs.readFileSync(packageJsonPath, 'utf8');
const packageJson = JSON.parse(packageJsonContent);
version = packageJson.version;
}
} catch (error) {
// Silently fall back to default version
log('warn', 'Could not read own package.json for version info.', error);
}
return version;
}

View File

@@ -9,7 +9,7 @@ process.env.MODEL = 'sonar-pro';
process.env.MAX_TOKENS = '64000';
process.env.TEMPERATURE = '0.2';
process.env.DEBUG = 'false';
process.env.TASKMASTER_LOG_LEVEL = 'error'; // Set to error to reduce noise in tests
process.env.LOG_LEVEL = 'error'; // Set to error to reduce noise in tests
process.env.DEFAULT_SUBTASKS = '5';
process.env.DEFAULT_PRIORITY = 'medium';
process.env.PROJECT_NAME = 'Test Project';