feat: update tm models defaults (#1225)

This commit is contained in:
Ralph Khreish
2025-09-20 01:07:33 +02:00
committed by GitHub
parent 47ddb60231
commit a621ff05ea
11 changed files with 43 additions and 42 deletions

View File

@@ -1,5 +0,0 @@
---
"task-master-ai": minor
---
Testing one more pre-release iteration

View File

@@ -1,5 +0,0 @@
---
"task-master-ai": minor
---
Test out the RC

View File

@@ -1,5 +0,0 @@
---
"@tm/cli": minor
---
testing this stuff out to see how the release candidate works with monorepo

View File

@@ -2,7 +2,7 @@
"task-master-ai": minor "task-master-ai": minor
--- ---
Add grok-cli as a provider. You can now use Grok models with Task Master by setting the `GROK_CLI_API_KEY` environment variable. Add grok-cli as a provider with full codebase context support. You can now use Grok models (grok-2, grok-3, grok-4, etc.) with Task Master for AI operations that have access to your entire codebase context, enabling more informed task generation and PRD parsing.
## Setup Instructions ## Setup Instructions
@@ -20,11 +20,17 @@ Add grok-cli as a provider. You can now use Grok models with Task Master by sett
task-master models --set-fallback grok-beta task-master models --set-fallback grok-beta
``` ```
## Available Models ## Key Features
- `grok-beta` - Latest Grok model - **Full codebase context**: Grok models can analyze your entire project when generating tasks or parsing PRDs
- `grok-vision-beta` - Grok with vision capabilities - **xAI model access**: Support for latest Grok models (grok-2, grok-3, grok-4, etc.)
- **Code-aware task generation**: Create more accurate and contextual tasks based on your actual codebase
- **Intelligent PRD parsing**: Parse requirements with understanding of your existing code structure
The Grok CLI provider integrates with xAI's Grok models and can also use the local Grok CLI configuration file (`~/.grok/user-settings.json`) if available. ## Available Models
- `grok-beta` - Latest Grok model with codebase context
- `grok-vision-beta` - Grok with vision capabilities and codebase context
The Grok CLI provider integrates with xAI's Grok models via grok-cli and can also use the local Grok CLI configuration file (`~/.grok/user-settings.json`) if available.
## Credits ## Credits
Built using the [grok-cli](https://github.com/superagent-ai/grok-cli) by Superagent AI for seamless integration with xAI's Grok models. Built using the [grok-cli](https://github.com/superagent-ai/grok-cli) by Superagent AI for seamless integration with xAI's Grok models.

View File

@@ -0,0 +1,8 @@
---
"task-master-ai": minor
---
Improve taskmaster ai provider defaults
- moving from main anthropic 3.7 to anthropic sonnet 4
- moving from fallback anthropic 3.5 to anthropic 3.7

View File

@@ -1,9 +1,9 @@
{ {
"models": { "models": {
"main": { "main": {
"provider": "grok-cli", "provider": "anthropic",
"modelId": "grok-4-latest", "modelId": "claude-sonnet-4-20250514",
"maxTokens": 131072, "maxTokens": 64000,
"temperature": 0.2 "temperature": 0.2
}, },
"research": { "research": {
@@ -14,8 +14,8 @@
}, },
"fallback": { "fallback": {
"provider": "anthropic", "provider": "anthropic",
"modelId": "claude-sonnet-4-20250514", "modelId": "claude-3-7-sonnet-20250219",
"maxTokens": 64000, "maxTokens": 120000,
"temperature": 0.2 "temperature": 0.2
} }
}, },

View File

@@ -5,6 +5,7 @@
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest'; import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
import { ConfigManager } from './config-manager.js'; import { ConfigManager } from './config-manager.js';
import { DEFAULT_CONFIG_VALUES } from '../interfaces/configuration.interface.js';
import { ConfigLoader } from './services/config-loader.service.js'; import { ConfigLoader } from './services/config-loader.service.js';
import { ConfigMerger } from './services/config-merger.service.js'; import { ConfigMerger } from './services/config-merger.service.js';
import { RuntimeStateManager } from './services/runtime-state-manager.service.js'; import { RuntimeStateManager } from './services/runtime-state-manager.service.js';
@@ -69,8 +70,8 @@ describe('ConfigManager', () => {
({ ({
loadState: vi.fn().mockResolvedValue({ activeTag: 'master' }), loadState: vi.fn().mockResolvedValue({ activeTag: 'master' }),
saveState: vi.fn().mockResolvedValue(undefined), saveState: vi.fn().mockResolvedValue(undefined),
getActiveTag: vi.fn().mockReturnValue('master'), getCurrentTag: vi.fn().mockReturnValue('master'),
setActiveTag: vi.fn().mockResolvedValue(undefined), setCurrentTag: vi.fn().mockResolvedValue(undefined),
getState: vi.fn().mockReturnValue({ activeTag: 'master' }), getState: vi.fn().mockReturnValue({ activeTag: 'master' }),
updateMetadata: vi.fn().mockResolvedValue(undefined), updateMetadata: vi.fn().mockResolvedValue(undefined),
clearState: vi.fn().mockResolvedValue(undefined) clearState: vi.fn().mockResolvedValue(undefined)
@@ -227,8 +228,8 @@ describe('ConfigManager', () => {
const models = manager.getModelConfig(); const models = manager.getModelConfig();
expect(models).toEqual({ expect(models).toEqual({
main: 'claude-3-5-sonnet-20241022', main: DEFAULT_CONFIG_VALUES.MODELS.MAIN,
fallback: 'gpt-4o-mini' fallback: DEFAULT_CONFIG_VALUES.MODELS.FALLBACK
}); });
}); });
@@ -281,7 +282,7 @@ describe('ConfigManager', () => {
await manager.setActiveTag('feature-branch'); await manager.setActiveTag('feature-branch');
const stateManager = (manager as any).stateManager; const stateManager = (manager as any).stateManager;
expect(stateManager.setActiveTag).toHaveBeenCalledWith('feature-branch'); expect(stateManager.setCurrentTag).toHaveBeenCalledWith('feature-branch');
}); });
}); });

View File

@@ -10,6 +10,7 @@ import type {
PartialConfiguration, PartialConfiguration,
RuntimeStorageConfig RuntimeStorageConfig
} from '../interfaces/configuration.interface.js'; } from '../interfaces/configuration.interface.js';
import { DEFAULT_CONFIG_VALUES as DEFAULTS } from '../interfaces/configuration.interface.js';
import { ConfigLoader } from './services/config-loader.service.js'; import { ConfigLoader } from './services/config-loader.service.js';
import { import {
ConfigMerger, ConfigMerger,
@@ -167,8 +168,8 @@ export class ConfigManager {
getModelConfig() { getModelConfig() {
return ( return (
this.config.models || { this.config.models || {
main: 'claude-3-5-sonnet-20241022', main: DEFAULTS.MODELS.MAIN,
fallback: 'gpt-4o-mini' fallback: DEFAULTS.MODELS.FALLBACK
} }
); );
} }

View File

@@ -399,8 +399,8 @@ export interface IConfigurationManager {
*/ */
export const DEFAULT_CONFIG_VALUES = { export const DEFAULT_CONFIG_VALUES = {
MODELS: { MODELS: {
MAIN: 'claude-3-5-sonnet-20241022', MAIN: 'claude-sonnet-4-20250514',
FALLBACK: 'gpt-4o-mini' FALLBACK: 'claude-3-7-sonnet-20250219'
}, },
TASKS: { TASKS: {
DEFAULT_PRIORITY: 'medium' as TaskPriority, DEFAULT_PRIORITY: 'medium' as TaskPriority,

View File

@@ -27,21 +27,21 @@ const DEFAULTS = {
models: { models: {
main: { main: {
provider: 'anthropic', provider: 'anthropic',
modelId: 'claude-3-7-sonnet-20250219', modelId: 'claude-sonnet-4-20250514',
maxTokens: 64000, maxTokens: 64000,
temperature: 0.2 temperature: 0.2
}, },
research: { research: {
provider: 'perplexity', provider: 'perplexity',
modelId: 'sonar-pro', modelId: 'sonar',
maxTokens: 8700, maxTokens: 8700,
temperature: 0.1 temperature: 0.1
}, },
fallback: { fallback: {
// No default fallback provider/model initially // No default fallback provider/model initially
provider: 'anthropic', provider: 'anthropic',
modelId: 'claude-3-5-sonnet', modelId: 'claude-3-7-sonnet-20250219',
maxTokens: 8192, // Default parameters if fallback IS configured maxTokens: 120000, // Default parameters if fallback IS configured
temperature: 0.2 temperature: 0.2
} }
}, },

View File

@@ -119,20 +119,20 @@ const DEFAULT_CONFIG = {
models: { models: {
main: { main: {
provider: 'anthropic', provider: 'anthropic',
modelId: 'claude-3-7-sonnet-20250219', modelId: 'claude-sonnet-4-20250514',
maxTokens: 64000, maxTokens: 64000,
temperature: 0.2 temperature: 0.2
}, },
research: { research: {
provider: 'perplexity', provider: 'perplexity',
modelId: 'sonar-pro', modelId: 'sonar',
maxTokens: 8700, maxTokens: 8700,
temperature: 0.1 temperature: 0.1
}, },
fallback: { fallback: {
provider: 'anthropic', provider: 'anthropic',
modelId: 'claude-3-5-sonnet', modelId: 'claude-3-7-sonnet-20250219',
maxTokens: 8192, maxTokens: 120000,
temperature: 0.2 temperature: 0.2
} }
}, },