diff --git a/.changeset/tender-ads-joke.md b/.changeset/tender-ads-joke.md new file mode 100644 index 00000000..df972ea3 --- /dev/null +++ b/.changeset/tender-ads-joke.md @@ -0,0 +1,5 @@ +--- +"task-master-ai": minor +--- + +Added Groq provider support diff --git a/.cursor/mcp.json b/.cursor/mcp.json index b7a955fd..358dbbc4 100644 --- a/.cursor/mcp.json +++ b/.cursor/mcp.json @@ -8,6 +8,7 @@ "PERPLEXITY_API_KEY": "PERPLEXITY_API_KEY_HERE", "OPENAI_API_KEY": "OPENAI_API_KEY_HERE", "GOOGLE_API_KEY": "GOOGLE_API_KEY_HERE", + "GROQ_API_KEY": "GROQ_API_KEY_HERE", "XAI_API_KEY": "XAI_API_KEY_HERE", "OPENROUTER_API_KEY": "OPENROUTER_API_KEY_HERE", "MISTRAL_API_KEY": "MISTRAL_API_KEY_HERE", diff --git a/.env.example b/.env.example index 4cfbb8b1..54429bf5 100644 --- a/.env.example +++ b/.env.example @@ -4,6 +4,7 @@ PERPLEXITY_API_KEY=YOUR_PERPLEXITY_KEY_HERE OPENAI_API_KEY=YOUR_OPENAI_KEY_HERE GOOGLE_API_KEY=YOUR_GOOGLE_KEY_HERE MISTRAL_API_KEY=YOUR_MISTRAL_KEY_HERE +GROQ_API_KEY=YOUR_GROQ_KEY_HERE OPENROUTER_API_KEY=YOUR_OPENROUTER_KEY_HERE XAI_API_KEY=YOUR_XAI_KEY_HERE AZURE_OPENAI_API_KEY=YOUR_AZURE_KEY_HERE diff --git a/.taskmaster/config.json b/.taskmaster/config.json index 74780437..423d16b4 100644 --- a/.taskmaster/config.json +++ b/.taskmaster/config.json @@ -1,15 +1,15 @@ { "models": { "main": { - "provider": "vertex", - "modelId": "gemini-1.5-pro-002", - "maxTokens": 50000, + "provider": "groq", + "modelId": "llama-3.1-8b-instant", + "maxTokens": 131072, "temperature": 0.2 }, "research": { - "provider": "perplexity", - "modelId": "sonar", - "maxTokens": 8700, + "provider": "groq", + "modelId": "llama-3.3-70b-versatile", + "maxTokens": 32768, "temperature": 0.1 }, "fallback": { @@ -22,15 +22,16 @@ "global": { "logLevel": "info", "debug": false, + "defaultNumTasks": 10, "defaultSubtasks": 5, "defaultPriority": "medium", "projectName": "Taskmaster", "ollamaBaseURL": "http://localhost:11434/api", "bedrockBaseURL": "https://bedrock.us-east-1.amazonaws.com", + "responseLanguage": "English", "userId": "1234567890", "azureBaseURL": "https://your-endpoint.azure.com/", - "defaultTag": "master", - "responseLanguage": "English" + "defaultTag": "master" }, "claudeCode": {} } diff --git a/package-lock.json b/package-lock.json index 879f1123..48a07399 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "task-master-ai", - "version": "0.18.0", + "version": "0.19.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "task-master-ai", - "version": "0.18.0", + "version": "0.19.0", "license": "MIT WITH Commons-Clause", "dependencies": { "@ai-sdk/amazon-bedrock": "^2.2.9", @@ -14,6 +14,7 @@ "@ai-sdk/azure": "^1.3.17", "@ai-sdk/google": "^1.2.13", "@ai-sdk/google-vertex": "^2.2.23", + "@ai-sdk/groq": "^1.2.9", "@ai-sdk/mistral": "^1.2.7", "@ai-sdk/openai": "^1.3.20", "@ai-sdk/perplexity": "^1.1.7", @@ -162,6 +163,22 @@ "zod": "^3.0.0" } }, + "node_modules/@ai-sdk/groq": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/@ai-sdk/groq/-/groq-1.2.9.tgz", + "integrity": "sha512-7MoDaxm8yWtiRbD1LipYZG0kBl+Xe0sv/EeyxnHnGPZappXdlgtdOgTZVjjXkT3nWP30jjZi9A45zoVrBMb3Xg==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "1.1.3", + "@ai-sdk/provider-utils": "2.2.8" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.0.0" + } + }, "node_modules/@ai-sdk/mistral": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/@ai-sdk/mistral/-/mistral-1.2.8.tgz", diff --git a/package.json b/package.json index 146f8f5f..1c597be9 100644 --- a/package.json +++ b/package.json @@ -44,6 +44,7 @@ "@ai-sdk/azure": "^1.3.17", "@ai-sdk/google": "^1.2.13", "@ai-sdk/google-vertex": "^2.2.23", + "@ai-sdk/groq": "^1.2.9", "@ai-sdk/mistral": "^1.2.7", "@ai-sdk/openai": "^1.3.20", "@ai-sdk/perplexity": "^1.1.7", diff --git a/scripts/modules/supported-models.json b/scripts/modules/supported-models.json index 23f31ddb..075c3733 100644 --- a/scripts/modules/supported-models.json +++ b/scripts/modules/supported-models.json @@ -692,6 +692,98 @@ "max_tokens": 32768 } ], + "groq": [ + { + "id": "llama-3.3-70b-versatile", + "swe_score": 0.55, + "cost_per_1m_tokens": { + "input": 0.59, + "output": 0.79 + }, + "allowed_roles": ["main", "fallback", "research"], + "max_tokens": 32768 + }, + { + "id": "llama-3.1-8b-instant", + "swe_score": 0.32, + "cost_per_1m_tokens": { + "input": 0.05, + "output": 0.08 + }, + "allowed_roles": ["main", "fallback"], + "max_tokens": 131072 + }, + { + "id": "llama-4-scout", + "swe_score": 0.45, + "cost_per_1m_tokens": { + "input": 0.11, + "output": 0.34 + }, + "allowed_roles": ["main", "fallback", "research"], + "max_tokens": 32768 + }, + { + "id": "llama-4-maverick", + "swe_score": 0.52, + "cost_per_1m_tokens": { + "input": 0.5, + "output": 0.77 + }, + "allowed_roles": ["main", "fallback", "research"], + "max_tokens": 32768 + }, + { + "id": "mixtral-8x7b-32768", + "swe_score": 0.35, + "cost_per_1m_tokens": { + "input": 0.24, + "output": 0.24 + }, + "allowed_roles": ["main", "fallback"], + "max_tokens": 32768 + }, + { + "id": "qwen-qwq-32b-preview", + "swe_score": 0.4, + "cost_per_1m_tokens": { + "input": 0.18, + "output": 0.18 + }, + "allowed_roles": ["main", "fallback", "research"], + "max_tokens": 32768 + }, + { + "id": "deepseek-r1-distill-llama-70b", + "swe_score": 0.52, + "cost_per_1m_tokens": { + "input": 0.75, + "output": 0.99 + }, + "allowed_roles": ["main", "research"], + "max_tokens": 8192 + }, + { + "id": "gemma2-9b-it", + "swe_score": 0.3, + "cost_per_1m_tokens": { + "input": 0.2, + "output": 0.2 + }, + "allowed_roles": ["main", "fallback"], + "max_tokens": 8192 + }, + { + "id": "whisper-large-v3", + "swe_score": 0, + "cost_per_1m_tokens": { + "input": 0.11, + "output": 0 + }, + "allowed_roles": ["main"], + "max_tokens": 0 + } + ], "claude-code": [ { "id": "opus", diff --git a/src/ai-providers/groq.js b/src/ai-providers/groq.js new file mode 100644 index 00000000..f8eda87d --- /dev/null +++ b/src/ai-providers/groq.js @@ -0,0 +1,41 @@ +/** + * src/ai-providers/groq.js + * + * Implementation for interacting with Groq models + * using the Vercel AI SDK. + */ + +import { createGroq } from '@ai-sdk/groq'; +import { BaseAIProvider } from './base-provider.js'; + +export class GroqProvider extends BaseAIProvider { + constructor() { + super(); + this.name = 'Groq'; + } + + /** + * Creates and returns a Groq client instance. + * @param {object} params - Parameters for client initialization + * @param {string} params.apiKey - Groq API key + * @param {string} [params.baseURL] - Optional custom API endpoint + * @returns {Function} Groq client function + * @throws {Error} If API key is missing or initialization fails + */ + getClient(params) { + try { + const { apiKey, baseURL } = params; + + if (!apiKey) { + throw new Error('Groq API key is required.'); + } + + return createGroq({ + apiKey, + ...(baseURL && { baseURL }) + }); + } catch (error) { + this.handleError('client initialization', error); + } + } +} diff --git a/src/ai-providers/index.js b/src/ai-providers/index.js index c0807533..55af9fd7 100644 --- a/src/ai-providers/index.js +++ b/src/ai-providers/index.js @@ -8,6 +8,7 @@ export { PerplexityAIProvider } from './perplexity.js'; export { GoogleAIProvider } from './google.js'; export { OpenAIProvider } from './openai.js'; export { XAIProvider } from './xai.js'; +export { GroqProvider } from './groq.js'; export { OpenRouterAIProvider } from './openrouter.js'; export { OllamaAIProvider } from './ollama.js'; export { BedrockAIProvider } from './bedrock.js'; diff --git a/src/constants/providers.js b/src/constants/providers.js index a683c6e8..2f88779a 100644 --- a/src/constants/providers.js +++ b/src/constants/providers.js @@ -10,6 +10,7 @@ export const VALIDATED_PROVIDERS = [ 'google', 'perplexity', 'xai', + 'groq', 'mistral' ];