feat: Add GROQ API key support and integrate GROQ provider (#930)

* feat: Add GROQ API key support and integrate GROQ provider

* feat: Add support for Groq provider
- Added a new changeset documenting the addition of Groq provider support.
-Ran npm run format

* feat: Add support for Groq provider
- Added a new changeset documenting the addition of Groq provider support.
-Ran npm run format
This commit is contained in:
OTYAK
2025-07-08 07:37:38 +01:00
committed by GitHub
parent 3334e409ae
commit 98d1c97436
10 changed files with 171 additions and 10 deletions

View File

@@ -0,0 +1,5 @@
---
"task-master-ai": minor
---
Added Groq provider support

View File

@@ -8,6 +8,7 @@
"PERPLEXITY_API_KEY": "PERPLEXITY_API_KEY_HERE",
"OPENAI_API_KEY": "OPENAI_API_KEY_HERE",
"GOOGLE_API_KEY": "GOOGLE_API_KEY_HERE",
"GROQ_API_KEY": "GROQ_API_KEY_HERE",
"XAI_API_KEY": "XAI_API_KEY_HERE",
"OPENROUTER_API_KEY": "OPENROUTER_API_KEY_HERE",
"MISTRAL_API_KEY": "MISTRAL_API_KEY_HERE",

View File

@@ -4,6 +4,7 @@ PERPLEXITY_API_KEY=YOUR_PERPLEXITY_KEY_HERE
OPENAI_API_KEY=YOUR_OPENAI_KEY_HERE
GOOGLE_API_KEY=YOUR_GOOGLE_KEY_HERE
MISTRAL_API_KEY=YOUR_MISTRAL_KEY_HERE
GROQ_API_KEY=YOUR_GROQ_KEY_HERE
OPENROUTER_API_KEY=YOUR_OPENROUTER_KEY_HERE
XAI_API_KEY=YOUR_XAI_KEY_HERE
AZURE_OPENAI_API_KEY=YOUR_AZURE_KEY_HERE

View File

@@ -1,15 +1,15 @@
{
"models": {
"main": {
"provider": "vertex",
"modelId": "gemini-1.5-pro-002",
"maxTokens": 50000,
"provider": "groq",
"modelId": "llama-3.1-8b-instant",
"maxTokens": 131072,
"temperature": 0.2
},
"research": {
"provider": "perplexity",
"modelId": "sonar",
"maxTokens": 8700,
"provider": "groq",
"modelId": "llama-3.3-70b-versatile",
"maxTokens": 32768,
"temperature": 0.1
},
"fallback": {
@@ -22,15 +22,16 @@
"global": {
"logLevel": "info",
"debug": false,
"defaultNumTasks": 10,
"defaultSubtasks": 5,
"defaultPriority": "medium",
"projectName": "Taskmaster",
"ollamaBaseURL": "http://localhost:11434/api",
"bedrockBaseURL": "https://bedrock.us-east-1.amazonaws.com",
"responseLanguage": "English",
"userId": "1234567890",
"azureBaseURL": "https://your-endpoint.azure.com/",
"defaultTag": "master",
"responseLanguage": "English"
"defaultTag": "master"
},
"claudeCode": {}
}

21
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "task-master-ai",
"version": "0.18.0",
"version": "0.19.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "task-master-ai",
"version": "0.18.0",
"version": "0.19.0",
"license": "MIT WITH Commons-Clause",
"dependencies": {
"@ai-sdk/amazon-bedrock": "^2.2.9",
@@ -14,6 +14,7 @@
"@ai-sdk/azure": "^1.3.17",
"@ai-sdk/google": "^1.2.13",
"@ai-sdk/google-vertex": "^2.2.23",
"@ai-sdk/groq": "^1.2.9",
"@ai-sdk/mistral": "^1.2.7",
"@ai-sdk/openai": "^1.3.20",
"@ai-sdk/perplexity": "^1.1.7",
@@ -162,6 +163,22 @@
"zod": "^3.0.0"
}
},
"node_modules/@ai-sdk/groq": {
"version": "1.2.9",
"resolved": "https://registry.npmjs.org/@ai-sdk/groq/-/groq-1.2.9.tgz",
"integrity": "sha512-7MoDaxm8yWtiRbD1LipYZG0kBl+Xe0sv/EeyxnHnGPZappXdlgtdOgTZVjjXkT3nWP30jjZi9A45zoVrBMb3Xg==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "1.1.3",
"@ai-sdk/provider-utils": "2.2.8"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.0.0"
}
},
"node_modules/@ai-sdk/mistral": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/@ai-sdk/mistral/-/mistral-1.2.8.tgz",

View File

@@ -44,6 +44,7 @@
"@ai-sdk/azure": "^1.3.17",
"@ai-sdk/google": "^1.2.13",
"@ai-sdk/google-vertex": "^2.2.23",
"@ai-sdk/groq": "^1.2.9",
"@ai-sdk/mistral": "^1.2.7",
"@ai-sdk/openai": "^1.3.20",
"@ai-sdk/perplexity": "^1.1.7",

View File

@@ -692,6 +692,98 @@
"max_tokens": 32768
}
],
"groq": [
{
"id": "llama-3.3-70b-versatile",
"swe_score": 0.55,
"cost_per_1m_tokens": {
"input": 0.59,
"output": 0.79
},
"allowed_roles": ["main", "fallback", "research"],
"max_tokens": 32768
},
{
"id": "llama-3.1-8b-instant",
"swe_score": 0.32,
"cost_per_1m_tokens": {
"input": 0.05,
"output": 0.08
},
"allowed_roles": ["main", "fallback"],
"max_tokens": 131072
},
{
"id": "llama-4-scout",
"swe_score": 0.45,
"cost_per_1m_tokens": {
"input": 0.11,
"output": 0.34
},
"allowed_roles": ["main", "fallback", "research"],
"max_tokens": 32768
},
{
"id": "llama-4-maverick",
"swe_score": 0.52,
"cost_per_1m_tokens": {
"input": 0.5,
"output": 0.77
},
"allowed_roles": ["main", "fallback", "research"],
"max_tokens": 32768
},
{
"id": "mixtral-8x7b-32768",
"swe_score": 0.35,
"cost_per_1m_tokens": {
"input": 0.24,
"output": 0.24
},
"allowed_roles": ["main", "fallback"],
"max_tokens": 32768
},
{
"id": "qwen-qwq-32b-preview",
"swe_score": 0.4,
"cost_per_1m_tokens": {
"input": 0.18,
"output": 0.18
},
"allowed_roles": ["main", "fallback", "research"],
"max_tokens": 32768
},
{
"id": "deepseek-r1-distill-llama-70b",
"swe_score": 0.52,
"cost_per_1m_tokens": {
"input": 0.75,
"output": 0.99
},
"allowed_roles": ["main", "research"],
"max_tokens": 8192
},
{
"id": "gemma2-9b-it",
"swe_score": 0.3,
"cost_per_1m_tokens": {
"input": 0.2,
"output": 0.2
},
"allowed_roles": ["main", "fallback"],
"max_tokens": 8192
},
{
"id": "whisper-large-v3",
"swe_score": 0,
"cost_per_1m_tokens": {
"input": 0.11,
"output": 0
},
"allowed_roles": ["main"],
"max_tokens": 0
}
],
"claude-code": [
{
"id": "opus",

41
src/ai-providers/groq.js Normal file
View File

@@ -0,0 +1,41 @@
/**
* src/ai-providers/groq.js
*
* Implementation for interacting with Groq models
* using the Vercel AI SDK.
*/
import { createGroq } from '@ai-sdk/groq';
import { BaseAIProvider } from './base-provider.js';
export class GroqProvider extends BaseAIProvider {
constructor() {
super();
this.name = 'Groq';
}
/**
* Creates and returns a Groq client instance.
* @param {object} params - Parameters for client initialization
* @param {string} params.apiKey - Groq API key
* @param {string} [params.baseURL] - Optional custom API endpoint
* @returns {Function} Groq client function
* @throws {Error} If API key is missing or initialization fails
*/
getClient(params) {
try {
const { apiKey, baseURL } = params;
if (!apiKey) {
throw new Error('Groq API key is required.');
}
return createGroq({
apiKey,
...(baseURL && { baseURL })
});
} catch (error) {
this.handleError('client initialization', error);
}
}
}

View File

@@ -8,6 +8,7 @@ export { PerplexityAIProvider } from './perplexity.js';
export { GoogleAIProvider } from './google.js';
export { OpenAIProvider } from './openai.js';
export { XAIProvider } from './xai.js';
export { GroqProvider } from './groq.js';
export { OpenRouterAIProvider } from './openrouter.js';
export { OllamaAIProvider } from './ollama.js';
export { BedrockAIProvider } from './bedrock.js';

View File

@@ -10,6 +10,7 @@ export const VALIDATED_PROVIDERS = [
'google',
'perplexity',
'xai',
'groq',
'mistral'
];