feat: complete Groq provider integration and add Kimi K2 model (#978)
* feat: complete Groq provider integration and add Kimi K2 model - Add missing getRequiredApiKeyName() method to GroqProvider class - Register GroqProvider in ai-services-unified.js PROVIDERS object - Add Groq API key handling to config-manager.js (isApiKeySet and getMcpApiKeyStatus) - Add GROQ_API_KEY to env.example with format hint - Add moonshotai/kimi-k2-instruct model to Groq provider ($1/$3 per 1M tokens, 16k max) - Fix import sorting for linting compliance - Add GroqProvider mock to ai-services-unified tests Fixes missing implementation pieces that prevented Groq provider from working. * chore: improve changeset --------- Co-authored-by: Ben Vargas <ben@example.com> Co-authored-by: Ralph Khreish <35776126+Crunchyman-ralph@users.noreply.github.com>
This commit is contained in:
10
.changeset/groq-kimi-k2-support.md
Normal file
10
.changeset/groq-kimi-k2-support.md
Normal file
@@ -0,0 +1,10 @@
|
||||
---
|
||||
"task-master-ai": minor
|
||||
---
|
||||
|
||||
Complete Groq provider integration and add MoonshotAI Kimi K2 model support
|
||||
|
||||
- Fixed Groq provider registration
|
||||
- Added Groq API key validation
|
||||
- Added GROQ_API_KEY to .env.example
|
||||
- Added moonshotai/kimi-k2-instruct model with $1/$3 per 1M token pricing and 16k max output
|
||||
@@ -5,6 +5,7 @@ OPENAI_API_KEY="your_openai_api_key_here" # Optional, for OpenAI/Ope
|
||||
GOOGLE_API_KEY="your_google_api_key_here" # Optional, for Google Gemini models.
|
||||
MISTRAL_API_KEY="your_mistral_key_here" # Optional, for Mistral AI models.
|
||||
XAI_API_KEY="YOUR_XAI_KEY_HERE" # Optional, for xAI AI models.
|
||||
GROQ_API_KEY="your_groq_api_key_here" # Optional, for Groq models. Format: gsk_...
|
||||
AZURE_OPENAI_API_KEY="your_azure_key_here" # Optional, for Azure OpenAI models (requires endpoint in .taskmaster/config.json).
|
||||
OLLAMA_API_KEY="your_ollama_api_key_here" # Optional: For remote Ollama servers that require authentication.
|
||||
GITHUB_API_KEY="your_github_api_key_here" # Optional: For GitHub import/export features. Format: ghp_... or github_pat_...
|
||||
@@ -8,47 +8,48 @@
|
||||
|
||||
// --- Core Dependencies ---
|
||||
import {
|
||||
getMainProvider,
|
||||
getMainModelId,
|
||||
getResearchProvider,
|
||||
getResearchModelId,
|
||||
getFallbackProvider,
|
||||
MODEL_MAP,
|
||||
getAzureBaseURL,
|
||||
getBaseUrlForRole,
|
||||
getBedrockBaseURL,
|
||||
getDebugFlag,
|
||||
getFallbackModelId,
|
||||
getFallbackProvider,
|
||||
getMainModelId,
|
||||
getMainProvider,
|
||||
getOllamaBaseURL,
|
||||
getParametersForRole,
|
||||
getResearchModelId,
|
||||
getResearchProvider,
|
||||
getResponseLanguage,
|
||||
getUserId,
|
||||
MODEL_MAP,
|
||||
getDebugFlag,
|
||||
getBaseUrlForRole,
|
||||
isApiKeySet,
|
||||
getOllamaBaseURL,
|
||||
getAzureBaseURL,
|
||||
getBedrockBaseURL,
|
||||
getVertexProjectId,
|
||||
getVertexLocation,
|
||||
getVertexProjectId,
|
||||
isApiKeySet,
|
||||
providersWithoutApiKeys
|
||||
} from './config-manager.js';
|
||||
import {
|
||||
log,
|
||||
findProjectRoot,
|
||||
resolveEnvVariable,
|
||||
getCurrentTag
|
||||
getCurrentTag,
|
||||
log,
|
||||
resolveEnvVariable
|
||||
} from './utils.js';
|
||||
|
||||
// Import provider classes
|
||||
import {
|
||||
AnthropicAIProvider,
|
||||
PerplexityAIProvider,
|
||||
GoogleAIProvider,
|
||||
OpenAIProvider,
|
||||
XAIProvider,
|
||||
OpenRouterAIProvider,
|
||||
OllamaAIProvider,
|
||||
BedrockAIProvider,
|
||||
AzureProvider,
|
||||
VertexAIProvider,
|
||||
BedrockAIProvider,
|
||||
ClaudeCodeProvider,
|
||||
GeminiCliProvider
|
||||
GeminiCliProvider,
|
||||
GoogleAIProvider,
|
||||
GroqProvider,
|
||||
OllamaAIProvider,
|
||||
OpenAIProvider,
|
||||
OpenRouterAIProvider,
|
||||
PerplexityAIProvider,
|
||||
VertexAIProvider,
|
||||
XAIProvider
|
||||
} from '../../src/ai-providers/index.js';
|
||||
|
||||
// Import the provider registry
|
||||
@@ -61,6 +62,7 @@ const PROVIDERS = {
|
||||
google: new GoogleAIProvider(),
|
||||
openai: new OpenAIProvider(),
|
||||
xai: new XAIProvider(),
|
||||
groq: new GroqProvider(),
|
||||
openrouter: new OpenRouterAIProvider(),
|
||||
ollama: new OllamaAIProvider(),
|
||||
bedrock: new BedrockAIProvider(),
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import chalk from 'chalk';
|
||||
import { z } from 'zod';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { log, findProjectRoot, resolveEnvVariable, isEmpty } from './utils.js';
|
||||
import { AI_COMMAND_NAMES } from '../../src/constants/commands.js';
|
||||
import {
|
||||
LEGACY_CONFIG_FILE,
|
||||
TASKMASTER_DIR
|
||||
} from '../../src/constants/paths.js';
|
||||
import { findConfigPath } from '../../src/utils/path-utils.js';
|
||||
import {
|
||||
VALIDATED_PROVIDERS,
|
||||
ALL_PROVIDERS,
|
||||
CUSTOM_PROVIDERS,
|
||||
CUSTOM_PROVIDERS_ARRAY,
|
||||
ALL_PROVIDERS
|
||||
VALIDATED_PROVIDERS
|
||||
} from '../../src/constants/providers.js';
|
||||
import { AI_COMMAND_NAMES } from '../../src/constants/commands.js';
|
||||
import { findConfigPath } from '../../src/utils/path-utils.js';
|
||||
import { findProjectRoot, isEmpty, log, resolveEnvVariable } from './utils.js';
|
||||
|
||||
// Calculate __dirname in ESM
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
@@ -641,6 +641,7 @@ function isApiKeySet(providerName, session = null, projectRoot = null) {
|
||||
azure: 'AZURE_OPENAI_API_KEY',
|
||||
openrouter: 'OPENROUTER_API_KEY',
|
||||
xai: 'XAI_API_KEY',
|
||||
groq: 'GROQ_API_KEY',
|
||||
vertex: 'GOOGLE_API_KEY', // Vertex uses the same key as Google
|
||||
'claude-code': 'CLAUDE_CODE_API_KEY', // Not actually used, but included for consistency
|
||||
bedrock: 'AWS_ACCESS_KEY_ID' // Bedrock uses AWS credentials
|
||||
@@ -726,6 +727,10 @@ function getMcpApiKeyStatus(providerName, projectRoot = null) {
|
||||
apiKeyToCheck = mcpEnv.XAI_API_KEY;
|
||||
placeholderValue = 'YOUR_XAI_API_KEY_HERE';
|
||||
break;
|
||||
case 'groq':
|
||||
apiKeyToCheck = mcpEnv.GROQ_API_KEY;
|
||||
placeholderValue = 'YOUR_GROQ_API_KEY_HERE';
|
||||
break;
|
||||
case 'ollama':
|
||||
return true; // No key needed
|
||||
case 'claude-code':
|
||||
|
||||
@@ -295,6 +295,16 @@
|
||||
}
|
||||
],
|
||||
"groq": [
|
||||
{
|
||||
"id": "moonshotai/kimi-k2-instruct",
|
||||
"swe_score": 0.66,
|
||||
"cost_per_1m_tokens": {
|
||||
"input": 1.0,
|
||||
"output": 3.0
|
||||
},
|
||||
"allowed_roles": ["main", "fallback"],
|
||||
"max_tokens": 16384
|
||||
},
|
||||
{
|
||||
"id": "llama-3.3-70b-versatile",
|
||||
"swe_score": 0.55,
|
||||
|
||||
@@ -14,6 +14,14 @@ export class GroqProvider extends BaseAIProvider {
|
||||
this.name = 'Groq';
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the environment variable name required for this provider's API key.
|
||||
* @returns {string} The environment variable name for the Groq API key
|
||||
*/
|
||||
getRequiredApiKeyName() {
|
||||
return 'GROQ_API_KEY';
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and returns a Groq client instance.
|
||||
* @param {object} params - Parameters for client initialization
|
||||
|
||||
@@ -177,6 +177,13 @@ jest.unstable_mockModule('../../src/ai-providers/index.js', () => ({
|
||||
getRequiredApiKeyName: jest.fn(() => 'XAI_API_KEY'),
|
||||
isRequiredApiKey: jest.fn(() => true)
|
||||
})),
|
||||
GroqProvider: jest.fn(() => ({
|
||||
generateText: jest.fn(),
|
||||
streamText: jest.fn(),
|
||||
generateObject: jest.fn(),
|
||||
getRequiredApiKeyName: jest.fn(() => 'GROQ_API_KEY'),
|
||||
isRequiredApiKey: jest.fn(() => true)
|
||||
})),
|
||||
OpenRouterAIProvider: jest.fn(() => ({
|
||||
generateText: jest.fn(),
|
||||
streamText: jest.fn(),
|
||||
|
||||
Reference in New Issue
Block a user