This commit introduces two key improvements:
1. **Google Provider Telemetry:**
- Updated to include token usage data (, ) in the responses from and .
- This aligns the Google provider with others for consistent AI usage telemetry.
2. **Robust AI Object Response Handling:**
- Modified to more flexibly handle responses from .
- The add-task module now check for the AI-generated object in both and , improving compatibility with different AI provider response structures (e.g., Gemini).
These changes enhance the reliability of AI interactions, particularly with the Google provider, and ensure accurate telemetry collection.
32 lines
760 B
Plaintext
32 lines
760 B
Plaintext
{
|
|
"models": {
|
|
"main": {
|
|
"provider": "google",
|
|
"modelId": "gemini-2.5-pro-exp-03-25",
|
|
"maxTokens": 100000,
|
|
"temperature": 0.2
|
|
},
|
|
"research": {
|
|
"provider": "perplexity",
|
|
"modelId": "sonar-pro",
|
|
"maxTokens": 8700,
|
|
"temperature": 0.1
|
|
},
|
|
"fallback": {
|
|
"provider": "anthropic",
|
|
"modelId": "claude-3-7-sonnet-20250219",
|
|
"maxTokens": 120000,
|
|
"temperature": 0.2
|
|
}
|
|
},
|
|
"global": {
|
|
"logLevel": "info",
|
|
"debug": false,
|
|
"defaultSubtasks": 5,
|
|
"defaultPriority": "medium",
|
|
"projectName": "Taskmaster",
|
|
"ollamaBaseUrl": "http://localhost:11434/api",
|
|
"userId": "1234567890",
|
|
"azureOpenaiBaseUrl": "https://your-endpoint.openai.azure.com/"
|
|
}
|
|
} |