Compare commits

...

3 Commits

Author SHA1 Message Date
Ralph Khreish
2df4f13f65 chore: improve pre-release CI to be able to release more than one release candidate (#1036)
* chore: improve pre-release CI to be able to release more than one release candidate

* chore: implement requested changes from coderabbit

* chore: apply requested changes
2025-07-23 18:28:17 +02:00
github-actions[bot]
a37017e5a5 docs: Auto-update and format models.md 2025-07-23 16:03:40 +00:00
Ralph Khreish
fb7d588137 feat: improve config-manager max tokens for openrouter and kimi-k2 model (#1035) 2025-07-23 18:03:26 +02:00
5 changed files with 36 additions and 15 deletions

View File

@@ -0,0 +1,10 @@
---
"task-master-ai": patch
---
Fix max_tokens limits for OpenRouter and Groq models
- Add special handling in config-manager.js for custom OpenRouter models to use a conservative default of 32,768 max_tokens
- Update qwen/qwen-turbo model max_tokens from 1,000,000 to 32,768 to match OpenRouter's actual limits
- Fix moonshotai/kimi-k2-instruct max_tokens to 16,384 to match Groq's actual limit (fixes #1028)
- This prevents "maximum context length exceeded" errors when using OpenRouter models not in our supported models list

View File

@@ -16,7 +16,7 @@ jobs:
- uses: actions/setup-node@v4
with:
node-version: 20
cache: 'npm'
cache: "npm"
- name: Cache node_modules
uses: actions/cache@v4
@@ -32,10 +32,13 @@ jobs:
run: npm ci
timeout-minutes: 2
- name: Enter RC mode
- name: Enter RC mode (if not already in RC mode)
run: |
npx changeset pre exit || true
npx changeset pre enter rc
# ensure were in the right pre-mode (tag "rc")
if [ ! -f .changeset/pre.json ] \
|| [ "$(jq -r '.tag' .changeset/pre.json 2>/dev/null || echo '')" != "rc" ]; then
npx changeset pre enter rc
fi
- name: Version RC packages
run: npx changeset version
@@ -51,12 +54,9 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Exit RC mode
run: npx changeset pre exit
- name: Commit & Push changes
uses: actions-js/push@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: ${{ github.ref }}
message: 'chore: rc version bump'
message: "chore: rc version bump"

View File

@@ -1,4 +1,4 @@
# Available Models as of July 22, 2025
# Available Models as of July 23, 2025
## Main Models

View File

@@ -584,10 +584,21 @@ function getParametersForRole(role, explicitRoot = null) {
);
}
} else {
log(
'debug',
`No model definitions found for provider ${providerName} in MODEL_MAP. Using role default maxTokens: ${roleMaxTokens}`
);
// Special handling for custom OpenRouter models
if (providerName === CUSTOM_PROVIDERS.OPENROUTER) {
// Use a conservative default for OpenRouter models not in our list
const openrouterDefault = 32768;
effectiveMaxTokens = Math.min(roleMaxTokens, openrouterDefault);
log(
'debug',
`Custom OpenRouter model ${modelId} detected. Using conservative max_tokens: ${effectiveMaxTokens}`
);
} else {
log(
'debug',
`No model definitions found for provider ${providerName} in MODEL_MAP. Using role default maxTokens: ${roleMaxTokens}`
);
}
}
} catch (lookupError) {
log(

View File

@@ -333,7 +333,7 @@
"output": 3.0
},
"allowed_roles": ["main", "fallback"],
"max_tokens": 131072,
"max_tokens": 16384,
"supported": true
},
{
@@ -701,7 +701,7 @@
"output": 0.2
},
"allowed_roles": ["main", "fallback"],
"max_tokens": 1000000,
"max_tokens": 32768,
"supported": true
},
{