From 5b3dd3f29b98099342c0f9616000824b662fb43b Mon Sep 17 00:00:00 2001 From: Ralph Khreish <35776126+Crunchyman-ralph@users.noreply.github.com> Date: Sat, 19 Jul 2025 13:56:40 +0300 Subject: [PATCH] chore: adjust kimi k2 max tokens (#1014) --- package.json | 5 +---- scripts/modules/supported-models.json | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index c10b4b02..9a004872 100644 --- a/package.json +++ b/package.json @@ -9,10 +9,7 @@ "task-master-mcp": "mcp-server/server.js", "task-master-ai": "mcp-server/server.js" }, - "workspaces": [ - "apps/*", - "." - ], + "workspaces": ["apps/*", "."], "scripts": { "test": "node --experimental-vm-modules node_modules/.bin/jest", "test:fails": "node --experimental-vm-modules node_modules/.bin/jest --onlyFailures", diff --git a/scripts/modules/supported-models.json b/scripts/modules/supported-models.json index a321e6ac..f968bc31 100644 --- a/scripts/modules/supported-models.json +++ b/scripts/modules/supported-models.json @@ -303,7 +303,7 @@ "output": 3.0 }, "allowed_roles": ["main", "fallback"], - "max_tokens": 16384 + "max_tokens": 131072 }, { "id": "llama-3.3-70b-versatile",