release v1.0.23 to support websearch

This commit is contained in:
jinhui.li
2025-07-21 10:46:53 +08:00
parent e362feaa82
commit 7165953b50
5 changed files with 21 additions and 19 deletions

View File

@@ -107,7 +107,8 @@ Here is a comprehensive example:
"default": "deepseek,deepseek-chat",
"background": "ollama,qwen2.5-coder:latest",
"think": "deepseek,deepseek-reasoner",
"longContext": "openrouter,google/gemini-2.5-pro-preview"
"longContext": "openrouter,google/gemini-2.5-pro-preview",
"webSearch": "gemini,gemini-2.5-flash"
}
}
```
@@ -218,6 +219,8 @@ The `Router` object defines which model to use for different scenarios:
- `background`: A model for background tasks. This can be a smaller, local model to save costs.
- `think`: A model for reasoning-heavy tasks, like Plan Mode.
- `longContext`: A model for handling long contexts (e.g., > 60K tokens).
- `webSearch`: Used for handling web search tasks and this requires the model itself to support the feature. If you're using openrouter, you need to add the `:online` suffix after the model name.
You can also switch models dynamically in Claude Code with the `/model` command:
`/model provider_name,model_name`

View File

@@ -104,7 +104,8 @@ npm install -g @musistudio/claude-code-router
"default": "deepseek,deepseek-chat",
"background": "ollama,qwen2.5-coder:latest",
"think": "deepseek,deepseek-reasoner",
"longContext": "openrouter,google/gemini-2.5-pro-preview"
"longContext": "openrouter,google/gemini-2.5-pro-preview",
"webSearch": "gemini,gemini-2.5-flash"
}
}
```
@@ -215,6 +216,7 @@ Transformers 允许您修改请求和响应负载,以确保与不同提供商
- `background`: 用于后台任务的模型。这可以是一个较小的本地模型以节省成本。
- `think`: 用于推理密集型任务(如计划模式)的模型。
- `longContext`: 用于处理长上下文(例如,> 60K 令牌)的模型。
- `webSearch`: 用于处理网络搜索任务,需要模型本身支持。如果使用`openrouter`需要在模型后面加上`:online`后缀。
您还可以使用 `/model` 命令在 Claude Code 中动态切换模型:
`/model provider_name,model_name`

View File

@@ -1,6 +1,6 @@
{
"name": "@musistudio/claude-code-router",
"version": "1.0.22",
"version": "1.0.23",
"description": "Use Claude Code without an Anthropics account and route it to another LLM provider",
"bin": {
"ccr": "./dist/cli.js"
@@ -18,7 +18,7 @@
"author": "musistudio",
"license": "MIT",
"dependencies": {
"@musistudio/llms": "^1.0.9",
"@musistudio/llms": "^1.0.10",
"dotenv": "^16.4.7",
"tiktoken": "^1.0.21",
"uuid": "^11.1.0"

19
pnpm-lock.yaml generated
View File

@@ -9,8 +9,8 @@ importers:
.:
dependencies:
'@musistudio/llms':
specifier: ^1.0.9
version: 1.0.9(ws@8.18.3)(zod@3.25.67)
specifier: ^1.0.10
version: 1.0.10(ws@8.18.3)(zod@3.25.67)
dotenv:
specifier: ^16.4.7
version: 16.6.1
@@ -220,13 +220,8 @@ packages:
'@modelcontextprotocol/sdk':
optional: true
<<<<<<< HEAD
'@musistudio/llms@1.0.9':
resolution: {integrity: sha512-Z4H2yMcp3riDMQKAHTxqknanXgVSah/5HwDT5YtmsXaoznQpdzXLHlF4LBMMKNF6amq0R+MPfaPsjK8Lyddmxg==}
=======
'@musistudio/llms@1.0.8':
resolution: {integrity: sha512-C2GFoiw/DEo2faAQerRVOyWEupTJpoV+3z3rE9XEN31ySOcsaVPnKyWPmKKg9EDMBw70gQg5FZFg3jZxSCnWlA==}
>>>>>>> 2fc79dcf377ade7c4fc8883c94a6779fce830a5a
'@musistudio/llms@1.0.10':
resolution: {integrity: sha512-s3FUykkR/IykIHb5a/5GXfwB3MSf3DjGbJlmK9injoKhSVhA9SgbP8nG2cj3AlC1Ve5bFyLS5OR4R7wxWB4oqQ==}
'@nodelib/fs.scandir@2.1.5':
resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==}
@@ -820,11 +815,7 @@ snapshots:
- supports-color
- utf-8-validate
<<<<<<< HEAD
'@musistudio/llms@1.0.9(ws@8.18.3)(zod@3.25.67)':
=======
'@musistudio/llms@1.0.8(ws@8.18.3)(zod@3.25.67)':
>>>>>>> 2fc79dcf377ade7c4fc8883c94a6779fce830a5a
'@musistudio/llms@1.0.10(ws@8.18.3)(zod@3.25.67)':
dependencies:
'@anthropic-ai/sdk': 0.54.0
'@fastify/cors': 11.0.1

View File

@@ -14,7 +14,10 @@ const getUseModel = (req: any, tokenCount: number, config: any) => {
return config.Router.longContext;
}
// If the model is claude-3-5-haiku, use the background model
if (req.body.model?.startsWith("claude-3-5-haiku") && config.Router.background) {
if (
req.body.model?.startsWith("claude-3-5-haiku") &&
config.Router.background
) {
log("Using background model for ", req.body.model);
return config.Router.background;
}
@@ -23,6 +26,9 @@ const getUseModel = (req: any, tokenCount: number, config: any) => {
log("Using think model for ", req.body.thinking);
return config.Router.think;
}
if (Array.isArray(req.body.tools) && req.body.tools.some(tool => tool.type?.startsWith('web_search')) && config.Router.webSearch) {
return config.Router.webSearch;
}
return config.Router!.default;
};