adjust /model command priority

This commit is contained in:
jinhui.li
2025-06-17 12:36:59 +08:00
parent dd29cf895f
commit ae88d63c7c

View File

@@ -6,6 +6,14 @@ import { log } from "../utils/log";
const enc = get_encoding("cl100k_base");
const getUseModel = (req: Request, tokenCount: number) => {
const [provider, model] = req.body.model.split(",");
if (provider && model) {
return {
provider,
model,
};
}
// if tokenCount is greater than 32K, use the long context model
if (tokenCount > 1000 * 32) {
log("Using long context model due to token count:", tokenCount);
@@ -33,13 +41,6 @@ const getUseModel = (req: Request, tokenCount: number) => {
model,
};
}
const [provider, model] = req.body.model.split(",");
if (provider && model) {
return {
provider,
model,
};
}
return {
provider: "default",
model: req.config.OPENAI_MODEL,