support proxy

This commit is contained in:
jinhui.li
2025-03-03 15:30:31 +08:00
parent 5ef624a1ac
commit ac9acd73c3
4 changed files with 18 additions and 6 deletions

View File

@@ -10,7 +10,7 @@
- [x] Mormal Mode and Router Mode - [x] Mormal Mode and Router Mode
- [x] Using the qwen2.5-coder-3b-instruct model as the routing dispatcher (since its currently free on Alibaba Clouds official website) - [x] Using the qwen2.5-coder-3b model as the routing dispatcher (since its currently free on Alibaba Clouds official website)
- [x] Using the qwen-max-0125 model as the tool invoker - [x] Using the qwen-max-0125 model as the tool invoker
@@ -18,7 +18,9 @@
- [x] Using deepseek-r1 as the reasoning model - [x] Using deepseek-r1 as the reasoning model
Thanks to the free qwen2.5-coder-3b-instruct model from Alibaba and deepseeks KV-Cache, we can significantly reduce the cost of using Claude Code. Make sure to set appropriate ignorePatterns for the project. See: https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview - [x] Support proxy
Thanks to the free qwen2.5-coder-3b model from Alibaba and deepseeks KV-Cache, we can significantly reduce the cost of using Claude Code. Make sure to set appropriate ignorePatterns for the project. See: https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview
## Usage ## Usage

View File

@@ -4,6 +4,7 @@ import dotenv from "dotenv";
import { existsSync } from "fs"; import { existsSync } from "fs";
import { writeFile } from "fs/promises"; import { writeFile } from "fs/promises";
import { Router } from "./router.mjs"; import { Router } from "./router.mjs";
import { getOpenAICommonOptions } from "./utils.mjs";
dotenv.config(); dotenv.config();
const app = express(); const app = express();
@@ -11,7 +12,7 @@ const port = 3456;
app.use(express.json({ limit: "500mb" })); app.use(express.json({ limit: "500mb" }));
let client; let client;
if (process.env.ENABLE_ROUTER) { if (process.env.ENABLE_ROUTER && process.env.ENABLE_ROUTER === "true") {
const router = new Router(); const router = new Router();
client = { client = {
call: (data) => { call: (data) => {
@@ -22,6 +23,7 @@ if (process.env.ENABLE_ROUTER) {
const openai = new OpenAI({ const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY, apiKey: process.env.OPENAI_API_KEY,
baseURL: process.env.OPENAI_BASE_URL, baseURL: process.env.OPENAI_BASE_URL,
...getOpenAICommonOptions(),
}); });
client = { client = {
call: (data) => { call: (data) => {
@@ -128,6 +130,7 @@ app.post("/v1/messages", async (req, res) => {
let hasStartedTextBlock = false; let hasStartedTextBlock = false;
for await (const chunk of completion) { for await (const chunk of completion) {
console.log(chunk);
const delta = chunk.choices[0].delta; const delta = chunk.choices[0].delta;
if (delta.tool_calls && delta.tool_calls.length > 0) { if (delta.tool_calls && delta.tool_calls.length > 0) {
const toolCall = delta.tool_calls[0]; const toolCall = delta.tool_calls[0];
@@ -321,7 +324,7 @@ async function initializeClaudeConfig() {
async function run() { async function run() {
await initializeClaudeConfig(); await initializeClaudeConfig();
app.listen(port, "127.0.0.1", () => { app.listen(port, "127.0.0.1", () => {
console.log(`Example app listening on port ${port}`); console.log(`Example app listening on port ${port}`);
}); });
} }

View File

@@ -13,6 +13,7 @@
"dependencies": { "dependencies": {
"dotenv": "^16.4.7", "dotenv": "^16.4.7",
"express": "^4.21.2", "express": "^4.21.2",
"https-proxy-agent": "^7.0.6",
"openai": "^4.85.4" "openai": "^4.85.4"
} }
} }

View File

@@ -7,6 +7,7 @@ const useToolRouter = {
const client = new OpenAI({ const client = new OpenAI({
apiKey: process.env.TOOL_AGENT_API_KEY, apiKey: process.env.TOOL_AGENT_API_KEY,
baseURL: process.env.TOOL_AGENT_BASE_URL, baseURL: process.env.TOOL_AGENT_BASE_URL,
...getOpenAICommonOptions(),
}); });
return client.chat.completions.create({ return client.chat.completions.create({
...args, ...args,
@@ -14,7 +15,8 @@ const useToolRouter = {
...args.messages, ...args.messages,
{ {
role: "system", role: "system",
content: "You need to select the appropriate tool for the task based on the users request. Review the requirements and choose the tool that fits the task best.", content:
"You need to select the appropriate tool for the task based on the users request. Review the requirements and choose the tool that fits the task best.",
}, },
], ],
model: process.env.TOOL_AGENT_MODEL, model: process.env.TOOL_AGENT_MODEL,
@@ -29,6 +31,7 @@ const coderRouter = {
const client = new OpenAI({ const client = new OpenAI({
apiKey: process.env.CODER_AGENT_API_KEY, apiKey: process.env.CODER_AGENT_API_KEY,
baseURL: process.env.CODER_AGENT_BASE_URL, baseURL: process.env.CODER_AGENT_BASE_URL,
...getOpenAICommonOptions(),
}); });
delete args.tools; delete args.tools;
args.messages.forEach((item) => { args.messages.forEach((item) => {
@@ -42,7 +45,8 @@ const coderRouter = {
...args.messages, ...args.messages,
{ {
role: "system", role: "system",
content: "You are a code writer who helps users write code based on their specific requirements. You create algorithms, implement functionality, and build structures according to the clear instructions provided by the user. Your focus is solely on writing code, ensuring that the task is completed accurately and efficiently.", content:
"You are a code writer who helps users write code based on their specific requirements. You create algorithms, implement functionality, and build structures according to the clear instructions provided by the user. Your focus is solely on writing code, ensuring that the task is completed accurately and efficiently.",
}, },
], ],
model: process.env.CODER_AGENT_MODEL, model: process.env.CODER_AGENT_MODEL,
@@ -57,6 +61,7 @@ const thinkRouter = {
const client = new OpenAI({ const client = new OpenAI({
apiKey: process.env.THINK_AGENT_API_KEY, apiKey: process.env.THINK_AGENT_API_KEY,
baseURL: process.env.THINK_AGENT_BASE_URL, baseURL: process.env.THINK_AGENT_BASE_URL,
...getOpenAICommonOptions(),
}); });
const messages = JSON.parse(JSON.stringify(args.messages)); const messages = JSON.parse(JSON.stringify(args.messages));
messages.forEach((msg) => { messages.forEach((msg) => {
@@ -98,6 +103,7 @@ export class Router {
this.client = new OpenAI({ this.client = new OpenAI({
apiKey: process.env.ROUTER_AGENT_API_KEY, apiKey: process.env.ROUTER_AGENT_API_KEY,
baseURL: process.env.ROUTER_AGENT_BASE_URL, baseURL: process.env.ROUTER_AGENT_BASE_URL,
...getOpenAICommonOptions(),
}); });
} }
async route(args) { async route(args) {