support deepseek-v3-20250324

This commit is contained in:
jinhui.li
2025-03-25 14:51:28 +08:00
parent ba2da2f4ed
commit 93fcd77544
17 changed files with 2050 additions and 485 deletions

57
src/utils/index.ts Normal file
View File

@@ -0,0 +1,57 @@
import { HttpsProxyAgent } from "https-proxy-agent";
import OpenAI, { ClientOptions } from "openai";
import fs from "node:fs/promises";
import {
CONFIG_FILE,
DEFAULT_CONFIG,
HOME_DIR,
PROMPTS_DIR,
} from "../constants";
export function getOpenAICommonOptions(): ClientOptions {
const options: ClientOptions = {};
if (process.env.PROXY_URL) {
options.httpAgent = new HttpsProxyAgent(process.env.PROXY_URL);
}
return options;
}
const ensureDir = async (dir_path: string) => {
try {
await fs.access(dir_path);
} catch {
await fs.mkdir(dir_path, { recursive: true });
}
};
export const initDir = async () => {
await ensureDir(HOME_DIR);
await ensureDir(PROMPTS_DIR);
};
export const readConfigFile = async () => {
try {
const config = await fs.readFile(CONFIG_FILE, "utf-8");
return JSON.parse(config);
} catch {
await writeConfigFile(DEFAULT_CONFIG);
return DEFAULT_CONFIG;
}
};
export const writeConfigFile = async (config: any) => {
await fs.writeFile(CONFIG_FILE, JSON.stringify(config, null, 2));
};
export const initConfig = async () => {
const config = await readConfigFile();
Object.assign(process.env, config);
};
export const createClient = (options: ClientOptions) => {
const client = new OpenAI({
...options,
...getOpenAICommonOptions(),
});
return client;
};

27
src/utils/log.ts Normal file
View File

@@ -0,0 +1,27 @@
import fs from 'node:fs';
import path from 'node:path';
import { HOME_DIR } from '../constants';
const LOG_FILE = path.join(HOME_DIR, 'claude-code-router.log');
// Ensure log directory exists
if (!fs.existsSync(HOME_DIR)) {
fs.mkdirSync(HOME_DIR, { recursive: true });
}
export function log(...args: any[]) {
// Check if logging is enabled via environment variable
const isLogEnabled = process.env.LOG === 'true';
if (!isLogEnabled) {
return;
}
const timestamp = new Date().toISOString();
const logMessage = `[${timestamp}] ${args.map(arg =>
typeof arg === 'object' ? JSON.stringify(arg) : String(arg)
).join(' ')}\n`;
// Append to log file
fs.appendFileSync(LOG_FILE, logMessage, 'utf8');
}

268
src/utils/stream.ts Normal file
View File

@@ -0,0 +1,268 @@
import { Response } from "express";
import { OpenAI } from "openai";
interface ContentBlock {
type: string;
id?: string;
name?: string;
input?: any;
text?: string;
}
interface MessageEvent {
type: string;
message?: {
id: string;
type: string;
role: string;
content: any[];
model: string;
stop_reason: string | null;
stop_sequence: string | null;
usage: {
input_tokens: number;
output_tokens: number;
};
};
delta?: {
stop_reason?: string;
stop_sequence?: string | null;
content?: ContentBlock[];
type?: string;
text?: string;
partial_json?: string;
};
index?: number;
content_block?: ContentBlock;
usage?: {
input_tokens: number;
output_tokens: number;
};
}
export async function streamOpenAIResponse(
res: Response,
completion: AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>,
model: string
) {
const messageId = "msg_" + Date.now();
let contentBlockIndex = 0;
let currentContentBlocks: ContentBlock[] = [];
// Send message_start event
const messageStart: MessageEvent = {
type: "message_start",
message: {
id: messageId,
type: "message",
role: "assistant",
content: [],
model,
stop_reason: null,
stop_sequence: null,
usage: { input_tokens: 1, output_tokens: 1 },
},
};
res.write(`event: message_start\ndata: ${JSON.stringify(messageStart)}\n\n`);
let isToolUse = false;
let toolUseJson = "";
let hasStartedTextBlock = false;
try {
for await (const chunk of completion) {
const delta = chunk.choices[0].delta;
if (delta.tool_calls && delta.tool_calls.length > 0) {
const toolCall = delta.tool_calls[0];
if (!isToolUse) {
// Start new tool call block
isToolUse = true;
const toolBlock: ContentBlock = {
type: "tool_use",
id: `toolu_${Date.now()}`,
name: toolCall.function?.name,
input: {},
};
const toolBlockStart: MessageEvent = {
type: "content_block_start",
index: contentBlockIndex,
content_block: toolBlock,
};
currentContentBlocks.push(toolBlock);
res.write(
`event: content_block_start\ndata: ${JSON.stringify(
toolBlockStart
)}\n\n`
);
toolUseJson = "";
}
// Stream tool call JSON
if (toolCall.function?.arguments) {
const jsonDelta: MessageEvent = {
type: "content_block_delta",
index: contentBlockIndex,
delta: {
type: "input_json_delta",
partial_json: toolCall.function?.arguments,
},
};
toolUseJson += toolCall.function.arguments;
try {
const parsedJson = JSON.parse(toolUseJson);
currentContentBlocks[contentBlockIndex].input = parsedJson;
} catch (e) {
// JSON not yet complete, continue accumulating
}
res.write(
`event: content_block_delta\ndata: ${JSON.stringify(jsonDelta)}\n\n`
);
}
} else if (delta.content) {
// Handle regular text content
if (isToolUse) {
// End previous tool call block
const contentBlockStop: MessageEvent = {
type: "content_block_stop",
index: contentBlockIndex,
};
res.write(
`event: content_block_stop\ndata: ${JSON.stringify(
contentBlockStop
)}\n\n`
);
contentBlockIndex++;
isToolUse = false;
}
if (!delta.content) continue;
// If text block not yet started, send content_block_start
if (!hasStartedTextBlock) {
const textBlock: ContentBlock = {
type: "text",
text: "",
};
const textBlockStart: MessageEvent = {
type: "content_block_start",
index: contentBlockIndex,
content_block: textBlock,
};
currentContentBlocks.push(textBlock);
res.write(
`event: content_block_start\ndata: ${JSON.stringify(
textBlockStart
)}\n\n`
);
hasStartedTextBlock = true;
}
// Send regular text content
const contentDelta: MessageEvent = {
type: "content_block_delta",
index: contentBlockIndex,
delta: {
type: "text_delta",
text: delta.content,
},
};
// Update content block text
if (currentContentBlocks[contentBlockIndex]) {
currentContentBlocks[contentBlockIndex].text += delta.content;
}
res.write(
`event: content_block_delta\ndata: ${JSON.stringify(
contentDelta
)}\n\n`
);
}
}
} catch (e: any) {
// If text block not yet started, send content_block_start
if (!hasStartedTextBlock) {
const textBlock: ContentBlock = {
type: "text",
text: "",
};
const textBlockStart: MessageEvent = {
type: "content_block_start",
index: contentBlockIndex,
content_block: textBlock,
};
currentContentBlocks.push(textBlock);
res.write(
`event: content_block_start\ndata: ${JSON.stringify(
textBlockStart
)}\n\n`
);
hasStartedTextBlock = true;
}
// Send regular text content
const contentDelta: MessageEvent = {
type: "content_block_delta",
index: contentBlockIndex,
delta: {
type: "text_delta",
text: JSON.stringify(e),
},
};
// Update content block text
if (currentContentBlocks[contentBlockIndex]) {
currentContentBlocks[contentBlockIndex].text += JSON.stringify(e);
}
res.write(
`event: content_block_delta\ndata: ${JSON.stringify(contentDelta)}\n\n`
);
}
// Close last content block
const contentBlockStop: MessageEvent = {
type: "content_block_stop",
index: contentBlockIndex,
};
res.write(
`event: content_block_stop\ndata: ${JSON.stringify(contentBlockStop)}\n\n`
);
// Send message_delta event with appropriate stop_reason
const messageDelta: MessageEvent = {
type: "message_delta",
delta: {
stop_reason: isToolUse ? "tool_use" : "end_turn",
stop_sequence: null,
content: currentContentBlocks,
},
usage: { input_tokens: 100, output_tokens: 150 },
};
res.write(`event: message_delta\ndata: ${JSON.stringify(messageDelta)}\n\n`);
// Send message_stop event
const messageStop: MessageEvent = {
type: "message_stop",
};
res.write(`event: message_stop\ndata: ${JSON.stringify(messageStop)}\n\n`);
res.end();
}