add cli
This commit is contained in:
88
src/cli.ts
Normal file
88
src/cli.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env node
|
||||
import { run } from "./index";
|
||||
import { closeService } from "./utils/close";
|
||||
import { showStatus } from "./utils/status";
|
||||
import { executeCodeCommand } from "./utils/codeCommand";
|
||||
import { isServiceRunning } from "./utils/processCheck";
|
||||
import { version } from "../package.json";
|
||||
|
||||
const command = process.argv[2];
|
||||
|
||||
const HELP_TEXT = `
|
||||
Usage: claude-code [command]
|
||||
|
||||
Commands:
|
||||
start Start service
|
||||
stop Stop service
|
||||
status Show service status
|
||||
code Execute code command
|
||||
-v, version Show version information
|
||||
-h, help Show help information
|
||||
|
||||
Example:
|
||||
claude-code start
|
||||
claude-code code "Write a Hello World"
|
||||
`;
|
||||
|
||||
async function waitForService(
|
||||
timeout = 10000,
|
||||
initialDelay = 1000
|
||||
): Promise<boolean> {
|
||||
// Wait for an initial period to let the service initialize
|
||||
await new Promise((resolve) => setTimeout(resolve, initialDelay));
|
||||
|
||||
const startTime = Date.now();
|
||||
while (Date.now() - startTime < timeout) {
|
||||
if (isServiceRunning()) {
|
||||
// Wait for an additional short period to ensure service is fully ready
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
return true;
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
switch (command) {
|
||||
case "start":
|
||||
await run({ daemon: true });
|
||||
break;
|
||||
case "stop":
|
||||
await closeService();
|
||||
break;
|
||||
case "status":
|
||||
showStatus();
|
||||
break;
|
||||
case "code":
|
||||
if (!isServiceRunning()) {
|
||||
console.log("Service not running, starting service...");
|
||||
await run({ daemon: true });
|
||||
// Wait for service to start, exit with error if timeout
|
||||
if (await waitForService()) {
|
||||
executeCodeCommand(process.argv.slice(3));
|
||||
} else {
|
||||
console.error(
|
||||
"Service startup timeout, please manually run claude-code start to start the service"
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
} else {
|
||||
executeCodeCommand(process.argv.slice(3));
|
||||
}
|
||||
break;
|
||||
case "-v":
|
||||
case "version":
|
||||
console.log(`claude-code version: ${version}`);
|
||||
break;
|
||||
case "-h":
|
||||
case "help":
|
||||
console.log(HELP_TEXT);
|
||||
break;
|
||||
default:
|
||||
console.log(HELP_TEXT);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
@@ -7,9 +7,12 @@ export const CONFIG_FILE = `${HOME_DIR}/config.json`;
|
||||
|
||||
export const PLUGINS_DIR = `${HOME_DIR}/plugins`;
|
||||
|
||||
export const PID_FILE = path.join(HOME_DIR, '.claude-code-router.pid');
|
||||
|
||||
|
||||
export const DEFAULT_CONFIG = {
|
||||
log: false,
|
||||
OPENAI_API_KEY: "",
|
||||
OPENAI_BASE_URL: "https://openrouter.ai/api/v1",
|
||||
OPENAI_MODEL: "openai/o3-mini",
|
||||
OPENAI_BASE_URL: "",
|
||||
OPENAI_MODEL: "",
|
||||
};
|
||||
|
||||
36
src/index.ts
36
src/index.ts
@@ -6,6 +6,8 @@ import { formatRequest } from "./middlewares/formatRequest";
|
||||
import { rewriteBody } from "./middlewares/rewriteBody";
|
||||
import OpenAI from "openai";
|
||||
import { streamOpenAIResponse } from "./utils/stream";
|
||||
import { isServiceRunning, savePid } from "./utils/processCheck";
|
||||
import { fork } from "child_process";
|
||||
|
||||
async function initializeClaudeConfig() {
|
||||
const homeDir = process.env.HOME;
|
||||
@@ -20,18 +22,40 @@ async function initializeClaudeConfig() {
|
||||
autoUpdaterStatus: "enabled",
|
||||
userID,
|
||||
hasCompletedOnboarding: true,
|
||||
lastOnboardingVersion: "0.2.9",
|
||||
lastOnboardingVersion: "1.0.17",
|
||||
projects: {},
|
||||
};
|
||||
await writeFile(configPath, JSON.stringify(configContent, null, 2));
|
||||
}
|
||||
}
|
||||
|
||||
async function run() {
|
||||
interface RunOptions {
|
||||
port?: number;
|
||||
daemon?: boolean;
|
||||
}
|
||||
|
||||
async function run(options: RunOptions = {}) {
|
||||
const port = options.port || 3456;
|
||||
|
||||
// Check if service is already running
|
||||
if (isServiceRunning()) {
|
||||
console.log("✅ Service is already running in the background.");
|
||||
return;
|
||||
}
|
||||
|
||||
await initializeClaudeConfig();
|
||||
await initDir();
|
||||
await initConfig();
|
||||
const server = createServer(3456);
|
||||
|
||||
// Save the PID of the background process
|
||||
savePid(process.pid);
|
||||
|
||||
// Use port from environment variable if set (for background process)
|
||||
const servicePort = process.env.SERVICE_PORT
|
||||
? parseInt(process.env.SERVICE_PORT)
|
||||
: port;
|
||||
|
||||
const server = createServer(servicePort);
|
||||
server.useMiddleware(formatRequest);
|
||||
server.useMiddleware(rewriteBody);
|
||||
|
||||
@@ -46,11 +70,13 @@ async function run() {
|
||||
req.body.model = process.env.OPENAI_MODEL;
|
||||
}
|
||||
const completion: any = await openai.chat.completions.create(req.body);
|
||||
await streamOpenAIResponse(res, completion, req.body.model);
|
||||
await streamOpenAIResponse(res, completion, req.body.model, req.body);
|
||||
} catch (e) {
|
||||
console.error("Error in OpenAI API call:", e);
|
||||
}
|
||||
});
|
||||
server.start();
|
||||
console.log(`🚀 Claude Code Router is running on port ${servicePort}`);
|
||||
}
|
||||
run();
|
||||
|
||||
export { run };
|
||||
|
||||
@@ -3,6 +3,7 @@ import { ContentBlockParam } from "@anthropic-ai/sdk/resources";
|
||||
import { MessageCreateParamsBase } from "@anthropic-ai/sdk/resources/messages";
|
||||
import OpenAI from "openai";
|
||||
import { streamOpenAIResponse } from "../utils/stream";
|
||||
import { log } from "../utils/log";
|
||||
|
||||
export const formatRequest = async (
|
||||
req: Request,
|
||||
@@ -17,33 +18,138 @@ export const formatRequest = async (
|
||||
temperature,
|
||||
metadata,
|
||||
tools,
|
||||
stream,
|
||||
}: MessageCreateParamsBase = req.body;
|
||||
log("formatRequest: ", req.body);
|
||||
try {
|
||||
const openAIMessages: OpenAI.Chat.Completions.ChatCompletionMessageParam[] =
|
||||
messages.map((item) => {
|
||||
if (item.content instanceof Array) {
|
||||
return {
|
||||
role: item.role,
|
||||
content: item.content
|
||||
.map((it: ContentBlockParam) => {
|
||||
if (it.type === "text") {
|
||||
return typeof it.text === "string"
|
||||
? it.text
|
||||
: JSON.stringify(it);
|
||||
}
|
||||
return JSON.stringify(it);
|
||||
})
|
||||
.join(""),
|
||||
} as OpenAI.Chat.Completions.ChatCompletionMessageParam;
|
||||
}
|
||||
return {
|
||||
role: item.role,
|
||||
content:
|
||||
typeof item.content === "string"
|
||||
? item.content
|
||||
: JSON.stringify(item.content),
|
||||
};
|
||||
});
|
||||
// @ts-ignore
|
||||
const openAIMessages = Array.isArray(messages)
|
||||
? messages.flatMap((anthropicMessage) => {
|
||||
const openAiMessagesFromThisAnthropicMessage = [];
|
||||
|
||||
if (!Array.isArray(anthropicMessage.content)) {
|
||||
// Handle simple string content
|
||||
if (typeof anthropicMessage.content === "string") {
|
||||
openAiMessagesFromThisAnthropicMessage.push({
|
||||
role: anthropicMessage.role,
|
||||
content: anthropicMessage.content,
|
||||
});
|
||||
}
|
||||
// If content is not string and not array (e.g. null/undefined), it will result in an empty array, effectively skipping this message.
|
||||
return openAiMessagesFromThisAnthropicMessage;
|
||||
}
|
||||
|
||||
// Handle array content
|
||||
if (anthropicMessage.role === "assistant") {
|
||||
const assistantMessage = {
|
||||
role: "assistant",
|
||||
content: null, // Will be populated if text parts exist
|
||||
};
|
||||
let textContent = "";
|
||||
// @ts-ignore
|
||||
const toolCalls = []; // Corrected type here
|
||||
|
||||
anthropicMessage.content.forEach((contentPart) => {
|
||||
if (contentPart.type === "text") {
|
||||
textContent +=
|
||||
(typeof contentPart.text === "string"
|
||||
? contentPart.text
|
||||
: JSON.stringify(contentPart.text)) + "\\n";
|
||||
} else if (contentPart.type === "tool_use") {
|
||||
toolCalls.push({
|
||||
id: contentPart.id,
|
||||
type: "function",
|
||||
function: {
|
||||
name: contentPart.name,
|
||||
arguments: JSON.stringify(contentPart.input),
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const trimmedTextContent = textContent.trim();
|
||||
if (trimmedTextContent.length > 0) {
|
||||
// @ts-ignore
|
||||
assistantMessage.content = trimmedTextContent;
|
||||
}
|
||||
if (toolCalls.length > 0) {
|
||||
// @ts-ignore
|
||||
assistantMessage.tool_calls = toolCalls;
|
||||
}
|
||||
// @ts-ignore
|
||||
if (
|
||||
assistantMessage.content ||
|
||||
// @ts-ignore
|
||||
(assistantMessage.tool_calls &&
|
||||
// @ts-ignore
|
||||
assistantMessage.tool_calls.length > 0)
|
||||
) {
|
||||
openAiMessagesFromThisAnthropicMessage.push(assistantMessage);
|
||||
}
|
||||
} else if (anthropicMessage.role === "user") {
|
||||
// For user messages, text parts are combined into one message.
|
||||
// Tool results are transformed into subsequent, separate 'tool' role messages.
|
||||
let userTextMessageContent = "";
|
||||
// @ts-ignore
|
||||
const subsequentToolMessages = [];
|
||||
|
||||
anthropicMessage.content.forEach((contentPart) => {
|
||||
if (contentPart.type === "text") {
|
||||
userTextMessageContent +=
|
||||
(typeof contentPart.text === "string"
|
||||
? contentPart.text
|
||||
: JSON.stringify(contentPart.text)) + "\\n";
|
||||
} else if (contentPart.type === "tool_result") {
|
||||
// Each tool_result becomes a separate 'tool' message
|
||||
subsequentToolMessages.push({
|
||||
role: "tool",
|
||||
tool_call_id: contentPart.tool_use_id,
|
||||
content:
|
||||
typeof contentPart.content === "string"
|
||||
? contentPart.content
|
||||
: JSON.stringify(contentPart.content),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const trimmedUserText = userTextMessageContent.trim();
|
||||
if (trimmedUserText.length > 0) {
|
||||
openAiMessagesFromThisAnthropicMessage.push({
|
||||
role: "user",
|
||||
content: trimmedUserText,
|
||||
});
|
||||
}
|
||||
// @ts-ignore
|
||||
openAiMessagesFromThisAnthropicMessage.push(
|
||||
// @ts-ignore
|
||||
...subsequentToolMessages
|
||||
);
|
||||
} else {
|
||||
// Fallback for other roles (e.g. system, or custom roles if they were to appear here with array content)
|
||||
// This will combine all text parts into a single message for that role.
|
||||
let combinedContent = "";
|
||||
anthropicMessage.content.forEach((contentPart) => {
|
||||
if (contentPart.type === "text") {
|
||||
combinedContent +=
|
||||
(typeof contentPart.text === "string"
|
||||
? contentPart.text
|
||||
: JSON.stringify(contentPart.text)) + "\\n";
|
||||
} else {
|
||||
// For non-text parts in other roles, stringify them or handle as appropriate
|
||||
combinedContent += JSON.stringify(contentPart) + "\\n";
|
||||
}
|
||||
});
|
||||
const trimmedCombinedContent = combinedContent.trim();
|
||||
if (trimmedCombinedContent.length > 0) {
|
||||
openAiMessagesFromThisAnthropicMessage.push({
|
||||
role: anthropicMessage.role, // Cast needed as role could be other than 'user'/'assistant'
|
||||
content: trimmedCombinedContent,
|
||||
});
|
||||
}
|
||||
}
|
||||
return openAiMessagesFromThisAnthropicMessage;
|
||||
})
|
||||
: [];
|
||||
const systemMessages: OpenAI.Chat.Completions.ChatCompletionMessageParam[] =
|
||||
Array.isArray(system)
|
||||
? system.map((item) => ({
|
||||
@@ -51,11 +157,11 @@ export const formatRequest = async (
|
||||
content: item.text,
|
||||
}))
|
||||
: [{ role: "system", content: system }];
|
||||
const data: OpenAI.Chat.Completions.ChatCompletionCreateParams = {
|
||||
const data: any = {
|
||||
model,
|
||||
messages: [...systemMessages, ...openAIMessages],
|
||||
temperature,
|
||||
stream: true,
|
||||
stream,
|
||||
};
|
||||
if (tools) {
|
||||
data.tools = tools
|
||||
@@ -69,7 +175,9 @@ export const formatRequest = async (
|
||||
},
|
||||
}));
|
||||
}
|
||||
res.setHeader("Content-Type", "text/event-stream");
|
||||
if (stream) {
|
||||
res.setHeader("Content-Type", "text/event-stream");
|
||||
}
|
||||
res.setHeader("Cache-Control", "no-cache");
|
||||
res.setHeader("Connection", "keep-alive");
|
||||
req.body = data;
|
||||
@@ -95,7 +203,7 @@ export const formatRequest = async (
|
||||
};
|
||||
},
|
||||
};
|
||||
await streamOpenAIResponse(res, errorCompletion, model);
|
||||
await streamOpenAIResponse(res, errorCompletion, model, req.body);
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
23
src/utils/close.ts
Normal file
23
src/utils/close.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { isServiceRunning, cleanupPidFile } from './processCheck';
|
||||
import { existsSync, readFileSync } from 'fs';
|
||||
import { homedir } from 'os';
|
||||
import { join } from 'path';
|
||||
|
||||
export async function closeService() {
|
||||
const PID_FILE = join(homedir(), '.claude-code-router.pid');
|
||||
|
||||
if (!isServiceRunning()) {
|
||||
console.log("No service is currently running.");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const pid = parseInt(readFileSync(PID_FILE, 'utf-8'));
|
||||
process.kill(pid);
|
||||
cleanupPidFile();
|
||||
console.log("Service has been successfully stopped.");
|
||||
} catch (e) {
|
||||
console.log("Failed to stop the service. It may have already been stopped.");
|
||||
cleanupPidFile();
|
||||
}
|
||||
}
|
||||
31
src/utils/codeCommand.ts
Normal file
31
src/utils/codeCommand.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { spawn } from 'child_process';
|
||||
import { isServiceRunning } from './processCheck';
|
||||
|
||||
export async function executeCodeCommand(args: string[] = []) {
|
||||
// Service check is now handled in cli.ts
|
||||
|
||||
// Set environment variables
|
||||
const env = {
|
||||
...process.env,
|
||||
DISABLE_PROMPT_CACHING: '1',
|
||||
ANTHROPIC_BASE_URL: 'http://127.0.0.1:3456',
|
||||
API_TIMEOUT_MS: '600000'
|
||||
};
|
||||
|
||||
// Execute claude command
|
||||
const claudeProcess = spawn('claude', args, {
|
||||
env,
|
||||
stdio: 'inherit',
|
||||
shell: true
|
||||
});
|
||||
|
||||
claudeProcess.on('error', (error) => {
|
||||
console.error('Failed to start claude command:', error.message);
|
||||
console.log('Make sure Claude Code is installed: npm install -g @anthropic-ai/claude-code');
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
claudeProcess.on('close', (code) => {
|
||||
process.exit(code || 0);
|
||||
});
|
||||
}
|
||||
@@ -57,28 +57,21 @@ export const readConfigFile = async () => {
|
||||
const config = await fs.readFile(CONFIG_FILE, "utf-8");
|
||||
return JSON.parse(config);
|
||||
} catch {
|
||||
const useRouter = await confirm(
|
||||
"No config file found. Enable router mode? (Y/n)"
|
||||
);
|
||||
if (!useRouter) {
|
||||
const apiKey = await question("Enter OPENAI_API_KEY: ");
|
||||
const baseUrl = await question("Enter OPENAI_BASE_URL: ");
|
||||
const model = await question("Enter OPENAI_MODEL: ");
|
||||
const config = Object.assign({}, DEFAULT_CONFIG, {
|
||||
OPENAI_API_KEY: apiKey,
|
||||
OPENAI_BASE_URL: baseUrl,
|
||||
OPENAI_MODEL: model,
|
||||
});
|
||||
await writeConfigFile(config);
|
||||
return config;
|
||||
} else {
|
||||
const router = await question("Enter OPENAI_API_KEY: ");
|
||||
return DEFAULT_CONFIG;
|
||||
}
|
||||
const apiKey = await question("Enter OPENAI_API_KEY: ");
|
||||
const baseUrl = await question("Enter OPENAI_BASE_URL: ");
|
||||
const model = await question("Enter OPENAI_MODEL: ");
|
||||
const config = Object.assign({}, DEFAULT_CONFIG, {
|
||||
OPENAI_API_KEY: apiKey,
|
||||
OPENAI_BASE_URL: baseUrl,
|
||||
OPENAI_MODEL: model,
|
||||
});
|
||||
await writeConfigFile(config);
|
||||
return config;
|
||||
}
|
||||
};
|
||||
|
||||
export const writeConfigFile = async (config: any) => {
|
||||
await ensureDir(HOME_DIR);
|
||||
await fs.writeFile(CONFIG_FILE, JSON.stringify(config, null, 2));
|
||||
};
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { HOME_DIR } from '../constants';
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { HOME_DIR } from "../constants";
|
||||
|
||||
const LOG_FILE = path.join(HOME_DIR, 'claude-code-router.log');
|
||||
const LOG_FILE = path.join(HOME_DIR, "claude-code-router.log");
|
||||
|
||||
// Ensure log directory exists
|
||||
if (!fs.existsSync(HOME_DIR)) {
|
||||
@@ -11,17 +11,23 @@ if (!fs.existsSync(HOME_DIR)) {
|
||||
|
||||
export function log(...args: any[]) {
|
||||
// Check if logging is enabled via environment variable
|
||||
const isLogEnabled = process.env.LOG === 'true';
|
||||
|
||||
const isLogEnabled = process.env.LOG === "true";
|
||||
|
||||
if (!isLogEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
const timestamp = new Date().toISOString();
|
||||
const logMessage = `[${timestamp}] ${args.map(arg =>
|
||||
typeof arg === 'object' ? JSON.stringify(arg) : String(arg)
|
||||
).join(' ')}\n`;
|
||||
const logMessage = `[${timestamp}] ${
|
||||
Array.isArray(args)
|
||||
? args
|
||||
.map((arg) =>
|
||||
typeof arg === "object" ? JSON.stringify(arg) : String(arg)
|
||||
)
|
||||
.join(" ")
|
||||
: ""
|
||||
}\n`;
|
||||
|
||||
// Append to log file
|
||||
fs.appendFileSync(LOG_FILE, logMessage, 'utf8');
|
||||
fs.appendFileSync(LOG_FILE, logMessage, "utf8");
|
||||
}
|
||||
|
||||
60
src/utils/processCheck.ts
Normal file
60
src/utils/processCheck.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { PID_FILE } from '../constants';
|
||||
|
||||
|
||||
export function isServiceRunning(): boolean {
|
||||
if (!existsSync(PID_FILE)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const pid = parseInt(readFileSync(PID_FILE, 'utf-8'));
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch (e) {
|
||||
// Process not running, clean up pid file
|
||||
cleanupPidFile();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function savePid(pid: number) {
|
||||
writeFileSync(PID_FILE, pid.toString());
|
||||
}
|
||||
|
||||
export function cleanupPidFile() {
|
||||
if (existsSync(PID_FILE)) {
|
||||
try {
|
||||
const fs = require('fs');
|
||||
fs.unlinkSync(PID_FILE);
|
||||
} catch (e) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function getServicePid(): number | null {
|
||||
if (!existsSync(PID_FILE)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const pid = parseInt(readFileSync(PID_FILE, 'utf-8'));
|
||||
return isNaN(pid) ? null : pid;
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function getServiceInfo() {
|
||||
const pid = getServicePid();
|
||||
const running = isServiceRunning();
|
||||
|
||||
return {
|
||||
running,
|
||||
pid,
|
||||
port: 3456,
|
||||
endpoint: 'http://127.0.0.1:3456',
|
||||
pidFile: PID_FILE
|
||||
};
|
||||
}
|
||||
27
src/utils/status.ts
Normal file
27
src/utils/status.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { getServiceInfo } from './processCheck';
|
||||
|
||||
export function showStatus() {
|
||||
const info = getServiceInfo();
|
||||
|
||||
console.log('\n📊 Claude Code Router Status');
|
||||
console.log('═'.repeat(40));
|
||||
|
||||
if (info.running) {
|
||||
console.log('✅ Status: Running');
|
||||
console.log(`🆔 Process ID: ${info.pid}`);
|
||||
console.log(`🌐 Port: ${info.port}`);
|
||||
console.log(`📡 API Endpoint: ${info.endpoint}`);
|
||||
console.log(`📄 PID File: ${info.pidFile}`);
|
||||
console.log('');
|
||||
console.log('🚀 Ready to use! Run the following commands:');
|
||||
console.log(' claude-code-router code # Start coding with Claude');
|
||||
console.log(' claude-code-router close # Stop the service');
|
||||
} else {
|
||||
console.log('❌ Status: Not Running');
|
||||
console.log('');
|
||||
console.log('💡 To start the service:');
|
||||
console.log(' claude-code-router start');
|
||||
}
|
||||
|
||||
console.log('');
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Response } from "express";
|
||||
import { OpenAI } from "openai";
|
||||
import { log } from "./log";
|
||||
|
||||
interface ContentBlock {
|
||||
type: string;
|
||||
@@ -42,10 +43,40 @@ interface MessageEvent {
|
||||
|
||||
export async function streamOpenAIResponse(
|
||||
res: Response,
|
||||
completion: AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>,
|
||||
model: string
|
||||
completion: any,
|
||||
model: string,
|
||||
body: any
|
||||
) {
|
||||
const write = (data: string) => {
|
||||
log("response: ", data);
|
||||
res.write(data);
|
||||
};
|
||||
const messageId = "msg_" + Date.now();
|
||||
if (!body.stream) {
|
||||
res.json({
|
||||
id: messageId,
|
||||
type: "message",
|
||||
role: "assistant",
|
||||
// @ts-ignore
|
||||
content: completion.choices[0].message.content || completion.choices[0].message.tool_calls?.map((item) => {
|
||||
return {
|
||||
type: 'tool_use',
|
||||
id: item.id,
|
||||
name: item.function?.name,
|
||||
input: item.function?.arguments ? JSON.parse(item.function.arguments) : {},
|
||||
};
|
||||
}) || '',
|
||||
stop_reason: completion.choices[0].finish_reason === 'tool_calls' ? "tool_use" : "end_turn",
|
||||
stop_sequence: null,
|
||||
usage: {
|
||||
input_tokens: 100,
|
||||
output_tokens: 50,
|
||||
},
|
||||
});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
let contentBlockIndex = 0;
|
||||
let currentContentBlocks: ContentBlock[] = [];
|
||||
|
||||
@@ -63,7 +94,7 @@ export async function streamOpenAIResponse(
|
||||
usage: { input_tokens: 1, output_tokens: 1 },
|
||||
},
|
||||
};
|
||||
res.write(`event: message_start\ndata: ${JSON.stringify(messageStart)}\n\n`);
|
||||
write(`event: message_start\ndata: ${JSON.stringify(messageStart)}\n\n`);
|
||||
|
||||
let isToolUse = false;
|
||||
let toolUseJson = "";
|
||||
@@ -71,6 +102,7 @@ export async function streamOpenAIResponse(
|
||||
|
||||
try {
|
||||
for await (const chunk of completion) {
|
||||
log("Processing chunk:", chunk);
|
||||
const delta = chunk.choices[0].delta;
|
||||
|
||||
if (delta.tool_calls && delta.tool_calls.length > 0) {
|
||||
@@ -94,7 +126,7 @@ export async function streamOpenAIResponse(
|
||||
|
||||
currentContentBlocks.push(toolBlock);
|
||||
|
||||
res.write(
|
||||
write(
|
||||
`event: content_block_start\ndata: ${JSON.stringify(
|
||||
toolBlockStart
|
||||
)}\n\n`
|
||||
@@ -119,23 +151,25 @@ export async function streamOpenAIResponse(
|
||||
const parsedJson = JSON.parse(toolUseJson);
|
||||
currentContentBlocks[contentBlockIndex].input = parsedJson;
|
||||
} catch (e) {
|
||||
log(e);
|
||||
// JSON not yet complete, continue accumulating
|
||||
}
|
||||
|
||||
res.write(
|
||||
write(
|
||||
`event: content_block_delta\ndata: ${JSON.stringify(jsonDelta)}\n\n`
|
||||
);
|
||||
}
|
||||
} else if (delta.content) {
|
||||
// Handle regular text content
|
||||
if (isToolUse) {
|
||||
log("Tool call ended here:", delta);
|
||||
// End previous tool call block
|
||||
const contentBlockStop: MessageEvent = {
|
||||
type: "content_block_stop",
|
||||
index: contentBlockIndex,
|
||||
};
|
||||
|
||||
res.write(
|
||||
write(
|
||||
`event: content_block_stop\ndata: ${JSON.stringify(
|
||||
contentBlockStop
|
||||
)}\n\n`
|
||||
@@ -161,7 +195,7 @@ export async function streamOpenAIResponse(
|
||||
|
||||
currentContentBlocks.push(textBlock);
|
||||
|
||||
res.write(
|
||||
write(
|
||||
`event: content_block_start\ndata: ${JSON.stringify(
|
||||
textBlockStart
|
||||
)}\n\n`
|
||||
@@ -184,7 +218,7 @@ export async function streamOpenAIResponse(
|
||||
currentContentBlocks[contentBlockIndex].text += delta.content;
|
||||
}
|
||||
|
||||
res.write(
|
||||
write(
|
||||
`event: content_block_delta\ndata: ${JSON.stringify(
|
||||
contentDelta
|
||||
)}\n\n`
|
||||
@@ -207,7 +241,7 @@ export async function streamOpenAIResponse(
|
||||
|
||||
currentContentBlocks.push(textBlock);
|
||||
|
||||
res.write(
|
||||
write(
|
||||
`event: content_block_start\ndata: ${JSON.stringify(
|
||||
textBlockStart
|
||||
)}\n\n`
|
||||
@@ -230,7 +264,7 @@ export async function streamOpenAIResponse(
|
||||
currentContentBlocks[contentBlockIndex].text += JSON.stringify(e);
|
||||
}
|
||||
|
||||
res.write(
|
||||
write(
|
||||
`event: content_block_delta\ndata: ${JSON.stringify(contentDelta)}\n\n`
|
||||
);
|
||||
}
|
||||
@@ -241,7 +275,7 @@ export async function streamOpenAIResponse(
|
||||
index: contentBlockIndex,
|
||||
};
|
||||
|
||||
res.write(
|
||||
write(
|
||||
`event: content_block_stop\ndata: ${JSON.stringify(contentBlockStop)}\n\n`
|
||||
);
|
||||
|
||||
@@ -255,14 +289,17 @@ export async function streamOpenAIResponse(
|
||||
},
|
||||
usage: { input_tokens: 100, output_tokens: 150 },
|
||||
};
|
||||
if (!isToolUse) {
|
||||
log("body: ", body, "messageDelta: ", messageDelta);
|
||||
}
|
||||
|
||||
res.write(`event: message_delta\ndata: ${JSON.stringify(messageDelta)}\n\n`);
|
||||
write(`event: message_delta\ndata: ${JSON.stringify(messageDelta)}\n\n`);
|
||||
|
||||
// Send message_stop event
|
||||
const messageStop: MessageEvent = {
|
||||
type: "message_stop",
|
||||
};
|
||||
|
||||
res.write(`event: message_stop\ndata: ${JSON.stringify(messageStop)}\n\n`);
|
||||
write(`event: message_stop\ndata: ${JSON.stringify(messageStop)}\n\n`);
|
||||
res.end();
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user