This commit is contained in:
jinhui.li
2025-06-10 12:55:25 +08:00
parent 089654871c
commit 2cc91ada5c
19 changed files with 521 additions and 1150 deletions

23
src/utils/close.ts Normal file
View File

@@ -0,0 +1,23 @@
import { isServiceRunning, cleanupPidFile } from './processCheck';
import { existsSync, readFileSync } from 'fs';
import { homedir } from 'os';
import { join } from 'path';
export async function closeService() {
const PID_FILE = join(homedir(), '.claude-code-router.pid');
if (!isServiceRunning()) {
console.log("No service is currently running.");
return;
}
try {
const pid = parseInt(readFileSync(PID_FILE, 'utf-8'));
process.kill(pid);
cleanupPidFile();
console.log("Service has been successfully stopped.");
} catch (e) {
console.log("Failed to stop the service. It may have already been stopped.");
cleanupPidFile();
}
}

31
src/utils/codeCommand.ts Normal file
View File

@@ -0,0 +1,31 @@
import { spawn } from 'child_process';
import { isServiceRunning } from './processCheck';
export async function executeCodeCommand(args: string[] = []) {
// Service check is now handled in cli.ts
// Set environment variables
const env = {
...process.env,
DISABLE_PROMPT_CACHING: '1',
ANTHROPIC_BASE_URL: 'http://127.0.0.1:3456',
API_TIMEOUT_MS: '600000'
};
// Execute claude command
const claudeProcess = spawn('claude', args, {
env,
stdio: 'inherit',
shell: true
});
claudeProcess.on('error', (error) => {
console.error('Failed to start claude command:', error.message);
console.log('Make sure Claude Code is installed: npm install -g @anthropic-ai/claude-code');
process.exit(1);
});
claudeProcess.on('close', (code) => {
process.exit(code || 0);
});
}

View File

@@ -57,28 +57,21 @@ export const readConfigFile = async () => {
const config = await fs.readFile(CONFIG_FILE, "utf-8");
return JSON.parse(config);
} catch {
const useRouter = await confirm(
"No config file found. Enable router mode? (Y/n)"
);
if (!useRouter) {
const apiKey = await question("Enter OPENAI_API_KEY: ");
const baseUrl = await question("Enter OPENAI_BASE_URL: ");
const model = await question("Enter OPENAI_MODEL: ");
const config = Object.assign({}, DEFAULT_CONFIG, {
OPENAI_API_KEY: apiKey,
OPENAI_BASE_URL: baseUrl,
OPENAI_MODEL: model,
});
await writeConfigFile(config);
return config;
} else {
const router = await question("Enter OPENAI_API_KEY: ");
return DEFAULT_CONFIG;
}
const apiKey = await question("Enter OPENAI_API_KEY: ");
const baseUrl = await question("Enter OPENAI_BASE_URL: ");
const model = await question("Enter OPENAI_MODEL: ");
const config = Object.assign({}, DEFAULT_CONFIG, {
OPENAI_API_KEY: apiKey,
OPENAI_BASE_URL: baseUrl,
OPENAI_MODEL: model,
});
await writeConfigFile(config);
return config;
}
};
export const writeConfigFile = async (config: any) => {
await ensureDir(HOME_DIR);
await fs.writeFile(CONFIG_FILE, JSON.stringify(config, null, 2));
};

View File

@@ -1,8 +1,8 @@
import fs from 'node:fs';
import path from 'node:path';
import { HOME_DIR } from '../constants';
import fs from "node:fs";
import path from "node:path";
import { HOME_DIR } from "../constants";
const LOG_FILE = path.join(HOME_DIR, 'claude-code-router.log');
const LOG_FILE = path.join(HOME_DIR, "claude-code-router.log");
// Ensure log directory exists
if (!fs.existsSync(HOME_DIR)) {
@@ -11,17 +11,23 @@ if (!fs.existsSync(HOME_DIR)) {
export function log(...args: any[]) {
// Check if logging is enabled via environment variable
const isLogEnabled = process.env.LOG === 'true';
const isLogEnabled = process.env.LOG === "true";
if (!isLogEnabled) {
return;
}
const timestamp = new Date().toISOString();
const logMessage = `[${timestamp}] ${args.map(arg =>
typeof arg === 'object' ? JSON.stringify(arg) : String(arg)
).join(' ')}\n`;
const logMessage = `[${timestamp}] ${
Array.isArray(args)
? args
.map((arg) =>
typeof arg === "object" ? JSON.stringify(arg) : String(arg)
)
.join(" ")
: ""
}\n`;
// Append to log file
fs.appendFileSync(LOG_FILE, logMessage, 'utf8');
fs.appendFileSync(LOG_FILE, logMessage, "utf8");
}

60
src/utils/processCheck.ts Normal file
View File

@@ -0,0 +1,60 @@
import { existsSync, readFileSync, writeFileSync } from 'fs';
import { PID_FILE } from '../constants';
export function isServiceRunning(): boolean {
if (!existsSync(PID_FILE)) {
return false;
}
try {
const pid = parseInt(readFileSync(PID_FILE, 'utf-8'));
process.kill(pid, 0);
return true;
} catch (e) {
// Process not running, clean up pid file
cleanupPidFile();
return false;
}
}
export function savePid(pid: number) {
writeFileSync(PID_FILE, pid.toString());
}
export function cleanupPidFile() {
if (existsSync(PID_FILE)) {
try {
const fs = require('fs');
fs.unlinkSync(PID_FILE);
} catch (e) {
// Ignore cleanup errors
}
}
}
export function getServicePid(): number | null {
if (!existsSync(PID_FILE)) {
return null;
}
try {
const pid = parseInt(readFileSync(PID_FILE, 'utf-8'));
return isNaN(pid) ? null : pid;
} catch (e) {
return null;
}
}
export function getServiceInfo() {
const pid = getServicePid();
const running = isServiceRunning();
return {
running,
pid,
port: 3456,
endpoint: 'http://127.0.0.1:3456',
pidFile: PID_FILE
};
}

27
src/utils/status.ts Normal file
View File

@@ -0,0 +1,27 @@
import { getServiceInfo } from './processCheck';
export function showStatus() {
const info = getServiceInfo();
console.log('\n📊 Claude Code Router Status');
console.log('═'.repeat(40));
if (info.running) {
console.log('✅ Status: Running');
console.log(`🆔 Process ID: ${info.pid}`);
console.log(`🌐 Port: ${info.port}`);
console.log(`📡 API Endpoint: ${info.endpoint}`);
console.log(`📄 PID File: ${info.pidFile}`);
console.log('');
console.log('🚀 Ready to use! Run the following commands:');
console.log(' claude-code-router code # Start coding with Claude');
console.log(' claude-code-router close # Stop the service');
} else {
console.log('❌ Status: Not Running');
console.log('');
console.log('💡 To start the service:');
console.log(' claude-code-router start');
}
console.log('');
}

View File

@@ -1,5 +1,6 @@
import { Response } from "express";
import { OpenAI } from "openai";
import { log } from "./log";
interface ContentBlock {
type: string;
@@ -42,10 +43,40 @@ interface MessageEvent {
export async function streamOpenAIResponse(
res: Response,
completion: AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>,
model: string
completion: any,
model: string,
body: any
) {
const write = (data: string) => {
log("response: ", data);
res.write(data);
};
const messageId = "msg_" + Date.now();
if (!body.stream) {
res.json({
id: messageId,
type: "message",
role: "assistant",
// @ts-ignore
content: completion.choices[0].message.content || completion.choices[0].message.tool_calls?.map((item) => {
return {
type: 'tool_use',
id: item.id,
name: item.function?.name,
input: item.function?.arguments ? JSON.parse(item.function.arguments) : {},
};
}) || '',
stop_reason: completion.choices[0].finish_reason === 'tool_calls' ? "tool_use" : "end_turn",
stop_sequence: null,
usage: {
input_tokens: 100,
output_tokens: 50,
},
});
res.end();
return;
}
let contentBlockIndex = 0;
let currentContentBlocks: ContentBlock[] = [];
@@ -63,7 +94,7 @@ export async function streamOpenAIResponse(
usage: { input_tokens: 1, output_tokens: 1 },
},
};
res.write(`event: message_start\ndata: ${JSON.stringify(messageStart)}\n\n`);
write(`event: message_start\ndata: ${JSON.stringify(messageStart)}\n\n`);
let isToolUse = false;
let toolUseJson = "";
@@ -71,6 +102,7 @@ export async function streamOpenAIResponse(
try {
for await (const chunk of completion) {
log("Processing chunk:", chunk);
const delta = chunk.choices[0].delta;
if (delta.tool_calls && delta.tool_calls.length > 0) {
@@ -94,7 +126,7 @@ export async function streamOpenAIResponse(
currentContentBlocks.push(toolBlock);
res.write(
write(
`event: content_block_start\ndata: ${JSON.stringify(
toolBlockStart
)}\n\n`
@@ -119,23 +151,25 @@ export async function streamOpenAIResponse(
const parsedJson = JSON.parse(toolUseJson);
currentContentBlocks[contentBlockIndex].input = parsedJson;
} catch (e) {
log(e);
// JSON not yet complete, continue accumulating
}
res.write(
write(
`event: content_block_delta\ndata: ${JSON.stringify(jsonDelta)}\n\n`
);
}
} else if (delta.content) {
// Handle regular text content
if (isToolUse) {
log("Tool call ended here:", delta);
// End previous tool call block
const contentBlockStop: MessageEvent = {
type: "content_block_stop",
index: contentBlockIndex,
};
res.write(
write(
`event: content_block_stop\ndata: ${JSON.stringify(
contentBlockStop
)}\n\n`
@@ -161,7 +195,7 @@ export async function streamOpenAIResponse(
currentContentBlocks.push(textBlock);
res.write(
write(
`event: content_block_start\ndata: ${JSON.stringify(
textBlockStart
)}\n\n`
@@ -184,7 +218,7 @@ export async function streamOpenAIResponse(
currentContentBlocks[contentBlockIndex].text += delta.content;
}
res.write(
write(
`event: content_block_delta\ndata: ${JSON.stringify(
contentDelta
)}\n\n`
@@ -207,7 +241,7 @@ export async function streamOpenAIResponse(
currentContentBlocks.push(textBlock);
res.write(
write(
`event: content_block_start\ndata: ${JSON.stringify(
textBlockStart
)}\n\n`
@@ -230,7 +264,7 @@ export async function streamOpenAIResponse(
currentContentBlocks[contentBlockIndex].text += JSON.stringify(e);
}
res.write(
write(
`event: content_block_delta\ndata: ${JSON.stringify(contentDelta)}\n\n`
);
}
@@ -241,7 +275,7 @@ export async function streamOpenAIResponse(
index: contentBlockIndex,
};
res.write(
write(
`event: content_block_stop\ndata: ${JSON.stringify(contentBlockStop)}\n\n`
);
@@ -255,14 +289,17 @@ export async function streamOpenAIResponse(
},
usage: { input_tokens: 100, output_tokens: 150 },
};
if (!isToolUse) {
log("body: ", body, "messageDelta: ", messageDelta);
}
res.write(`event: message_delta\ndata: ${JSON.stringify(messageDelta)}\n\n`);
write(`event: message_delta\ndata: ${JSON.stringify(messageDelta)}\n\n`);
// Send message_stop event
const messageStop: MessageEvent = {
type: "message_stop",
};
res.write(`event: message_stop\ndata: ${JSON.stringify(messageStop)}\n\n`);
write(`event: message_stop\ndata: ${JSON.stringify(messageStop)}\n\n`);
res.end();
}