mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-02 08:33:36 +00:00
feat: remove codex support
This commit is contained in:
@@ -64,78 +64,6 @@ export function createModelsRoutes(): Router {
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "gpt-4o",
|
||||
name: "GPT-4o",
|
||||
provider: "openai",
|
||||
contextWindow: 128000,
|
||||
maxOutputTokens: 16384,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "gpt-4o-mini",
|
||||
name: "GPT-4o Mini",
|
||||
provider: "openai",
|
||||
contextWindow: 128000,
|
||||
maxOutputTokens: 16384,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "o1",
|
||||
name: "o1",
|
||||
provider: "openai",
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 100000,
|
||||
supportsVision: true,
|
||||
supportsTools: false,
|
||||
},
|
||||
{
|
||||
id: "gpt-5.2",
|
||||
name: "GPT-5.2 (Codex)",
|
||||
provider: "openai-codex",
|
||||
contextWindow: 256000,
|
||||
maxOutputTokens: 32768,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "gpt-5.1-codex-max",
|
||||
name: "GPT-5.1 Codex Max",
|
||||
provider: "openai-codex",
|
||||
contextWindow: 256000,
|
||||
maxOutputTokens: 32768,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "gpt-5.1-codex",
|
||||
name: "GPT-5.1 Codex",
|
||||
provider: "openai-codex",
|
||||
contextWindow: 256000,
|
||||
maxOutputTokens: 32768,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "gpt-5.1-codex-mini",
|
||||
name: "GPT-5.1 Codex Mini",
|
||||
provider: "openai-codex",
|
||||
contextWindow: 256000,
|
||||
maxOutputTokens: 16384,
|
||||
supportsVision: false,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "gpt-5.1",
|
||||
name: "GPT-5.1",
|
||||
provider: "openai-codex",
|
||||
contextWindow: 256000,
|
||||
maxOutputTokens: 32768,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
];
|
||||
|
||||
res.json({ success: true, models });
|
||||
@@ -156,17 +84,6 @@ export function createModelsRoutes(): Router {
|
||||
available: statuses.claude?.installed || false,
|
||||
hasApiKey: !!process.env.ANTHROPIC_API_KEY || !!process.env.CLAUDE_CODE_OAUTH_TOKEN,
|
||||
},
|
||||
openai: {
|
||||
available: !!process.env.OPENAI_API_KEY,
|
||||
hasApiKey: !!process.env.OPENAI_API_KEY,
|
||||
},
|
||||
"openai-codex": {
|
||||
available: statuses.codex?.installed || false,
|
||||
hasApiKey: !!process.env.OPENAI_API_KEY,
|
||||
cliInstalled: statuses.codex?.installed,
|
||||
cliVersion: statuses.codex?.version,
|
||||
cliPath: statuses.codex?.path,
|
||||
},
|
||||
google: {
|
||||
available: !!process.env.GOOGLE_API_KEY,
|
||||
hasApiKey: !!process.env.GOOGLE_API_KEY,
|
||||
|
||||
@@ -230,84 +230,6 @@ export function createSetupRoutes(): Router {
|
||||
}
|
||||
});
|
||||
|
||||
// Get Codex CLI status
|
||||
router.get("/codex-status", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
let installed = false;
|
||||
let version = "";
|
||||
let cliPath = "";
|
||||
let method = "none";
|
||||
|
||||
// Try to find Codex CLI
|
||||
try {
|
||||
const { stdout } = await execAsync("which codex || where codex 2>/dev/null");
|
||||
cliPath = stdout.trim();
|
||||
installed = true;
|
||||
method = "path";
|
||||
|
||||
try {
|
||||
const { stdout: versionOut } = await execAsync("codex --version");
|
||||
version = versionOut.trim();
|
||||
} catch {
|
||||
version = "unknown";
|
||||
}
|
||||
} catch {
|
||||
// Not found
|
||||
}
|
||||
|
||||
// Check for OpenAI/Codex authentication
|
||||
// Simplified: only check via CLI command, no file parsing
|
||||
let auth = {
|
||||
authenticated: false,
|
||||
method: "none" as string,
|
||||
hasEnvKey: !!process.env.OPENAI_API_KEY,
|
||||
hasStoredApiKey: !!apiKeys.openai,
|
||||
};
|
||||
|
||||
// Try to verify authentication using codex CLI command if CLI is installed
|
||||
if (installed && cliPath) {
|
||||
try {
|
||||
const { stdout: statusOutput } = await execAsync(`"${cliPath}" login status 2>&1`, {
|
||||
timeout: 5000,
|
||||
});
|
||||
|
||||
// Check if the output indicates logged in status
|
||||
if (statusOutput && (statusOutput.includes('Logged in') || statusOutput.includes('Authenticated'))) {
|
||||
auth.authenticated = true;
|
||||
auth.method = "cli_verified"; // CLI verified via login status command
|
||||
}
|
||||
} catch (error) {
|
||||
// CLI check failed - user needs to login manually
|
||||
console.log("[Setup] Codex login status check failed:", error);
|
||||
}
|
||||
}
|
||||
|
||||
// Environment variable override
|
||||
if (process.env.OPENAI_API_KEY) {
|
||||
auth.authenticated = true;
|
||||
auth.method = "env"; // OPENAI_API_KEY environment variable
|
||||
}
|
||||
|
||||
// In-memory stored API key (from settings UI)
|
||||
if (!auth.authenticated && apiKeys.openai) {
|
||||
auth.authenticated = true;
|
||||
auth.method = "api_key"; // Manually stored API key
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
status: installed ? "installed" : "not_installed",
|
||||
method,
|
||||
version,
|
||||
path: cliPath,
|
||||
auth,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Install Claude CLI
|
||||
router.post("/install-claude", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
@@ -324,20 +246,6 @@ export function createSetupRoutes(): Router {
|
||||
}
|
||||
});
|
||||
|
||||
// Install Codex CLI
|
||||
router.post("/install-codex", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
res.json({
|
||||
success: false,
|
||||
error:
|
||||
"CLI installation requires terminal access. Please install manually using: npm install -g @openai/codex",
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Auth Claude
|
||||
router.post("/auth-claude", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
@@ -353,28 +261,6 @@ export function createSetupRoutes(): Router {
|
||||
}
|
||||
});
|
||||
|
||||
// Auth Codex
|
||||
router.post("/auth-codex", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { apiKey } = req.body as { apiKey?: string };
|
||||
|
||||
if (apiKey) {
|
||||
apiKeys.openai = apiKey;
|
||||
process.env.OPENAI_API_KEY = apiKey;
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.json({
|
||||
success: true,
|
||||
requiresManualAuth: true,
|
||||
command: "codex auth login",
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Store API key
|
||||
router.post("/store-api-key", async (req: Request, res: Response) => {
|
||||
try {
|
||||
@@ -401,9 +287,6 @@ export function createSetupRoutes(): Router {
|
||||
process.env.ANTHROPIC_API_KEY = apiKey;
|
||||
await persistApiKeyToEnv("ANTHROPIC_API_KEY", apiKey);
|
||||
console.log("[Setup] Stored API key as ANTHROPIC_API_KEY");
|
||||
} else if (provider === "openai") {
|
||||
process.env.OPENAI_API_KEY = apiKey;
|
||||
await persistApiKeyToEnv("OPENAI_API_KEY", apiKey);
|
||||
} else if (provider === "google") {
|
||||
process.env.GOOGLE_API_KEY = apiKey;
|
||||
await persistApiKeyToEnv("GOOGLE_API_KEY", apiKey);
|
||||
@@ -422,7 +305,6 @@ export function createSetupRoutes(): Router {
|
||||
res.json({
|
||||
success: true,
|
||||
hasAnthropicKey: !!apiKeys.anthropic || !!process.env.ANTHROPIC_API_KEY,
|
||||
hasOpenAIKey: !!apiKeys.openai || !!process.env.OPENAI_API_KEY,
|
||||
hasGoogleKey: !!apiKeys.google || !!process.env.GOOGLE_API_KEY,
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -431,34 +313,6 @@ export function createSetupRoutes(): Router {
|
||||
}
|
||||
});
|
||||
|
||||
// Configure Codex MCP
|
||||
router.post("/configure-codex-mcp", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: "projectPath required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Create .codex directory and config
|
||||
const codexDir = path.join(projectPath, ".codex");
|
||||
await fs.mkdir(codexDir, { recursive: true });
|
||||
|
||||
const configPath = path.join(codexDir, "config.toml");
|
||||
const config = `# Codex configuration
|
||||
[mcp]
|
||||
enabled = true
|
||||
`;
|
||||
await fs.writeFile(configPath, config);
|
||||
|
||||
res.json({ success: true, configPath });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get platform info
|
||||
router.get("/platform", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
@@ -478,29 +332,5 @@ enabled = true
|
||||
}
|
||||
});
|
||||
|
||||
// Test OpenAI connection
|
||||
router.post("/test-openai", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { apiKey } = req.body as { apiKey?: string };
|
||||
const key = apiKey || apiKeys.openai || process.env.OPENAI_API_KEY;
|
||||
|
||||
if (!key) {
|
||||
res.json({ success: false, error: "No OpenAI API key provided" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Simple test - just verify the key format
|
||||
if (!key.startsWith("sk-")) {
|
||||
res.json({ success: false, error: "Invalid OpenAI API key format" });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true, message: "API key format is valid" });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user