feat: add GPT-5.2 model support and refresh profiles functionality

- Introduced the GPT-5.2 model with advanced coding capabilities across various components.
- Added a new button in ProfilesView to refresh default profiles, enhancing user experience.
- Updated CodexSetupStep to clarify authentication requirements and added commands for verifying login status.
- Enhanced utility functions to recognize the new GPT-5.2 model in the application.
This commit is contained in:
Kacper
2025-12-13 01:36:15 +01:00
parent 9cf5fff0ad
commit 55603cb5c7
8 changed files with 170 additions and 51 deletions

View File

@@ -90,6 +90,15 @@ export function createModelsRoutes(): Router {
supportsVision: true,
supportsTools: false,
},
{
id: "gpt-5.2",
name: "GPT-5.2 (Codex)",
provider: "openai",
contextWindow: 256000,
maxOutputTokens: 32768,
supportsVision: true,
supportsTools: true,
},
];
res.json({ success: true, models });

View File

@@ -249,56 +249,41 @@ export function createSetupRoutes(): Router {
const { stdout: versionOut } = await execAsync("codex --version");
version = versionOut.trim();
} catch {
// Version command might not be available
version = "unknown";
}
} catch {
// Not found
}
// Check for OpenAI/Codex authentication
// Simplified: only check via CLI command, no file parsing
let auth = {
authenticated: false,
method: "none" as string,
hasAuthFile: false,
hasEnvKey: !!process.env.OPENAI_API_KEY,
hasStoredApiKey: !!apiKeys.openai,
hasEnvApiKey: !!process.env.OPENAI_API_KEY,
// Additional fields for subscription/account detection
hasSubscription: false,
cliLoggedIn: false,
};
// Check for OpenAI CLI auth file (~/.codex/auth.json or similar)
const codexAuthPaths = [
path.join(os.homedir(), ".codex", "auth.json"),
path.join(os.homedir(), ".openai", "credentials"),
path.join(os.homedir(), ".config", "openai", "credentials.json"),
];
for (const authPath of codexAuthPaths) {
// Try to verify authentication using codex CLI command if CLI is installed
if (installed && cliPath) {
try {
const authContent = await fs.readFile(authPath, "utf-8");
const authData = JSON.parse(authContent);
auth.hasAuthFile = true;
const { stdout: statusOutput } = await execAsync(`"${cliPath}" login status 2>&1`, {
timeout: 5000,
});
// Check for subscription/tokens
if (authData.subscription || authData.plan || authData.account_type) {
auth.hasSubscription = true;
// Check if the output indicates logged in status
if (statusOutput && (statusOutput.includes('Logged in') || statusOutput.includes('Authenticated'))) {
auth.authenticated = true;
auth.method = "subscription"; // Codex subscription (Plus/Team)
} else if (authData.access_token || authData.api_key) {
auth.cliLoggedIn = true;
auth.authenticated = true;
auth.method = "cli_verified"; // CLI logged in with account
auth.method = "cli_verified"; // CLI verified via login status command
}
break;
} catch {
// Auth file not found at this path
} catch (error) {
// CLI check failed - user needs to login manually
console.log("[Setup] Codex login status check failed:", error);
}
}
// Environment variable has highest priority
if (auth.hasEnvApiKey) {
// Environment variable override
if (process.env.OPENAI_API_KEY) {
auth.authenticated = true;
auth.method = "env"; // OPENAI_API_KEY environment variable
}

View File

@@ -18,6 +18,13 @@ import type { EventEmitter, EventType } from "../lib/events.js";
const execAsync = promisify(exec);
// Model name mappings for Claude (matching electron version)
const MODEL_MAP: Record<string, string> = {
haiku: "claude-haiku-4-5",
sonnet: "claude-sonnet-4-20250514",
opus: "claude-opus-4-5-20251101",
};
interface Feature {
id: string;
title: string;
@@ -25,6 +32,37 @@ interface Feature {
status: string;
priority?: number;
spec?: string;
model?: string; // Model to use for this feature
}
/**
* Get model string from feature's model property
* Supports model keys like "opus", "sonnet", "haiku" or full model strings
* Also supports OpenAI/Codex models like "gpt-5.2", "gpt-5.1-codex", etc.
*/
function getModelString(feature: Feature): string {
const modelKey = feature.model || "opus"; // Default to opus
// Check if it's an OpenAI/Codex model (starts with "gpt-" or "o" for O-series)
if (modelKey.startsWith("gpt-") || modelKey.startsWith("o")) {
console.log(`[AutoMode] Using OpenAI/Codex model from feature ${feature.id}: ${modelKey} (passing through)`);
return modelKey;
}
// If it's already a full Claude model string (contains "claude-"), use it directly
if (modelKey.includes("claude-")) {
console.log(`[AutoMode] Using Claude model from feature ${feature.id}: ${modelKey} (full model string)`);
return modelKey;
}
// Otherwise, look it up in the Claude model map
const modelString = MODEL_MAP[modelKey] || MODEL_MAP.opus;
if (modelString !== MODEL_MAP.opus || modelKey === "opus") {
console.log(`[AutoMode] Resolved Claude model for feature ${feature.id}: "${modelKey}" -> "${modelString}"`);
} else {
console.warn(`[AutoMode] Unknown model key "${modelKey}" for feature ${feature.id}, defaulting to "${modelString}"`);
}
return modelString;
}
interface RunningFeature {
@@ -199,8 +237,12 @@ export class AutoModeService {
// Build the prompt
const prompt = this.buildFeaturePrompt(feature);
// Run the agent
await this.runAgent(workDir, featureId, prompt, abortController);
// Get model from feature
const model = getModelString(feature);
console.log(`[AutoMode] Executing feature ${featureId} with model: ${model}`);
// Run the agent with the feature's model
await this.runAgent(workDir, featureId, prompt, abortController, undefined, model);
// Mark as waiting_approval for user review
await this.updateFeatureStatus(projectPath, featureId, "waiting_approval");
@@ -330,7 +372,12 @@ export class AutoModeService {
});
try {
await this.runAgent(workDir, featureId, prompt, abortController, imagePaths);
// Load feature to get its model
const feature = await this.loadFeature(projectPath, featureId);
const model = feature ? getModelString(feature) : MODEL_MAP.opus;
console.log(`[AutoMode] Follow-up for feature ${featureId} using model: ${model}`);
await this.runAgent(workDir, featureId, prompt, abortController, imagePaths, model);
this.emitAutoModeEvent("auto_mode_feature_complete", {
featureId,
@@ -709,10 +756,23 @@ When done, summarize what you implemented and any notes for the developer.`;
featureId: string,
prompt: string,
abortController: AbortController,
imagePaths?: string[]
imagePaths?: string[],
model?: string
): Promise<void> {
const finalModel = model || MODEL_MAP.opus;
console.log(`[AutoMode] runAgent called for feature ${featureId} with model: ${finalModel}`);
// Check if this is an OpenAI/Codex model - Claude Agent SDK doesn't support these
if (finalModel.startsWith("gpt-") || finalModel.startsWith("o")) {
const errorMessage = `OpenAI/Codex models (like "${finalModel}") are not yet supported in server mode. ` +
`Please use a Claude model (opus, sonnet, or haiku) instead. ` +
`OpenAI/Codex models are only supported in the Electron app.`;
console.error(`[AutoMode] ${errorMessage}`);
throw new Error(errorMessage);
}
const options: Options = {
model: "claude-opus-4-5-20251101",
model: finalModel,
maxTurns: 50,
cwd: workDir,
allowedTools: [