Merge branch 'v0.9.0rc' into opencode-support

This commit is contained in:
webdevcody
2026-01-08 15:40:06 -05:00
39 changed files with 1475 additions and 509 deletions

View File

@@ -21,7 +21,7 @@ import {
} from '@automaker/types';
// Pattern definitions for Codex/OpenAI models
const CODEX_MODEL_PREFIXES = ['gpt-'];
const CODEX_MODEL_PREFIXES = ['codex-', 'gpt-'];
const OPENAI_O_SERIES_PATTERN = /^o\d/;
const OPENAI_O_SERIES_ALLOWED_MODELS = new Set<string>();
@@ -62,6 +62,12 @@ export function resolveModelString(
return modelKey;
}
// Codex model with explicit prefix (e.g., "codex-gpt-5.1-codex-max") - pass through unchanged
if (modelKey.startsWith(PROVIDER_PREFIXES.codex)) {
console.log(`[ModelResolver] Using Codex model: ${modelKey}`);
return modelKey;
}
// Full Claude model string - pass through unchanged
if (modelKey.includes('claude-')) {
console.log(`[ModelResolver] Using full Claude model string: ${modelKey}`);
@@ -75,8 +81,7 @@ export function resolveModelString(
return resolved;
}
// OpenAI/Codex models - check BEFORE bare Cursor models since they overlap
// (Cursor supports gpt models, but bare "gpt-*" should route to Codex)
// OpenAI/Codex models - check for codex- or gpt- prefix
if (
CODEX_MODEL_PREFIXES.some((prefix) => modelKey.startsWith(prefix)) ||
(OPENAI_O_SERIES_PATTERN.test(modelKey) && OPENAI_O_SERIES_ALLOWED_MODELS.has(modelKey))

View File

@@ -2,14 +2,15 @@
* Codex CLI Model IDs
* Based on OpenAI Codex CLI official models
* Reference: https://developers.openai.com/codex/models/
*
* IMPORTANT: All Codex models use 'codex-' prefix to distinguish from Cursor CLI models
*/
export type CodexModelId =
| 'gpt-5.2-codex' // Most advanced agentic coding model for complex software engineering
| 'gpt-5-codex' // Purpose-built for Codex CLI with versatile tool use
| 'gpt-5-codex-mini' // Faster workflows optimized for low-latency code Q&A and editing
| 'codex-1' // Version of o3 optimized for software engineering
| 'codex-mini-latest' // Version of o4-mini for Codex, optimized for faster workflows
| 'gpt-5'; // GPT-5 base flagship model
| 'codex-gpt-5.2-codex'
| 'codex-gpt-5.1-codex-max'
| 'codex-gpt-5.1-codex-mini'
| 'codex-gpt-5.2'
| 'codex-gpt-5.1';
/**
* Codex model metadata
@@ -25,47 +26,41 @@ export interface CodexModelConfig {
/**
* Complete model map for Codex CLI
* All keys use 'codex-' prefix to distinguish from Cursor CLI models
*/
export const CODEX_MODEL_CONFIG_MAP: Record<CodexModelId, CodexModelConfig> = {
'gpt-5.2-codex': {
id: 'gpt-5.2-codex',
'codex-gpt-5.2-codex': {
id: 'codex-gpt-5.2-codex',
label: 'GPT-5.2-Codex',
description: 'Most advanced agentic coding model for complex software engineering',
hasThinking: true,
supportsVision: true, // GPT-5 supports vision
supportsVision: true,
},
'gpt-5-codex': {
id: 'gpt-5-codex',
label: 'GPT-5-Codex',
description: 'Purpose-built for Codex CLI with versatile tool use',
'codex-gpt-5.1-codex-max': {
id: 'codex-gpt-5.1-codex-max',
label: 'GPT-5.1-Codex-Max',
description: 'Optimized for long-horizon, agentic coding tasks in Codex',
hasThinking: true,
supportsVision: true,
},
'gpt-5-codex-mini': {
id: 'gpt-5-codex-mini',
label: 'GPT-5-Codex-Mini',
description: 'Faster workflows optimized for low-latency code Q&A and editing',
'codex-gpt-5.1-codex-mini': {
id: 'codex-gpt-5.1-codex-mini',
label: 'GPT-5.1-Codex-Mini',
description: 'Smaller, more cost-effective version for faster workflows',
hasThinking: false,
supportsVision: true,
},
'codex-1': {
id: 'codex-1',
label: 'Codex-1',
description: 'Version of o3 optimized for software engineering',
'codex-gpt-5.2': {
id: 'codex-gpt-5.2',
label: 'GPT-5.2 (Codex)',
description: 'Best general agentic model for tasks across industries and domains via Codex',
hasThinking: true,
supportsVision: true,
},
'codex-mini-latest': {
id: 'codex-mini-latest',
label: 'Codex-Mini-Latest',
description: 'Version of o4-mini for Codex, optimized for faster workflows',
hasThinking: false,
supportsVision: true,
},
'gpt-5': {
id: 'gpt-5',
label: 'GPT-5',
description: 'GPT-5 base flagship model',
'codex-gpt-5.1': {
id: 'codex-gpt-5.1',
label: 'GPT-5.1 (Codex)',
description: 'Great for coding and agentic tasks across domains via Codex',
hasThinking: true,
supportsVision: true,
},

View File

@@ -1,6 +1,8 @@
/**
* Cursor CLI Model IDs
* Reference: https://cursor.com/docs
*
* IMPORTANT: GPT models use 'cursor-' prefix to distinguish from Codex CLI models
*/
export type CursorModelId =
| 'auto' // Auto-select best model
@@ -12,14 +14,14 @@ export type CursorModelId =
| 'opus-4.1' // Claude Opus 4.1
| 'gemini-3-pro' // Gemini 3 Pro
| 'gemini-3-flash' // Gemini 3 Flash
| 'gpt-5.2' // GPT-5.2
| 'gpt-5.1' // GPT-5.1
| 'gpt-5.2-high' // GPT-5.2 High
| 'gpt-5.1-high' // GPT-5.1 High
| 'gpt-5.1-codex' // GPT-5.1 Codex
| 'gpt-5.1-codex-high' // GPT-5.1 Codex High
| 'gpt-5.1-codex-max' // GPT-5.1 Codex Max
| 'gpt-5.1-codex-max-high' // GPT-5.1 Codex Max High
| 'cursor-gpt-5.2' // GPT-5.2 via Cursor
| 'cursor-gpt-5.1' // GPT-5.1 via Cursor
| 'cursor-gpt-5.2-high' // GPT-5.2 High via Cursor
| 'cursor-gpt-5.1-high' // GPT-5.1 High via Cursor
| 'cursor-gpt-5.1-codex' // GPT-5.1 Codex via Cursor
| 'cursor-gpt-5.1-codex-high' // GPT-5.1 Codex High via Cursor
| 'cursor-gpt-5.1-codex-max' // GPT-5.1 Codex Max via Cursor
| 'cursor-gpt-5.1-codex-max-high' // GPT-5.1 Codex Max High via Cursor
| 'grok'; // Grok
/**
@@ -101,57 +103,57 @@ export const CURSOR_MODEL_MAP: Record<CursorModelId, CursorModelConfig> = {
hasThinking: false,
supportsVision: false,
},
'gpt-5.2': {
id: 'gpt-5.2',
'cursor-gpt-5.2': {
id: 'cursor-gpt-5.2',
label: 'GPT-5.2',
description: 'OpenAI GPT-5.2 via Cursor',
hasThinking: false,
supportsVision: false,
},
'gpt-5.1': {
id: 'gpt-5.1',
'cursor-gpt-5.1': {
id: 'cursor-gpt-5.1',
label: 'GPT-5.1',
description: 'OpenAI GPT-5.1 via Cursor',
hasThinking: false,
supportsVision: false,
},
'gpt-5.2-high': {
id: 'gpt-5.2-high',
'cursor-gpt-5.2-high': {
id: 'cursor-gpt-5.2-high',
label: 'GPT-5.2 High',
description: 'OpenAI GPT-5.2 with high compute',
hasThinking: false,
supportsVision: false,
},
'gpt-5.1-high': {
id: 'gpt-5.1-high',
'cursor-gpt-5.1-high': {
id: 'cursor-gpt-5.1-high',
label: 'GPT-5.1 High',
description: 'OpenAI GPT-5.1 with high compute',
hasThinking: false,
supportsVision: false,
},
'gpt-5.1-codex': {
id: 'gpt-5.1-codex',
'cursor-gpt-5.1-codex': {
id: 'cursor-gpt-5.1-codex',
label: 'GPT-5.1 Codex',
description: 'OpenAI GPT-5.1 Codex for code generation',
hasThinking: false,
supportsVision: false,
},
'gpt-5.1-codex-high': {
id: 'gpt-5.1-codex-high',
'cursor-gpt-5.1-codex-high': {
id: 'cursor-gpt-5.1-codex-high',
label: 'GPT-5.1 Codex High',
description: 'OpenAI GPT-5.1 Codex with high compute',
hasThinking: false,
supportsVision: false,
},
'gpt-5.1-codex-max': {
id: 'gpt-5.1-codex-max',
'cursor-gpt-5.1-codex-max': {
id: 'cursor-gpt-5.1-codex-max',
label: 'GPT-5.1 Codex Max',
description: 'OpenAI GPT-5.1 Codex Max capacity',
hasThinking: false,
supportsVision: false,
},
'gpt-5.1-codex-max-high': {
id: 'gpt-5.1-codex-max-high',
'cursor-gpt-5.1-codex-max-high': {
id: 'cursor-gpt-5.1-codex-max-high',
label: 'GPT-5.1 Codex Max High',
description: 'OpenAI GPT-5.1 Codex Max with high compute',
hasThinking: false,
@@ -224,14 +226,14 @@ export interface GroupedModel {
export const CURSOR_MODEL_GROUPS: GroupedModel[] = [
// GPT-5.2 group (compute levels)
{
baseId: 'gpt-5.2-group',
baseId: 'cursor-gpt-5.2-group',
label: 'GPT-5.2',
description: 'OpenAI GPT-5.2 via Cursor',
variantType: 'compute',
variants: [
{ id: 'gpt-5.2', label: 'Standard', description: 'Default compute level' },
{ id: 'cursor-gpt-5.2', label: 'Standard', description: 'Default compute level' },
{
id: 'gpt-5.2-high',
id: 'cursor-gpt-5.2-high',
label: 'High',
description: 'High compute level',
badge: 'More tokens',
@@ -240,14 +242,14 @@ export const CURSOR_MODEL_GROUPS: GroupedModel[] = [
},
// GPT-5.1 group (compute levels)
{
baseId: 'gpt-5.1-group',
baseId: 'cursor-gpt-5.1-group',
label: 'GPT-5.1',
description: 'OpenAI GPT-5.1 via Cursor',
variantType: 'compute',
variants: [
{ id: 'gpt-5.1', label: 'Standard', description: 'Default compute level' },
{ id: 'cursor-gpt-5.1', label: 'Standard', description: 'Default compute level' },
{
id: 'gpt-5.1-high',
id: 'cursor-gpt-5.1-high',
label: 'High',
description: 'High compute level',
badge: 'More tokens',
@@ -256,16 +258,26 @@ export const CURSOR_MODEL_GROUPS: GroupedModel[] = [
},
// GPT-5.1 Codex group (capacity + compute matrix)
{
baseId: 'gpt-5.1-codex-group',
baseId: 'cursor-gpt-5.1-codex-group',
label: 'GPT-5.1 Codex',
description: 'OpenAI GPT-5.1 Codex for code generation',
variantType: 'capacity',
variants: [
{ id: 'gpt-5.1-codex', label: 'Standard', description: 'Default capacity' },
{ id: 'gpt-5.1-codex-high', label: 'High', description: 'High compute', badge: 'Compute' },
{ id: 'gpt-5.1-codex-max', label: 'Max', description: 'Maximum capacity', badge: 'Capacity' },
{ id: 'cursor-gpt-5.1-codex', label: 'Standard', description: 'Default capacity' },
{
id: 'gpt-5.1-codex-max-high',
id: 'cursor-gpt-5.1-codex-high',
label: 'High',
description: 'High compute',
badge: 'Compute',
},
{
id: 'cursor-gpt-5.1-codex-max',
label: 'Max',
description: 'Maximum capacity',
badge: 'Capacity',
},
{
id: 'cursor-gpt-5.1-codex-max-high',
label: 'Max High',
description: 'Max capacity + high compute',
badge: 'Premium',

View File

@@ -3,6 +3,7 @@
*/
import type { PlanningMode, ThinkingLevel } from './settings.js';
import type { ReasoningEffort } from './provider.js';
/**
* A single entry in the description history
@@ -49,6 +50,7 @@ export interface Feature {
branchName?: string; // Name of the feature branch (undefined = use current worktree)
skipTests?: boolean;
thinkingLevel?: ThinkingLevel;
reasoningEffort?: ReasoningEffort;
planningMode?: PlanningMode;
requirePlanApproval?: boolean;
planSpec?: {

View File

@@ -74,47 +74,39 @@ export const CODEX_MODELS: (ModelOption & { hasReasoning?: boolean })[] = [
{
id: CODEX_MODEL_MAP.gpt52Codex,
label: 'GPT-5.2-Codex',
description: 'Most advanced agentic coding model (default for ChatGPT users).',
description: 'Most advanced agentic coding model for complex software engineering.',
badge: 'Premium',
provider: 'codex',
hasReasoning: true,
},
{
id: CODEX_MODEL_MAP.gpt5Codex,
label: 'GPT-5-Codex',
description: 'Purpose-built for Codex CLI (default for CLI users).',
badge: 'Balanced',
id: CODEX_MODEL_MAP.gpt51CodexMax,
label: 'GPT-5.1-Codex-Max',
description: 'Optimized for long-horizon, agentic coding tasks in Codex.',
badge: 'Premium',
provider: 'codex',
hasReasoning: true,
},
{
id: CODEX_MODEL_MAP.gpt5CodexMini,
label: 'GPT-5-Codex-Mini',
description: 'Faster workflows for code Q&A and editing.',
id: CODEX_MODEL_MAP.gpt51CodexMini,
label: 'GPT-5.1-Codex-Mini',
description: 'Smaller, more cost-effective version for faster workflows.',
badge: 'Speed',
provider: 'codex',
hasReasoning: false,
},
{
id: CODEX_MODEL_MAP.codex1,
label: 'Codex-1',
description: 'o3-based model optimized for software engineering.',
badge: 'Premium',
id: CODEX_MODEL_MAP.gpt52,
label: 'GPT-5.2',
description: 'Best general agentic model for tasks across industries and domains.',
badge: 'Balanced',
provider: 'codex',
hasReasoning: true,
},
{
id: CODEX_MODEL_MAP.codexMiniLatest,
label: 'Codex-Mini-Latest',
description: 'o4-mini-based model for faster workflows.',
badge: 'Balanced',
provider: 'codex',
hasReasoning: false,
},
{
id: CODEX_MODEL_MAP.gpt5,
label: 'GPT-5',
description: 'GPT-5 base flagship model.',
id: CODEX_MODEL_MAP.gpt51,
label: 'GPT-5.1',
description: 'Great for coding and agentic tasks across domains.',
badge: 'Balanced',
provider: 'codex',
hasReasoning: true,
@@ -203,11 +195,10 @@ export function getModelDisplayName(model: ModelAlias | string): string {
sonnet: 'Claude Sonnet',
opus: 'Claude Opus',
[CODEX_MODEL_MAP.gpt52Codex]: 'GPT-5.2-Codex',
[CODEX_MODEL_MAP.gpt5Codex]: 'GPT-5-Codex',
[CODEX_MODEL_MAP.gpt5CodexMini]: 'GPT-5-Codex-Mini',
[CODEX_MODEL_MAP.codex1]: 'Codex-1',
[CODEX_MODEL_MAP.codexMiniLatest]: 'Codex-Mini-Latest',
[CODEX_MODEL_MAP.gpt5]: 'GPT-5',
[CODEX_MODEL_MAP.gpt51CodexMax]: 'GPT-5.1-Codex-Max',
[CODEX_MODEL_MAP.gpt51CodexMini]: 'GPT-5.1-Codex-Mini',
[CODEX_MODEL_MAP.gpt52]: 'GPT-5.2',
[CODEX_MODEL_MAP.gpt51]: 'GPT-5.1',
};
return displayNames[model] || model;
}

View File

@@ -11,23 +11,23 @@ export const CLAUDE_MODEL_MAP: Record<string, string> = {
* Codex/OpenAI model identifiers
* Based on OpenAI Codex CLI official models
* See: https://developers.openai.com/codex/models/
*
* IMPORTANT: All Codex models use 'codex-' prefix to distinguish from Cursor CLI models
*/
export const CODEX_MODEL_MAP = {
// Codex-specific models
// Recommended Codex-specific models
/** Most advanced agentic coding model for complex software engineering (default for ChatGPT users) */
gpt52Codex: 'gpt-5.2-codex',
/** Purpose-built for Codex CLI with versatile tool use (default for CLI users) */
gpt5Codex: 'gpt-5-codex',
/** Faster workflows optimized for low-latency code Q&A and editing */
gpt5CodexMini: 'gpt-5-codex-mini',
/** Version of o3 optimized for software engineering */
codex1: 'codex-1',
/** Version of o4-mini for Codex, optimized for faster workflows */
codexMiniLatest: 'codex-mini-latest',
gpt52Codex: 'codex-gpt-5.2-codex',
/** Optimized for long-horizon, agentic coding tasks in Codex */
gpt51CodexMax: 'codex-gpt-5.1-codex-max',
/** Smaller, more cost-effective version for faster workflows */
gpt51CodexMini: 'codex-gpt-5.1-codex-mini',
// Base GPT-5 model (also available in Codex)
/** GPT-5 base flagship model */
gpt5: 'gpt-5',
// General-purpose GPT models (also available in Codex)
/** Best general agentic model for tasks across industries and domains */
gpt52: 'codex-gpt-5.2',
/** Great for coding and agentic tasks across domains */
gpt51: 'codex-gpt-5.1',
} as const;
export const CODEX_MODEL_IDS = Object.values(CODEX_MODEL_MAP);
@@ -38,9 +38,9 @@ export const CODEX_MODEL_IDS = Object.values(CODEX_MODEL_MAP);
*/
export const REASONING_CAPABLE_MODELS = new Set([
CODEX_MODEL_MAP.gpt52Codex,
CODEX_MODEL_MAP.gpt5Codex,
CODEX_MODEL_MAP.gpt5,
CODEX_MODEL_MAP.codex1, // o3-based model
CODEX_MODEL_MAP.gpt51CodexMax,
CODEX_MODEL_MAP.gpt52,
CODEX_MODEL_MAP.gpt51,
]);
/**

View File

@@ -7,8 +7,8 @@
*/
import type { ModelProvider } from './settings.js';
import { CURSOR_MODEL_MAP, type CursorModelId } from './cursor-models.js';
import { CLAUDE_MODEL_MAP, CODEX_MODEL_MAP, type CodexModelId } from './model.js';
import { CURSOR_MODEL_MAP } from './cursor-models.js';
import { CLAUDE_MODEL_MAP, CODEX_MODEL_MAP } from './model.js';
import { OPENCODE_MODEL_CONFIG_MAP } from './opencode-models.js';
/** Provider prefix constants */
@@ -16,14 +16,13 @@ export const PROVIDER_PREFIXES = {
cursor: 'cursor-',
codex: 'codex-',
opencode: 'opencode-',
// Add new provider prefixes here
} as const;
/**
* Check if a model string represents a Cursor model
*
* @param model - Model string to check (e.g., "cursor-composer-1" or "composer-1")
* @returns true if the model is a Cursor model
* @returns true if the model is a Cursor model (excluding Codex-specific models)
*/
export function isCursorModel(model: string | undefined | null): boolean {
if (!model || typeof model !== 'string') return false;
@@ -33,8 +32,13 @@ export function isCursorModel(model: string | undefined | null): boolean {
return true;
}
// Check if it's a bare Cursor model ID
return model in CURSOR_MODEL_MAP;
// Check if it's a bare Cursor model ID (excluding Codex-specific models)
// Codex-specific models should always route to Codex provider, not Cursor
if (model in CURSOR_MODEL_MAP) {
return true;
}
return false;
}
/**
@@ -69,7 +73,7 @@ export function isCodexModel(model: string | undefined | null): boolean {
return true;
}
// Check if it's a gpt- model
// Check if it's a gpt- model (bare gpt models go to Codex, not Cursor)
if (model.startsWith('gpt-')) {
return true;
}
@@ -80,8 +84,7 @@ export function isCodexModel(model: string | undefined | null): boolean {
}
// Check if it's in the CODEX_MODEL_MAP
const modelValues = Object.values(CODEX_MODEL_MAP);
return modelValues.includes(model as CodexModelId);
return model in CODEX_MODEL_MAP;
}
/**
@@ -223,9 +226,8 @@ export function normalizeModelString(model: string | undefined | null): string {
}
// For Codex, bare gpt-* and o-series models are valid canonical forms
// Only add prefix if it's in CODEX_MODEL_MAP but doesn't have gpt-/o prefix
const codexModelValues = Object.values(CODEX_MODEL_MAP);
if (codexModelValues.includes(model as CodexModelId)) {
// Check if it's in the CODEX_MODEL_MAP
if (model in CODEX_MODEL_MAP) {
// If it already starts with gpt- or o, it's canonical
if (model.startsWith('gpt-') || /^o\d/.test(model)) {
return model;

View File

@@ -13,6 +13,7 @@ import type { OpencodeModelId } from './opencode-models.js';
import { getAllOpencodeModelIds, DEFAULT_OPENCODE_MODEL } from './opencode-models.js';
import type { PromptCustomization } from './prompts.js';
import type { CodexSandboxMode, CodexApprovalPolicy } from './codex.js';
import type { ReasoningEffort } from './provider.js';
// Re-export ModelAlias for convenience
export type { ModelAlias };
@@ -110,14 +111,18 @@ const DEFAULT_CODEX_ADDITIONAL_DIRS: string[] = [];
/**
* PhaseModelEntry - Configuration for a single phase model
*
* Encapsulates both the model selection and optional thinking level
* for Claude models. Cursor models handle thinking internally.
* Encapsulates the model selection and optional reasoning/thinking capabilities:
* - Claude models: Use thinkingLevel for extended thinking
* - Codex models: Use reasoningEffort for reasoning intensity
* - Cursor models: Handle thinking internally
*/
export interface PhaseModelEntry {
/** The model to use (Claude alias or Cursor model ID) */
model: ModelAlias | CursorModelId;
/** The model to use (Claude alias, Cursor model ID, or Codex model ID) */
model: ModelAlias | CursorModelId | CodexModelId;
/** Extended thinking level (only applies to Claude models, defaults to 'none') */
thinkingLevel?: ThinkingLevel;
/** Reasoning effort level (only applies to Codex models, defaults to 'none') */
reasoningEffort?: ReasoningEffort;
}
/**
@@ -282,7 +287,7 @@ export function profileHasThinking(profile: AIProfile): boolean {
if (profile.provider === 'codex') {
// Codex models handle thinking internally (o-series models)
const model = profile.codexModel || 'gpt-5.2';
const model = profile.codexModel || 'codex-gpt-5.2';
return model.startsWith('o');
}
@@ -303,7 +308,7 @@ export function getProfileModelString(profile: AIProfile): string {
}
if (profile.provider === 'codex') {
return `codex:${profile.codexModel || 'gpt-5.2'}`;
return `codex:${profile.codexModel || 'codex-gpt-5.2'}`;
}
if (profile.provider === 'opencode') {