Compare commits

..

4 Commits

Author SHA1 Message Date
Kacper
220c8e4ddf feat: add 'dev-server:url-detected' event type to EventType
- Introduced a new event type 'dev-server:url-detected' to enhance event handling for the development server.
- This addition allows for better tracking and response to URL detection during server operations.

These changes improve the event system's capability to manage server-related events effectively.
2026-02-05 23:19:31 +01:00
Kacper
f97453484f feat: enhance adaptive thinking model support and update UI components
- Added `isAdaptiveThinkingModel` utility to improve model identification logic in the AddFeatureDialog.
- Updated the ThinkingLevelSelector to conditionally display information based on available thinking levels.
- Enhanced model name formatting in agent-context-parser to include 'GPT-5.3 Codex' for better clarity.

These changes improve the user experience by refining model handling and UI feedback related to adaptive thinking capabilities.
2026-02-05 23:05:19 +01:00
Kacper
835ffe3185 feat: update Claude model to Opus 4.6 and enhance adaptive thinking support
- Changed model identifier from `claude-opus-4-5-20251101` to `claude-opus-4-6` across various files, including documentation and code references.
- Updated the SDK to support adaptive thinking for Opus 4.6, allowing the model to determine its own reasoning depth.
- Enhanced the thinking level options to include 'adaptive' and adjusted related components to reflect this change.
- Updated tests to ensure compatibility with the new model and its features.

These changes improve the model's capabilities and user experience by leveraging adaptive reasoning.
2026-02-05 22:43:22 +01:00
Kacper
3b361cb0b9 chore: update Codex SDK to version 0.98.0 and add GPT-5.3-Codex model
- Upgraded @openai/codex-sdk from version 0.77.0 to 0.98.0 in package-lock.json and package.json.
- Introduced new model 'GPT-5.3-Codex' with enhanced capabilities in codex-models.ts and related files.
- Updated descriptions for existing models to reflect their latest features and improvements.
- Adjusted Codex model configuration and display to include the new model and its attributes.

These changes enhance the Codex model offerings and ensure compatibility with the latest SDK version.
2026-02-05 22:17:55 +01:00
30 changed files with 268 additions and 104 deletions

View File

@@ -161,7 +161,7 @@ Use `resolveModelString()` from `@automaker/model-resolver` to convert model ali
- `haiku``claude-haiku-4-5` - `haiku``claude-haiku-4-5`
- `sonnet``claude-sonnet-4-20250514` - `sonnet``claude-sonnet-4-20250514`
- `opus``claude-opus-4-5-20251101` - `opus``claude-opus-4-6`
## Environment Variables ## Environment Variables

View File

@@ -24,7 +24,7 @@
"test:unit": "vitest run tests/unit" "test:unit": "vitest run tests/unit"
}, },
"dependencies": { "dependencies": {
"@anthropic-ai/claude-agent-sdk": "0.1.76", "@anthropic-ai/claude-agent-sdk": "0.2.32",
"@automaker/dependency-resolver": "1.0.0", "@automaker/dependency-resolver": "1.0.0",
"@automaker/git-utils": "1.0.0", "@automaker/git-utils": "1.0.0",
"@automaker/model-resolver": "1.0.0", "@automaker/model-resolver": "1.0.0",
@@ -34,7 +34,7 @@
"@automaker/utils": "1.0.0", "@automaker/utils": "1.0.0",
"@github/copilot-sdk": "^0.1.16", "@github/copilot-sdk": "^0.1.16",
"@modelcontextprotocol/sdk": "1.25.2", "@modelcontextprotocol/sdk": "1.25.2",
"@openai/codex-sdk": "^0.77.0", "@openai/codex-sdk": "^0.98.0",
"cookie-parser": "1.4.7", "cookie-parser": "1.4.7",
"cors": "2.8.5", "cors": "2.8.5",
"dotenv": "17.2.3", "dotenv": "17.2.3",

View File

@@ -253,11 +253,27 @@ function buildMcpOptions(config: CreateSdkOptionsConfig): McpOptions {
/** /**
* Build thinking options for SDK configuration. * Build thinking options for SDK configuration.
* Converts ThinkingLevel to maxThinkingTokens for the Claude SDK. * Converts ThinkingLevel to maxThinkingTokens for the Claude SDK.
* For adaptive thinking (Opus 4.6), omits maxThinkingTokens to let the model
* decide its own reasoning depth.
* *
* @param thinkingLevel - The thinking level to convert * @param thinkingLevel - The thinking level to convert
* @returns Object with maxThinkingTokens if thinking is enabled * @returns Object with maxThinkingTokens if thinking is enabled with a budget
*/ */
function buildThinkingOptions(thinkingLevel?: ThinkingLevel): Partial<Options> { function buildThinkingOptions(thinkingLevel?: ThinkingLevel): Partial<Options> {
if (!thinkingLevel || thinkingLevel === 'none') {
return {};
}
// Adaptive thinking (Opus 4.6): don't set maxThinkingTokens
// The model will use adaptive thinking by default
if (thinkingLevel === 'adaptive') {
logger.debug(
`buildThinkingOptions: thinkingLevel="adaptive" -> no maxThinkingTokens (model decides)`
);
return {};
}
// Manual budget-based thinking for Haiku/Sonnet
const maxThinkingTokens = getThinkingTokenBudget(thinkingLevel); const maxThinkingTokens = getThinkingTokenBudget(thinkingLevel);
logger.debug( logger.debug(
`buildThinkingOptions: thinkingLevel="${thinkingLevel}" -> maxThinkingTokens=${maxThinkingTokens}` `buildThinkingOptions: thinkingLevel="${thinkingLevel}" -> maxThinkingTokens=${maxThinkingTokens}`

View File

@@ -219,8 +219,11 @@ export class ClaudeProvider extends BaseProvider {
// claudeCompatibleProvider takes precedence over claudeApiProfile // claudeCompatibleProvider takes precedence over claudeApiProfile
const providerConfig = claudeCompatibleProvider || claudeApiProfile; const providerConfig = claudeCompatibleProvider || claudeApiProfile;
// Convert thinking level to token budget // Build thinking configuration
const maxThinkingTokens = getThinkingTokenBudget(thinkingLevel); // Adaptive thinking (Opus 4.6): don't set maxThinkingTokens, model uses adaptive by default
// Manual thinking (Haiku/Sonnet): use budget_tokens
const maxThinkingTokens =
thinkingLevel === 'adaptive' ? undefined : getThinkingTokenBudget(thinkingLevel);
// Build Claude SDK options // Build Claude SDK options
const sdkOptions: Options = { const sdkOptions: Options = {
@@ -349,13 +352,13 @@ export class ClaudeProvider extends BaseProvider {
getAvailableModels(): ModelDefinition[] { getAvailableModels(): ModelDefinition[] {
const models = [ const models = [
{ {
id: 'claude-opus-4-5-20251101', id: 'claude-opus-4-6',
name: 'Claude Opus 4.5', name: 'Claude Opus 4.6',
modelString: 'claude-opus-4-5-20251101', modelString: 'claude-opus-4-6',
provider: 'anthropic', provider: 'anthropic',
description: 'Most capable Claude model', description: 'Most capable Claude model with adaptive thinking',
contextWindow: 200000, contextWindow: 200000,
maxOutputTokens: 16000, maxOutputTokens: 128000,
supportsVision: true, supportsVision: true,
supportsTools: true, supportsTools: true,
tier: 'premium' as const, tier: 'premium' as const,

View File

@@ -19,12 +19,11 @@ const MAX_OUTPUT_16K = 16000;
export const CODEX_MODELS: ModelDefinition[] = [ export const CODEX_MODELS: ModelDefinition[] = [
// ========== Recommended Codex Models ========== // ========== Recommended Codex Models ==========
{ {
id: CODEX_MODEL_MAP.gpt52Codex, id: CODEX_MODEL_MAP.gpt53Codex,
name: 'GPT-5.2-Codex', name: 'GPT-5.3-Codex',
modelString: CODEX_MODEL_MAP.gpt52Codex, modelString: CODEX_MODEL_MAP.gpt53Codex,
provider: 'openai', provider: 'openai',
description: description: 'Latest frontier agentic coding model.',
'Most advanced agentic coding model for complex software engineering (default for ChatGPT users).',
contextWindow: CONTEXT_WINDOW_256K, contextWindow: CONTEXT_WINDOW_256K,
maxOutputTokens: MAX_OUTPUT_32K, maxOutputTokens: MAX_OUTPUT_32K,
supportsVision: true, supportsVision: true,
@@ -33,12 +32,25 @@ export const CODEX_MODELS: ModelDefinition[] = [
default: true, default: true,
hasReasoning: true, hasReasoning: true,
}, },
{
id: CODEX_MODEL_MAP.gpt52Codex,
name: 'GPT-5.2-Codex',
modelString: CODEX_MODEL_MAP.gpt52Codex,
provider: 'openai',
description: 'Frontier agentic coding model.',
contextWindow: CONTEXT_WINDOW_256K,
maxOutputTokens: MAX_OUTPUT_32K,
supportsVision: true,
supportsTools: true,
tier: 'premium' as const,
hasReasoning: true,
},
{ {
id: CODEX_MODEL_MAP.gpt51CodexMax, id: CODEX_MODEL_MAP.gpt51CodexMax,
name: 'GPT-5.1-Codex-Max', name: 'GPT-5.1-Codex-Max',
modelString: CODEX_MODEL_MAP.gpt51CodexMax, modelString: CODEX_MODEL_MAP.gpt51CodexMax,
provider: 'openai', provider: 'openai',
description: 'Optimized for long-horizon, agentic coding tasks in Codex.', description: 'Codex-optimized flagship for deep and fast reasoning.',
contextWindow: CONTEXT_WINDOW_256K, contextWindow: CONTEXT_WINDOW_256K,
maxOutputTokens: MAX_OUTPUT_32K, maxOutputTokens: MAX_OUTPUT_32K,
supportsVision: true, supportsVision: true,
@@ -51,7 +63,7 @@ export const CODEX_MODELS: ModelDefinition[] = [
name: 'GPT-5.1-Codex-Mini', name: 'GPT-5.1-Codex-Mini',
modelString: CODEX_MODEL_MAP.gpt51CodexMini, modelString: CODEX_MODEL_MAP.gpt51CodexMini,
provider: 'openai', provider: 'openai',
description: 'Smaller, more cost-effective version for faster workflows.', description: 'Optimized for codex. Cheaper, faster, but less capable.',
contextWindow: CONTEXT_WINDOW_128K, contextWindow: CONTEXT_WINDOW_128K,
maxOutputTokens: MAX_OUTPUT_16K, maxOutputTokens: MAX_OUTPUT_16K,
supportsVision: true, supportsVision: true,
@@ -66,7 +78,7 @@ export const CODEX_MODELS: ModelDefinition[] = [
name: 'GPT-5.2', name: 'GPT-5.2',
modelString: CODEX_MODEL_MAP.gpt52, modelString: CODEX_MODEL_MAP.gpt52,
provider: 'openai', provider: 'openai',
description: 'Best general agentic model for tasks across industries and domains.', description: 'Latest frontier model with improvements across knowledge, reasoning and coding.',
contextWindow: CONTEXT_WINDOW_256K, contextWindow: CONTEXT_WINDOW_256K,
maxOutputTokens: MAX_OUTPUT_32K, maxOutputTokens: MAX_OUTPUT_32K,
supportsVision: true, supportsVision: true,

View File

@@ -103,7 +103,7 @@ export class ProviderFactory {
/** /**
* Get the appropriate provider for a given model ID * Get the appropriate provider for a given model ID
* *
* @param modelId Model identifier (e.g., "claude-opus-4-5-20251101", "cursor-gpt-4o", "cursor-auto") * @param modelId Model identifier (e.g., "claude-opus-4-6", "cursor-gpt-4o", "cursor-auto")
* @param options Optional settings * @param options Optional settings
* @param options.throwOnDisconnected Throw error if provider is disconnected (default: true) * @param options.throwOnDisconnected Throw error if provider is disconnected (default: true)
* @returns Provider instance for the model * @returns Provider instance for the model

View File

@@ -35,7 +35,7 @@ describe('model-resolver.ts', () => {
it("should resolve 'opus' alias to full model string", () => { it("should resolve 'opus' alias to full model string", () => {
const result = resolveModelString('opus'); const result = resolveModelString('opus');
expect(result).toBe('claude-opus-4-5-20251101'); expect(result).toBe('claude-opus-4-6');
expect(consoleSpy.log).toHaveBeenCalledWith( expect(consoleSpy.log).toHaveBeenCalledWith(
expect.stringContaining('Migrated legacy ID: "opus" -> "claude-opus"') expect.stringContaining('Migrated legacy ID: "opus" -> "claude-opus"')
); );
@@ -117,7 +117,7 @@ describe('model-resolver.ts', () => {
describe('getEffectiveModel', () => { describe('getEffectiveModel', () => {
it('should prioritize explicit model over session and default', () => { it('should prioritize explicit model over session and default', () => {
const result = getEffectiveModel('opus', 'haiku', 'gpt-5.2'); const result = getEffectiveModel('opus', 'haiku', 'gpt-5.2');
expect(result).toBe('claude-opus-4-5-20251101'); expect(result).toBe('claude-opus-4-6');
}); });
it('should use session model when explicit is not provided', () => { it('should use session model when explicit is not provided', () => {

View File

@@ -491,5 +491,29 @@ describe('sdk-options.ts', () => {
expect(options.maxThinkingTokens).toBeUndefined(); expect(options.maxThinkingTokens).toBeUndefined();
}); });
}); });
describe('adaptive thinking for Opus 4.6', () => {
it('should not set maxThinkingTokens for adaptive thinking (model decides)', async () => {
const { createAutoModeOptions } = await import('@/lib/sdk-options.js');
const options = createAutoModeOptions({
cwd: '/test/path',
thinkingLevel: 'adaptive',
});
expect(options.maxThinkingTokens).toBeUndefined();
});
it('should not include maxThinkingTokens when thinkingLevel is "none"', async () => {
const { createAutoModeOptions } = await import('@/lib/sdk-options.js');
const options = createAutoModeOptions({
cwd: '/test/path',
thinkingLevel: 'none',
});
expect(options.maxThinkingTokens).toBeUndefined();
});
});
}); });
}); });

View File

@@ -39,7 +39,7 @@ describe('claude-provider.ts', () => {
const generator = provider.executeQuery({ const generator = provider.executeQuery({
prompt: 'Hello', prompt: 'Hello',
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/test', cwd: '/test',
}); });
@@ -59,7 +59,7 @@ describe('claude-provider.ts', () => {
const generator = provider.executeQuery({ const generator = provider.executeQuery({
prompt: 'Test prompt', prompt: 'Test prompt',
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/test/dir', cwd: '/test/dir',
systemPrompt: 'You are helpful', systemPrompt: 'You are helpful',
maxTurns: 10, maxTurns: 10,
@@ -71,7 +71,7 @@ describe('claude-provider.ts', () => {
expect(sdk.query).toHaveBeenCalledWith({ expect(sdk.query).toHaveBeenCalledWith({
prompt: 'Test prompt', prompt: 'Test prompt',
options: expect.objectContaining({ options: expect.objectContaining({
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
systemPrompt: 'You are helpful', systemPrompt: 'You are helpful',
maxTurns: 10, maxTurns: 10,
cwd: '/test/dir', cwd: '/test/dir',
@@ -91,7 +91,7 @@ describe('claude-provider.ts', () => {
const generator = provider.executeQuery({ const generator = provider.executeQuery({
prompt: 'Test', prompt: 'Test',
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/test', cwd: '/test',
}); });
@@ -116,7 +116,7 @@ describe('claude-provider.ts', () => {
const generator = provider.executeQuery({ const generator = provider.executeQuery({
prompt: 'Test', prompt: 'Test',
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/test', cwd: '/test',
abortController, abortController,
}); });
@@ -145,7 +145,7 @@ describe('claude-provider.ts', () => {
const generator = provider.executeQuery({ const generator = provider.executeQuery({
prompt: 'Current message', prompt: 'Current message',
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/test', cwd: '/test',
conversationHistory, conversationHistory,
sdkSessionId: 'test-session-id', sdkSessionId: 'test-session-id',
@@ -176,7 +176,7 @@ describe('claude-provider.ts', () => {
const generator = provider.executeQuery({ const generator = provider.executeQuery({
prompt: arrayPrompt as any, prompt: arrayPrompt as any,
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/test', cwd: '/test',
}); });
@@ -196,7 +196,7 @@ describe('claude-provider.ts', () => {
const generator = provider.executeQuery({ const generator = provider.executeQuery({
prompt: 'Test', prompt: 'Test',
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/test', cwd: '/test',
}); });
@@ -222,7 +222,7 @@ describe('claude-provider.ts', () => {
const generator = provider.executeQuery({ const generator = provider.executeQuery({
prompt: 'Test', prompt: 'Test',
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/test', cwd: '/test',
}); });
@@ -286,7 +286,7 @@ describe('claude-provider.ts', () => {
const generator = provider.executeQuery({ const generator = provider.executeQuery({
prompt: 'Test', prompt: 'Test',
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/test', cwd: '/test',
}); });
@@ -313,7 +313,7 @@ describe('claude-provider.ts', () => {
const generator = provider.executeQuery({ const generator = provider.executeQuery({
prompt: 'Test', prompt: 'Test',
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/test', cwd: '/test',
}); });
@@ -341,7 +341,7 @@ describe('claude-provider.ts', () => {
const generator = provider.executeQuery({ const generator = provider.executeQuery({
prompt: 'Test', prompt: 'Test',
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/test', cwd: '/test',
}); });
@@ -366,12 +366,12 @@ describe('claude-provider.ts', () => {
expect(models).toHaveLength(4); expect(models).toHaveLength(4);
}); });
it('should include Claude Opus 4.5', () => { it('should include Claude Opus 4.6', () => {
const models = provider.getAvailableModels(); const models = provider.getAvailableModels();
const opus = models.find((m) => m.id === 'claude-opus-4-5-20251101'); const opus = models.find((m) => m.id === 'claude-opus-4-6');
expect(opus).toBeDefined(); expect(opus).toBeDefined();
expect(opus?.name).toBe('Claude Opus 4.5'); expect(opus?.name).toBe('Claude Opus 4.6');
expect(opus?.provider).toBe('anthropic'); expect(opus?.provider).toBe('anthropic');
}); });
@@ -400,7 +400,7 @@ describe('claude-provider.ts', () => {
it('should mark Opus as default', () => { it('should mark Opus as default', () => {
const models = provider.getAvailableModels(); const models = provider.getAvailableModels();
const opus = models.find((m) => m.id === 'claude-opus-4-5-20251101'); const opus = models.find((m) => m.id === 'claude-opus-4-6');
expect(opus?.default).toBe(true); expect(opus?.default).toBe(true);
}); });

View File

@@ -54,8 +54,8 @@ describe('provider-factory.ts', () => {
describe('getProviderForModel', () => { describe('getProviderForModel', () => {
describe('Claude models (claude-* prefix)', () => { describe('Claude models (claude-* prefix)', () => {
it('should return ClaudeProvider for claude-opus-4-5-20251101', () => { it('should return ClaudeProvider for claude-opus-4-6', () => {
const provider = ProviderFactory.getProviderForModel('claude-opus-4-5-20251101'); const provider = ProviderFactory.getProviderForModel('claude-opus-4-6');
expect(provider).toBeInstanceOf(ClaudeProvider); expect(provider).toBeInstanceOf(ClaudeProvider);
}); });
@@ -70,7 +70,7 @@ describe('provider-factory.ts', () => {
}); });
it('should be case-insensitive for claude models', () => { it('should be case-insensitive for claude models', () => {
const provider = ProviderFactory.getProviderForModel('CLAUDE-OPUS-4-5-20251101'); const provider = ProviderFactory.getProviderForModel('CLAUDE-OPUS-4-6');
expect(provider).toBeInstanceOf(ClaudeProvider); expect(provider).toBeInstanceOf(ClaudeProvider);
}); });
}); });

View File

@@ -199,7 +199,7 @@ The agent is configured with:
```javascript ```javascript
{ {
model: "claude-opus-4-5-20251101", model: "claude-opus-4-6",
maxTurns: 20, maxTurns: 20,
cwd: workingDirectory, cwd: workingDirectory,
allowedTools: [ allowedTools: [

View File

@@ -28,7 +28,7 @@ import { cn } from '@/lib/utils';
import { modelSupportsThinking } from '@/lib/utils'; import { modelSupportsThinking } from '@/lib/utils';
import { useAppStore, ThinkingLevel, FeatureImage, PlanningMode, Feature } from '@/store/app-store'; import { useAppStore, ThinkingLevel, FeatureImage, PlanningMode, Feature } from '@/store/app-store';
import type { ReasoningEffort, PhaseModelEntry, AgentModel } from '@automaker/types'; import type { ReasoningEffort, PhaseModelEntry, AgentModel } from '@automaker/types';
import { supportsReasoningEffort } from '@automaker/types'; import { supportsReasoningEffort, isAdaptiveThinkingModel } from '@automaker/types';
import { import {
PrioritySelector, PrioritySelector,
WorkModeSelector, WorkModeSelector,
@@ -264,7 +264,20 @@ export function AddFeatureDialog({
}, [planningMode]); }, [planningMode]);
const handleModelChange = (entry: PhaseModelEntry) => { const handleModelChange = (entry: PhaseModelEntry) => {
setModelEntry(entry); // Normalize thinking level when switching between adaptive and non-adaptive models
const isNewModelAdaptive =
typeof entry.model === 'string' && isAdaptiveThinkingModel(entry.model);
const currentLevel = entry.thinkingLevel || 'none';
if (isNewModelAdaptive && currentLevel !== 'none' && currentLevel !== 'adaptive') {
// Switching TO Opus 4.6 with a manual level -> auto-switch to 'adaptive'
setModelEntry({ ...entry, thinkingLevel: 'adaptive' });
} else if (!isNewModelAdaptive && currentLevel === 'adaptive') {
// Switching FROM Opus 4.6 with adaptive -> auto-switch to 'high'
setModelEntry({ ...entry, thinkingLevel: 'high' });
} else {
setModelEntry(entry);
}
}; };
const buildFeatureData = (): FeatureData | null => { const buildFeatureData = (): FeatureData | null => {

View File

@@ -167,7 +167,14 @@ export const ALL_MODELS: ModelOption[] = [
...COPILOT_MODELS, ...COPILOT_MODELS,
]; ];
export const THINKING_LEVELS: ThinkingLevel[] = ['none', 'low', 'medium', 'high', 'ultrathink']; export const THINKING_LEVELS: ThinkingLevel[] = [
'none',
'low',
'medium',
'high',
'ultrathink',
'adaptive',
];
export const THINKING_LEVEL_LABELS: Record<ThinkingLevel, string> = { export const THINKING_LEVEL_LABELS: Record<ThinkingLevel, string> = {
none: 'None', none: 'None',
@@ -175,6 +182,7 @@ export const THINKING_LEVEL_LABELS: Record<ThinkingLevel, string> = {
medium: 'Med', medium: 'Med',
high: 'High', high: 'High',
ultrathink: 'Ultra', ultrathink: 'Ultra',
adaptive: 'Adaptive',
}; };
/** /**

View File

@@ -2,19 +2,25 @@ import { Label } from '@/components/ui/label';
import { Brain } from 'lucide-react'; import { Brain } from 'lucide-react';
import { cn } from '@/lib/utils'; import { cn } from '@/lib/utils';
import { ThinkingLevel } from '@/store/app-store'; import { ThinkingLevel } from '@/store/app-store';
import { THINKING_LEVELS, THINKING_LEVEL_LABELS } from './model-constants'; import { THINKING_LEVEL_LABELS } from './model-constants';
import { getThinkingLevelsForModel } from '@automaker/types';
interface ThinkingLevelSelectorProps { interface ThinkingLevelSelectorProps {
selectedLevel: ThinkingLevel; selectedLevel: ThinkingLevel;
onLevelSelect: (level: ThinkingLevel) => void; onLevelSelect: (level: ThinkingLevel) => void;
testIdPrefix?: string; testIdPrefix?: string;
/** Optional model ID to filter available thinking levels (e.g., Opus 4.6 only shows None/Adaptive) */
model?: string;
} }
export function ThinkingLevelSelector({ export function ThinkingLevelSelector({
selectedLevel, selectedLevel,
onLevelSelect, onLevelSelect,
testIdPrefix = 'thinking-level', testIdPrefix = 'thinking-level',
model,
}: ThinkingLevelSelectorProps) { }: ThinkingLevelSelectorProps) {
const levels = model ? getThinkingLevelsForModel(model) : getThinkingLevelsForModel('');
return ( return (
<div className="space-y-2 pt-2 border-t border-border"> <div className="space-y-2 pt-2 border-t border-border">
<Label className="flex items-center gap-2 text-sm"> <Label className="flex items-center gap-2 text-sm">
@@ -22,7 +28,7 @@ export function ThinkingLevelSelector({
Thinking Level Thinking Level
</Label> </Label>
<div className="flex gap-2 flex-wrap"> <div className="flex gap-2 flex-wrap">
{THINKING_LEVELS.map((level) => ( {levels.map((level) => (
<button <button
key={level} key={level}
type="button" type="button"
@@ -40,7 +46,9 @@ export function ThinkingLevelSelector({
))} ))}
</div> </div>
<p className="text-xs text-muted-foreground"> <p className="text-xs text-muted-foreground">
Higher levels give more time to reason through complex problems. {levels.includes('adaptive')
? 'Adaptive thinking lets the model decide how much reasoning to use.'
: 'Higher levels give more time to reason through complex problems.'}
</p> </p>
</div> </div>
); );

View File

@@ -21,6 +21,7 @@ import {
isGroupSelected, isGroupSelected,
getSelectedVariant, getSelectedVariant,
codexModelHasThinking, codexModelHasThinking,
getThinkingLevelsForModel,
} from '@automaker/types'; } from '@automaker/types';
import { import {
CLAUDE_MODELS, CLAUDE_MODELS,
@@ -28,7 +29,6 @@ import {
OPENCODE_MODELS, OPENCODE_MODELS,
GEMINI_MODELS, GEMINI_MODELS,
COPILOT_MODELS, COPILOT_MODELS,
THINKING_LEVELS,
THINKING_LEVEL_LABELS, THINKING_LEVEL_LABELS,
REASONING_EFFORT_LEVELS, REASONING_EFFORT_LEVELS,
REASONING_EFFORT_LABELS, REASONING_EFFORT_LABELS,
@@ -1296,7 +1296,9 @@ export function PhaseModelSelector({
<div className="px-2 py-1 text-xs font-medium text-muted-foreground"> <div className="px-2 py-1 text-xs font-medium text-muted-foreground">
Thinking Level Thinking Level
</div> </div>
{THINKING_LEVELS.map((level) => ( {getThinkingLevelsForModel(
model.mapsToClaudeModel === 'opus' ? 'claude-opus' : ''
).map((level) => (
<button <button
key={level} key={level}
onClick={() => { onClick={() => {
@@ -1322,6 +1324,7 @@ export function PhaseModelSelector({
{level === 'medium' && 'Moderate reasoning (10k tokens)'} {level === 'medium' && 'Moderate reasoning (10k tokens)'}
{level === 'high' && 'Deep reasoning (16k tokens)'} {level === 'high' && 'Deep reasoning (16k tokens)'}
{level === 'ultrathink' && 'Maximum reasoning (32k tokens)'} {level === 'ultrathink' && 'Maximum reasoning (32k tokens)'}
{level === 'adaptive' && 'Model decides reasoning depth'}
</span> </span>
</div> </div>
{isSelected && currentThinking === level && ( {isSelected && currentThinking === level && (
@@ -1402,7 +1405,9 @@ export function PhaseModelSelector({
<div className="px-2 py-1.5 text-xs font-medium text-muted-foreground border-b border-border/50 mb-1"> <div className="px-2 py-1.5 text-xs font-medium text-muted-foreground border-b border-border/50 mb-1">
Thinking Level Thinking Level
</div> </div>
{THINKING_LEVELS.map((level) => ( {getThinkingLevelsForModel(
model.mapsToClaudeModel === 'opus' ? 'claude-opus' : ''
).map((level) => (
<button <button
key={level} key={level}
onClick={() => { onClick={() => {
@@ -1428,6 +1433,7 @@ export function PhaseModelSelector({
{level === 'medium' && 'Moderate reasoning (10k tokens)'} {level === 'medium' && 'Moderate reasoning (10k tokens)'}
{level === 'high' && 'Deep reasoning (16k tokens)'} {level === 'high' && 'Deep reasoning (16k tokens)'}
{level === 'ultrathink' && 'Maximum reasoning (32k tokens)'} {level === 'ultrathink' && 'Maximum reasoning (32k tokens)'}
{level === 'adaptive' && 'Model decides reasoning depth'}
</span> </span>
</div> </div>
{isSelected && currentThinking === level && ( {isSelected && currentThinking === level && (
@@ -1564,7 +1570,7 @@ export function PhaseModelSelector({
<div className="px-2 py-1 text-xs font-medium text-muted-foreground"> <div className="px-2 py-1 text-xs font-medium text-muted-foreground">
Thinking Level Thinking Level
</div> </div>
{THINKING_LEVELS.map((level) => ( {getThinkingLevelsForModel(model.id).map((level) => (
<button <button
key={level} key={level}
onClick={() => { onClick={() => {
@@ -1589,6 +1595,7 @@ export function PhaseModelSelector({
{level === 'medium' && 'Moderate reasoning (10k tokens)'} {level === 'medium' && 'Moderate reasoning (10k tokens)'}
{level === 'high' && 'Deep reasoning (16k tokens)'} {level === 'high' && 'Deep reasoning (16k tokens)'}
{level === 'ultrathink' && 'Maximum reasoning (32k tokens)'} {level === 'ultrathink' && 'Maximum reasoning (32k tokens)'}
{level === 'adaptive' && 'Model decides reasoning depth'}
</span> </span>
</div> </div>
{isSelected && currentThinking === level && ( {isSelected && currentThinking === level && (
@@ -1685,7 +1692,7 @@ export function PhaseModelSelector({
<div className="px-2 py-1.5 text-xs font-medium text-muted-foreground border-b border-border/50 mb-1"> <div className="px-2 py-1.5 text-xs font-medium text-muted-foreground border-b border-border/50 mb-1">
Thinking Level Thinking Level
</div> </div>
{THINKING_LEVELS.map((level) => ( {getThinkingLevelsForModel(model.id).map((level) => (
<button <button
key={level} key={level}
onClick={() => { onClick={() => {
@@ -1710,6 +1717,7 @@ export function PhaseModelSelector({
{level === 'medium' && 'Moderate reasoning (10k tokens)'} {level === 'medium' && 'Moderate reasoning (10k tokens)'}
{level === 'high' && 'Deep reasoning (16k tokens)'} {level === 'high' && 'Deep reasoning (16k tokens)'}
{level === 'ultrathink' && 'Maximum reasoning (32k tokens)'} {level === 'ultrathink' && 'Maximum reasoning (32k tokens)'}
{level === 'adaptive' && 'Model decides reasoning depth'}
</span> </span>
</div> </div>
{isSelected && currentThinking === level && ( {isSelected && currentThinking === level && (

View File

@@ -27,25 +27,30 @@ interface CodexModelInfo {
} }
const CODEX_MODEL_INFO: Record<CodexModelId, CodexModelInfo> = { const CODEX_MODEL_INFO: Record<CodexModelId, CodexModelInfo> = {
'codex-gpt-5.3-codex': {
id: 'codex-gpt-5.3-codex',
label: 'GPT-5.3-Codex',
description: 'Latest frontier agentic coding model',
},
'codex-gpt-5.2-codex': { 'codex-gpt-5.2-codex': {
id: 'codex-gpt-5.2-codex', id: 'codex-gpt-5.2-codex',
label: 'GPT-5.2-Codex', label: 'GPT-5.2-Codex',
description: 'Most advanced agentic coding model for complex software engineering', description: 'Frontier agentic coding model',
}, },
'codex-gpt-5.1-codex-max': { 'codex-gpt-5.1-codex-max': {
id: 'codex-gpt-5.1-codex-max', id: 'codex-gpt-5.1-codex-max',
label: 'GPT-5.1-Codex-Max', label: 'GPT-5.1-Codex-Max',
description: 'Optimized for long-horizon, agentic coding tasks in Codex', description: 'Codex-optimized flagship for deep and fast reasoning',
}, },
'codex-gpt-5.1-codex-mini': { 'codex-gpt-5.1-codex-mini': {
id: 'codex-gpt-5.1-codex-mini', id: 'codex-gpt-5.1-codex-mini',
label: 'GPT-5.1-Codex-Mini', label: 'GPT-5.1-Codex-Mini',
description: 'Smaller, more cost-effective version for faster workflows', description: 'Optimized for codex. Cheaper, faster, but less capable',
}, },
'codex-gpt-5.2': { 'codex-gpt-5.2': {
id: 'codex-gpt-5.2', id: 'codex-gpt-5.2',
label: 'GPT-5.2', label: 'GPT-5.2',
description: 'Best general agentic model for tasks across industries and domains', description: 'Latest frontier model with improvements across knowledge, reasoning and coding',
}, },
'codex-gpt-5.1': { 'codex-gpt-5.1': {
id: 'codex-gpt-5.1', id: 'codex-gpt-5.1',
@@ -160,6 +165,7 @@ export function CodexModelConfiguration({
function supportsReasoningEffort(modelId: string): boolean { function supportsReasoningEffort(modelId: string): boolean {
const reasoningModels = [ const reasoningModels = [
'codex-gpt-5.3-codex',
'codex-gpt-5.2-codex', 'codex-gpt-5.2-codex',
'codex-gpt-5.1-codex-max', 'codex-gpt-5.1-codex-max',
'codex-gpt-5.2', 'codex-gpt-5.2',

View File

@@ -27,18 +27,20 @@ export interface AgentTaskInfo {
/** /**
* Default model used by the feature executor * Default model used by the feature executor
*/ */
export const DEFAULT_MODEL = 'claude-opus-4-5-20251101'; export const DEFAULT_MODEL = 'claude-opus-4-6';
/** /**
* Formats a model name for display * Formats a model name for display
*/ */
export function formatModelName(model: string): string { export function formatModelName(model: string): string {
// Claude models // Claude models
if (model.includes('opus-4-6')) return 'Opus 4.6';
if (model.includes('opus')) return 'Opus 4.5'; if (model.includes('opus')) return 'Opus 4.5';
if (model.includes('sonnet')) return 'Sonnet 4.5'; if (model.includes('sonnet')) return 'Sonnet 4.5';
if (model.includes('haiku')) return 'Haiku 4.5'; if (model.includes('haiku')) return 'Haiku 4.5';
// Codex/GPT models - specific formatting // Codex/GPT models - specific formatting
if (model === 'codex-gpt-5.3-codex') return 'GPT-5.3 Codex';
if (model === 'codex-gpt-5.2-codex') return 'GPT-5.2 Codex'; if (model === 'codex-gpt-5.2-codex') return 'GPT-5.2 Codex';
if (model === 'codex-gpt-5.2') return 'GPT-5.2'; if (model === 'codex-gpt-5.2') return 'GPT-5.2';
if (model === 'codex-gpt-5.1-codex-max') return 'GPT-5.1 Max'; if (model === 'codex-gpt-5.1-codex-max') return 'GPT-5.1 Max';

View File

@@ -142,7 +142,7 @@ const modelId = resolveModelString('sonnet'); // → 'claude-sonnet-4-20250514'
- `haiku``claude-haiku-4-5` (fast, simple tasks) - `haiku``claude-haiku-4-5` (fast, simple tasks)
- `sonnet``claude-sonnet-4-20250514` (balanced, recommended) - `sonnet``claude-sonnet-4-20250514` (balanced, recommended)
- `opus``claude-opus-4-5-20251101` (maximum capability) - `opus``claude-opus-4-6` (maximum capability)
### @automaker/dependency-resolver ### @automaker/dependency-resolver

View File

@@ -175,7 +175,7 @@ Uses `@anthropic-ai/claude-agent-sdk` for direct SDK integration.
Routes models that: Routes models that:
- Start with `"claude-"` (e.g., `"claude-opus-4-5-20251101"`) - Start with `"claude-"` (e.g., `"claude-opus-4-6"`)
- Are Claude aliases: `"opus"`, `"sonnet"`, `"haiku"` - Are Claude aliases: `"opus"`, `"sonnet"`, `"haiku"`
#### Authentication #### Authentication
@@ -191,7 +191,7 @@ const provider = new ClaudeProvider();
const stream = provider.executeQuery({ const stream = provider.executeQuery({
prompt: 'What is 2+2?', prompt: 'What is 2+2?',
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
cwd: '/project/path', cwd: '/project/path',
systemPrompt: 'You are a helpful assistant.', systemPrompt: 'You are a helpful assistant.',
maxTurns: 20, maxTurns: 20,
@@ -701,7 +701,7 @@ Test provider interaction with services:
```typescript ```typescript
describe('Provider Integration', () => { describe('Provider Integration', () => {
it('should work with AgentService', async () => { it('should work with AgentService', async () => {
const provider = ProviderFactory.getProviderForModel('claude-opus-4-5-20251101'); const provider = ProviderFactory.getProviderForModel('claude-opus-4-6');
// Test full workflow // Test full workflow
}); });

View File

@@ -213,7 +213,7 @@ Model alias mapping for Claude models.
export const CLAUDE_MODEL_MAP: Record<string, string> = { export const CLAUDE_MODEL_MAP: Record<string, string> = {
haiku: 'claude-haiku-4-5', haiku: 'claude-haiku-4-5',
sonnet: 'claude-sonnet-4-20250514', sonnet: 'claude-sonnet-4-20250514',
opus: 'claude-opus-4-5-20251101', opus: 'claude-opus-4-6',
} as const; } as const;
``` ```
@@ -223,7 +223,7 @@ Default models per provider.
```typescript ```typescript
export const DEFAULT_MODELS = { export const DEFAULT_MODELS = {
claude: 'claude-opus-4-5-20251101', claude: 'claude-opus-4-6',
openai: 'gpt-5.2', openai: 'gpt-5.2',
} as const; } as const;
``` ```
@@ -248,8 +248,8 @@ Resolve a model key/alias to a full model string.
import { resolveModelString, DEFAULT_MODELS } from '../lib/model-resolver.js'; import { resolveModelString, DEFAULT_MODELS } from '../lib/model-resolver.js';
resolveModelString('opus'); resolveModelString('opus');
// Returns: "claude-opus-4-5-20251101" // Returns: "claude-opus-4-6"
// Logs: "[ModelResolver] Resolved model alias: "opus" -> "claude-opus-4-5-20251101"" // Logs: "[ModelResolver] Resolved model alias: "opus" -> "claude-opus-4-6""
resolveModelString('gpt-5.2'); resolveModelString('gpt-5.2');
// Returns: "gpt-5.2" // Returns: "gpt-5.2"
@@ -260,8 +260,8 @@ resolveModelString('claude-sonnet-4-20250514');
// Logs: "[ModelResolver] Using full Claude model string: claude-sonnet-4-20250514" // Logs: "[ModelResolver] Using full Claude model string: claude-sonnet-4-20250514"
resolveModelString('invalid-model'); resolveModelString('invalid-model');
// Returns: "claude-opus-4-5-20251101" // Returns: "claude-opus-4-6"
// Logs: "[ModelResolver] Unknown model key "invalid-model", using default: "claude-opus-4-5-20251101"" // Logs: "[ModelResolver] Unknown model key "invalid-model", using default: "claude-opus-4-6""
``` ```
--- ---

View File

@@ -30,15 +30,15 @@ const model2 = resolveModelString('haiku');
// Returns: 'claude-haiku-4-5' // Returns: 'claude-haiku-4-5'
const model3 = resolveModelString('opus'); const model3 = resolveModelString('opus');
// Returns: 'claude-opus-4-5-20251101' // Returns: 'claude-opus-4-6'
// Use with custom default // Use with custom default
const model4 = resolveModelString(undefined, 'claude-sonnet-4-20250514'); const model4 = resolveModelString(undefined, 'claude-sonnet-4-20250514');
// Returns: 'claude-sonnet-4-20250514' (default) // Returns: 'claude-sonnet-4-20250514' (default)
// Direct model ID passthrough // Direct model ID passthrough
const model5 = resolveModelString('claude-opus-4-5-20251101'); const model5 = resolveModelString('claude-opus-4-6');
// Returns: 'claude-opus-4-5-20251101' (unchanged) // Returns: 'claude-opus-4-6' (unchanged)
``` ```
### Get Effective Model ### Get Effective Model
@@ -72,7 +72,7 @@ console.log(DEFAULT_MODELS.chat); // 'claude-sonnet-4-20250514'
// Model alias mappings // Model alias mappings
console.log(CLAUDE_MODEL_MAP.haiku); // 'claude-haiku-4-5' console.log(CLAUDE_MODEL_MAP.haiku); // 'claude-haiku-4-5'
console.log(CLAUDE_MODEL_MAP.sonnet); // 'claude-sonnet-4-20250514' console.log(CLAUDE_MODEL_MAP.sonnet); // 'claude-sonnet-4-20250514'
console.log(CLAUDE_MODEL_MAP.opus); // 'claude-opus-4-5-20251101' console.log(CLAUDE_MODEL_MAP.opus); // 'claude-opus-4-6'
``` ```
## Usage Example ## Usage Example
@@ -103,7 +103,7 @@ const feature: Feature = {
}; };
prepareFeatureExecution(feature); prepareFeatureExecution(feature);
// Output: Executing feature with model: claude-opus-4-5-20251101 // Output: Executing feature with model: claude-opus-4-6
``` ```
## Supported Models ## Supported Models
@@ -112,7 +112,7 @@ prepareFeatureExecution(feature);
- `haiku``claude-haiku-4-5` - `haiku``claude-haiku-4-5`
- `sonnet``claude-sonnet-4-20250514` - `sonnet``claude-sonnet-4-20250514`
- `opus``claude-opus-4-5-20251101` - `opus``claude-opus-4-6`
### Model Selection Guide ### Model Selection Guide

View File

@@ -484,12 +484,12 @@ describe('model-resolver', () => {
it('should handle full Claude model string in entry', () => { it('should handle full Claude model string in entry', () => {
const entry: PhaseModelEntry = { const entry: PhaseModelEntry = {
model: 'claude-opus-4-5-20251101', model: 'claude-opus-4-6',
thinkingLevel: 'high', thinkingLevel: 'high',
}; };
const result = resolvePhaseModel(entry); const result = resolvePhaseModel(entry);
expect(result.model).toBe('claude-opus-4-5-20251101'); expect(result.model).toBe('claude-opus-4-6');
expect(result.thinkingLevel).toBe('high'); expect(result.thinkingLevel).toBe('high');
}); });
}); });

View File

@@ -54,13 +54,19 @@ export function getClaudeCliPaths(): string[] {
if (isWindows) { if (isWindows) {
const appData = process.env.APPDATA || path.join(os.homedir(), 'AppData', 'Roaming'); const appData = process.env.APPDATA || path.join(os.homedir(), 'AppData', 'Roaming');
return [ const nvmSymlink = process.env.NVM_SYMLINK;
const paths = [
path.join(os.homedir(), '.local', 'bin', 'claude.exe'), path.join(os.homedir(), '.local', 'bin', 'claude.exe'),
path.join(appData, 'npm', 'claude.cmd'), path.join(appData, 'npm', 'claude.cmd'),
path.join(appData, 'npm', 'claude'), path.join(appData, 'npm', 'claude'),
path.join(appData, '.npm-global', 'bin', 'claude.cmd'), path.join(appData, '.npm-global', 'bin', 'claude.cmd'),
path.join(appData, '.npm-global', 'bin', 'claude'), path.join(appData, '.npm-global', 'bin', 'claude'),
]; ];
// nvm4w (NVM for Windows) symlink path
if (nvmSymlink) {
paths.push(path.join(nvmSymlink, 'claude.cmd'), path.join(nvmSymlink, 'claude'));
}
return paths;
} }
return [ return [
@@ -130,7 +136,8 @@ export function getCodexCliPaths(): string[] {
if (isWindows) { if (isWindows) {
const appData = process.env.APPDATA || path.join(homeDir, 'AppData', 'Roaming'); const appData = process.env.APPDATA || path.join(homeDir, 'AppData', 'Roaming');
const localAppData = process.env.LOCALAPPDATA || path.join(homeDir, 'AppData', 'Local'); const localAppData = process.env.LOCALAPPDATA || path.join(homeDir, 'AppData', 'Local');
return [ const nvmSymlink = process.env.NVM_SYMLINK;
const paths = [
path.join(homeDir, '.local', 'bin', 'codex.exe'), path.join(homeDir, '.local', 'bin', 'codex.exe'),
path.join(appData, 'npm', 'codex.cmd'), path.join(appData, 'npm', 'codex.cmd'),
path.join(appData, 'npm', 'codex'), path.join(appData, 'npm', 'codex'),
@@ -142,6 +149,11 @@ export function getCodexCliPaths(): string[] {
path.join(localAppData, 'pnpm', 'codex.cmd'), path.join(localAppData, 'pnpm', 'codex.cmd'),
path.join(localAppData, 'pnpm', 'codex'), path.join(localAppData, 'pnpm', 'codex'),
]; ];
// nvm4w (NVM for Windows) symlink path
if (nvmSymlink) {
paths.push(path.join(nvmSymlink, 'codex.cmd'), path.join(nvmSymlink, 'codex'));
}
return paths;
} }
// Include NVM bin paths for codex installed via npm global under NVM // Include NVM bin paths for codex installed via npm global under NVM
@@ -1126,7 +1138,8 @@ export function getOpenCodeCliPaths(): string[] {
if (isWindows) { if (isWindows) {
const appData = process.env.APPDATA || path.join(homeDir, 'AppData', 'Roaming'); const appData = process.env.APPDATA || path.join(homeDir, 'AppData', 'Roaming');
const localAppData = process.env.LOCALAPPDATA || path.join(homeDir, 'AppData', 'Local'); const localAppData = process.env.LOCALAPPDATA || path.join(homeDir, 'AppData', 'Local');
return [ const nvmSymlink = process.env.NVM_SYMLINK;
const paths = [
// OpenCode's default installation directory // OpenCode's default installation directory
path.join(homeDir, '.opencode', 'bin', 'opencode.exe'), path.join(homeDir, '.opencode', 'bin', 'opencode.exe'),
path.join(homeDir, '.local', 'bin', 'opencode.exe'), path.join(homeDir, '.local', 'bin', 'opencode.exe'),
@@ -1143,6 +1156,11 @@ export function getOpenCodeCliPaths(): string[] {
path.join(homeDir, 'go', 'bin', 'opencode.exe'), path.join(homeDir, 'go', 'bin', 'opencode.exe'),
path.join(process.env.GOPATH || path.join(homeDir, 'go'), 'bin', 'opencode.exe'), path.join(process.env.GOPATH || path.join(homeDir, 'go'), 'bin', 'opencode.exe'),
]; ];
// nvm4w (NVM for Windows) symlink path
if (nvmSymlink) {
paths.push(path.join(nvmSymlink, 'opencode.cmd'), path.join(nvmSymlink, 'opencode'));
}
return paths;
} }
// Include NVM bin paths for opencode installed via npm global under NVM // Include NVM bin paths for opencode installed via npm global under NVM

View File

@@ -6,6 +6,7 @@
* IMPORTANT: All Codex models use 'codex-' prefix to distinguish from Cursor CLI models * IMPORTANT: All Codex models use 'codex-' prefix to distinguish from Cursor CLI models
*/ */
export type CodexModelId = export type CodexModelId =
| 'codex-gpt-5.3-codex'
| 'codex-gpt-5.2-codex' | 'codex-gpt-5.2-codex'
| 'codex-gpt-5.1-codex-max' | 'codex-gpt-5.1-codex-max'
| 'codex-gpt-5.1-codex-mini' | 'codex-gpt-5.1-codex-mini'
@@ -29,31 +30,38 @@ export interface CodexModelConfig {
* All keys use 'codex-' prefix to distinguish from Cursor CLI models * All keys use 'codex-' prefix to distinguish from Cursor CLI models
*/ */
export const CODEX_MODEL_CONFIG_MAP: Record<CodexModelId, CodexModelConfig> = { export const CODEX_MODEL_CONFIG_MAP: Record<CodexModelId, CodexModelConfig> = {
'codex-gpt-5.3-codex': {
id: 'codex-gpt-5.3-codex',
label: 'GPT-5.3-Codex',
description: 'Latest frontier agentic coding model',
hasThinking: true,
supportsVision: true,
},
'codex-gpt-5.2-codex': { 'codex-gpt-5.2-codex': {
id: 'codex-gpt-5.2-codex', id: 'codex-gpt-5.2-codex',
label: 'GPT-5.2-Codex', label: 'GPT-5.2-Codex',
description: 'Most advanced agentic coding model for complex software engineering', description: 'Frontier agentic coding model',
hasThinking: true, hasThinking: true,
supportsVision: true, supportsVision: true,
}, },
'codex-gpt-5.1-codex-max': { 'codex-gpt-5.1-codex-max': {
id: 'codex-gpt-5.1-codex-max', id: 'codex-gpt-5.1-codex-max',
label: 'GPT-5.1-Codex-Max', label: 'GPT-5.1-Codex-Max',
description: 'Optimized for long-horizon, agentic coding tasks in Codex', description: 'Codex-optimized flagship for deep and fast reasoning',
hasThinking: true, hasThinking: true,
supportsVision: true, supportsVision: true,
}, },
'codex-gpt-5.1-codex-mini': { 'codex-gpt-5.1-codex-mini': {
id: 'codex-gpt-5.1-codex-mini', id: 'codex-gpt-5.1-codex-mini',
label: 'GPT-5.1-Codex-Mini', label: 'GPT-5.1-Codex-Mini',
description: 'Smaller, more cost-effective version for faster workflows', description: 'Optimized for codex. Cheaper, faster, but less capable',
hasThinking: false, hasThinking: false,
supportsVision: true, supportsVision: true,
}, },
'codex-gpt-5.2': { 'codex-gpt-5.2': {
id: 'codex-gpt-5.2', id: 'codex-gpt-5.2',
label: 'GPT-5.2 (Codex)', label: 'GPT-5.2 (Codex)',
description: 'Best general agentic model for tasks across industries and domains via Codex', description: 'Latest frontier model with improvements across knowledge, reasoning and coding',
hasThinking: true, hasThinking: true,
supportsVision: true, supportsVision: true,
}, },

View File

@@ -46,6 +46,7 @@ export type EventType =
| 'dev-server:started' | 'dev-server:started'
| 'dev-server:output' | 'dev-server:output'
| 'dev-server:stopped' | 'dev-server:stopped'
| 'dev-server:url-detected'
| 'test-runner:started' | 'test-runner:started'
| 'test-runner:progress' | 'test-runner:progress'
| 'test-runner:output' | 'test-runner:output'

View File

@@ -196,6 +196,8 @@ export {
PROJECT_SETTINGS_VERSION, PROJECT_SETTINGS_VERSION,
THINKING_TOKEN_BUDGET, THINKING_TOKEN_BUDGET,
getThinkingTokenBudget, getThinkingTokenBudget,
isAdaptiveThinkingModel,
getThinkingLevelsForModel,
// Event hook constants // Event hook constants
EVENT_HOOK_TRIGGER_LABELS, EVENT_HOOK_TRIGGER_LABELS,
// Claude-compatible provider templates (new) // Claude-compatible provider templates (new)

View File

@@ -72,10 +72,18 @@ export const CLAUDE_MODELS: ModelOption[] = [
* Official models from https://developers.openai.com/codex/models/ * Official models from https://developers.openai.com/codex/models/
*/ */
export const CODEX_MODELS: (ModelOption & { hasReasoning?: boolean })[] = [ export const CODEX_MODELS: (ModelOption & { hasReasoning?: boolean })[] = [
{
id: CODEX_MODEL_MAP.gpt53Codex,
label: 'GPT-5.3-Codex',
description: 'Latest frontier agentic coding model.',
badge: 'Premium',
provider: 'codex',
hasReasoning: true,
},
{ {
id: CODEX_MODEL_MAP.gpt52Codex, id: CODEX_MODEL_MAP.gpt52Codex,
label: 'GPT-5.2-Codex', label: 'GPT-5.2-Codex',
description: 'Most advanced agentic coding model for complex software engineering.', description: 'Frontier agentic coding model.',
badge: 'Premium', badge: 'Premium',
provider: 'codex', provider: 'codex',
hasReasoning: true, hasReasoning: true,
@@ -83,7 +91,7 @@ export const CODEX_MODELS: (ModelOption & { hasReasoning?: boolean })[] = [
{ {
id: CODEX_MODEL_MAP.gpt51CodexMax, id: CODEX_MODEL_MAP.gpt51CodexMax,
label: 'GPT-5.1-Codex-Max', label: 'GPT-5.1-Codex-Max',
description: 'Optimized for long-horizon, agentic coding tasks in Codex.', description: 'Codex-optimized flagship for deep and fast reasoning.',
badge: 'Premium', badge: 'Premium',
provider: 'codex', provider: 'codex',
hasReasoning: true, hasReasoning: true,
@@ -91,7 +99,7 @@ export const CODEX_MODELS: (ModelOption & { hasReasoning?: boolean })[] = [
{ {
id: CODEX_MODEL_MAP.gpt51CodexMini, id: CODEX_MODEL_MAP.gpt51CodexMini,
label: 'GPT-5.1-Codex-Mini', label: 'GPT-5.1-Codex-Mini',
description: 'Smaller, more cost-effective version for faster workflows.', description: 'Optimized for codex. Cheaper, faster, but less capable.',
badge: 'Speed', badge: 'Speed',
provider: 'codex', provider: 'codex',
hasReasoning: false, hasReasoning: false,
@@ -99,7 +107,7 @@ export const CODEX_MODELS: (ModelOption & { hasReasoning?: boolean })[] = [
{ {
id: CODEX_MODEL_MAP.gpt52, id: CODEX_MODEL_MAP.gpt52,
label: 'GPT-5.2', label: 'GPT-5.2',
description: 'Best general agentic model for tasks across industries and domains.', description: 'Latest frontier model with improvements across knowledge, reasoning and coding.',
badge: 'Balanced', badge: 'Balanced',
provider: 'codex', provider: 'codex',
hasReasoning: true, hasReasoning: true,
@@ -141,6 +149,7 @@ export const THINKING_LEVELS: ThinkingLevelOption[] = [
{ id: 'medium', label: 'Medium' }, { id: 'medium', label: 'Medium' },
{ id: 'high', label: 'High' }, { id: 'high', label: 'High' },
{ id: 'ultrathink', label: 'Ultrathink' }, { id: 'ultrathink', label: 'Ultrathink' },
{ id: 'adaptive', label: 'Adaptive' },
]; ];
/** /**
@@ -154,6 +163,7 @@ export const THINKING_LEVEL_LABELS: Record<ThinkingLevel, string> = {
medium: 'Med', medium: 'Med',
high: 'High', high: 'High',
ultrathink: 'Ultra', ultrathink: 'Ultra',
adaptive: 'Adaptive',
}; };
/** /**
@@ -211,6 +221,7 @@ export function getModelDisplayName(model: ModelAlias | string): string {
haiku: 'Claude Haiku', haiku: 'Claude Haiku',
sonnet: 'Claude Sonnet', sonnet: 'Claude Sonnet',
opus: 'Claude Opus', opus: 'Claude Opus',
[CODEX_MODEL_MAP.gpt53Codex]: 'GPT-5.3-Codex',
[CODEX_MODEL_MAP.gpt52Codex]: 'GPT-5.2-Codex', [CODEX_MODEL_MAP.gpt52Codex]: 'GPT-5.2-Codex',
[CODEX_MODEL_MAP.gpt51CodexMax]: 'GPT-5.1-Codex-Max', [CODEX_MODEL_MAP.gpt51CodexMax]: 'GPT-5.1-Codex-Max',
[CODEX_MODEL_MAP.gpt51CodexMini]: 'GPT-5.1-Codex-Mini', [CODEX_MODEL_MAP.gpt51CodexMini]: 'GPT-5.1-Codex-Mini',

View File

@@ -18,7 +18,7 @@ export type ClaudeCanonicalId = 'claude-haiku' | 'claude-sonnet' | 'claude-opus'
export const CLAUDE_CANONICAL_MAP: Record<ClaudeCanonicalId, string> = { export const CLAUDE_CANONICAL_MAP: Record<ClaudeCanonicalId, string> = {
'claude-haiku': 'claude-haiku-4-5-20251001', 'claude-haiku': 'claude-haiku-4-5-20251001',
'claude-sonnet': 'claude-sonnet-4-5-20250929', 'claude-sonnet': 'claude-sonnet-4-5-20250929',
'claude-opus': 'claude-opus-4-5-20251101', 'claude-opus': 'claude-opus-4-6',
} as const; } as const;
/** /**
@@ -29,7 +29,7 @@ export const CLAUDE_CANONICAL_MAP: Record<ClaudeCanonicalId, string> = {
export const CLAUDE_MODEL_MAP: Record<string, string> = { export const CLAUDE_MODEL_MAP: Record<string, string> = {
haiku: 'claude-haiku-4-5-20251001', haiku: 'claude-haiku-4-5-20251001',
sonnet: 'claude-sonnet-4-5-20250929', sonnet: 'claude-sonnet-4-5-20250929',
opus: 'claude-opus-4-5-20251101', opus: 'claude-opus-4-6',
} as const; } as const;
/** /**
@@ -50,15 +50,17 @@ export const LEGACY_CLAUDE_ALIAS_MAP: Record<string, ClaudeCanonicalId> = {
*/ */
export const CODEX_MODEL_MAP = { export const CODEX_MODEL_MAP = {
// Recommended Codex-specific models // Recommended Codex-specific models
/** Most advanced agentic coding model for complex software engineering (default for ChatGPT users) */ /** Latest frontier agentic coding model */
gpt53Codex: 'codex-gpt-5.3-codex',
/** Frontier agentic coding model */
gpt52Codex: 'codex-gpt-5.2-codex', gpt52Codex: 'codex-gpt-5.2-codex',
/** Optimized for long-horizon, agentic coding tasks in Codex */ /** Codex-optimized flagship for deep and fast reasoning */
gpt51CodexMax: 'codex-gpt-5.1-codex-max', gpt51CodexMax: 'codex-gpt-5.1-codex-max',
/** Smaller, more cost-effective version for faster workflows */ /** Optimized for codex. Cheaper, faster, but less capable */
gpt51CodexMini: 'codex-gpt-5.1-codex-mini', gpt51CodexMini: 'codex-gpt-5.1-codex-mini',
// General-purpose GPT models (also available in Codex) // General-purpose GPT models (also available in Codex)
/** Best general agentic model for tasks across industries and domains */ /** Latest frontier model with improvements across knowledge, reasoning and coding */
gpt52: 'codex-gpt-5.2', gpt52: 'codex-gpt-5.2',
/** Great for coding and agentic tasks across domains */ /** Great for coding and agentic tasks across domains */
gpt51: 'codex-gpt-5.1', gpt51: 'codex-gpt-5.1',
@@ -71,6 +73,7 @@ export const CODEX_MODEL_IDS = Object.values(CODEX_MODEL_MAP);
* These models can use reasoning.effort parameter * These models can use reasoning.effort parameter
*/ */
export const REASONING_CAPABLE_MODELS = new Set([ export const REASONING_CAPABLE_MODELS = new Set([
CODEX_MODEL_MAP.gpt53Codex,
CODEX_MODEL_MAP.gpt52Codex, CODEX_MODEL_MAP.gpt52Codex,
CODEX_MODEL_MAP.gpt51CodexMax, CODEX_MODEL_MAP.gpt51CodexMax,
CODEX_MODEL_MAP.gpt52, CODEX_MODEL_MAP.gpt52,
@@ -96,7 +99,7 @@ export function getAllCodexModelIds(): CodexModelId[] {
* Uses canonical prefixed IDs for consistent routing. * Uses canonical prefixed IDs for consistent routing.
*/ */
export const DEFAULT_MODELS = { export const DEFAULT_MODELS = {
claude: 'claude-opus-4-5-20251101', claude: 'claude-opus-4-6',
cursor: 'cursor-auto', // Cursor's recommended default (with prefix) cursor: 'cursor-auto', // Cursor's recommended default (with prefix)
codex: CODEX_MODEL_MAP.gpt52Codex, // GPT-5.2-Codex is the most advanced agentic coding model codex: CODEX_MODEL_MAP.gpt52Codex, // GPT-5.2-Codex is the most advanced agentic coding model
} as const; } as const;

View File

@@ -213,7 +213,7 @@ export type PlanningMode = 'skip' | 'lite' | 'spec' | 'full';
export type ServerLogLevel = 'error' | 'warn' | 'info' | 'debug'; export type ServerLogLevel = 'error' | 'warn' | 'info' | 'debug';
/** ThinkingLevel - Extended thinking levels for Claude models (reasoning intensity) */ /** ThinkingLevel - Extended thinking levels for Claude models (reasoning intensity) */
export type ThinkingLevel = 'none' | 'low' | 'medium' | 'high' | 'ultrathink'; export type ThinkingLevel = 'none' | 'low' | 'medium' | 'high' | 'ultrathink' | 'adaptive';
/** /**
* SidebarStyle - Sidebar layout style options * SidebarStyle - Sidebar layout style options
@@ -237,6 +237,7 @@ export const THINKING_TOKEN_BUDGET: Record<ThinkingLevel, number | undefined> =
medium: 10000, // Light reasoning medium: 10000, // Light reasoning
high: 16000, // Complex tasks (recommended starting point) high: 16000, // Complex tasks (recommended starting point)
ultrathink: 32000, // Maximum safe (above this risks timeouts) ultrathink: 32000, // Maximum safe (above this risks timeouts)
adaptive: undefined, // Adaptive thinking (Opus 4.6) - SDK handles token allocation
}; };
/** /**
@@ -247,6 +248,26 @@ export function getThinkingTokenBudget(level: ThinkingLevel | undefined): number
return THINKING_TOKEN_BUDGET[level]; return THINKING_TOKEN_BUDGET[level];
} }
/**
* Check if a model uses adaptive thinking (Opus 4.6+)
* Adaptive thinking models let the SDK decide token allocation automatically.
*/
export function isAdaptiveThinkingModel(model: string): boolean {
return model.includes('opus-4-6') || model === 'claude-opus';
}
/**
* Get the available thinking levels for a given model.
* - Opus 4.6: Only 'none' and 'adaptive' (SDK handles token allocation)
* - Others: Full range of manual thinking levels
*/
export function getThinkingLevelsForModel(model: string): ThinkingLevel[] {
if (isAdaptiveThinkingModel(model)) {
return ['none', 'adaptive'];
}
return ['none', 'low', 'medium', 'high', 'ultrathink'];
}
/** ModelProvider - AI model provider for credentials and API key management */ /** ModelProvider - AI model provider for credentials and API key management */
export type ModelProvider = 'claude' | 'cursor' | 'codex' | 'opencode' | 'gemini' | 'copilot'; export type ModelProvider = 'claude' | 'cursor' | 'codex' | 'opencode' | 'gemini' | 'copilot';

18
package-lock.json generated
View File

@@ -35,7 +35,7 @@
"version": "0.13.0", "version": "0.13.0",
"license": "SEE LICENSE IN LICENSE", "license": "SEE LICENSE IN LICENSE",
"dependencies": { "dependencies": {
"@anthropic-ai/claude-agent-sdk": "0.1.76", "@anthropic-ai/claude-agent-sdk": "0.2.32",
"@automaker/dependency-resolver": "1.0.0", "@automaker/dependency-resolver": "1.0.0",
"@automaker/git-utils": "1.0.0", "@automaker/git-utils": "1.0.0",
"@automaker/model-resolver": "1.0.0", "@automaker/model-resolver": "1.0.0",
@@ -45,7 +45,7 @@
"@automaker/utils": "1.0.0", "@automaker/utils": "1.0.0",
"@github/copilot-sdk": "^0.1.16", "@github/copilot-sdk": "^0.1.16",
"@modelcontextprotocol/sdk": "1.25.2", "@modelcontextprotocol/sdk": "1.25.2",
"@openai/codex-sdk": "^0.77.0", "@openai/codex-sdk": "^0.98.0",
"cookie-parser": "1.4.7", "cookie-parser": "1.4.7",
"cors": "2.8.5", "cors": "2.8.5",
"dotenv": "17.2.3", "dotenv": "17.2.3",
@@ -657,9 +657,9 @@
} }
}, },
"node_modules/@anthropic-ai/claude-agent-sdk": { "node_modules/@anthropic-ai/claude-agent-sdk": {
"version": "0.1.76", "version": "0.2.32",
"resolved": "https://registry.npmjs.org/@anthropic-ai/claude-agent-sdk/-/claude-agent-sdk-0.1.76.tgz", "resolved": "https://registry.npmjs.org/@anthropic-ai/claude-agent-sdk/-/claude-agent-sdk-0.2.32.tgz",
"integrity": "sha512-s7RvpXoFaLXLG7A1cJBAPD8ilwOhhc/12fb5mJXRuD561o4FmPtQ+WRfuy9akMmrFRfLsKv8Ornw3ClGAPL2fw==", "integrity": "sha512-8AtsSx/M9jxd0ihS08eqa7VireTEuwQy0i1+6ZJX93LECT6Svlf47dPJiAm7JB+BhVMmwTfQeS6x1akIcCfvbQ==",
"license": "SEE LICENSE IN README.md", "license": "SEE LICENSE IN README.md",
"engines": { "engines": {
"node": ">=18.0.0" "node": ">=18.0.0"
@@ -675,7 +675,7 @@
"@img/sharp-win32-x64": "^0.33.5" "@img/sharp-win32-x64": "^0.33.5"
}, },
"peerDependencies": { "peerDependencies": {
"zod": "^3.24.1 || ^4.0.0" "zod": "^4.0.0"
} }
}, },
"node_modules/@automaker/dependency-resolver": { "node_modules/@automaker/dependency-resolver": {
@@ -3949,9 +3949,9 @@
} }
}, },
"node_modules/@openai/codex-sdk": { "node_modules/@openai/codex-sdk": {
"version": "0.77.0", "version": "0.98.0",
"resolved": "https://registry.npmjs.org/@openai/codex-sdk/-/codex-sdk-0.77.0.tgz", "resolved": "https://registry.npmjs.org/@openai/codex-sdk/-/codex-sdk-0.98.0.tgz",
"integrity": "sha512-bvJQ4dASnZ7jgfxmseViQwdRupHxs0TwHSZFeYB0gpdOAXnWwDWdGJRCMyphLSHwExRp27JNOk7EBFVmZRBanQ==", "integrity": "sha512-TbPgrBpuSNMJyOXys0HNsh6UoP5VIHu1fVh2KDdACi5XyB0vuPtzBZC+qOsxHz7WXEQPFlomPLyxS6JnE5Okmg==",
"license": "Apache-2.0", "license": "Apache-2.0",
"engines": { "engines": {
"node": ">=18" "node": ">=18"