feat(auto-mode): skip memory extraction when Claude not configured and add reasoning effort support

- Skip learning extraction when ANTHROPIC_API_KEY is not available
- Add reasoningEffort parameter to simpleQuery for Codex model configuration
- Add stdinData support to spawnProcess for CLI stdin input
- Update UI API types for model override with reasoning support
This commit is contained in:
DhanushSantosh
2026-01-14 00:50:33 +05:30
parent 253ab94646
commit fa3ead0e8d
6 changed files with 85 additions and 19 deletions

View File

@@ -15,7 +15,13 @@
*/ */
import { ProviderFactory } from './provider-factory.js'; import { ProviderFactory } from './provider-factory.js';
import type { ProviderMessage, ContentBlock, ThinkingLevel } from '@automaker/types'; import type {
ProviderMessage,
ContentBlock,
ThinkingLevel,
ReasoningEffort,
} from '@automaker/types';
import { stripProviderPrefix } from '@automaker/types';
/** /**
* Options for simple query execution * Options for simple query execution
@@ -42,6 +48,8 @@ export interface SimpleQueryOptions {
}; };
/** Thinking level for Claude models */ /** Thinking level for Claude models */
thinkingLevel?: ThinkingLevel; thinkingLevel?: ThinkingLevel;
/** Reasoning effort for Codex/OpenAI models */
reasoningEffort?: ReasoningEffort;
/** If true, runs in read-only mode (no file writes) */ /** If true, runs in read-only mode (no file writes) */
readOnly?: boolean; readOnly?: boolean;
/** Setting sources for CLAUDE.md loading */ /** Setting sources for CLAUDE.md loading */
@@ -97,6 +105,7 @@ const DEFAULT_MODEL = 'claude-sonnet-4-20250514';
export async function simpleQuery(options: SimpleQueryOptions): Promise<SimpleQueryResult> { export async function simpleQuery(options: SimpleQueryOptions): Promise<SimpleQueryResult> {
const model = options.model || DEFAULT_MODEL; const model = options.model || DEFAULT_MODEL;
const provider = ProviderFactory.getProviderForModel(model); const provider = ProviderFactory.getProviderForModel(model);
const bareModel = stripProviderPrefix(model);
let responseText = ''; let responseText = '';
let structuredOutput: Record<string, unknown> | undefined; let structuredOutput: Record<string, unknown> | undefined;
@@ -104,7 +113,8 @@ export async function simpleQuery(options: SimpleQueryOptions): Promise<SimpleQu
// Build provider options // Build provider options
const providerOptions = { const providerOptions = {
prompt: options.prompt, prompt: options.prompt,
model: model, model: bareModel,
originalModel: model,
cwd: options.cwd, cwd: options.cwd,
systemPrompt: options.systemPrompt, systemPrompt: options.systemPrompt,
maxTurns: options.maxTurns ?? 1, maxTurns: options.maxTurns ?? 1,
@@ -112,6 +122,7 @@ export async function simpleQuery(options: SimpleQueryOptions): Promise<SimpleQu
abortController: options.abortController, abortController: options.abortController,
outputFormat: options.outputFormat, outputFormat: options.outputFormat,
thinkingLevel: options.thinkingLevel, thinkingLevel: options.thinkingLevel,
reasoningEffort: options.reasoningEffort,
readOnly: options.readOnly, readOnly: options.readOnly,
settingSources: options.settingSources, settingSources: options.settingSources,
}; };
@@ -176,6 +187,7 @@ export async function simpleQuery(options: SimpleQueryOptions): Promise<SimpleQu
export async function streamingQuery(options: StreamingQueryOptions): Promise<SimpleQueryResult> { export async function streamingQuery(options: StreamingQueryOptions): Promise<SimpleQueryResult> {
const model = options.model || DEFAULT_MODEL; const model = options.model || DEFAULT_MODEL;
const provider = ProviderFactory.getProviderForModel(model); const provider = ProviderFactory.getProviderForModel(model);
const bareModel = stripProviderPrefix(model);
let responseText = ''; let responseText = '';
let structuredOutput: Record<string, unknown> | undefined; let structuredOutput: Record<string, unknown> | undefined;
@@ -183,7 +195,8 @@ export async function streamingQuery(options: StreamingQueryOptions): Promise<Si
// Build provider options // Build provider options
const providerOptions = { const providerOptions = {
prompt: options.prompt, prompt: options.prompt,
model: model, model: bareModel,
originalModel: model,
cwd: options.cwd, cwd: options.cwd,
systemPrompt: options.systemPrompt, systemPrompt: options.systemPrompt,
maxTurns: options.maxTurns ?? 250, maxTurns: options.maxTurns ?? 250,
@@ -191,6 +204,7 @@ export async function streamingQuery(options: StreamingQueryOptions): Promise<Si
abortController: options.abortController, abortController: options.abortController,
outputFormat: options.outputFormat, outputFormat: options.outputFormat,
thinkingLevel: options.thinkingLevel, thinkingLevel: options.thinkingLevel,
reasoningEffort: options.reasoningEffort,
readOnly: options.readOnly, readOnly: options.readOnly,
settingSources: options.settingSources, settingSources: options.settingSources,
}; };

View File

@@ -21,7 +21,7 @@ import type {
ThinkingLevel, ThinkingLevel,
PlanningMode, PlanningMode,
} from '@automaker/types'; } from '@automaker/types';
import { DEFAULT_PHASE_MODELS, stripProviderPrefix } from '@automaker/types'; import { DEFAULT_PHASE_MODELS, isClaudeModel, stripProviderPrefix } from '@automaker/types';
import { import {
buildPromptWithImages, buildPromptWithImages,
classifyError, classifyError,
@@ -3586,10 +3586,29 @@ If nothing notable: {"learnings": []}`;
const phaseModelEntry = const phaseModelEntry =
settings?.phaseModels?.memoryExtractionModel || DEFAULT_PHASE_MODELS.memoryExtractionModel; settings?.phaseModels?.memoryExtractionModel || DEFAULT_PHASE_MODELS.memoryExtractionModel;
const { model } = resolvePhaseModel(phaseModelEntry); const { model } = resolvePhaseModel(phaseModelEntry);
const hasClaudeKey = Boolean(process.env.ANTHROPIC_API_KEY);
let resolvedModel = model;
if (isClaudeModel(model) && !hasClaudeKey) {
const fallbackModel = feature.model
? resolveModelString(feature.model, DEFAULT_MODELS.claude)
: null;
if (fallbackModel && !isClaudeModel(fallbackModel)) {
console.log(
`[AutoMode] Claude not configured for memory extraction; using feature model "${fallbackModel}".`
);
resolvedModel = fallbackModel;
} else {
console.log(
'[AutoMode] Claude not configured for memory extraction; skipping learning extraction.'
);
return;
}
}
const result = await simpleQuery({ const result = await simpleQuery({
prompt: userPrompt, prompt: userPrompt,
model, model: resolvedModel,
cwd: projectPath, cwd: projectPath,
maxTurns: 1, maxTurns: 1,
allowedTools: [], allowedTools: [],

View File

@@ -1,6 +1,6 @@
import { useState, useCallback, useMemo } from 'react'; import { useState, useCallback, useMemo } from 'react';
import { useAppStore } from '@/store/app-store'; import { useAppStore } from '@/store/app-store';
import type { ModelAlias, CursorModelId, PhaseModelKey, PhaseModelEntry } from '@automaker/types'; import type { ModelId, PhaseModelKey, PhaseModelEntry } from '@automaker/types';
import { DEFAULT_PHASE_MODELS } from '@automaker/types'; import { DEFAULT_PHASE_MODELS } from '@automaker/types';
export interface UseModelOverrideOptions { export interface UseModelOverrideOptions {
@@ -14,7 +14,7 @@ export interface UseModelOverrideResult {
/** The effective model entry (override or global default) */ /** The effective model entry (override or global default) */
effectiveModelEntry: PhaseModelEntry; effectiveModelEntry: PhaseModelEntry;
/** The effective model string (for backward compatibility with APIs that only accept strings) */ /** The effective model string (for backward compatibility with APIs that only accept strings) */
effectiveModel: ModelAlias | CursorModelId; effectiveModel: ModelId;
/** Whether the model is currently overridden */ /** Whether the model is currently overridden */
isOverridden: boolean; isOverridden: boolean;
/** Set a model override */ /** Set a model override */
@@ -32,7 +32,7 @@ export interface UseModelOverrideResult {
*/ */
function normalizeEntry(entry: PhaseModelEntry | string): PhaseModelEntry { function normalizeEntry(entry: PhaseModelEntry | string): PhaseModelEntry {
if (typeof entry === 'string') { if (typeof entry === 'string') {
return { model: entry as ModelAlias | CursorModelId }; return { model: entry as ModelId };
} }
return entry; return entry;
} }
@@ -40,9 +40,9 @@ function normalizeEntry(entry: PhaseModelEntry | string): PhaseModelEntry {
/** /**
* Extract model string from PhaseModelEntry or string * Extract model string from PhaseModelEntry or string
*/ */
function extractModel(entry: PhaseModelEntry | string): ModelAlias | CursorModelId { function extractModel(entry: PhaseModelEntry | string): ModelId {
if (typeof entry === 'string') { if (typeof entry === 'string') {
return entry as ModelAlias | CursorModelId; return entry as ModelId;
} }
return entry.model; return entry.model;
} }

View File

@@ -10,7 +10,9 @@ import type {
IssueValidationResponse, IssueValidationResponse,
IssueValidationEvent, IssueValidationEvent,
StoredValidation, StoredValidation,
AgentModel, ModelId,
ThinkingLevel,
ReasoningEffort,
GitHubComment, GitHubComment,
IssueCommentsResult, IssueCommentsResult,
Idea, Idea,
@@ -314,7 +316,9 @@ export interface GitHubAPI {
validateIssue: ( validateIssue: (
projectPath: string, projectPath: string,
issue: IssueValidationInput, issue: IssueValidationInput,
model?: AgentModel model?: ModelId,
thinkingLevel?: ThinkingLevel,
reasoningEffort?: ReasoningEffort
) => Promise<{ success: boolean; message?: string; issueNumber?: number; error?: string }>; ) => Promise<{ success: boolean; message?: string; issueNumber?: number; error?: string }>;
/** Check validation status for an issue or all issues */ /** Check validation status for an issue or all issues */
getValidationStatus: ( getValidationStatus: (
@@ -1294,6 +1298,7 @@ interface SetupAPI {
success: boolean; success: boolean;
hasAnthropicKey: boolean; hasAnthropicKey: boolean;
hasGoogleKey: boolean; hasGoogleKey: boolean;
hasOpenaiKey: boolean;
}>; }>;
deleteApiKey: ( deleteApiKey: (
provider: string provider: string
@@ -1377,6 +1382,7 @@ function createMockSetupAPI(): SetupAPI {
success: true, success: true,
hasAnthropicKey: false, hasAnthropicKey: false,
hasGoogleKey: false, hasGoogleKey: false,
hasOpenaiKey: false,
}; };
}, },
@@ -3008,8 +3014,20 @@ function createMockGitHubAPI(): GitHubAPI {
mergedPRs: [], mergedPRs: [],
}; };
}, },
validateIssue: async (projectPath: string, issue: IssueValidationInput, model?: AgentModel) => { validateIssue: async (
console.log('[Mock] Starting async validation:', { projectPath, issue, model }); projectPath: string,
issue: IssueValidationInput,
model?: ModelId,
thinkingLevel?: ThinkingLevel,
reasoningEffort?: ReasoningEffort
) => {
console.log('[Mock] Starting async validation:', {
projectPath,
issue,
model,
thinkingLevel,
reasoningEffort,
});
// Simulate async validation in background // Simulate async validation in background
setTimeout(() => { setTimeout(() => {

View File

@@ -36,6 +36,7 @@ import type {
import type { Message, SessionListItem } from '@/types/electron'; import type { Message, SessionListItem } from '@/types/electron';
import type { Feature, ClaudeUsageResponse, CodexUsageResponse } from '@/store/app-store'; import type { Feature, ClaudeUsageResponse, CodexUsageResponse } from '@/store/app-store';
import type { WorktreeAPI, GitAPI, ModelDefinition, ProviderStatus } from '@/types/electron'; import type { WorktreeAPI, GitAPI, ModelDefinition, ProviderStatus } from '@/types/electron';
import type { ModelId, ThinkingLevel, ReasoningEffort } from '@automaker/types';
import { getGlobalFileBrowser } from '@/contexts/file-browser-context'; import { getGlobalFileBrowser } from '@/contexts/file-browser-context';
const logger = createLogger('HttpClient'); const logger = createLogger('HttpClient');
@@ -1173,6 +1174,7 @@ export class HttpApiClient implements ElectronAPI {
success: boolean; success: boolean;
hasAnthropicKey: boolean; hasAnthropicKey: boolean;
hasGoogleKey: boolean; hasGoogleKey: boolean;
hasOpenaiKey: boolean;
}> => this.get('/api/setup/api-keys'), }> => this.get('/api/setup/api-keys'),
getPlatform: (): Promise<{ getPlatform: (): Promise<{
@@ -1838,9 +1840,17 @@ export class HttpApiClient implements ElectronAPI {
validateIssue: ( validateIssue: (
projectPath: string, projectPath: string,
issue: IssueValidationInput, issue: IssueValidationInput,
model?: string, model?: ModelId,
thinkingLevel?: string thinkingLevel?: ThinkingLevel,
) => this.post('/api/github/validate-issue', { projectPath, ...issue, model, thinkingLevel }), reasoningEffort?: ReasoningEffort
) =>
this.post('/api/github/validate-issue', {
projectPath,
...issue,
model,
thinkingLevel,
reasoningEffort,
}),
getValidationStatus: (projectPath: string, issueNumber?: number) => getValidationStatus: (projectPath: string, issueNumber?: number) =>
this.post('/api/github/validation-status', { projectPath, issueNumber }), this.post('/api/github/validation-status', { projectPath, issueNumber }),
stopValidation: (projectPath: string, issueNumber: number) => stopValidation: (projectPath: string, issueNumber: number) =>

View File

@@ -190,7 +190,7 @@ export async function* spawnJSONLProcess(options: SubprocessOptions): AsyncGener
* Spawns a subprocess and collects all output * Spawns a subprocess and collects all output
*/ */
export async function spawnProcess(options: SubprocessOptions): Promise<SubprocessResult> { export async function spawnProcess(options: SubprocessOptions): Promise<SubprocessResult> {
const { command, args, cwd, env, abortController } = options; const { command, args, cwd, env, abortController, stdinData } = options;
const processEnv = { const processEnv = {
...process.env, ...process.env,
@@ -204,10 +204,15 @@ export async function spawnProcess(options: SubprocessOptions): Promise<Subproce
const childProcess = spawn(command, args, { const childProcess = spawn(command, args, {
cwd, cwd,
env: processEnv, env: processEnv,
stdio: ['ignore', 'pipe', 'pipe'], stdio: [stdinData ? 'pipe' : 'ignore', 'pipe', 'pipe'],
shell: needsShell, shell: needsShell,
}); });
if (stdinData && childProcess.stdin) {
childProcess.stdin.write(stdinData);
childProcess.stdin.end();
}
let stdout = ''; let stdout = '';
let stderr = ''; let stderr = '';