mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-01 08:13:37 +00:00
Merge branch 'v0.11.0rc' into feat/dev-server-log-panel
Resolved conflict in worktree-panel.tsx by combining imports: - DevServerLogsPanel from this branch - WorktreeMobileDropdown, WorktreeActionsDropdown, BranchSwitchDropdown from v0.11.0rc Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -222,7 +222,7 @@ app.use('/api/sessions', createSessionsRoutes(agentService));
|
||||
app.use('/api/features', createFeaturesRoutes(featureLoader));
|
||||
app.use('/api/auto-mode', createAutoModeRoutes(autoModeService));
|
||||
app.use('/api/enhance-prompt', createEnhancePromptRoutes(settingsService));
|
||||
app.use('/api/worktree', createWorktreeRoutes(events));
|
||||
app.use('/api/worktree', createWorktreeRoutes(events, settingsService));
|
||||
app.use('/api/git', createGitRoutes());
|
||||
app.use('/api/suggestions', createSuggestionsRoutes(events, settingsService));
|
||||
app.use('/api/models', createModelsRoutes());
|
||||
|
||||
@@ -45,6 +45,7 @@ import {
|
||||
getCodexTodoToolName,
|
||||
} from './codex-tool-mapping.js';
|
||||
import { SettingsService } from '../services/settings-service.js';
|
||||
import { createTempEnvOverride } from '../lib/auth-utils.js';
|
||||
import { checkSandboxCompatibility } from '../lib/sdk-options.js';
|
||||
import { CODEX_MODELS } from './codex-models.js';
|
||||
|
||||
@@ -142,6 +143,7 @@ type CodexExecutionMode = typeof CODEX_EXECUTION_MODE_CLI | typeof CODEX_EXECUTI
|
||||
type CodexExecutionPlan = {
|
||||
mode: CodexExecutionMode;
|
||||
cliPath: string | null;
|
||||
openAiApiKey?: string | null;
|
||||
};
|
||||
|
||||
const ALLOWED_ENV_VARS = [
|
||||
@@ -166,6 +168,22 @@ function buildEnv(): Record<string, string> {
|
||||
return env;
|
||||
}
|
||||
|
||||
async function resolveOpenAiApiKey(): Promise<string | null> {
|
||||
const envKey = process.env[OPENAI_API_KEY_ENV];
|
||||
if (envKey) {
|
||||
return envKey;
|
||||
}
|
||||
|
||||
try {
|
||||
const settingsService = new SettingsService(getCodexSettingsDir());
|
||||
const credentials = await settingsService.getCredentials();
|
||||
const storedKey = credentials.apiKeys.openai?.trim();
|
||||
return storedKey ? storedKey : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function hasMcpServersConfigured(options: ExecuteOptions): boolean {
|
||||
return Boolean(options.mcpServers && Object.keys(options.mcpServers).length > 0);
|
||||
}
|
||||
@@ -181,18 +199,21 @@ function isSdkEligible(options: ExecuteOptions): boolean {
|
||||
async function resolveCodexExecutionPlan(options: ExecuteOptions): Promise<CodexExecutionPlan> {
|
||||
const cliPath = await findCodexCliPath();
|
||||
const authIndicators = await getCodexAuthIndicators();
|
||||
const hasApiKey = Boolean(process.env[OPENAI_API_KEY_ENV]);
|
||||
const openAiApiKey = await resolveOpenAiApiKey();
|
||||
const hasApiKey = Boolean(openAiApiKey);
|
||||
const cliAuthenticated = authIndicators.hasOAuthToken || authIndicators.hasApiKey || hasApiKey;
|
||||
const sdkEligible = isSdkEligible(options);
|
||||
const cliAvailable = Boolean(cliPath);
|
||||
|
||||
if (hasApiKey) {
|
||||
return {
|
||||
mode: CODEX_EXECUTION_MODE_SDK,
|
||||
cliPath,
|
||||
openAiApiKey,
|
||||
};
|
||||
}
|
||||
|
||||
if (sdkEligible) {
|
||||
if (hasApiKey) {
|
||||
return {
|
||||
mode: CODEX_EXECUTION_MODE_SDK,
|
||||
cliPath,
|
||||
};
|
||||
}
|
||||
if (!cliAvailable) {
|
||||
throw new Error(ERROR_CODEX_SDK_AUTH_REQUIRED);
|
||||
}
|
||||
@@ -209,6 +230,7 @@ async function resolveCodexExecutionPlan(options: ExecuteOptions): Promise<Codex
|
||||
return {
|
||||
mode: CODEX_EXECUTION_MODE_CLI,
|
||||
cliPath,
|
||||
openAiApiKey,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -701,7 +723,14 @@ export class CodexProvider extends BaseProvider {
|
||||
|
||||
const executionPlan = await resolveCodexExecutionPlan(options);
|
||||
if (executionPlan.mode === CODEX_EXECUTION_MODE_SDK) {
|
||||
yield* executeCodexSdkQuery(options, combinedSystemPrompt);
|
||||
const cleanupEnv = executionPlan.openAiApiKey
|
||||
? createTempEnvOverride({ [OPENAI_API_KEY_ENV]: executionPlan.openAiApiKey })
|
||||
: null;
|
||||
try {
|
||||
yield* executeCodexSdkQuery(options, combinedSystemPrompt);
|
||||
} finally {
|
||||
cleanupEnv?.();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -780,11 +809,16 @@ export class CodexProvider extends BaseProvider {
|
||||
'-', // Read prompt from stdin to avoid shell escaping issues
|
||||
];
|
||||
|
||||
const envOverrides = buildEnv();
|
||||
if (executionPlan.openAiApiKey && !envOverrides[OPENAI_API_KEY_ENV]) {
|
||||
envOverrides[OPENAI_API_KEY_ENV] = executionPlan.openAiApiKey;
|
||||
}
|
||||
|
||||
const stream = spawnJSONLProcess({
|
||||
command: commandPath,
|
||||
args,
|
||||
cwd: options.cwd,
|
||||
env: buildEnv(),
|
||||
env: envOverrides,
|
||||
abortController: options.abortController,
|
||||
timeout: DEFAULT_TIMEOUT_MS,
|
||||
stdinData: promptText, // Pass prompt via stdin
|
||||
@@ -971,7 +1005,7 @@ export class CodexProvider extends BaseProvider {
|
||||
|
||||
async detectInstallation(): Promise<InstallationStatus> {
|
||||
const cliPath = await findCodexCliPath();
|
||||
const hasApiKey = !!process.env[OPENAI_API_KEY_ENV];
|
||||
const hasApiKey = Boolean(await resolveOpenAiApiKey());
|
||||
const authIndicators = await getCodexAuthIndicators();
|
||||
const installed = !!cliPath;
|
||||
|
||||
@@ -1013,7 +1047,7 @@ export class CodexProvider extends BaseProvider {
|
||||
*/
|
||||
async checkAuth(): Promise<CodexAuthStatus> {
|
||||
const cliPath = await findCodexCliPath();
|
||||
const hasApiKey = !!process.env[OPENAI_API_KEY_ENV];
|
||||
const hasApiKey = Boolean(await resolveOpenAiApiKey());
|
||||
const authIndicators = await getCodexAuthIndicators();
|
||||
|
||||
// Check for API key in environment
|
||||
|
||||
@@ -15,7 +15,13 @@
|
||||
*/
|
||||
|
||||
import { ProviderFactory } from './provider-factory.js';
|
||||
import type { ProviderMessage, ContentBlock, ThinkingLevel } from '@automaker/types';
|
||||
import type {
|
||||
ProviderMessage,
|
||||
ContentBlock,
|
||||
ThinkingLevel,
|
||||
ReasoningEffort,
|
||||
} from '@automaker/types';
|
||||
import { stripProviderPrefix } from '@automaker/types';
|
||||
|
||||
/**
|
||||
* Options for simple query execution
|
||||
@@ -42,6 +48,8 @@ export interface SimpleQueryOptions {
|
||||
};
|
||||
/** Thinking level for Claude models */
|
||||
thinkingLevel?: ThinkingLevel;
|
||||
/** Reasoning effort for Codex/OpenAI models */
|
||||
reasoningEffort?: ReasoningEffort;
|
||||
/** If true, runs in read-only mode (no file writes) */
|
||||
readOnly?: boolean;
|
||||
/** Setting sources for CLAUDE.md loading */
|
||||
@@ -97,6 +105,7 @@ const DEFAULT_MODEL = 'claude-sonnet-4-20250514';
|
||||
export async function simpleQuery(options: SimpleQueryOptions): Promise<SimpleQueryResult> {
|
||||
const model = options.model || DEFAULT_MODEL;
|
||||
const provider = ProviderFactory.getProviderForModel(model);
|
||||
const bareModel = stripProviderPrefix(model);
|
||||
|
||||
let responseText = '';
|
||||
let structuredOutput: Record<string, unknown> | undefined;
|
||||
@@ -104,7 +113,8 @@ export async function simpleQuery(options: SimpleQueryOptions): Promise<SimpleQu
|
||||
// Build provider options
|
||||
const providerOptions = {
|
||||
prompt: options.prompt,
|
||||
model: model,
|
||||
model: bareModel,
|
||||
originalModel: model,
|
||||
cwd: options.cwd,
|
||||
systemPrompt: options.systemPrompt,
|
||||
maxTurns: options.maxTurns ?? 1,
|
||||
@@ -112,6 +122,7 @@ export async function simpleQuery(options: SimpleQueryOptions): Promise<SimpleQu
|
||||
abortController: options.abortController,
|
||||
outputFormat: options.outputFormat,
|
||||
thinkingLevel: options.thinkingLevel,
|
||||
reasoningEffort: options.reasoningEffort,
|
||||
readOnly: options.readOnly,
|
||||
settingSources: options.settingSources,
|
||||
};
|
||||
@@ -176,6 +187,7 @@ export async function simpleQuery(options: SimpleQueryOptions): Promise<SimpleQu
|
||||
export async function streamingQuery(options: StreamingQueryOptions): Promise<SimpleQueryResult> {
|
||||
const model = options.model || DEFAULT_MODEL;
|
||||
const provider = ProviderFactory.getProviderForModel(model);
|
||||
const bareModel = stripProviderPrefix(model);
|
||||
|
||||
let responseText = '';
|
||||
let structuredOutput: Record<string, unknown> | undefined;
|
||||
@@ -183,7 +195,8 @@ export async function streamingQuery(options: StreamingQueryOptions): Promise<Si
|
||||
// Build provider options
|
||||
const providerOptions = {
|
||||
prompt: options.prompt,
|
||||
model: model,
|
||||
model: bareModel,
|
||||
originalModel: model,
|
||||
cwd: options.cwd,
|
||||
systemPrompt: options.systemPrompt,
|
||||
maxTurns: options.maxTurns ?? 250,
|
||||
@@ -191,6 +204,7 @@ export async function streamingQuery(options: StreamingQueryOptions): Promise<Si
|
||||
abortController: options.abortController,
|
||||
outputFormat: options.outputFormat,
|
||||
thinkingLevel: options.thinkingLevel,
|
||||
reasoningEffort: options.reasoningEffort,
|
||||
readOnly: options.readOnly,
|
||||
settingSources: options.settingSources,
|
||||
};
|
||||
|
||||
@@ -5,6 +5,43 @@
|
||||
import type { Request, Response } from 'express';
|
||||
import { execAsync, execEnv, getErrorMessage, logError } from './common.js';
|
||||
|
||||
const GIT_REMOTE_ORIGIN_COMMAND = 'git remote get-url origin';
|
||||
const GH_REPO_VIEW_COMMAND = 'gh repo view --json name,owner';
|
||||
const GITHUB_REPO_URL_PREFIX = 'https://github.com/';
|
||||
const GITHUB_HTTPS_REMOTE_REGEX = /https:\/\/github\.com\/([^/]+)\/([^/.]+)/;
|
||||
const GITHUB_SSH_REMOTE_REGEX = /git@github\.com:([^/]+)\/([^/.]+)/;
|
||||
|
||||
interface GhRepoViewResponse {
|
||||
name?: string;
|
||||
owner?: {
|
||||
login?: string;
|
||||
};
|
||||
}
|
||||
|
||||
async function resolveRepoFromGh(projectPath: string): Promise<{
|
||||
owner: string;
|
||||
repo: string;
|
||||
} | null> {
|
||||
try {
|
||||
const { stdout } = await execAsync(GH_REPO_VIEW_COMMAND, {
|
||||
cwd: projectPath,
|
||||
env: execEnv,
|
||||
});
|
||||
|
||||
const data = JSON.parse(stdout) as GhRepoViewResponse;
|
||||
const owner = typeof data.owner?.login === 'string' ? data.owner.login : null;
|
||||
const repo = typeof data.name === 'string' ? data.name : null;
|
||||
|
||||
if (!owner || !repo) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return { owner, repo };
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export interface GitHubRemoteStatus {
|
||||
hasGitHubRemote: boolean;
|
||||
remoteUrl: string | null;
|
||||
@@ -21,19 +58,38 @@ export async function checkGitHubRemote(projectPath: string): Promise<GitHubRemo
|
||||
};
|
||||
|
||||
try {
|
||||
// Get the remote URL (origin by default)
|
||||
const { stdout } = await execAsync('git remote get-url origin', {
|
||||
cwd: projectPath,
|
||||
env: execEnv,
|
||||
});
|
||||
let remoteUrl = '';
|
||||
try {
|
||||
// Get the remote URL (origin by default)
|
||||
const { stdout } = await execAsync(GIT_REMOTE_ORIGIN_COMMAND, {
|
||||
cwd: projectPath,
|
||||
env: execEnv,
|
||||
});
|
||||
remoteUrl = stdout.trim();
|
||||
status.remoteUrl = remoteUrl || null;
|
||||
} catch {
|
||||
// Ignore missing origin remote
|
||||
}
|
||||
|
||||
const remoteUrl = stdout.trim();
|
||||
status.remoteUrl = remoteUrl;
|
||||
const ghRepo = await resolveRepoFromGh(projectPath);
|
||||
if (ghRepo) {
|
||||
status.hasGitHubRemote = true;
|
||||
status.owner = ghRepo.owner;
|
||||
status.repo = ghRepo.repo;
|
||||
if (!status.remoteUrl) {
|
||||
status.remoteUrl = `${GITHUB_REPO_URL_PREFIX}${ghRepo.owner}/${ghRepo.repo}`;
|
||||
}
|
||||
return status;
|
||||
}
|
||||
|
||||
// Check if it's a GitHub URL
|
||||
// Formats: https://github.com/owner/repo.git, git@github.com:owner/repo.git
|
||||
const httpsMatch = remoteUrl.match(/https:\/\/github\.com\/([^/]+)\/([^/.]+)/);
|
||||
const sshMatch = remoteUrl.match(/git@github\.com:([^/]+)\/([^/.]+)/);
|
||||
if (!remoteUrl) {
|
||||
return status;
|
||||
}
|
||||
|
||||
const httpsMatch = remoteUrl.match(GITHUB_HTTPS_REMOTE_REGEX);
|
||||
const sshMatch = remoteUrl.match(GITHUB_SSH_REMOTE_REGEX);
|
||||
|
||||
const match = httpsMatch || sshMatch;
|
||||
if (match) {
|
||||
|
||||
@@ -25,19 +25,24 @@ interface GraphQLComment {
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
interface GraphQLCommentConnection {
|
||||
totalCount: number;
|
||||
pageInfo: {
|
||||
hasNextPage: boolean;
|
||||
endCursor: string | null;
|
||||
};
|
||||
nodes: GraphQLComment[];
|
||||
}
|
||||
|
||||
interface GraphQLIssueOrPullRequest {
|
||||
__typename: 'Issue' | 'PullRequest';
|
||||
comments: GraphQLCommentConnection;
|
||||
}
|
||||
|
||||
interface GraphQLResponse {
|
||||
data?: {
|
||||
repository?: {
|
||||
issue?: {
|
||||
comments: {
|
||||
totalCount: number;
|
||||
pageInfo: {
|
||||
hasNextPage: boolean;
|
||||
endCursor: string | null;
|
||||
};
|
||||
nodes: GraphQLComment[];
|
||||
};
|
||||
};
|
||||
issueOrPullRequest?: GraphQLIssueOrPullRequest | null;
|
||||
};
|
||||
};
|
||||
errors?: Array<{ message: string }>;
|
||||
@@ -45,6 +50,7 @@ interface GraphQLResponse {
|
||||
|
||||
/** Timeout for GitHub API requests in milliseconds */
|
||||
const GITHUB_API_TIMEOUT_MS = 30000;
|
||||
const COMMENTS_PAGE_SIZE = 50;
|
||||
|
||||
/**
|
||||
* Validate cursor format (GraphQL cursors are typically base64 strings)
|
||||
@@ -54,7 +60,7 @@ function isValidCursor(cursor: string): boolean {
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch comments for a specific issue using GitHub GraphQL API
|
||||
* Fetch comments for a specific issue or pull request using GitHub GraphQL API
|
||||
*/
|
||||
async function fetchIssueComments(
|
||||
projectPath: string,
|
||||
@@ -70,24 +76,52 @@ async function fetchIssueComments(
|
||||
|
||||
// Use GraphQL variables instead of string interpolation for safety
|
||||
const query = `
|
||||
query GetIssueComments($owner: String!, $repo: String!, $issueNumber: Int!, $cursor: String) {
|
||||
query GetIssueComments(
|
||||
$owner: String!
|
||||
$repo: String!
|
||||
$issueNumber: Int!
|
||||
$cursor: String
|
||||
$pageSize: Int!
|
||||
) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
issue(number: $issueNumber) {
|
||||
comments(first: 50, after: $cursor) {
|
||||
totalCount
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
id
|
||||
author {
|
||||
login
|
||||
avatarUrl
|
||||
issueOrPullRequest(number: $issueNumber) {
|
||||
__typename
|
||||
... on Issue {
|
||||
comments(first: $pageSize, after: $cursor) {
|
||||
totalCount
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
id
|
||||
author {
|
||||
login
|
||||
avatarUrl
|
||||
}
|
||||
body
|
||||
createdAt
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
... on PullRequest {
|
||||
comments(first: $pageSize, after: $cursor) {
|
||||
totalCount
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
id
|
||||
author {
|
||||
login
|
||||
avatarUrl
|
||||
}
|
||||
body
|
||||
createdAt
|
||||
updatedAt
|
||||
}
|
||||
body
|
||||
createdAt
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -99,6 +133,7 @@ async function fetchIssueComments(
|
||||
repo,
|
||||
issueNumber,
|
||||
cursor: cursor || null,
|
||||
pageSize: COMMENTS_PAGE_SIZE,
|
||||
};
|
||||
|
||||
const requestBody = JSON.stringify({ query, variables });
|
||||
@@ -140,10 +175,10 @@ async function fetchIssueComments(
|
||||
throw new Error(response.errors[0].message);
|
||||
}
|
||||
|
||||
const commentsData = response.data?.repository?.issue?.comments;
|
||||
const commentsData = response.data?.repository?.issueOrPullRequest?.comments;
|
||||
|
||||
if (!commentsData) {
|
||||
throw new Error('Issue not found or no comments data available');
|
||||
throw new Error('Issue or pull request not found or no comments data available');
|
||||
}
|
||||
|
||||
const comments: GitHubComment[] = commentsData.nodes.map((node) => ({
|
||||
|
||||
@@ -9,6 +9,17 @@ import { checkGitHubRemote } from './check-github-remote.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('ListIssues');
|
||||
const OPEN_ISSUES_LIMIT = 100;
|
||||
const CLOSED_ISSUES_LIMIT = 50;
|
||||
const ISSUE_LIST_FIELDS = 'number,title,state,author,createdAt,labels,url,body,assignees';
|
||||
const ISSUE_STATE_OPEN = 'open';
|
||||
const ISSUE_STATE_CLOSED = 'closed';
|
||||
const GH_ISSUE_LIST_COMMAND = 'gh issue list';
|
||||
const GH_STATE_FLAG = '--state';
|
||||
const GH_JSON_FLAG = '--json';
|
||||
const GH_LIMIT_FLAG = '--limit';
|
||||
const LINKED_PRS_BATCH_SIZE = 20;
|
||||
const LINKED_PRS_TIMELINE_ITEMS = 10;
|
||||
|
||||
export interface GitHubLabel {
|
||||
name: string;
|
||||
@@ -69,34 +80,68 @@ async function fetchLinkedPRs(
|
||||
|
||||
// Build GraphQL query for batch fetching linked PRs
|
||||
// We fetch up to 20 issues at a time to avoid query limits
|
||||
const batchSize = 20;
|
||||
for (let i = 0; i < issueNumbers.length; i += batchSize) {
|
||||
const batch = issueNumbers.slice(i, i + batchSize);
|
||||
for (let i = 0; i < issueNumbers.length; i += LINKED_PRS_BATCH_SIZE) {
|
||||
const batch = issueNumbers.slice(i, i + LINKED_PRS_BATCH_SIZE);
|
||||
|
||||
const issueQueries = batch
|
||||
.map(
|
||||
(num, idx) => `
|
||||
issue${idx}: issue(number: ${num}) {
|
||||
number
|
||||
timelineItems(first: 10, itemTypes: [CROSS_REFERENCED_EVENT, CONNECTED_EVENT]) {
|
||||
nodes {
|
||||
... on CrossReferencedEvent {
|
||||
source {
|
||||
... on PullRequest {
|
||||
number
|
||||
title
|
||||
state
|
||||
url
|
||||
issue${idx}: issueOrPullRequest(number: ${num}) {
|
||||
... on Issue {
|
||||
number
|
||||
timelineItems(
|
||||
first: ${LINKED_PRS_TIMELINE_ITEMS}
|
||||
itemTypes: [CROSS_REFERENCED_EVENT, CONNECTED_EVENT]
|
||||
) {
|
||||
nodes {
|
||||
... on CrossReferencedEvent {
|
||||
source {
|
||||
... on PullRequest {
|
||||
number
|
||||
title
|
||||
state
|
||||
url
|
||||
}
|
||||
}
|
||||
}
|
||||
... on ConnectedEvent {
|
||||
subject {
|
||||
... on PullRequest {
|
||||
number
|
||||
title
|
||||
state
|
||||
url
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
... on ConnectedEvent {
|
||||
subject {
|
||||
... on PullRequest {
|
||||
number
|
||||
title
|
||||
state
|
||||
url
|
||||
}
|
||||
}
|
||||
... on PullRequest {
|
||||
number
|
||||
timelineItems(
|
||||
first: ${LINKED_PRS_TIMELINE_ITEMS}
|
||||
itemTypes: [CROSS_REFERENCED_EVENT, CONNECTED_EVENT]
|
||||
) {
|
||||
nodes {
|
||||
... on CrossReferencedEvent {
|
||||
source {
|
||||
... on PullRequest {
|
||||
number
|
||||
title
|
||||
state
|
||||
url
|
||||
}
|
||||
}
|
||||
}
|
||||
... on ConnectedEvent {
|
||||
subject {
|
||||
... on PullRequest {
|
||||
number
|
||||
title
|
||||
state
|
||||
url
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -213,16 +258,35 @@ export function createListIssuesHandler() {
|
||||
}
|
||||
|
||||
// Fetch open and closed issues in parallel (now including assignees)
|
||||
const repoQualifier =
|
||||
remoteStatus.owner && remoteStatus.repo ? `${remoteStatus.owner}/${remoteStatus.repo}` : '';
|
||||
const repoFlag = repoQualifier ? `-R ${repoQualifier}` : '';
|
||||
const [openResult, closedResult] = await Promise.all([
|
||||
execAsync(
|
||||
'gh issue list --state open --json number,title,state,author,createdAt,labels,url,body,assignees --limit 100',
|
||||
[
|
||||
GH_ISSUE_LIST_COMMAND,
|
||||
repoFlag,
|
||||
`${GH_STATE_FLAG} ${ISSUE_STATE_OPEN}`,
|
||||
`${GH_JSON_FLAG} ${ISSUE_LIST_FIELDS}`,
|
||||
`${GH_LIMIT_FLAG} ${OPEN_ISSUES_LIMIT}`,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' '),
|
||||
{
|
||||
cwd: projectPath,
|
||||
env: execEnv,
|
||||
}
|
||||
),
|
||||
execAsync(
|
||||
'gh issue list --state closed --json number,title,state,author,createdAt,labels,url,body,assignees --limit 50',
|
||||
[
|
||||
GH_ISSUE_LIST_COMMAND,
|
||||
repoFlag,
|
||||
`${GH_STATE_FLAG} ${ISSUE_STATE_CLOSED}`,
|
||||
`${GH_JSON_FLAG} ${ISSUE_LIST_FIELDS}`,
|
||||
`${GH_LIMIT_FLAG} ${CLOSED_ISSUES_LIMIT}`,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' '),
|
||||
{
|
||||
cwd: projectPath,
|
||||
env: execEnv,
|
||||
|
||||
@@ -6,6 +6,17 @@ import type { Request, Response } from 'express';
|
||||
import { execAsync, execEnv, getErrorMessage, logError } from './common.js';
|
||||
import { checkGitHubRemote } from './check-github-remote.js';
|
||||
|
||||
const OPEN_PRS_LIMIT = 100;
|
||||
const MERGED_PRS_LIMIT = 50;
|
||||
const PR_LIST_FIELDS =
|
||||
'number,title,state,author,createdAt,labels,url,isDraft,headRefName,reviewDecision,mergeable,body';
|
||||
const PR_STATE_OPEN = 'open';
|
||||
const PR_STATE_MERGED = 'merged';
|
||||
const GH_PR_LIST_COMMAND = 'gh pr list';
|
||||
const GH_STATE_FLAG = '--state';
|
||||
const GH_JSON_FLAG = '--json';
|
||||
const GH_LIMIT_FLAG = '--limit';
|
||||
|
||||
export interface GitHubLabel {
|
||||
name: string;
|
||||
color: string;
|
||||
@@ -57,16 +68,36 @@ export function createListPRsHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
const repoQualifier =
|
||||
remoteStatus.owner && remoteStatus.repo ? `${remoteStatus.owner}/${remoteStatus.repo}` : '';
|
||||
const repoFlag = repoQualifier ? `-R ${repoQualifier}` : '';
|
||||
|
||||
const [openResult, mergedResult] = await Promise.all([
|
||||
execAsync(
|
||||
'gh pr list --state open --json number,title,state,author,createdAt,labels,url,isDraft,headRefName,reviewDecision,mergeable,body --limit 100',
|
||||
[
|
||||
GH_PR_LIST_COMMAND,
|
||||
repoFlag,
|
||||
`${GH_STATE_FLAG} ${PR_STATE_OPEN}`,
|
||||
`${GH_JSON_FLAG} ${PR_LIST_FIELDS}`,
|
||||
`${GH_LIMIT_FLAG} ${OPEN_PRS_LIMIT}`,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' '),
|
||||
{
|
||||
cwd: projectPath,
|
||||
env: execEnv,
|
||||
}
|
||||
),
|
||||
execAsync(
|
||||
'gh pr list --state merged --json number,title,state,author,createdAt,labels,url,isDraft,headRefName,reviewDecision,mergeable,body --limit 50',
|
||||
[
|
||||
GH_PR_LIST_COMMAND,
|
||||
repoFlag,
|
||||
`${GH_STATE_FLAG} ${PR_STATE_MERGED}`,
|
||||
`${GH_JSON_FLAG} ${PR_LIST_FIELDS}`,
|
||||
`${GH_LIMIT_FLAG} ${MERGED_PRS_LIMIT}`,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' '),
|
||||
{
|
||||
cwd: projectPath,
|
||||
env: execEnv,
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* Scans the codebase to determine if an issue is valid, invalid, or needs clarification.
|
||||
* Runs asynchronously and emits events for progress and completion.
|
||||
* Supports both Claude models and Cursor models.
|
||||
* Supports Claude, Codex, Cursor, and OpenCode models.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
@@ -11,13 +11,19 @@ import type { EventEmitter } from '../../../lib/events.js';
|
||||
import type {
|
||||
IssueValidationResult,
|
||||
IssueValidationEvent,
|
||||
ModelAlias,
|
||||
CursorModelId,
|
||||
ModelId,
|
||||
GitHubComment,
|
||||
LinkedPRInfo,
|
||||
ThinkingLevel,
|
||||
ReasoningEffort,
|
||||
} from '@automaker/types';
|
||||
import {
|
||||
DEFAULT_PHASE_MODELS,
|
||||
isClaudeModel,
|
||||
isCodexModel,
|
||||
isCursorModel,
|
||||
isOpencodeModel,
|
||||
} from '@automaker/types';
|
||||
import { isCursorModel, DEFAULT_PHASE_MODELS } from '@automaker/types';
|
||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||
import { extractJson } from '../../../lib/json-extractor.js';
|
||||
import { writeValidation } from '../../../lib/validation-storage.js';
|
||||
@@ -39,9 +45,6 @@ import {
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getAutoLoadClaudeMdSetting } from '../../../lib/settings-helpers.js';
|
||||
|
||||
/** Valid Claude model values for validation */
|
||||
const VALID_CLAUDE_MODELS: readonly ModelAlias[] = ['opus', 'sonnet', 'haiku'] as const;
|
||||
|
||||
/**
|
||||
* Request body for issue validation
|
||||
*/
|
||||
@@ -51,10 +54,12 @@ interface ValidateIssueRequestBody {
|
||||
issueTitle: string;
|
||||
issueBody: string;
|
||||
issueLabels?: string[];
|
||||
/** Model to use for validation (opus, sonnet, haiku, or cursor model IDs) */
|
||||
model?: ModelAlias | CursorModelId;
|
||||
/** Thinking level for Claude models (ignored for Cursor models) */
|
||||
/** Model to use for validation (Claude alias or provider model ID) */
|
||||
model?: ModelId;
|
||||
/** Thinking level for Claude models (ignored for non-Claude models) */
|
||||
thinkingLevel?: ThinkingLevel;
|
||||
/** Reasoning effort for Codex models (ignored for non-Codex models) */
|
||||
reasoningEffort?: ReasoningEffort;
|
||||
/** Comments to include in validation analysis */
|
||||
comments?: GitHubComment[];
|
||||
/** Linked pull requests for this issue */
|
||||
@@ -66,7 +71,7 @@ interface ValidateIssueRequestBody {
|
||||
*
|
||||
* Emits events for start, progress, complete, and error.
|
||||
* Stores result on completion.
|
||||
* Supports both Claude models (with structured output) and Cursor models (with JSON parsing).
|
||||
* Supports Claude/Codex models (structured output) and Cursor/OpenCode models (JSON parsing).
|
||||
*/
|
||||
async function runValidation(
|
||||
projectPath: string,
|
||||
@@ -74,13 +79,14 @@ async function runValidation(
|
||||
issueTitle: string,
|
||||
issueBody: string,
|
||||
issueLabels: string[] | undefined,
|
||||
model: ModelAlias | CursorModelId,
|
||||
model: ModelId,
|
||||
events: EventEmitter,
|
||||
abortController: AbortController,
|
||||
settingsService?: SettingsService,
|
||||
comments?: ValidationComment[],
|
||||
linkedPRs?: ValidationLinkedPR[],
|
||||
thinkingLevel?: ThinkingLevel
|
||||
thinkingLevel?: ThinkingLevel,
|
||||
reasoningEffort?: ReasoningEffort
|
||||
): Promise<void> {
|
||||
// Emit start event
|
||||
const startEvent: IssueValidationEvent = {
|
||||
@@ -111,8 +117,8 @@ async function runValidation(
|
||||
|
||||
let responseText = '';
|
||||
|
||||
// Determine if we should use structured output (Claude supports it, Cursor doesn't)
|
||||
const useStructuredOutput = !isCursorModel(model);
|
||||
// Determine if we should use structured output (Claude/Codex support it, Cursor/OpenCode don't)
|
||||
const useStructuredOutput = isClaudeModel(model) || isCodexModel(model);
|
||||
|
||||
// Build the final prompt - for Cursor, include system prompt and JSON schema instructions
|
||||
let finalPrompt = basePrompt;
|
||||
@@ -138,14 +144,20 @@ ${basePrompt}`;
|
||||
'[ValidateIssue]'
|
||||
);
|
||||
|
||||
// Use thinkingLevel from request if provided, otherwise fall back to settings
|
||||
// Use request overrides if provided, otherwise fall back to settings
|
||||
let effectiveThinkingLevel: ThinkingLevel | undefined = thinkingLevel;
|
||||
if (!effectiveThinkingLevel) {
|
||||
let effectiveReasoningEffort: ReasoningEffort | undefined = reasoningEffort;
|
||||
if (!effectiveThinkingLevel || !effectiveReasoningEffort) {
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.validationModel || DEFAULT_PHASE_MODELS.validationModel;
|
||||
const resolved = resolvePhaseModel(phaseModelEntry);
|
||||
effectiveThinkingLevel = resolved.thinkingLevel;
|
||||
if (!effectiveThinkingLevel) {
|
||||
effectiveThinkingLevel = resolved.thinkingLevel;
|
||||
}
|
||||
if (!effectiveReasoningEffort && typeof phaseModelEntry !== 'string') {
|
||||
effectiveReasoningEffort = phaseModelEntry.reasoningEffort;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Using model: ${model}`);
|
||||
@@ -158,6 +170,7 @@ ${basePrompt}`;
|
||||
systemPrompt: useStructuredOutput ? ISSUE_VALIDATION_SYSTEM_PROMPT : undefined,
|
||||
abortController,
|
||||
thinkingLevel: effectiveThinkingLevel,
|
||||
reasoningEffort: effectiveReasoningEffort,
|
||||
readOnly: true, // Issue validation only reads code, doesn't write
|
||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||
outputFormat: useStructuredOutput
|
||||
@@ -262,6 +275,7 @@ export function createValidateIssueHandler(
|
||||
issueLabels,
|
||||
model = 'opus',
|
||||
thinkingLevel,
|
||||
reasoningEffort,
|
||||
comments: rawComments,
|
||||
linkedPRs: rawLinkedPRs,
|
||||
} = req.body as ValidateIssueRequestBody;
|
||||
@@ -309,14 +323,17 @@ export function createValidateIssueHandler(
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate model parameter at runtime - accept Claude models or Cursor models
|
||||
const isValidClaudeModel = VALID_CLAUDE_MODELS.includes(model as ModelAlias);
|
||||
const isValidCursorModel = isCursorModel(model);
|
||||
// Validate model parameter at runtime - accept any supported provider model
|
||||
const isValidModel =
|
||||
isClaudeModel(model) ||
|
||||
isCursorModel(model) ||
|
||||
isCodexModel(model) ||
|
||||
isOpencodeModel(model);
|
||||
|
||||
if (!isValidClaudeModel && !isValidCursorModel) {
|
||||
if (!isValidModel) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Invalid model. Must be one of: ${VALID_CLAUDE_MODELS.join(', ')}, or a Cursor model ID`,
|
||||
error: 'Invalid model. Must be a Claude, Cursor, Codex, or OpenCode model ID (or alias).',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -347,7 +364,8 @@ export function createValidateIssueHandler(
|
||||
settingsService,
|
||||
validationComments,
|
||||
validationLinkedPRs,
|
||||
thinkingLevel
|
||||
thinkingLevel,
|
||||
reasoningEffort
|
||||
)
|
||||
.catch(() => {
|
||||
// Error is already handled inside runValidation (event emitted)
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* Each provider shows: `{ configured: boolean, masked: string }`
|
||||
* Masked shows first 4 and last 4 characters for verification.
|
||||
*
|
||||
* Response: `{ "success": true, "credentials": { anthropic } }`
|
||||
* Response: `{ "success": true, "credentials": { anthropic, google, openai } }`
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* PUT /api/settings/credentials - Update API credentials
|
||||
*
|
||||
* Updates API keys for Anthropic. Partial updates supported.
|
||||
* Updates API keys for supported providers. Partial updates supported.
|
||||
* Returns masked credentials for verification without exposing full keys.
|
||||
*
|
||||
* Request body: `Partial<Credentials>` (usually just apiKeys)
|
||||
|
||||
@@ -11,6 +11,7 @@ export function createApiKeysHandler() {
|
||||
res.json({
|
||||
success: true,
|
||||
hasAnthropicKey: !!getApiKey('anthropic') || !!process.env.ANTHROPIC_API_KEY,
|
||||
hasGoogleKey: !!getApiKey('google'),
|
||||
hasOpenaiKey: !!getApiKey('openai') || !!process.env.OPENAI_API_KEY,
|
||||
});
|
||||
} catch (error) {
|
||||
|
||||
@@ -21,22 +21,25 @@ export function createStoreApiKeyHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
setApiKey(provider, apiKey);
|
||||
|
||||
// Also set as environment variable and persist to .env
|
||||
if (provider === 'anthropic' || provider === 'anthropic_oauth_token') {
|
||||
// Both API key and OAuth token use ANTHROPIC_API_KEY
|
||||
process.env.ANTHROPIC_API_KEY = apiKey;
|
||||
await persistApiKeyToEnv('ANTHROPIC_API_KEY', apiKey);
|
||||
logger.info('[Setup] Stored API key as ANTHROPIC_API_KEY');
|
||||
} else {
|
||||
const providerEnvMap: Record<string, string> = {
|
||||
anthropic: 'ANTHROPIC_API_KEY',
|
||||
anthropic_oauth_token: 'ANTHROPIC_API_KEY',
|
||||
openai: 'OPENAI_API_KEY',
|
||||
};
|
||||
const envKey = providerEnvMap[provider];
|
||||
if (!envKey) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Unsupported provider: ${provider}. Only anthropic is supported.`,
|
||||
error: `Unsupported provider: ${provider}. Only anthropic and openai are supported.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
setApiKey(provider, apiKey);
|
||||
process.env[envKey] = apiKey;
|
||||
await persistApiKeyToEnv(envKey, apiKey);
|
||||
logger.info(`[Setup] Stored API key as ${envKey}`);
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, 'Store API key failed');
|
||||
|
||||
@@ -17,6 +17,7 @@ import { createDeleteHandler } from './routes/delete.js';
|
||||
import { createCreatePRHandler } from './routes/create-pr.js';
|
||||
import { createPRInfoHandler } from './routes/pr-info.js';
|
||||
import { createCommitHandler } from './routes/commit.js';
|
||||
import { createGenerateCommitMessageHandler } from './routes/generate-commit-message.js';
|
||||
import { createPushHandler } from './routes/push.js';
|
||||
import { createPullHandler } from './routes/pull.js';
|
||||
import { createCheckoutBranchHandler } from './routes/checkout-branch.js';
|
||||
@@ -40,8 +41,12 @@ import {
|
||||
createDeleteInitScriptHandler,
|
||||
createRunInitScriptHandler,
|
||||
} from './routes/init-script.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
|
||||
export function createWorktreeRoutes(events: EventEmitter): Router {
|
||||
export function createWorktreeRoutes(
|
||||
events: EventEmitter,
|
||||
settingsService?: SettingsService
|
||||
): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post('/info', validatePathParams('projectPath'), createInfoHandler());
|
||||
@@ -65,6 +70,12 @@ export function createWorktreeRoutes(events: EventEmitter): Router {
|
||||
requireGitRepoOnly,
|
||||
createCommitHandler()
|
||||
);
|
||||
router.post(
|
||||
'/generate-commit-message',
|
||||
validatePathParams('worktreePath'),
|
||||
requireGitRepoOnly,
|
||||
createGenerateCommitMessageHandler(settingsService)
|
||||
);
|
||||
router.post(
|
||||
'/push',
|
||||
validatePathParams('worktreePath'),
|
||||
|
||||
@@ -0,0 +1,275 @@
|
||||
/**
|
||||
* POST /worktree/generate-commit-message endpoint - Generate an AI commit message from git diff
|
||||
*
|
||||
* Uses the configured model (via phaseModels.commitMessageModel) to generate a concise,
|
||||
* conventional commit message from git changes. Defaults to Claude Haiku for speed.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { existsSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { query } from '@anthropic-ai/claude-agent-sdk';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { DEFAULT_PHASE_MODELS, isCursorModel, stripProviderPrefix } from '@automaker/types';
|
||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||
import { mergeCommitMessagePrompts } from '@automaker/prompts';
|
||||
import { ProviderFactory } from '../../../providers/provider-factory.js';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const logger = createLogger('GenerateCommitMessage');
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
/** Timeout for AI provider calls in milliseconds (30 seconds) */
|
||||
const AI_TIMEOUT_MS = 30_000;
|
||||
|
||||
/**
|
||||
* Wraps an async generator with a timeout.
|
||||
* If the generator takes longer than the timeout, it throws an error.
|
||||
*/
|
||||
async function* withTimeout<T>(
|
||||
generator: AsyncIterable<T>,
|
||||
timeoutMs: number
|
||||
): AsyncGenerator<T, void, unknown> {
|
||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||
setTimeout(() => reject(new Error(`AI provider timed out after ${timeoutMs}ms`)), timeoutMs);
|
||||
});
|
||||
|
||||
const iterator = generator[Symbol.asyncIterator]();
|
||||
let done = false;
|
||||
|
||||
while (!done) {
|
||||
const result = await Promise.race([iterator.next(), timeoutPromise]);
|
||||
if (result.done) {
|
||||
done = true;
|
||||
} else {
|
||||
yield result.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the effective system prompt for commit message generation.
|
||||
* Uses custom prompt from settings if enabled, otherwise falls back to default.
|
||||
*/
|
||||
async function getSystemPrompt(settingsService?: SettingsService): Promise<string> {
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
const prompts = mergeCommitMessagePrompts(settings?.promptCustomization?.commitMessage);
|
||||
return prompts.systemPrompt;
|
||||
}
|
||||
|
||||
interface GenerateCommitMessageRequestBody {
|
||||
worktreePath: string;
|
||||
}
|
||||
|
||||
interface GenerateCommitMessageSuccessResponse {
|
||||
success: true;
|
||||
message: string;
|
||||
}
|
||||
|
||||
interface GenerateCommitMessageErrorResponse {
|
||||
success: false;
|
||||
error: string;
|
||||
}
|
||||
|
||||
async function extractTextFromStream(
|
||||
stream: AsyncIterable<{
|
||||
type: string;
|
||||
subtype?: string;
|
||||
result?: string;
|
||||
message?: {
|
||||
content?: Array<{ type: string; text?: string }>;
|
||||
};
|
||||
}>
|
||||
): Promise<string> {
|
||||
let responseText = '';
|
||||
|
||||
for await (const msg of stream) {
|
||||
if (msg.type === 'assistant' && msg.message?.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === 'text' && block.text) {
|
||||
responseText += block.text;
|
||||
}
|
||||
}
|
||||
} else if (msg.type === 'result' && msg.subtype === 'success') {
|
||||
responseText = msg.result || responseText;
|
||||
}
|
||||
}
|
||||
|
||||
return responseText;
|
||||
}
|
||||
|
||||
export function createGenerateCommitMessageHandler(
|
||||
settingsService?: SettingsService
|
||||
): (req: Request, res: Response) => Promise<void> {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath } = req.body as GenerateCommitMessageRequestBody;
|
||||
|
||||
if (!worktreePath || typeof worktreePath !== 'string') {
|
||||
const response: GenerateCommitMessageErrorResponse = {
|
||||
success: false,
|
||||
error: 'worktreePath is required and must be a string',
|
||||
};
|
||||
res.status(400).json(response);
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate that the directory exists
|
||||
if (!existsSync(worktreePath)) {
|
||||
const response: GenerateCommitMessageErrorResponse = {
|
||||
success: false,
|
||||
error: 'worktreePath does not exist',
|
||||
};
|
||||
res.status(400).json(response);
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate that it's a git repository (check for .git folder or file for worktrees)
|
||||
const gitPath = join(worktreePath, '.git');
|
||||
if (!existsSync(gitPath)) {
|
||||
const response: GenerateCommitMessageErrorResponse = {
|
||||
success: false,
|
||||
error: 'worktreePath is not a git repository',
|
||||
};
|
||||
res.status(400).json(response);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`Generating commit message for worktree: ${worktreePath}`);
|
||||
|
||||
// Get git diff of staged and unstaged changes
|
||||
let diff = '';
|
||||
try {
|
||||
// First try to get staged changes
|
||||
const { stdout: stagedDiff } = await execAsync('git diff --cached', {
|
||||
cwd: worktreePath,
|
||||
maxBuffer: 1024 * 1024 * 5, // 5MB buffer
|
||||
});
|
||||
|
||||
// If no staged changes, get unstaged changes
|
||||
if (!stagedDiff.trim()) {
|
||||
const { stdout: unstagedDiff } = await execAsync('git diff', {
|
||||
cwd: worktreePath,
|
||||
maxBuffer: 1024 * 1024 * 5, // 5MB buffer
|
||||
});
|
||||
diff = unstagedDiff;
|
||||
} else {
|
||||
diff = stagedDiff;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to get git diff:', error);
|
||||
const response: GenerateCommitMessageErrorResponse = {
|
||||
success: false,
|
||||
error: 'Failed to get git changes',
|
||||
};
|
||||
res.status(500).json(response);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!diff.trim()) {
|
||||
const response: GenerateCommitMessageErrorResponse = {
|
||||
success: false,
|
||||
error: 'No changes to commit',
|
||||
};
|
||||
res.status(400).json(response);
|
||||
return;
|
||||
}
|
||||
|
||||
// Truncate diff if too long (keep first 10000 characters to avoid token limits)
|
||||
const truncatedDiff =
|
||||
diff.length > 10000 ? diff.substring(0, 10000) + '\n\n[... diff truncated ...]' : diff;
|
||||
|
||||
const userPrompt = `Generate a commit message for these changes:\n\n\`\`\`diff\n${truncatedDiff}\n\`\`\``;
|
||||
|
||||
// Get model from phase settings
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.commitMessageModel || DEFAULT_PHASE_MODELS.commitMessageModel;
|
||||
const { model } = resolvePhaseModel(phaseModelEntry);
|
||||
|
||||
logger.info(`Using model for commit message: ${model}`);
|
||||
|
||||
// Get the effective system prompt (custom or default)
|
||||
const systemPrompt = await getSystemPrompt(settingsService);
|
||||
|
||||
let message: string;
|
||||
|
||||
// Route to appropriate provider based on model type
|
||||
if (isCursorModel(model)) {
|
||||
// Use Cursor provider for Cursor models
|
||||
logger.info(`Using Cursor provider for model: ${model}`);
|
||||
|
||||
const provider = ProviderFactory.getProviderForModel(model);
|
||||
const bareModel = stripProviderPrefix(model);
|
||||
|
||||
const cursorPrompt = `${systemPrompt}\n\n${userPrompt}`;
|
||||
|
||||
let responseText = '';
|
||||
const cursorStream = provider.executeQuery({
|
||||
prompt: cursorPrompt,
|
||||
model: bareModel,
|
||||
cwd: worktreePath,
|
||||
maxTurns: 1,
|
||||
allowedTools: [],
|
||||
readOnly: true,
|
||||
});
|
||||
|
||||
// Wrap with timeout to prevent indefinite hangs
|
||||
for await (const msg of withTimeout(cursorStream, AI_TIMEOUT_MS)) {
|
||||
if (msg.type === 'assistant' && msg.message?.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === 'text' && block.text) {
|
||||
responseText += block.text;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
message = responseText.trim();
|
||||
} else {
|
||||
// Use Claude SDK for Claude models
|
||||
const stream = query({
|
||||
prompt: userPrompt,
|
||||
options: {
|
||||
model,
|
||||
systemPrompt,
|
||||
maxTurns: 1,
|
||||
allowedTools: [],
|
||||
permissionMode: 'default',
|
||||
},
|
||||
});
|
||||
|
||||
// Wrap with timeout to prevent indefinite hangs
|
||||
message = await extractTextFromStream(withTimeout(stream, AI_TIMEOUT_MS));
|
||||
}
|
||||
|
||||
if (!message || message.trim().length === 0) {
|
||||
logger.warn('Received empty response from model');
|
||||
const response: GenerateCommitMessageErrorResponse = {
|
||||
success: false,
|
||||
error: 'Failed to generate commit message - empty response',
|
||||
};
|
||||
res.status(500).json(response);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`Generated commit message: ${message.trim().substring(0, 100)}...`);
|
||||
|
||||
const response: GenerateCommitMessageSuccessResponse = {
|
||||
success: true,
|
||||
message: message.trim(),
|
||||
};
|
||||
res.json(response);
|
||||
} catch (error) {
|
||||
logError(error, 'Generate commit message failed');
|
||||
const response: GenerateCommitMessageErrorResponse = {
|
||||
success: false,
|
||||
error: getErrorMessage(error),
|
||||
};
|
||||
res.status(500).json(response);
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -21,7 +21,7 @@ import type {
|
||||
ThinkingLevel,
|
||||
PlanningMode,
|
||||
} from '@automaker/types';
|
||||
import { DEFAULT_PHASE_MODELS, stripProviderPrefix } from '@automaker/types';
|
||||
import { DEFAULT_PHASE_MODELS, isClaudeModel, stripProviderPrefix } from '@automaker/types';
|
||||
import {
|
||||
buildPromptWithImages,
|
||||
classifyError,
|
||||
@@ -3586,10 +3586,29 @@ If nothing notable: {"learnings": []}`;
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.memoryExtractionModel || DEFAULT_PHASE_MODELS.memoryExtractionModel;
|
||||
const { model } = resolvePhaseModel(phaseModelEntry);
|
||||
const hasClaudeKey = Boolean(process.env.ANTHROPIC_API_KEY);
|
||||
let resolvedModel = model;
|
||||
|
||||
if (isClaudeModel(model) && !hasClaudeKey) {
|
||||
const fallbackModel = feature.model
|
||||
? resolveModelString(feature.model, DEFAULT_MODELS.claude)
|
||||
: null;
|
||||
if (fallbackModel && !isClaudeModel(fallbackModel)) {
|
||||
console.log(
|
||||
`[AutoMode] Claude not configured for memory extraction; using feature model "${fallbackModel}".`
|
||||
);
|
||||
resolvedModel = fallbackModel;
|
||||
} else {
|
||||
console.log(
|
||||
'[AutoMode] Claude not configured for memory extraction; skipping learning extraction.'
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const result = await simpleQuery({
|
||||
prompt: userPrompt,
|
||||
model,
|
||||
model: resolvedModel,
|
||||
cwd: projectPath,
|
||||
maxTurns: 1,
|
||||
allowedTools: [],
|
||||
|
||||
@@ -161,11 +161,15 @@ export class ClaudeUsageService {
|
||||
|
||||
const workingDirectory = this.isWindows
|
||||
? process.env.USERPROFILE || os.homedir() || 'C:\\'
|
||||
: process.env.HOME || os.homedir() || '/tmp';
|
||||
: os.tmpdir();
|
||||
|
||||
// Use platform-appropriate shell and command
|
||||
const shell = this.isWindows ? 'cmd.exe' : '/bin/sh';
|
||||
const args = this.isWindows ? ['/c', 'claude', '/usage'] : ['-c', 'claude /usage'];
|
||||
// Use --add-dir to whitelist the current directory and bypass the trust prompt
|
||||
// We don't pass /usage here, we'll type it into the REPL
|
||||
const args = this.isWindows
|
||||
? ['/c', 'claude', '--add-dir', workingDirectory]
|
||||
: ['-c', `claude --add-dir "${workingDirectory}"`];
|
||||
|
||||
let ptyProcess: any = null;
|
||||
|
||||
@@ -181,8 +185,6 @@ export class ClaudeUsageService {
|
||||
} as Record<string, string>,
|
||||
});
|
||||
} catch (spawnError) {
|
||||
// pty.spawn() can throw synchronously if the native module fails to load
|
||||
// or if PTY is not available in the current environment (e.g., containers without /dev/pts)
|
||||
const errorMessage = spawnError instanceof Error ? spawnError.message : String(spawnError);
|
||||
logger.error('[executeClaudeUsageCommandPty] Failed to spawn PTY:', errorMessage);
|
||||
|
||||
@@ -205,16 +207,52 @@ export class ClaudeUsageService {
|
||||
if (output.includes('Current session')) {
|
||||
resolve(output);
|
||||
} else {
|
||||
reject(new Error('Command timed out'));
|
||||
reject(
|
||||
new Error(
|
||||
'The Claude CLI took too long to respond. This can happen if the CLI is waiting for a trust prompt or is otherwise busy.'
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}, this.timeout);
|
||||
}, 45000); // 45 second timeout
|
||||
|
||||
let hasSentCommand = false;
|
||||
let hasApprovedTrust = false;
|
||||
|
||||
ptyProcess.onData((data: string) => {
|
||||
output += data;
|
||||
|
||||
// Check if we've seen the usage data (look for "Current session")
|
||||
if (!hasSeenUsageData && output.includes('Current session')) {
|
||||
// Strip ANSI codes for easier matching
|
||||
// eslint-disable-next-line no-control-regex
|
||||
const cleanOutput = output.replace(/\x1B\[[0-9;]*[A-Za-z]/g, '');
|
||||
|
||||
// Check for specific authentication/permission errors
|
||||
if (
|
||||
cleanOutput.includes('OAuth token does not meet scope requirement') ||
|
||||
cleanOutput.includes('permission_error') ||
|
||||
cleanOutput.includes('token_expired') ||
|
||||
cleanOutput.includes('authentication_error')
|
||||
) {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
if (ptyProcess && !ptyProcess.killed) {
|
||||
ptyProcess.kill();
|
||||
}
|
||||
reject(
|
||||
new Error(
|
||||
"Claude CLI authentication issue. Please run 'claude logout' and then 'claude login' in your terminal to refresh permissions."
|
||||
)
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if we've seen the usage data (look for "Current session" or the TUI Usage header)
|
||||
if (
|
||||
!hasSeenUsageData &&
|
||||
(cleanOutput.includes('Current session') ||
|
||||
(cleanOutput.includes('Usage') && cleanOutput.includes('% left')))
|
||||
) {
|
||||
hasSeenUsageData = true;
|
||||
// Wait for full output, then send escape to exit
|
||||
setTimeout(() => {
|
||||
@@ -228,16 +266,54 @@ export class ClaudeUsageService {
|
||||
}
|
||||
}, 2000);
|
||||
}
|
||||
}, 2000);
|
||||
}, 3000);
|
||||
}
|
||||
|
||||
// Handle Trust Dialog: "Do you want to work in this folder?"
|
||||
// Since we are running in os.tmpdir(), it is safe to approve.
|
||||
if (!hasApprovedTrust && cleanOutput.includes('Do you want to work in this folder?')) {
|
||||
hasApprovedTrust = true;
|
||||
// Wait a tiny bit to ensure prompt is ready, then send Enter
|
||||
setTimeout(() => {
|
||||
if (!settled && ptyProcess && !ptyProcess.killed) {
|
||||
ptyProcess.write('\r');
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
// Detect REPL prompt and send /usage command
|
||||
if (
|
||||
!hasSentCommand &&
|
||||
(cleanOutput.includes('❯') || cleanOutput.includes('? for shortcuts'))
|
||||
) {
|
||||
hasSentCommand = true;
|
||||
// Wait for REPL to fully settle
|
||||
setTimeout(() => {
|
||||
if (!settled && ptyProcess && !ptyProcess.killed) {
|
||||
// Send command with carriage return
|
||||
ptyProcess.write('/usage\r');
|
||||
|
||||
// Send another enter after 1 second to confirm selection if autocomplete menu appeared
|
||||
setTimeout(() => {
|
||||
if (!settled && ptyProcess && !ptyProcess.killed) {
|
||||
ptyProcess.write('\r');
|
||||
}
|
||||
}, 1200);
|
||||
}
|
||||
}, 1500);
|
||||
}
|
||||
|
||||
// Fallback: if we see "Esc to cancel" but haven't seen usage data yet
|
||||
if (!hasSeenUsageData && output.includes('Esc to cancel')) {
|
||||
if (
|
||||
!hasSeenUsageData &&
|
||||
cleanOutput.includes('Esc to cancel') &&
|
||||
!cleanOutput.includes('Do you want to work in this folder?')
|
||||
) {
|
||||
setTimeout(() => {
|
||||
if (!settled && ptyProcess && !ptyProcess.killed) {
|
||||
ptyProcess.write('\x1b'); // Send escape key
|
||||
}
|
||||
}, 3000);
|
||||
}, 5000);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -246,8 +322,11 @@ export class ClaudeUsageService {
|
||||
if (settled) return;
|
||||
settled = true;
|
||||
|
||||
// Check for authentication errors in output
|
||||
if (output.includes('token_expired') || output.includes('authentication_error')) {
|
||||
if (
|
||||
output.includes('token_expired') ||
|
||||
output.includes('authentication_error') ||
|
||||
output.includes('permission_error')
|
||||
) {
|
||||
reject(new Error("Authentication required - please run 'claude login'"));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -431,6 +431,8 @@ export class SettingsService {
|
||||
*/
|
||||
async getMaskedCredentials(): Promise<{
|
||||
anthropic: { configured: boolean; masked: string };
|
||||
google: { configured: boolean; masked: string };
|
||||
openai: { configured: boolean; masked: string };
|
||||
}> {
|
||||
const credentials = await this.getCredentials();
|
||||
|
||||
@@ -444,6 +446,14 @@ export class SettingsService {
|
||||
configured: !!credentials.apiKeys.anthropic,
|
||||
masked: maskKey(credentials.apiKeys.anthropic),
|
||||
},
|
||||
google: {
|
||||
configured: !!credentials.apiKeys.google,
|
||||
masked: maskKey(credentials.apiKeys.google),
|
||||
},
|
||||
openai: {
|
||||
configured: !!credentials.apiKeys.openai,
|
||||
masked: maskKey(credentials.apiKeys.openai),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user