fix: Remove unused vars and improve type safety. Improve task recovery

This commit is contained in:
gsxdsm
2026-02-17 13:18:40 -08:00
parent 8bb10632b1
commit de021f96bf
68 changed files with 1028 additions and 534 deletions

View File

@@ -303,7 +303,7 @@ app.use(
callback(null, origin);
return;
}
} catch (err) {
} catch {
// Ignore URL parsing errors
}
@@ -376,7 +376,7 @@ eventHookService.initialize(events, settingsService, eventHistoryService, featur
let globalSettings: Awaited<ReturnType<typeof settingsService.getGlobalSettings>> | null = null;
try {
globalSettings = await settingsService.getGlobalSettings();
} catch (err) {
} catch {
logger.warn('Failed to load global settings, using defaults');
}
@@ -394,7 +394,7 @@ eventHookService.initialize(events, settingsService, eventHistoryService, featur
const enableRequestLog = globalSettings.enableRequestLogging ?? true;
setRequestLoggingEnabled(enableRequestLog);
logger.info(`HTTP request logging: ${enableRequestLog ? 'enabled' : 'disabled'}`);
} catch (err) {
} catch {
logger.warn('Failed to apply logging settings, using defaults');
}
}
@@ -421,6 +421,22 @@ eventHookService.initialize(events, settingsService, eventHistoryService, featur
} else {
logger.info('[STARTUP] Feature state reconciliation complete - no stale states found');
}
// Resume interrupted features in the background after reconciliation.
// This uses the saved execution state to identify features that were running
// before the restart (their statuses have been reset to ready/backlog by
// reconciliation above). Running in background so it doesn't block startup.
if (totalReconciled > 0) {
for (const project of globalSettings.projects) {
autoModeService.resumeInterruptedFeatures(project.path).catch((err) => {
logger.warn(
`[STARTUP] Failed to resume interrupted features for ${project.path}:`,
err
);
});
}
logger.info('[STARTUP] Initiated background resume of interrupted features');
}
}
} catch (err) {
logger.warn('[STARTUP] Failed to reconcile feature states:', err);
@@ -581,7 +597,7 @@ wss.on('connection', (ws: WebSocket) => {
logger.info('Sending event to client:', {
type,
messageLength: message.length,
sessionId: (payload as any)?.sessionId,
sessionId: (payload as Record<string, unknown>)?.sessionId,
});
ws.send(message);
} else {

View File

@@ -8,9 +8,6 @@ import { spawn, execSync } from 'child_process';
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
import { createLogger } from '@automaker/utils';
const logger = createLogger('CliDetection');
export interface CliInfo {
name: string;
@@ -86,7 +83,7 @@ export async function detectCli(
options: CliDetectionOptions = {}
): Promise<CliDetectionResult> {
const config = CLI_CONFIGS[provider];
const { timeout = 5000, includeWsl = false, wslDistribution } = options;
const { timeout = 5000 } = options;
const issues: string[] = [];
const cliInfo: CliInfo = {

View File

@@ -40,7 +40,7 @@ export interface ErrorClassification {
suggestedAction?: string;
retryable: boolean;
provider?: string;
context?: Record<string, any>;
context?: Record<string, unknown>;
}
export interface ErrorPattern {
@@ -180,7 +180,7 @@ const ERROR_PATTERNS: ErrorPattern[] = [
export function classifyError(
error: unknown,
provider?: string,
context?: Record<string, any>
context?: Record<string, unknown>
): ErrorClassification {
const errorText = getErrorText(error);
@@ -281,18 +281,19 @@ function getErrorText(error: unknown): string {
if (typeof error === 'object' && error !== null) {
// Handle structured error objects
const errorObj = error as any;
const errorObj = error as Record<string, unknown>;
if (errorObj.message) {
if (typeof errorObj.message === 'string') {
return errorObj.message;
}
if (errorObj.error?.message) {
return errorObj.error.message;
const nestedError = errorObj.error;
if (typeof nestedError === 'object' && nestedError !== null && 'message' in nestedError) {
return String((nestedError as Record<string, unknown>).message);
}
if (errorObj.error) {
return typeof errorObj.error === 'string' ? errorObj.error : JSON.stringify(errorObj.error);
if (nestedError) {
return typeof nestedError === 'string' ? nestedError : JSON.stringify(nestedError);
}
return JSON.stringify(error);
@@ -307,7 +308,7 @@ function getErrorText(error: unknown): string {
export function createErrorResponse(
error: unknown,
provider?: string,
context?: Record<string, any>
context?: Record<string, unknown>
): {
success: false;
error: string;
@@ -335,7 +336,7 @@ export function logError(
error: unknown,
provider?: string,
operation?: string,
additionalContext?: Record<string, any>
additionalContext?: Record<string, unknown>
): void {
const classification = classifyError(error, provider, {
operation,

View File

@@ -12,11 +12,18 @@ export interface PermissionCheckResult {
reason?: string;
}
/** Minimal shape of a Cursor tool call used for permission checking */
interface CursorToolCall {
shellToolCall?: { args?: { command: string } };
readToolCall?: { args?: { path: string } };
writeToolCall?: { args?: { path: string } };
}
/**
* Check if a tool call is allowed based on permissions
*/
export function checkToolCallPermission(
toolCall: any,
toolCall: CursorToolCall,
permissions: CursorCliConfigFile | null
): PermissionCheckResult {
if (!permissions || !permissions.permissions) {
@@ -152,7 +159,11 @@ function matchesRule(toolName: string, rule: string): boolean {
/**
* Log permission violations
*/
export function logPermissionViolation(toolCall: any, reason: string, sessionId?: string): void {
export function logPermissionViolation(
toolCall: CursorToolCall,
reason: string,
sessionId?: string
): void {
const sessionIdStr = sessionId ? ` [${sessionId}]` : '';
if (toolCall.shellToolCall?.args?.command) {

View File

@@ -78,7 +78,7 @@ export async function readWorktreeMetadata(
const metadataPath = getWorktreeMetadataPath(projectPath, branch);
const content = (await secureFs.readFile(metadataPath, 'utf-8')) as string;
return JSON.parse(content) as WorktreeMetadata;
} catch (error) {
} catch (_error) {
// File doesn't exist or can't be read
return null;
}

View File

@@ -5,7 +5,7 @@
* with the provider architecture.
*/
import { query, type Options } from '@anthropic-ai/claude-agent-sdk';
import { query, type Options, type SDKUserMessage } from '@anthropic-ai/claude-agent-sdk';
import { BaseProvider } from './base-provider.js';
import { classifyError, getUserFriendlyErrorMessage, createLogger } from '@automaker/utils';
@@ -32,31 +32,6 @@ import type {
ModelDefinition,
} from './types.js';
// Explicit allowlist of environment variables to pass to the SDK.
// Only these vars are passed - nothing else from process.env leaks through.
const ALLOWED_ENV_VARS = [
// Authentication
'ANTHROPIC_API_KEY',
'ANTHROPIC_AUTH_TOKEN',
// Endpoint configuration
'ANTHROPIC_BASE_URL',
'API_TIMEOUT_MS',
// Model mappings
'ANTHROPIC_DEFAULT_HAIKU_MODEL',
'ANTHROPIC_DEFAULT_SONNET_MODEL',
'ANTHROPIC_DEFAULT_OPUS_MODEL',
// Traffic control
'CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC',
// System vars (always from process.env)
'PATH',
'HOME',
'SHELL',
'TERM',
'USER',
'LANG',
'LC_ALL',
];
// System vars are always passed from process.env regardless of profile
const SYSTEM_ENV_VARS = ['PATH', 'HOME', 'SHELL', 'TERM', 'USER', 'LANG', 'LC_ALL'];
@@ -258,7 +233,7 @@ export class ClaudeProvider extends BaseProvider {
};
// Build prompt payload
let promptPayload: string | AsyncIterable<any>;
let promptPayload: string | AsyncIterable<SDKUserMessage>;
if (Array.isArray(prompt)) {
// Multi-part prompt (with images)
@@ -317,12 +292,16 @@ export class ClaudeProvider extends BaseProvider {
? `${userMessage}\n\nTip: If you're running multiple features in auto-mode, consider reducing concurrency (maxConcurrency setting) to avoid hitting rate limits.`
: userMessage;
const enhancedError = new Error(message);
(enhancedError as any).originalError = error;
(enhancedError as any).type = errorInfo.type;
const enhancedError = new Error(message) as Error & {
originalError: unknown;
type: string;
retryAfter?: number;
};
enhancedError.originalError = error;
enhancedError.type = errorInfo.type;
if (errorInfo.isRateLimit) {
(enhancedError as any).retryAfter = errorInfo.retryAfter;
enhancedError.retryAfter = errorInfo.retryAfter;
}
throw enhancedError;

View File

@@ -30,7 +30,6 @@ import type {
ModelDefinition,
} from './types.js';
import {
CODEX_MODEL_MAP,
supportsReasoningEffort,
validateBareModelId,
calculateReasoningTimeout,
@@ -56,15 +55,9 @@ const CODEX_EXEC_SUBCOMMAND = 'exec';
const CODEX_JSON_FLAG = '--json';
const CODEX_MODEL_FLAG = '--model';
const CODEX_VERSION_FLAG = '--version';
const CODEX_SANDBOX_FLAG = '--sandbox';
const CODEX_APPROVAL_FLAG = '--ask-for-approval';
const CODEX_SEARCH_FLAG = '--search';
const CODEX_OUTPUT_SCHEMA_FLAG = '--output-schema';
const CODEX_CONFIG_FLAG = '--config';
const CODEX_IMAGE_FLAG = '--image';
const CODEX_ADD_DIR_FLAG = '--add-dir';
const CODEX_SKIP_GIT_REPO_CHECK_FLAG = '--skip-git-repo-check';
const CODEX_RESUME_FLAG = 'resume';
const CODEX_REASONING_EFFORT_KEY = 'reasoning_effort';
const CODEX_YOLO_FLAG = '--dangerously-bypass-approvals-and-sandbox';
const OPENAI_API_KEY_ENV = 'OPENAI_API_KEY';
@@ -106,9 +99,6 @@ const TEXT_ENCODING = 'utf-8';
*/
const CODEX_CLI_TIMEOUT_MS = DEFAULT_TIMEOUT_MS;
const CODEX_FEATURE_GENERATION_BASE_TIMEOUT_MS = 300000; // 5 minutes for feature generation
const CONTEXT_WINDOW_256K = 256000;
const MAX_OUTPUT_32K = 32000;
const MAX_OUTPUT_16K = 16000;
const SYSTEM_PROMPT_SEPARATOR = '\n\n';
const CODEX_INSTRUCTIONS_DIR = '.codex';
const CODEX_INSTRUCTIONS_SECTION = 'Codex Project Instructions';
@@ -758,17 +748,14 @@ export class CodexProvider extends BaseProvider {
options.cwd,
codexSettings.sandboxMode !== 'danger-full-access'
);
const resolvedSandboxMode = sandboxCheck.enabled
? codexSettings.sandboxMode
: 'danger-full-access';
if (!sandboxCheck.enabled && sandboxCheck.message) {
console.warn(`[CodexProvider] ${sandboxCheck.message}`);
}
const searchEnabled =
codexSettings.enableWebSearch || resolveSearchEnabled(resolvedAllowedTools, restrictTools);
const outputSchemaPath = await writeOutputSchemaFile(options.cwd, options.outputFormat);
await writeOutputSchemaFile(options.cwd, options.outputFormat);
const imageBlocks = codexSettings.enableImages ? extractImageBlocks(options.prompt) : [];
const imagePaths = await writeImageFiles(options.cwd, imageBlocks);
await writeImageFiles(options.cwd, imageBlocks);
const approvalPolicy =
hasMcpServers && options.mcpAutoApproveTools !== undefined
? options.mcpAutoApproveTools
@@ -801,7 +788,7 @@ export class CodexProvider extends BaseProvider {
overrides.push({ key: 'features.web_search_request', value: true });
}
const configOverrides = buildConfigOverrides(overrides);
buildConfigOverrides(overrides);
const preExecArgs: string[] = [];
// Add additional directories with write access
@@ -1033,7 +1020,7 @@ export class CodexProvider extends BaseProvider {
async detectInstallation(): Promise<InstallationStatus> {
const cliPath = await findCodexCliPath();
const hasApiKey = Boolean(await resolveOpenAiApiKey());
const authIndicators = await getCodexAuthIndicators();
await getCodexAuthIndicators();
const installed = !!cliPath;
let version = '';
@@ -1045,7 +1032,7 @@ export class CodexProvider extends BaseProvider {
cwd: process.cwd(),
});
version = result.stdout.trim();
} catch (error) {
} catch {
version = '';
}
}

View File

@@ -85,10 +85,6 @@ interface SdkToolExecutionEndEvent extends SdkEvent {
};
}
interface SdkSessionIdleEvent extends SdkEvent {
type: 'session.idle';
}
interface SdkSessionErrorEvent extends SdkEvent {
type: 'session.error';
data: {

View File

@@ -69,6 +69,7 @@ interface CursorToolHandler<TArgs = unknown, TResult = unknown> {
* Registry of Cursor tool handlers
* Each handler knows how to normalize its specific tool call type
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- handler registry stores heterogeneous tool type parameters
const CURSOR_TOOL_HANDLERS: Record<string, CursorToolHandler<any, any>> = {
readToolCall: {
name: 'Read',
@@ -878,7 +879,7 @@ export class CursorProvider extends CliProvider {
logger.debug(`CursorProvider.executeQuery called with model: "${options.model}"`);
// Get effective permissions for this project
const effectivePermissions = await getEffectivePermissions(options.cwd || process.cwd());
await getEffectivePermissions(options.cwd || process.cwd());
// Debug: log raw events when AUTOMAKER_DEBUG_RAW_OUTPUT is enabled
const debugRawEvents =

View File

@@ -20,7 +20,6 @@ import type {
ProviderMessage,
InstallationStatus,
ModelDefinition,
ContentBlock,
} from './types.js';
import { validateBareModelId } from '@automaker/types';
import { GEMINI_MODEL_MAP, type GeminiAuthStatus } from '@automaker/types';

View File

@@ -16,8 +16,6 @@
import { ProviderFactory } from './provider-factory.js';
import type {
ProviderMessage,
ContentBlock,
ThinkingLevel,
ReasoningEffort,
ClaudeApiProfile,

View File

@@ -6,7 +6,7 @@ import type { Request, Response } from 'express';
import { AgentService } from '../../../services/agent-service.js';
import { createLogger } from '@automaker/utils';
import { getErrorMessage, logError } from '../common.js';
const logger = createLogger('Agent');
const _logger = createLogger('Agent');
export function createStartHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise<void> => {

View File

@@ -128,7 +128,7 @@ export function logAuthStatus(context: string): void {
*/
export function logError(error: unknown, context: string): void {
logger.error(`${context}:`);
logger.error('Error name:', (error as any)?.name);
logger.error('Error name:', (error as Error)?.name);
logger.error('Error message:', (error as Error)?.message);
logger.error('Error stack:', (error as Error)?.stack);
logger.error('Full error object:', JSON.stringify(error, Object.getOwnPropertyNames(error), 2));

View File

@@ -30,7 +30,7 @@ const DEFAULT_MAX_FEATURES = 50;
* Timeout for Codex models when generating features (5 minutes).
* Codex models are slower and need more time to generate 50+ features.
*/
const CODEX_FEATURE_GENERATION_TIMEOUT_MS = 300000; // 5 minutes
const _CODEX_FEATURE_GENERATION_TIMEOUT_MS = 300000; // 5 minutes
/**
* Type for extracted features JSON response

View File

@@ -29,7 +29,6 @@ import {
updateTechnologyStack,
updateRoadmapPhaseStatus,
type ImplementedFeature,
type RoadmapPhase,
} from '../../lib/xml-extractor.js';
import { getNotificationService } from '../../services/notification-service.js';

View File

@@ -6,7 +6,7 @@
*/
import type { EventEmitter } from '../../lib/events.js';
import type { Feature, BacklogPlanResult, BacklogChange, DependencyUpdate } from '@automaker/types';
import type { Feature, BacklogPlanResult } from '@automaker/types';
import {
DEFAULT_PHASE_MODELS,
isCursorModel,

View File

@@ -3,7 +3,7 @@
*/
import type { Request, Response } from 'express';
import type { BacklogPlanResult, BacklogChange, Feature } from '@automaker/types';
import type { BacklogPlanResult } from '@automaker/types';
import { FeatureLoader } from '../../../services/feature-loader.js';
import { clearBacklogPlan, getErrorMessage, logError, logger } from '../common.js';

View File

@@ -36,7 +36,7 @@ interface ExportRequest {
};
}
export function createExportHandler(featureLoader: FeatureLoader) {
export function createExportHandler(_featureLoader: FeatureLoader) {
const exportService = getFeatureExportService();
return async (req: Request, res: Response): Promise<void> => {

View File

@@ -34,7 +34,7 @@ export function createGenerateTitleHandler(
): (req: Request, res: Response) => Promise<void> {
return async (req: Request, res: Response): Promise<void> => {
try {
const { description, projectPath } = req.body as GenerateTitleRequestBody;
const { description } = req.body as GenerateTitleRequestBody;
if (!description || typeof description !== 'string') {
const response: GenerateTitleErrorResponse = {

View File

@@ -33,7 +33,7 @@ interface ConflictInfo {
hasConflict: boolean;
}
export function createImportHandler(featureLoader: FeatureLoader) {
export function createImportHandler(_featureLoader: FeatureLoader) {
const exportService = getFeatureExportService();
return async (req: Request, res: Response): Promise<void> => {

View File

@@ -35,9 +35,9 @@ export function createMkdirHandler() {
error: 'Path exists and is not a directory',
});
return;
} catch (statError: any) {
} catch (statError: unknown) {
// ENOENT means path doesn't exist - we should create it
if (statError.code !== 'ENOENT') {
if ((statError as NodeJS.ErrnoException).code !== 'ENOENT') {
// Some other error (could be ELOOP in parent path)
throw statError;
}
@@ -47,7 +47,7 @@ export function createMkdirHandler() {
await secureFs.mkdir(resolvedPath, { recursive: true });
res.json({ success: true });
} catch (error: any) {
} catch (error: unknown) {
// Path not allowed - return 403 Forbidden
if (error instanceof PathNotAllowedError) {
res.status(403).json({ success: false, error: getErrorMessage(error) });
@@ -55,7 +55,7 @@ export function createMkdirHandler() {
}
// Handle ELOOP specifically
if (error.code === 'ELOOP') {
if ((error as NodeJS.ErrnoException).code === 'ELOOP') {
logError(error, 'Create directory failed - symlink loop detected');
res.status(400).json({
success: false,

View File

@@ -10,7 +10,11 @@ import { getErrorMessage, logError } from '../common.js';
export function createResolveDirectoryHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { directoryName, sampleFiles, fileCount } = req.body as {
const {
directoryName,
sampleFiles,
fileCount: _fileCount,
} = req.body as {
directoryName: string;
sampleFiles?: string[];
fileCount?: number;

View File

@@ -11,10 +11,9 @@ import { getBoardDir } from '@automaker/platform';
export function createSaveBoardBackgroundHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { data, filename, mimeType, projectPath } = req.body as {
const { data, filename, projectPath } = req.body as {
data: string;
filename: string;
mimeType: string;
projectPath: string;
};

View File

@@ -12,10 +12,9 @@ import { sanitizeFilename } from '@automaker/utils';
export function createSaveImageHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { data, filename, mimeType, projectPath } = req.body as {
const { data, filename, projectPath } = req.body as {
data: string;
filename: string;
mimeType: string;
projectPath: string;
};

View File

@@ -5,7 +5,7 @@
import type { Request, Response } from 'express';
import * as secureFs from '../../../lib/secure-fs.js';
import path from 'path';
import { isPathAllowed, PathNotAllowedError, getAllowedRootDirectory } from '@automaker/platform';
import { isPathAllowed, getAllowedRootDirectory } from '@automaker/platform';
import { getErrorMessage, logError } from '../common.js';
export function createValidatePathHandler() {

View File

@@ -37,9 +37,12 @@ export function createGeminiRoutes(): Router {
const provider = new GeminiProvider();
const status = await provider.detectInstallation();
const authMethod =
(status as any).authMethod ||
(status.authenticated ? (status.hasApiKey ? 'api_key' : 'cli_login') : 'none');
// Derive authMethod from typed InstallationStatus fields
const authMethod = status.authenticated
? status.hasApiKey
? 'api_key'
: 'cli_login'
: 'none';
res.json({
success: true,
@@ -48,7 +51,7 @@ export function createGeminiRoutes(): Router {
path: status.path || null,
authenticated: status.authenticated || false,
authMethod,
hasCredentialsFile: (status as any).hasCredentialsFile || false,
hasCredentialsFile: false,
});
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';

View File

@@ -6,7 +6,6 @@ import type { Request, Response } from 'express';
import type { EventEmitter } from '../../../lib/events.js';
import type { IssueValidationEvent } from '@automaker/types';
import {
isValidationRunning,
getValidationStatus,
getRunningValidations,
abortValidation,
@@ -15,7 +14,6 @@ import {
logger,
} from './validation-common.js';
import {
readValidation,
getAllValidations,
getValidationWithFreshness,
deleteValidation,

View File

@@ -12,7 +12,7 @@ export function createProvidersHandler() {
// Get installation status from all providers
const statuses = await ProviderFactory.checkAllProviders();
const providers: Record<string, any> = {
const providers: Record<string, Record<string, unknown>> = {
anthropic: {
available: statuses.claude?.installed || false,
hasApiKey: !!process.env.ANTHROPIC_API_KEY,

View File

@@ -46,16 +46,14 @@ export function createUpdateGlobalHandler(settingsService: SettingsService) {
}
// Minimal debug logging to help diagnose accidental wipes.
const projectsLen = Array.isArray((updates as any).projects)
? (updates as any).projects.length
: undefined;
const trashedLen = Array.isArray((updates as any).trashedProjects)
? (updates as any).trashedProjects.length
const projectsLen = Array.isArray(updates.projects) ? updates.projects.length : undefined;
const trashedLen = Array.isArray(updates.trashedProjects)
? updates.trashedProjects.length
: undefined;
logger.info(
`[SERVER_SETTINGS_UPDATE] Request received: projects=${projectsLen ?? 'n/a'}, trashedProjects=${trashedLen ?? 'n/a'}, theme=${
(updates as any).theme ?? 'n/a'
}, localStorageMigrated=${(updates as any).localStorageMigrated ?? 'n/a'}`
updates.theme ?? 'n/a'
}, localStorageMigrated=${updates.localStorageMigrated ?? 'n/a'}`
);
// Get old settings to detect theme changes

View File

@@ -4,13 +4,9 @@
import type { Request, Response } from 'express';
import { getErrorMessage, logError } from '../common.js';
import { exec } from 'child_process';
import { promisify } from 'util';
import * as fs from 'fs';
import * as path from 'path';
const execAsync = promisify(exec);
export function createAuthClaudeHandler() {
return async (_req: Request, res: Response): Promise<void> => {
try {

View File

@@ -4,13 +4,9 @@
import type { Request, Response } from 'express';
import { logError, getErrorMessage } from '../common.js';
import { exec } from 'child_process';
import { promisify } from 'util';
import * as fs from 'fs';
import * as path from 'path';
const execAsync = promisify(exec);
export function createAuthOpencodeHandler() {
return async (_req: Request, res: Response): Promise<void> => {
try {

View File

@@ -10,9 +10,6 @@ import type { Request, Response } from 'express';
import { CopilotProvider } from '../../../providers/copilot-provider.js';
import { getErrorMessage, logError } from '../common.js';
import type { ModelDefinition } from '@automaker/types';
import { createLogger } from '@automaker/utils';
const logger = createLogger('CopilotModelsRoute');
// Singleton provider instance for caching
let providerInstance: CopilotProvider | null = null;

View File

@@ -14,9 +14,6 @@ import {
} from '../../../providers/opencode-provider.js';
import { getErrorMessage, logError } from '../common.js';
import type { ModelDefinition } from '@automaker/types';
import { createLogger } from '@automaker/utils';
const logger = createLogger('OpenCodeModelsRoute');
// Singleton provider instance for caching
let providerInstance: OpencodeProvider | null = null;

View File

@@ -151,7 +151,7 @@ export function createVerifyClaudeAuthHandler() {
AuthSessionManager.createSession(sessionId, authMethod || 'api_key', apiKey, 'anthropic');
// Create temporary environment override for SDK call
const cleanupEnv = createTempEnvOverride(authEnv);
const _cleanupEnv = createTempEnvOverride(authEnv);
// Run a minimal query to verify authentication
const stream = query({
@@ -194,8 +194,10 @@ export function createVerifyClaudeAuthHandler() {
}
// Check specifically for assistant messages with text content
if (msg.type === 'assistant' && (msg as any).message?.content) {
const content = (msg as any).message.content;
const msgRecord = msg as Record<string, unknown>;
const msgMessage = msgRecord.message as Record<string, unknown> | undefined;
if (msg.type === 'assistant' && msgMessage?.content) {
const content = msgMessage.content;
if (Array.isArray(content)) {
for (const block of content) {
if (block.type === 'text' && block.text) {

View File

@@ -5,7 +5,6 @@
import { randomBytes } from 'crypto';
import { createLogger } from '@automaker/utils';
import type { Request, Response, NextFunction } from 'express';
import { getTerminalService } from '../../services/terminal-service.js';
const logger = createLogger('Terminal');

View File

@@ -9,7 +9,6 @@ import {
generateToken,
addToken,
getTokenExpiryMs,
getErrorMessage,
} from '../common.js';
export function createAuthHandler() {

View File

@@ -31,8 +31,8 @@ export async function getTrackedBranches(projectPath: string): Promise<TrackedBr
const content = (await secureFs.readFile(filePath, 'utf-8')) as string;
const data: BranchTrackingData = JSON.parse(content);
return data.branches || [];
} catch (error: any) {
if (error.code === 'ENOENT') {
} catch (error: unknown) {
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
return [];
}
logger.warn('Failed to read tracked branches:', error);

View File

@@ -117,7 +117,7 @@ export function createCreatePRHandler() {
cwd: worktreePath,
env: execEnv,
});
} catch (error: unknown) {
} catch {
// If push fails, try with --set-upstream
try {
await execAsync(`git push --set-upstream origin ${branchName}`, {
@@ -195,7 +195,7 @@ export function createCreatePRHandler() {
}
}
}
} catch (error) {
} catch {
// Couldn't parse remotes - will try fallback
}
@@ -216,7 +216,7 @@ export function createCreatePRHandler() {
originOwner = owner;
repoUrl = `https://github.com/${owner}/${repo}`;
}
} catch (error) {
} catch {
// Failed to get repo URL from config
}
}

View File

@@ -51,7 +51,7 @@ export function createDeleteHandler() {
// Remove the worktree (using array arguments to prevent injection)
try {
await execGitCommand(['worktree', 'remove', worktreePath, '--force'], projectPath);
} catch (error) {
} catch {
// Try with prune if remove fails
await execGitCommand(['worktree', 'prune'], projectPath);
}

View File

@@ -64,31 +64,8 @@ export function createZaiRoutes(
router.post('/configure', async (req: Request, res: Response) => {
try {
const { apiToken, apiHost } = req.body;
if (apiToken !== undefined) {
// Set in-memory token
usageService.setApiToken(apiToken || '');
// Persist to credentials (deep merge happens in updateCredentials)
try {
await settingsService.updateCredentials({
apiKeys: { zai: apiToken || '' },
} as Parameters<typeof settingsService.updateCredentials>[0]);
logger.info('[configure] Saved z.ai API key to credentials');
} catch (persistError) {
logger.error('[configure] Failed to persist z.ai API key:', persistError);
}
}
if (apiHost) {
usageService.setApiHost(apiHost);
}
res.json({
success: true,
message: 'z.ai configuration updated',
isAvailable: usageService.isAvailable(),
});
const result = await usageService.configure({ apiToken, apiHost }, settingsService);
res.json(result);
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
logger.error('Error configuring z.ai:', error);
@@ -100,50 +77,8 @@ export function createZaiRoutes(
router.post('/verify', async (req: Request, res: Response) => {
try {
const { apiKey } = req.body;
if (!apiKey || typeof apiKey !== 'string' || apiKey.trim().length === 0) {
res.json({
success: false,
authenticated: false,
error: 'Please provide an API key to test.',
});
return;
}
// Test the key by making a request to z.ai API
const quotaUrl =
process.env.Z_AI_QUOTA_URL ||
`${process.env.Z_AI_API_HOST ? `https://${process.env.Z_AI_API_HOST}` : 'https://api.z.ai'}/api/monitor/usage/quota/limit`;
logger.info(`[verify] Testing API key against: ${quotaUrl}`);
const response = await fetch(quotaUrl, {
method: 'GET',
headers: {
Authorization: `Bearer ${apiKey.trim()}`,
Accept: 'application/json',
},
});
if (response.ok) {
res.json({
success: true,
authenticated: true,
message: 'Connection successful! z.ai API responded.',
});
} else if (response.status === 401 || response.status === 403) {
res.json({
success: false,
authenticated: false,
error: 'Invalid API key. Please check your key and try again.',
});
} else {
res.json({
success: false,
authenticated: false,
error: `API request failed: ${response.status} ${response.statusText}`,
});
}
const result = await usageService.verifyApiKey(apiKey);
res.json(result);
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
logger.error('Error verifying z.ai API key:', error);

View File

@@ -444,17 +444,11 @@ export class AgentExecutor {
callbacks: AgentExecutorCallbacks
): Promise<{ responseText: string; tasksCompleted: number }> {
const {
workDir,
featureId,
projectPath,
abortController,
branchName = null,
planningMode = 'skip',
provider,
effectiveBareModel,
credentials,
claudeCompatibleProvider,
mcpServers,
sdkOptions,
} = options;
let responseText = initialResponseText,

View File

@@ -15,11 +15,9 @@ import {
loadContextFiles,
createLogger,
classifyError,
getUserFriendlyErrorMessage,
} from '@automaker/utils';
import { ProviderFactory } from '../providers/provider-factory.js';
import { createChatOptions, validateWorkingDirectory } from '../lib/sdk-options.js';
import { PathNotAllowedError } from '@automaker/platform';
import type { SettingsService } from './settings-service.js';
import {
getAutoLoadClaudeMdSetting,

View File

@@ -158,10 +158,7 @@ export class AutoLoopCoordinator {
const projectState = this.autoLoopsByProject.get(worktreeKey);
if (!projectState) return;
const { projectPath, branchName } = projectState.config;
let iterationCount = 0;
while (projectState.isRunning && !projectState.abortController.signal.aborted) {
iterationCount++;
try {
const runningCount = await this.getRunningCountForWorktree(projectPath, branchName);
if (runningCount >= projectState.config.maxConcurrency) {

View File

@@ -10,7 +10,6 @@
*/
import path from 'path';
import type { Feature } from '@automaker/types';
import { createLogger } from '@automaker/utils';
import type { EventEmitter } from '../../lib/events.js';
import { TypedEventBus } from '../typed-event-bus.js';

View File

@@ -295,7 +295,6 @@ export class ClaudeUsageService {
}
// Don't fail if we have data - return it instead
// Check cleaned output since raw output has ANSI codes between words
// eslint-disable-next-line no-control-regex
const cleanedForCheck = output
.replace(/\x1B\[(\d+)C/g, (_m: string, n: string) => ' '.repeat(parseInt(n, 10)))
.replace(/\x1B\[[0-9;?]*[A-Za-z@]/g, '');
@@ -332,7 +331,6 @@ export class ClaudeUsageService {
// Convert cursor forward (ESC[nC) to spaces first to preserve word boundaries,
// then strip remaining ANSI sequences. Without this, the Claude CLI TUI output
// like "Current week (all models)" becomes "Currentweek(allmodels)".
// eslint-disable-next-line no-control-regex
const cleanOutput = output
.replace(/\x1B\[(\d+)C/g, (_match: string, n: string) => ' '.repeat(parseInt(n, 10)))
.replace(/\x1B\[[0-9;?]*[A-Za-z@]/g, '');
@@ -492,7 +490,6 @@ export class ClaudeUsageService {
// First, convert cursor movement sequences to whitespace to preserve word boundaries.
// The Claude CLI TUI uses ESC[nC (cursor forward) instead of actual spaces between words.
// Without this, "Current week (all models)" becomes "Currentweek(allmodels)" after stripping.
// eslint-disable-next-line no-control-regex
let clean = text
// Cursor forward (CSI n C): replace with n spaces to preserve word separation
.replace(/\x1B\[(\d+)C/g, (_match, n) => ' '.repeat(parseInt(n, 10)))

View File

@@ -246,7 +246,7 @@ class DevServerService {
// No process found on port, which is fine
}
}
} catch (error) {
} catch {
// Ignore errors - port might not have any process
logger.debug(`No process to kill on port ${port}`);
}

View File

@@ -13,12 +13,7 @@
import { createLogger } from '@automaker/utils';
import * as secureFs from '../lib/secure-fs.js';
import {
getEventHistoryDir,
getEventHistoryIndexPath,
getEventPath,
ensureEventHistoryDir,
} from '@automaker/platform';
import { getEventHistoryIndexPath, getEventPath, ensureEventHistoryDir } from '@automaker/platform';
import type {
StoredEvent,
StoredEventIndex,

View File

@@ -20,7 +20,6 @@ import type { TypedEventBus } from './typed-event-bus.js';
import type { ConcurrencyManager, RunningFeature } from './concurrency-manager.js';
import type { WorktreeResolver } from './worktree-resolver.js';
import type { SettingsService } from './settings-service.js';
import type { PipelineContext } from './pipeline-orchestrator.js';
import { pipelineService } from './pipeline-service.js';
// Re-export callback types from execution-types.ts for backward compatibility

View File

@@ -205,7 +205,6 @@ export class FeatureExportService {
importData: FeatureImport
): Promise<FeatureImportResult> {
const warnings: string[] = [];
const errors: string[] = [];
try {
// Extract feature from data (handle both raw Feature and wrapped FeatureExport)

View File

@@ -195,9 +195,10 @@ export class FeatureLoader {
}
// Read all feature directories
// secureFs.readdir returns Dirent[] but typed as generic; cast to access isDirectory()
const entries = (await secureFs.readdir(featuresDir, {
withFileTypes: true,
})) as any[];
})) as import('fs').Dirent[];
const featureDirs = entries.filter((entry) => entry.isDirectory());
// Load all features concurrently with automatic recovery from backups

View File

@@ -13,7 +13,7 @@ import { createLogger } from '@automaker/utils';
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
import { execSync } from 'child_process';
import { execFileSync } from 'child_process';
const logger = createLogger('GeminiUsage');
@@ -26,6 +26,12 @@ const CODE_ASSIST_URL = 'https://cloudcode-pa.googleapis.com/v1internal:loadCode
// Google OAuth endpoints for token refresh
const GOOGLE_TOKEN_URL = 'https://oauth2.googleapis.com/token';
/** Default timeout for fetch requests in milliseconds */
const FETCH_TIMEOUT_MS = 10_000;
/** TTL for cached credentials in milliseconds (5 minutes) */
const CREDENTIALS_CACHE_TTL_MS = 5 * 60 * 1000;
export interface GeminiQuotaBucket {
/** Model ID this quota applies to */
modelId: string;
@@ -114,8 +120,11 @@ interface QuotaResponse {
*/
export class GeminiUsageService {
private cachedCredentials: OAuthCredentials | null = null;
private cachedCredentialsAt: number | null = null;
private cachedClientCredentials: OAuthClientCredentials | null = null;
private credentialsPath: string;
/** The actual path from which credentials were loaded (for write-back) */
private loadedCredentialsPath: string | null = null;
constructor() {
// Default credentials path for Gemini CLI
@@ -176,6 +185,7 @@ export class GeminiUsageService {
'Content-Type': 'application/json',
},
body: JSON.stringify({}),
signal: AbortSignal.timeout(FETCH_TIMEOUT_MS),
});
if (codeAssistResponse.ok) {
@@ -199,6 +209,7 @@ export class GeminiUsageService {
'Content-Type': 'application/json',
},
body: JSON.stringify(projectId ? { project: projectId } : {}),
signal: AbortSignal.timeout(FETCH_TIMEOUT_MS),
});
if (!response.ok) {
@@ -338,19 +349,46 @@ export class GeminiUsageService {
}
/**
* Load OAuth credentials from file
* Load OAuth credentials from file.
* Implements TTL-based cache invalidation and file mtime checks.
*/
private async loadCredentials(): Promise<OAuthCredentials | null> {
if (this.cachedCredentials) {
return this.cachedCredentials;
// Check if cached credentials are still valid
if (this.cachedCredentials && this.cachedCredentialsAt) {
const now = Date.now();
const cacheAge = now - this.cachedCredentialsAt;
if (cacheAge < CREDENTIALS_CACHE_TTL_MS) {
// Cache is within TTL - also check file mtime
const sourcePath = this.loadedCredentialsPath || this.credentialsPath;
try {
const stat = fs.statSync(sourcePath);
if (stat.mtimeMs <= this.cachedCredentialsAt) {
// File hasn't been modified since we cached - use cache
return this.cachedCredentials;
}
// File has been modified, fall through to re-read
logger.debug('[loadCredentials] File modified since cache, re-reading');
} catch {
// File doesn't exist or can't stat - use cache
return this.cachedCredentials;
}
} else {
// Cache TTL expired, discard
logger.debug('[loadCredentials] Cache TTL expired, re-reading');
}
// Invalidate cached credentials
this.cachedCredentials = null;
this.cachedCredentialsAt = null;
}
// Check multiple possible paths
const possiblePaths = [
// Build unique possible paths (deduplicate)
const rawPaths = [
this.credentialsPath,
path.join(os.homedir(), '.gemini', 'oauth_creds.json'),
path.join(os.homedir(), '.config', 'gemini', 'oauth_creds.json'),
];
const possiblePaths = [...new Set(rawPaths)];
for (const credPath of possiblePaths) {
try {
@@ -361,6 +399,8 @@ export class GeminiUsageService {
// Handle different credential formats
if (creds.access_token || creds.refresh_token) {
this.cachedCredentials = creds;
this.cachedCredentialsAt = Date.now();
this.loadedCredentialsPath = credPath;
logger.info('[loadCredentials] Loaded from:', credPath);
return creds;
}
@@ -372,6 +412,8 @@ export class GeminiUsageService {
client_id: clientCreds.client_id,
client_secret: clientCreds.client_secret,
};
this.cachedCredentialsAt = Date.now();
this.loadedCredentialsPath = credPath;
return this.cachedCredentials;
}
}
@@ -387,14 +429,21 @@ export class GeminiUsageService {
* Find the Gemini CLI binary path
*/
private findGeminiBinaryPath(): string | null {
// Try 'which' on Unix-like systems, 'where' on Windows
const whichCmd = process.platform === 'win32' ? 'where' : 'which';
try {
// Try 'which' on Unix-like systems
const whichResult = execSync('which gemini 2>/dev/null', { encoding: 'utf8' }).trim();
if (whichResult && fs.existsSync(whichResult)) {
return whichResult;
const whichResult = execFileSync(whichCmd, ['gemini'], {
encoding: 'utf8',
timeout: 5000,
stdio: ['pipe', 'pipe', 'pipe'],
}).trim();
// 'where' on Windows may return multiple lines; take the first
const firstLine = whichResult.split('\n')[0]?.trim();
if (firstLine && fs.existsSync(firstLine)) {
return firstLine;
}
} catch {
// Ignore errors from 'which'
// Ignore errors from 'which'/'where'
}
// Check common installation paths
@@ -554,27 +603,33 @@ export class GeminiUsageService {
}
}
// Try finding oauth2.js by searching in node_modules
try {
const searchResult = execSync(
`find ${baseDir}/.. -name "oauth2.js" -path "*gemini*" -path "*code_assist*" 2>/dev/null | head -1`,
{ encoding: 'utf8', timeout: 5000 }
).trim();
// Try finding oauth2.js by searching in node_modules (POSIX only)
if (process.platform !== 'win32') {
try {
const searchBase = path.resolve(baseDir, '..');
const searchResult = execFileSync(
'find',
[searchBase, '-name', 'oauth2.js', '-path', '*gemini*', '-path', '*code_assist*'],
{ encoding: 'utf8', timeout: 5000, stdio: ['pipe', 'pipe', 'pipe'] }
)
.trim()
.split('\n')[0]; // Take first result
if (searchResult && fs.existsSync(searchResult)) {
logger.debug('[extractOAuthClientCredentials] Found via search:', searchResult);
const content = fs.readFileSync(searchResult, 'utf8');
const creds = this.parseOAuthCredentialsFromSource(content);
if (creds) {
this.cachedClientCredentials = creds;
logger.info(
'[extractOAuthClientCredentials] Extracted credentials from CLI (via search)'
);
return creds;
if (searchResult && fs.existsSync(searchResult)) {
logger.debug('[extractOAuthClientCredentials] Found via search:', searchResult);
const content = fs.readFileSync(searchResult, 'utf8');
const creds = this.parseOAuthCredentialsFromSource(content);
if (creds) {
this.cachedClientCredentials = creds;
logger.info(
'[extractOAuthClientCredentials] Extracted credentials from CLI (via search)'
);
return creds;
}
}
} catch {
// Ignore search errors
}
} catch {
// Ignore search errors
}
logger.warn('[extractOAuthClientCredentials] Could not extract credentials from CLI');
@@ -669,6 +724,7 @@ export class GeminiUsageService {
refresh_token: creds.refresh_token,
grant_type: 'refresh_token',
}),
signal: AbortSignal.timeout(FETCH_TIMEOUT_MS),
});
if (response.ok) {
@@ -685,13 +741,12 @@ export class GeminiUsageService {
access_token: newAccessToken,
expiry_date: Date.now() + expiresIn * 1000,
};
this.cachedCredentialsAt = Date.now();
// Save back to file
// Save back to the file the credentials were loaded from
const writePath = this.loadedCredentialsPath || this.credentialsPath;
try {
fs.writeFileSync(
this.credentialsPath,
JSON.stringify(this.cachedCredentials, null, 2)
);
fs.writeFileSync(writePath, JSON.stringify(this.cachedCredentials, null, 2));
} catch (e) {
logger.debug('[getValidAccessToken] Could not save refreshed token:', e);
}
@@ -743,6 +798,7 @@ export class GeminiUsageService {
*/
clearCache(): void {
this.cachedCredentials = null;
this.cachedCredentialsAt = null;
this.cachedClientCredentials = null;
}
}

View File

@@ -27,7 +27,6 @@ import type {
} from '@automaker/types';
import { DEFAULT_IDEATION_CONTEXT_SOURCES } from '@automaker/types';
import {
getIdeationDir,
getIdeasDir,
getIdeaDir,
getIdeaPath,
@@ -407,7 +406,9 @@ export class IdeationService {
return [];
}
const entries = (await secureFs.readdir(ideasDir, { withFileTypes: true })) as any[];
const entries = (await secureFs.readdir(ideasDir, {
withFileTypes: true,
})) as import('fs').Dirent[];
const ideaDirs = entries.filter((entry) => entry.isDirectory());
const ideas: Idea[] = [];
@@ -855,15 +856,26 @@ ${contextSection}${existingWorkSection}`;
}
return parsed
.map((item: any, index: number) => ({
id: this.generateId('sug'),
category,
title: item.title || `Suggestion ${index + 1}`,
description: item.description || '',
rationale: item.rationale || '',
priority: item.priority || 'medium',
relatedFiles: item.relatedFiles || [],
}))
.map(
(
item: {
title?: string;
description?: string;
rationale?: string;
priority?: 'low' | 'medium' | 'high';
relatedFiles?: string[];
},
index: number
) => ({
id: this.generateId('sug'),
category,
title: item.title || `Suggestion ${index + 1}`,
description: item.description || '',
rationale: item.rationale || '',
priority: item.priority || ('medium' as const),
relatedFiles: item.relatedFiles || [],
})
)
.slice(0, count);
} catch (error) {
logger.warn('Failed to parse JSON response:', error);
@@ -1705,7 +1717,9 @@ ${contextSection}${existingWorkSection}`;
const results: AnalysisFileInfo[] = [];
try {
const entries = (await secureFs.readdir(dirPath, { withFileTypes: true })) as any[];
const entries = (await secureFs.readdir(dirPath, {
withFileTypes: true,
})) as import('fs').Dirent[];
for (const entry of entries) {
if (entry.isDirectory()) {

View File

@@ -250,6 +250,14 @@ export class RecoveryService {
async resumeInterruptedFeatures(projectPath: string): Promise<void> {
const featuresDir = getFeaturesDir(projectPath);
try {
// Load execution state to find features that were running before restart.
// This is critical because reconcileAllFeatureStates() runs at server startup
// and resets in_progress/interrupted/pipeline_* features to ready/backlog
// BEFORE the UI connects and calls this method. Without checking execution state,
// we would find no features to resume since their statuses have already been reset.
const executionState = await this.loadExecutionState(projectPath);
const previouslyRunningIds = new Set(executionState.runningFeatureIds ?? []);
const entries = await secureFs.readdir(featuresDir, { withFileTypes: true });
const featuresWithContext: Feature[] = [];
const featuresWithoutContext: Feature[] = [];
@@ -263,18 +271,37 @@ export class RecoveryService {
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
const feature = result.data;
if (!feature) continue;
if (
// Check if the feature should be resumed:
// 1. Features still in active states (in_progress, pipeline_*) - not yet reconciled
// 2. Features in interrupted state - explicitly marked for resume
// 3. Features that were previously running (from execution state) and are now
// in ready/backlog due to reconciliation resetting their status
const isActiveState =
feature.status === 'in_progress' ||
(feature.status && feature.status.startsWith('pipeline_'))
) {
(await this.contextExists(projectPath, feature.id))
? featuresWithContext.push(feature)
: featuresWithoutContext.push(feature);
feature.status === 'interrupted' ||
(feature.status && feature.status.startsWith('pipeline_'));
const wasReconciledFromRunning =
previouslyRunningIds.has(feature.id) &&
(feature.status === 'ready' || feature.status === 'backlog');
if (isActiveState || wasReconciledFromRunning) {
if (await this.contextExists(projectPath, feature.id)) {
featuresWithContext.push(feature);
} else {
featuresWithoutContext.push(feature);
}
}
}
}
const allInterruptedFeatures = [...featuresWithContext, ...featuresWithoutContext];
if (allInterruptedFeatures.length === 0) return;
logger.info(
`[resumeInterruptedFeatures] Found ${allInterruptedFeatures.length} feature(s) to resume ` +
`(${previouslyRunningIds.size} from execution state, statuses: ${allInterruptedFeatures.map((f) => `${f.id}=${f.status}`).join(', ')})`
);
this.eventBus.emitAutoModeEvent('auto_mode_resuming_features', {
message: `Resuming ${allInterruptedFeatures.length} interrupted feature(s)`,
projectPath,
@@ -295,6 +322,10 @@ export class RecoveryService {
/* continue */
}
}
// Clear execution state after successful resume to prevent
// re-resuming the same features on subsequent calls
await this.clearExecutionState(projectPath);
} catch {
/* ignore */
}

View File

@@ -1,7 +1,12 @@
import { createLogger } from '@automaker/utils';
import { createEventEmitter } from '../lib/events.js';
import type { SettingsService } from './settings-service.js';
const logger = createLogger('ZaiUsage');
/** Default timeout for fetch requests in milliseconds */
const FETCH_TIMEOUT_MS = 10_000;
/**
* z.ai quota limit entry from the API
*/
@@ -112,6 +117,21 @@ interface ZaiApiResponse {
message?: string;
}
/** Result from configure method */
interface ConfigureResult {
success: boolean;
message: string;
isAvailable: boolean;
}
/** Result from verifyApiKey method */
interface VerifyResult {
success: boolean;
authenticated: boolean;
message?: string;
error?: string;
}
/**
* z.ai Usage Service
*
@@ -162,16 +182,163 @@ export class ZaiUsageService {
return Boolean(token && token.length > 0);
}
/**
* Configure z.ai API token and host.
* Persists the token via settingsService and updates in-memory state.
*/
async configure(
options: { apiToken?: string; apiHost?: string },
settingsService: SettingsService
): Promise<ConfigureResult> {
const emitter = createEventEmitter();
if (options.apiToken !== undefined) {
// Set in-memory token
this.setApiToken(options.apiToken || '');
// Persist to credentials
try {
await settingsService.updateCredentials({
apiKeys: { zai: options.apiToken || '' },
} as Parameters<typeof settingsService.updateCredentials>[0]);
logger.info('[configure] Saved z.ai API key to credentials');
} catch (persistError) {
logger.error('[configure] Failed to persist z.ai API key:', persistError);
}
}
if (options.apiHost) {
this.setApiHost(options.apiHost);
}
const result: ConfigureResult = {
success: true,
message: 'z.ai configuration updated',
isAvailable: this.isAvailable(),
};
emitter.emit('notification:created', {
type: 'zai.configured',
success: result.success,
isAvailable: result.isAvailable,
});
return result;
}
/**
* Verify an API key without storing it.
* Makes a test request to the z.ai quota URL with the given key.
*/
async verifyApiKey(apiKey: string | undefined): Promise<VerifyResult> {
const emitter = createEventEmitter();
if (!apiKey || typeof apiKey !== 'string' || apiKey.trim().length === 0) {
return {
success: false,
authenticated: false,
error: 'Please provide an API key to test.',
};
}
const quotaUrl =
process.env.Z_AI_QUOTA_URL ||
`${process.env.Z_AI_API_HOST ? `https://${process.env.Z_AI_API_HOST}` : 'https://api.z.ai'}/api/monitor/usage/quota/limit`;
logger.info(`[verify] Testing API key against: ${quotaUrl}`);
try {
const response = await fetch(quotaUrl, {
method: 'GET',
headers: {
Authorization: `Bearer ${apiKey.trim()}`,
Accept: 'application/json',
},
signal: AbortSignal.timeout(FETCH_TIMEOUT_MS),
});
let result: VerifyResult;
if (response.ok) {
result = {
success: true,
authenticated: true,
message: 'Connection successful! z.ai API responded.',
};
} else if (response.status === 401 || response.status === 403) {
result = {
success: false,
authenticated: false,
error: 'Invalid API key. Please check your key and try again.',
};
} else {
result = {
success: false,
authenticated: false,
error: `API request failed: ${response.status} ${response.statusText}`,
};
}
emitter.emit('notification:created', {
type: 'zai.verify.result',
success: result.success,
authenticated: result.authenticated,
});
return result;
} catch (error) {
// Handle abort/timeout errors specifically
if (error instanceof Error && error.name === 'AbortError') {
const result: VerifyResult = {
success: false,
authenticated: false,
error: 'Request timed out. The z.ai API did not respond in time.',
};
emitter.emit('notification:created', {
type: 'zai.verify.result',
success: false,
error: 'timeout',
});
return result;
}
const message = error instanceof Error ? error.message : 'Unknown error';
logger.error('Error verifying z.ai API key:', error);
emitter.emit('notification:created', {
type: 'zai.verify.result',
success: false,
error: message,
});
return {
success: false,
authenticated: false,
error: `Network error: ${message}`,
};
}
}
/**
* Fetch usage data from z.ai API
*/
async fetchUsageData(): Promise<ZaiUsageData> {
logger.info('[fetchUsageData] Starting...');
const emitter = createEventEmitter();
emitter.emit('notification:created', { type: 'zai.usage.start' });
const token = this.getApiToken();
if (!token) {
logger.error('[fetchUsageData] No API token configured');
throw new Error('z.ai API token not configured. Set Z_AI_API_KEY environment variable.');
const error = new Error(
'z.ai API token not configured. Set Z_AI_API_KEY environment variable.'
);
emitter.emit('notification:created', {
type: 'zai.usage.error',
error: error.message,
});
throw error;
}
const quotaUrl =
@@ -180,31 +347,68 @@ export class ZaiUsageService {
logger.info(`[fetchUsageData] Fetching from: ${quotaUrl}`);
try {
const response = await fetch(quotaUrl, {
method: 'GET',
headers: {
Authorization: `Bearer ${token}`,
Accept: 'application/json',
},
});
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), FETCH_TIMEOUT_MS);
if (!response.ok) {
logger.error(`[fetchUsageData] HTTP ${response.status}: ${response.statusText}`);
throw new Error(`z.ai API request failed: ${response.status} ${response.statusText}`);
try {
const response = await fetch(quotaUrl, {
method: 'GET',
headers: {
Authorization: `Bearer ${token}`,
Accept: 'application/json',
},
signal: controller.signal,
});
clearTimeout(timeoutId);
if (!response.ok) {
logger.error(`[fetchUsageData] HTTP ${response.status}: ${response.statusText}`);
throw new Error(`z.ai API request failed: ${response.status} ${response.statusText}`);
}
const data = (await response.json()) as unknown as ZaiApiResponse;
logger.info('[fetchUsageData] Response received:', JSON.stringify(data, null, 2));
const result = this.parseApiResponse(data);
emitter.emit('notification:created', {
type: 'zai.usage.success',
data: result,
});
return result;
} finally {
clearTimeout(timeoutId);
}
} catch (error) {
// Handle abort/timeout errors
if (error instanceof Error && error.name === 'AbortError') {
const timeoutError = new Error(`z.ai API request timed out after ${FETCH_TIMEOUT_MS}ms`);
emitter.emit('notification:created', {
type: 'zai.usage.error',
error: timeoutError.message,
});
throw timeoutError;
}
const data = (await response.json()) as unknown as ZaiApiResponse;
logger.info('[fetchUsageData] Response received:', JSON.stringify(data, null, 2));
return this.parseApiResponse(data);
} catch (error) {
if (error instanceof Error && error.message.includes('z.ai API')) {
emitter.emit('notification:created', {
type: 'zai.usage.error',
error: error.message,
});
throw error;
}
logger.error('[fetchUsageData] Failed to fetch:', error);
throw new Error(
const fetchError = new Error(
`Failed to fetch z.ai usage data: ${error instanceof Error ? error.message : String(error)}`
);
emitter.emit('notification:created', {
type: 'zai.usage.error',
error: fetchError.message,
});
throw fetchError;
}
}

View File

@@ -5,7 +5,7 @@
* across all providers (Claude, Codex, Cursor)
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { describe, it, expect } from 'vitest';
import {
detectCli,
detectAllCLis,
@@ -270,7 +270,7 @@ describe('Error Recovery Tests', () => {
expect(results).toHaveProperty('cursor');
// Should provide error information for failures
Object.entries(results).forEach(([provider, result]) => {
Object.entries(results).forEach(([_provider, result]) => {
if (!result.detected && result.issues.length > 0) {
expect(result.issues.length).toBeGreaterThan(0);
expect(result.issues[0]).toBeTruthy();