mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-02 20:43:36 +00:00
feat: implement modular provider architecture with Codex CLI support
Implements a flexible provider pattern that supports both Claude Agent SDK and OpenAI Codex CLI, enabling future expansion to other AI providers (Cursor, OpenCode, etc.) with minimal changes. ## Architecture Changes ### New Provider System - Created provider abstraction layer with BaseProvider interface - Model-based routing: model prefix determines provider - `gpt-*`, `o*` → CodexProvider (subprocess CLI) - `claude-*`, `opus/sonnet/haiku` → ClaudeProvider (SDK) - Providers implement common ExecuteOptions interface ### New Files Created - `providers/types.ts` - Shared interfaces (ExecuteOptions, ProviderMessage, etc.) - `providers/base-provider.ts` - Abstract base class - `providers/claude-provider.ts` - Claude Agent SDK wrapper - `providers/codex-provider.ts` - Codex CLI subprocess executor - `providers/codex-cli-detector.ts` - Installation & auth detection - `providers/codex-config-manager.ts` - TOML config management - `providers/provider-factory.ts` - Model-based provider routing - `lib/subprocess-manager.ts` - Reusable subprocess utilities ## Features Implemented ### Codex CLI Integration - Spawns Codex CLI as subprocess with JSONL output - Converts Codex events to Claude SDK-compatible format - Supports both `codex login` and OPENAI_API_KEY auth methods - Handles: reasoning, messages, commands, todos, file changes - Extracts text from content blocks for non-vision CLI ### Conversation History - Added conversationHistory support to ExecuteOptions - ClaudeProvider: yields previous messages to SDK - CodexProvider: prepends history as text context - Follow-up prompts maintain full conversation context ### Image Upload Support - Images embedded as base64 for vision models - Image paths appended to prompt text for Read tool access - Auto-mode: copies images to feature folder - Follow-up: combines original + new images - Updates feature.json with image metadata ### Session Model Persistence - Added `model` field to Session and SessionMetadata - Sessions remember model preference across interactions - API endpoints accept model parameter - Auto-mode respects feature's model setting ## Modified Files ### Services - `agent-service.ts`: - Added conversation history building - Uses ProviderFactory instead of direct SDK calls - Appends image paths to prompts - Added model parameter and persistence - `auto-mode-service.ts`: - Removed OpenAI model block restriction - Uses ProviderFactory for all models - Added image support in buildFeaturePrompt - Follow-up: loads context, copies images, updates feature.json - Returns to waiting_approval after follow-up ### Routes - `agent.ts`: Added model parameter to /send endpoint - `sessions.ts`: Added model field to create/update - `models.ts`: Added Codex models (gpt-5.2, gpt-5.1-codex*) ### Configuration - `.env.example`: Added OPENAI_API_KEY and CODEX_CLI_PATH - `.gitignore`: Added provider-specific ignores ## Bug Fixes - Fixed image path resolution (relative → absolute) - Fixed Codex empty prompt when images attached - Fixed follow-up status management (in_progress → waiting_approval) - Fixed follow-up images not appearing in prompt text - Removed OpenAI model restrictions in auto-mode ## Testing Notes - Codex CLI authentication verified with both methods - Image uploads work for both Claude (vision) and Codex (Read tool) - Follow-up prompts maintain full context - Conversation history persists across turns - Model switching works per-session 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
202
apps/server/src/providers/claude-provider.ts
Normal file
202
apps/server/src/providers/claude-provider.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
/**
|
||||
* Claude Provider - Executes queries using Claude Agent SDK
|
||||
*
|
||||
* Wraps the @anthropic-ai/claude-agent-sdk for seamless integration
|
||||
* with the provider architecture.
|
||||
*/
|
||||
|
||||
import { query, type Options } from "@anthropic-ai/claude-agent-sdk";
|
||||
import { BaseProvider } from "./base-provider.js";
|
||||
import type {
|
||||
ExecuteOptions,
|
||||
ProviderMessage,
|
||||
InstallationStatus,
|
||||
ModelDefinition,
|
||||
} from "./types.js";
|
||||
|
||||
export class ClaudeProvider extends BaseProvider {
|
||||
getName(): string {
|
||||
return "claude";
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a query using Claude Agent SDK
|
||||
*/
|
||||
async *executeQuery(options: ExecuteOptions): AsyncGenerator<ProviderMessage> {
|
||||
const {
|
||||
prompt,
|
||||
model,
|
||||
cwd,
|
||||
systemPrompt,
|
||||
maxTurns = 20,
|
||||
allowedTools,
|
||||
abortController,
|
||||
conversationHistory,
|
||||
} = options;
|
||||
|
||||
// Build Claude SDK options
|
||||
const sdkOptions: Options = {
|
||||
model,
|
||||
systemPrompt,
|
||||
maxTurns,
|
||||
cwd,
|
||||
allowedTools: allowedTools || [
|
||||
"Read",
|
||||
"Write",
|
||||
"Edit",
|
||||
"Glob",
|
||||
"Grep",
|
||||
"Bash",
|
||||
"WebSearch",
|
||||
"WebFetch",
|
||||
],
|
||||
permissionMode: "acceptEdits",
|
||||
sandbox: {
|
||||
enabled: true,
|
||||
autoAllowBashIfSandboxed: true,
|
||||
},
|
||||
abortController,
|
||||
};
|
||||
|
||||
// Build prompt payload with conversation history
|
||||
let promptPayload: string | AsyncGenerator<any, void, unknown>;
|
||||
|
||||
if (conversationHistory && conversationHistory.length > 0) {
|
||||
// Multi-turn conversation with history
|
||||
promptPayload = (async function* () {
|
||||
// Yield all previous messages first
|
||||
for (const historyMsg of conversationHistory) {
|
||||
yield {
|
||||
type: historyMsg.role,
|
||||
session_id: "",
|
||||
message: {
|
||||
role: historyMsg.role,
|
||||
content: Array.isArray(historyMsg.content)
|
||||
? historyMsg.content
|
||||
: [{ type: "text", text: historyMsg.content }],
|
||||
},
|
||||
parent_tool_use_id: null,
|
||||
};
|
||||
}
|
||||
|
||||
// Yield current prompt
|
||||
yield {
|
||||
type: "user" as const,
|
||||
session_id: "",
|
||||
message: {
|
||||
role: "user" as const,
|
||||
content: Array.isArray(prompt)
|
||||
? prompt
|
||||
: [{ type: "text", text: prompt }],
|
||||
},
|
||||
parent_tool_use_id: null,
|
||||
};
|
||||
})();
|
||||
} else if (Array.isArray(prompt)) {
|
||||
// Multi-part prompt (with images) - no history
|
||||
promptPayload = (async function* () {
|
||||
yield {
|
||||
type: "user" as const,
|
||||
session_id: "",
|
||||
message: {
|
||||
role: "user" as const,
|
||||
content: prompt,
|
||||
},
|
||||
parent_tool_use_id: null,
|
||||
};
|
||||
})();
|
||||
} else {
|
||||
// Simple text prompt - no history
|
||||
promptPayload = prompt;
|
||||
}
|
||||
|
||||
// Execute via Claude Agent SDK
|
||||
const stream = query({ prompt: promptPayload, options: sdkOptions });
|
||||
|
||||
// Stream messages directly - they're already in the correct format
|
||||
for await (const msg of stream) {
|
||||
yield msg as ProviderMessage;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect Claude SDK installation (always available via npm)
|
||||
*/
|
||||
async detectInstallation(): Promise<InstallationStatus> {
|
||||
// Claude SDK is always available since it's a dependency
|
||||
const hasApiKey =
|
||||
!!process.env.ANTHROPIC_API_KEY || !!process.env.CLAUDE_CODE_OAUTH_TOKEN;
|
||||
|
||||
return {
|
||||
installed: true,
|
||||
method: "sdk",
|
||||
hasApiKey,
|
||||
authenticated: hasApiKey,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available Claude models
|
||||
*/
|
||||
getAvailableModels(): ModelDefinition[] {
|
||||
return [
|
||||
{
|
||||
id: "claude-opus-4-5-20251101",
|
||||
name: "Claude Opus 4.5",
|
||||
modelString: "claude-opus-4-5-20251101",
|
||||
provider: "anthropic",
|
||||
description: "Most capable Claude model",
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 16000,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: "premium",
|
||||
default: true,
|
||||
},
|
||||
{
|
||||
id: "claude-sonnet-4-20250514",
|
||||
name: "Claude Sonnet 4",
|
||||
modelString: "claude-sonnet-4-20250514",
|
||||
provider: "anthropic",
|
||||
description: "Balanced performance and cost",
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 16000,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: "standard",
|
||||
},
|
||||
{
|
||||
id: "claude-3-5-sonnet-20241022",
|
||||
name: "Claude 3.5 Sonnet",
|
||||
modelString: "claude-3-5-sonnet-20241022",
|
||||
provider: "anthropic",
|
||||
description: "Fast and capable",
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 8000,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: "standard",
|
||||
},
|
||||
{
|
||||
id: "claude-3-5-haiku-20241022",
|
||||
name: "Claude 3.5 Haiku",
|
||||
modelString: "claude-3-5-haiku-20241022",
|
||||
provider: "anthropic",
|
||||
description: "Fastest Claude model",
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 8000,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: "basic",
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the provider supports a specific feature
|
||||
*/
|
||||
supportsFeature(feature: string): boolean {
|
||||
const supportedFeatures = ["tools", "text", "vision", "thinking"];
|
||||
return supportedFeatures.includes(feature);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user