mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-01 20:23:36 +00:00
feat: implement modular provider architecture with Codex CLI support
Implements a flexible provider pattern that supports both Claude Agent SDK and OpenAI Codex CLI, enabling future expansion to other AI providers (Cursor, OpenCode, etc.) with minimal changes. ## Architecture Changes ### New Provider System - Created provider abstraction layer with BaseProvider interface - Model-based routing: model prefix determines provider - `gpt-*`, `o*` → CodexProvider (subprocess CLI) - `claude-*`, `opus/sonnet/haiku` → ClaudeProvider (SDK) - Providers implement common ExecuteOptions interface ### New Files Created - `providers/types.ts` - Shared interfaces (ExecuteOptions, ProviderMessage, etc.) - `providers/base-provider.ts` - Abstract base class - `providers/claude-provider.ts` - Claude Agent SDK wrapper - `providers/codex-provider.ts` - Codex CLI subprocess executor - `providers/codex-cli-detector.ts` - Installation & auth detection - `providers/codex-config-manager.ts` - TOML config management - `providers/provider-factory.ts` - Model-based provider routing - `lib/subprocess-manager.ts` - Reusable subprocess utilities ## Features Implemented ### Codex CLI Integration - Spawns Codex CLI as subprocess with JSONL output - Converts Codex events to Claude SDK-compatible format - Supports both `codex login` and OPENAI_API_KEY auth methods - Handles: reasoning, messages, commands, todos, file changes - Extracts text from content blocks for non-vision CLI ### Conversation History - Added conversationHistory support to ExecuteOptions - ClaudeProvider: yields previous messages to SDK - CodexProvider: prepends history as text context - Follow-up prompts maintain full conversation context ### Image Upload Support - Images embedded as base64 for vision models - Image paths appended to prompt text for Read tool access - Auto-mode: copies images to feature folder - Follow-up: combines original + new images - Updates feature.json with image metadata ### Session Model Persistence - Added `model` field to Session and SessionMetadata - Sessions remember model preference across interactions - API endpoints accept model parameter - Auto-mode respects feature's model setting ## Modified Files ### Services - `agent-service.ts`: - Added conversation history building - Uses ProviderFactory instead of direct SDK calls - Appends image paths to prompts - Added model parameter and persistence - `auto-mode-service.ts`: - Removed OpenAI model block restriction - Uses ProviderFactory for all models - Added image support in buildFeaturePrompt - Follow-up: loads context, copies images, updates feature.json - Returns to waiting_approval after follow-up ### Routes - `agent.ts`: Added model parameter to /send endpoint - `sessions.ts`: Added model field to create/update - `models.ts`: Added Codex models (gpt-5.2, gpt-5.1-codex*) ### Configuration - `.env.example`: Added OPENAI_API_KEY and CODEX_CLI_PATH - `.gitignore`: Added provider-specific ignores ## Bug Fixes - Fixed image path resolution (relative → absolute) - Fixed Codex empty prompt when images attached - Fixed follow-up status management (in_progress → waiting_approval) - Fixed follow-up images not appearing in prompt text - Removed OpenAI model restrictions in auto-mode ## Testing Notes - Codex CLI authentication verified with both methods - Image uploads work for both Claude (vision) and Codex (Read tool) - Follow-up prompts maintain full context - Conversation history persists across turns - Model switching works per-session 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -40,11 +40,12 @@ export function createAgentRoutes(
|
||||
// Send a message
|
||||
router.post("/send", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId, message, workingDirectory, imagePaths } = req.body as {
|
||||
const { sessionId, message, workingDirectory, imagePaths, model } = req.body as {
|
||||
sessionId: string;
|
||||
message: string;
|
||||
workingDirectory?: string;
|
||||
imagePaths?: string[];
|
||||
model?: string;
|
||||
};
|
||||
|
||||
if (!sessionId || !message) {
|
||||
@@ -61,6 +62,7 @@ export function createAgentRoutes(
|
||||
message,
|
||||
workingDirectory,
|
||||
imagePaths,
|
||||
model,
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("[Agent Route] Error sending message:", error);
|
||||
@@ -128,5 +130,26 @@ export function createAgentRoutes(
|
||||
}
|
||||
});
|
||||
|
||||
// Set session model
|
||||
router.post("/model", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId, model } = req.body as {
|
||||
sessionId: string;
|
||||
model: string;
|
||||
};
|
||||
|
||||
if (!sessionId || !model) {
|
||||
res.status(400).json({ success: false, error: "sessionId and model are required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await agentService.setSessionModel(sessionId, model);
|
||||
res.json({ success: result });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import { ProviderFactory } from "../providers/provider-factory.js";
|
||||
|
||||
interface ModelDefinition {
|
||||
id: string;
|
||||
@@ -93,7 +94,25 @@ export function createModelsRoutes(): Router {
|
||||
{
|
||||
id: "gpt-5.2",
|
||||
name: "GPT-5.2 (Codex)",
|
||||
provider: "openai",
|
||||
provider: "openai-codex",
|
||||
contextWindow: 256000,
|
||||
maxOutputTokens: 32768,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "gpt-5.1-codex-max",
|
||||
name: "GPT-5.1 Codex Max",
|
||||
provider: "openai-codex",
|
||||
contextWindow: 256000,
|
||||
maxOutputTokens: 32768,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "gpt-5.1-codex",
|
||||
name: "GPT-5.1 Codex",
|
||||
provider: "openai-codex",
|
||||
contextWindow: 256000,
|
||||
maxOutputTokens: 32768,
|
||||
supportsVision: true,
|
||||
@@ -111,15 +130,25 @@ export function createModelsRoutes(): Router {
|
||||
// Check provider status
|
||||
router.get("/providers", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const providers: Record<string, ProviderStatus> = {
|
||||
// Get installation status from all providers
|
||||
const statuses = await ProviderFactory.checkAllProviders();
|
||||
|
||||
const providers: Record<string, any> = {
|
||||
anthropic: {
|
||||
available: !!process.env.ANTHROPIC_API_KEY,
|
||||
hasApiKey: !!process.env.ANTHROPIC_API_KEY,
|
||||
available: statuses.claude?.installed || false,
|
||||
hasApiKey: !!process.env.ANTHROPIC_API_KEY || !!process.env.CLAUDE_CODE_OAUTH_TOKEN,
|
||||
},
|
||||
openai: {
|
||||
available: !!process.env.OPENAI_API_KEY,
|
||||
hasApiKey: !!process.env.OPENAI_API_KEY,
|
||||
},
|
||||
"openai-codex": {
|
||||
available: statuses.codex?.installed || false,
|
||||
hasApiKey: !!process.env.OPENAI_API_KEY,
|
||||
cliInstalled: statuses.codex?.installed,
|
||||
cliVersion: statuses.codex?.version,
|
||||
cliPath: statuses.codex?.path,
|
||||
},
|
||||
google: {
|
||||
available: !!process.env.GOOGLE_API_KEY,
|
||||
hasApiKey: !!process.env.GOOGLE_API_KEY,
|
||||
|
||||
@@ -46,10 +46,11 @@ export function createSessionsRoutes(agentService: AgentService): Router {
|
||||
// Create a new session
|
||||
router.post("/", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { name, projectPath, workingDirectory } = req.body as {
|
||||
const { name, projectPath, workingDirectory, model } = req.body as {
|
||||
name: string;
|
||||
projectPath?: string;
|
||||
workingDirectory?: string;
|
||||
model?: string;
|
||||
};
|
||||
|
||||
if (!name) {
|
||||
@@ -60,7 +61,8 @@ export function createSessionsRoutes(agentService: AgentService): Router {
|
||||
const session = await agentService.createSession(
|
||||
name,
|
||||
projectPath,
|
||||
workingDirectory
|
||||
workingDirectory,
|
||||
model
|
||||
);
|
||||
res.json({ success: true, session });
|
||||
} catch (error) {
|
||||
@@ -73,12 +75,13 @@ export function createSessionsRoutes(agentService: AgentService): Router {
|
||||
router.put("/:sessionId", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
const { name, tags } = req.body as {
|
||||
const { name, tags, model } = req.body as {
|
||||
name?: string;
|
||||
tags?: string[];
|
||||
model?: string;
|
||||
};
|
||||
|
||||
const session = await agentService.updateSession(sessionId, { name, tags });
|
||||
const session = await agentService.updateSession(sessionId, { name, tags, model });
|
||||
if (!session) {
|
||||
res.status(404).json({ success: false, error: "Session not found" });
|
||||
return;
|
||||
|
||||
Reference in New Issue
Block a user