mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-30 06:12:03 +00:00
refactor: restrict model checks to gpt-* for OpenAI/Codex models
- Updated model resolution logic to only check for gpt-* models, removing references to unsupported o1/o3 models in both model-resolver and provider-factory files. - Enhanced comments for clarity regarding model support in Codex CLI.
This commit is contained in:
@@ -42,7 +42,8 @@ export function resolveModelString(
|
||||
}
|
||||
|
||||
// OpenAI/Codex models - pass through unchanged
|
||||
if (modelKey.startsWith("gpt-") || modelKey.startsWith("o")) {
|
||||
// Only check for gpt-* models (Codex CLI doesn't support o1/o3)
|
||||
if (modelKey.startsWith("gpt-")) {
|
||||
console.log(`[ModelResolver] Using OpenAI/Codex model: ${modelKey}`);
|
||||
return modelKey;
|
||||
}
|
||||
|
||||
@@ -21,8 +21,9 @@ export class ProviderFactory {
|
||||
static getProviderForModel(modelId: string): BaseProvider {
|
||||
const lowerModel = modelId.toLowerCase();
|
||||
|
||||
// OpenAI/Codex models (gpt-*, o1, o3, etc.)
|
||||
if (lowerModel.startsWith("gpt-") || lowerModel.startsWith("o")) {
|
||||
// OpenAI/Codex models (gpt-*)
|
||||
// Note: o1/o3 models are not supported by Codex CLI
|
||||
if (lowerModel.startsWith("gpt-")) {
|
||||
return new CodexProvider();
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user