/** * AdvancedConfig Documentation Section * * Covers Vertex AI setup, Ollama local models, environment variables, * CLI arguments, webhook support, and the project registry. */ import { Badge } from '@/components/ui/badge' /** Environment variable descriptor for the reference table. */ interface EnvVar { name: string description: string } const ENV_VARS: EnvVar[] = [ { name: 'CLAUDE_CODE_USE_VERTEX', description: 'Enable Vertex AI (1)' }, { name: 'CLOUD_ML_REGION', description: 'GCP region' }, { name: 'ANTHROPIC_VERTEX_PROJECT_ID', description: 'GCP project ID' }, { name: 'ANTHROPIC_BASE_URL', description: 'Custom API base URL (for Ollama)' }, { name: 'ANTHROPIC_AUTH_TOKEN', description: 'API auth token' }, { name: 'API_TIMEOUT_MS', description: 'API timeout in milliseconds' }, { name: 'EXTRA_READ_PATHS', description: 'Comma-separated extra read directories' }, { name: 'ANTHROPIC_DEFAULT_OPUS_MODEL', description: 'Override Opus model name' }, { name: 'ANTHROPIC_DEFAULT_SONNET_MODEL', description: 'Override Sonnet model name' }, { name: 'ANTHROPIC_DEFAULT_HAIKU_MODEL', description: 'Override Haiku model name' }, ] /** CLI argument descriptor for the reference table. */ interface CliArg { name: string description: string } const CLI_ARGS: CliArg[] = [ { name: '--project-dir', description: 'Project directory path or registered name' }, { name: '--yolo', description: 'Enable YOLO mode' }, { name: '--parallel', description: 'Enable parallel mode' }, { name: '--max-concurrency N', description: 'Max concurrent agents (1-5)' }, { name: '--batch-size N', description: 'Features per coding agent (1-3)' }, { name: '--batch-features 1,2,3', description: 'Specific feature IDs to implement' }, { name: '--testing-batch-size N', description: 'Features per testing batch (1-5)' }, { name: '--testing-batch-features 1,2,3', description: 'Specific testing feature IDs' }, ] export function AdvancedConfig() { return (
{/* Vertex AI Setup */}

Vertex AI Setup

Run coding agents via Google Cloud Vertex AI:

  1. Install and authenticate the gcloud CLI:{' '} gcloud auth application-default login
  2. Configure your{' '} .env file:
{`CLAUDE_CODE_USE_VERTEX=1
CLOUD_ML_REGION=us-east5
ANTHROPIC_VERTEX_PROJECT_ID=your-gcp-project-id
ANTHROPIC_DEFAULT_OPUS_MODEL=claude-opus-4-5@20251101
ANTHROPIC_DEFAULT_SONNET_MODEL=claude-sonnet-4-5@20250929
ANTHROPIC_DEFAULT_HAIKU_MODEL=claude-3-5-haiku@20241022`}
Use @{' '} instead of -{' '} in model names for Vertex AI.
{/* Ollama Local Models */}

Ollama Local Models

Run coding agents using local models via Ollama v0.14.0+:

  1. Install Ollama from{' '} ollama.com
  2. Start Ollama:{' '} ollama serve
  3. Pull a coding model:{' '} ollama pull qwen3-coder
  4. Configure your{' '} .env:
{`ANTHROPIC_BASE_URL=http://localhost:11434
ANTHROPIC_AUTH_TOKEN=ollama
API_TIMEOUT_MS=3000000
ANTHROPIC_DEFAULT_SONNET_MODEL=qwen3-coder`}

Recommended models:{' '} qwen3-coder{' '} deepseek-coder-v2{' '} codellama

Limitations: Smaller context windows than Claude (model-dependent), extended context beta disabled (not supported by Ollama), and performance depends on local hardware (GPU recommended).

{/* Environment Variables */}

Environment Variables

Key environment variables for configuring AutoForge:

{ENV_VARS.map((v) => ( ))}
Variable Description
{v.name} {v.description}
{/* CLI Arguments */}

CLI Arguments

Command-line arguments for{' '} autonomous_agent_demo.py :

{CLI_ARGS.map((arg) => ( ))}
Argument Description
{arg.name} {arg.description}
{/* Webhook Support */}

Webhook Support

{/* Project Registry */}

Project Registry

) }