# Optional: N8N webhook for progress notifications # PROGRESS_N8N_WEBHOOK_URL=https://your-n8n-instance.com/webhook/... # Playwright Browser Configuration # # PLAYWRIGHT_BROWSER: Which browser to use for testing # - firefox: Lower CPU usage, recommended (default) # - chrome: Google Chrome # - webkit: Safari engine # - msedge: Microsoft Edge # PLAYWRIGHT_BROWSER=firefox # # PLAYWRIGHT_HEADLESS: Run browser without visible window # - true: Browser runs in background, saves CPU (default) # - false: Browser opens a visible window (useful for debugging) # PLAYWRIGHT_HEADLESS=true # Extra Read Paths (Optional) # Comma-separated list of absolute paths for read-only access to external directories. # The agent can read files from these paths but cannot write to them. # Useful for referencing documentation, shared libraries, or other projects. # Example: EXTRA_READ_PATHS=/Volumes/Data/dev,/Users/shared/libs # EXTRA_READ_PATHS= # Google Cloud Vertex AI Configuration (Optional) # To use Claude via Vertex AI on Google Cloud Platform, uncomment and set these variables. # Requires: gcloud CLI installed and authenticated (run: gcloud auth application-default login) # Note: Use @ instead of - in model names (e.g., claude-opus-4-5@20251101) # # CLAUDE_CODE_USE_VERTEX=1 # CLOUD_ML_REGION=us-east5 # ANTHROPIC_VERTEX_PROJECT_ID=your-gcp-project-id # ANTHROPIC_DEFAULT_OPUS_MODEL=claude-opus-4-5@20251101 # ANTHROPIC_DEFAULT_SONNET_MODEL=claude-sonnet-4-5@20250929 # ANTHROPIC_DEFAULT_HAIKU_MODEL=claude-3-5-haiku@20241022 # GLM/Alternative API Configuration (Optional) # To use Zhipu AI's GLM models instead of Claude, uncomment and set these variables. # This only affects AutoCoder - your global Claude Code settings remain unchanged. # Get an API key at: https://z.ai/subscribe # # ANTHROPIC_BASE_URL=https://api.z.ai/api/anthropic # ANTHROPIC_AUTH_TOKEN=your-zhipu-api-key # API_TIMEOUT_MS=3000000 # ANTHROPIC_DEFAULT_SONNET_MODEL=glm-4.7 # ANTHROPIC_DEFAULT_OPUS_MODEL=glm-4.7 # ANTHROPIC_DEFAULT_HAIKU_MODEL=glm-4.5-air # Ollama Local Model Configuration (Optional) # To use local models via Ollama instead of Claude, uncomment and set these variables. # Requires Ollama v0.14.0+ with Anthropic API compatibility. # See: https://ollama.com/blog/claude # # ANTHROPIC_BASE_URL=http://localhost:11434 # ANTHROPIC_AUTH_TOKEN=ollama # API_TIMEOUT_MS=3000000 # ANTHROPIC_DEFAULT_SONNET_MODEL=qwen3-coder # ANTHROPIC_DEFAULT_OPUS_MODEL=qwen3-coder # ANTHROPIC_DEFAULT_HAIKU_MODEL=qwen3-coder # # Model recommendations: # - For best results, use a capable coding model like qwen3-coder or deepseek-coder-v2 # - You can use the same model for all tiers, or different models per tier # - Larger models (70B+) work best for Opus tier, smaller (7B-20B) for Haiku