Merge remote-tracking branch 'origin/main' into worktree-select

This commit is contained in:
Cody Seibert
2025-12-19 21:34:59 -05:00
333 changed files with 17444 additions and 14308 deletions

View File

@@ -0,0 +1,66 @@
name: "Setup Project"
description: "Common setup steps for CI workflows - checkout, Node.js, dependencies, and native modules"
inputs:
node-version:
description: "Node.js version to use"
required: false
default: "22"
check-lockfile:
description: "Run lockfile lint check for SSH URLs"
required: false
default: "false"
rebuild-node-pty-path:
description: "Working directory for node-pty rebuild (empty = root)"
required: false
default: ""
runs:
using: "composite"
steps:
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ inputs.node-version }}
cache: "npm"
cache-dependency-path: package-lock.json
- name: Check for SSH URLs in lockfile
if: inputs.check-lockfile == 'true'
shell: bash
run: npm run lint:lockfile
- name: Configure Git for HTTPS
shell: bash
# Convert SSH URLs to HTTPS for git dependencies (e.g., @electron/node-gyp)
# This is needed because SSH authentication isn't available in CI
run: git config --global url."https://github.com/".insteadOf "git@github.com:"
- name: Install dependencies
shell: bash
# Use npm install instead of npm ci to correctly resolve platform-specific
# optional dependencies (e.g., @tailwindcss/oxide, lightningcss binaries)
# Skip scripts to avoid electron-builder install-app-deps which uses too much memory
run: npm install --ignore-scripts
- name: Install Linux native bindings
shell: bash
# Workaround for npm optional dependencies bug (npm/cli#4828)
# Explicitly install Linux bindings needed for build tools
run: |
npm install --no-save --force --ignore-scripts \
@rollup/rollup-linux-x64-gnu@4.53.3 \
@tailwindcss/oxide-linux-x64-gnu@4.1.17
- name: Rebuild native modules (root)
if: inputs.rebuild-node-pty-path == ''
shell: bash
# Rebuild node-pty and other native modules for Electron
run: npm rebuild node-pty
- name: Rebuild native modules (workspace)
if: inputs.rebuild-node-pty-path != ''
shell: bash
# Rebuild node-pty and other native modules needed for server
run: npm rebuild node-pty
working-directory: ${{ inputs.rebuild-node-pty-path }}

View File

@@ -18,34 +18,15 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
- name: Setup project
uses: ./.github/actions/setup-project
with:
node-version: "22"
cache: "npm"
cache-dependency-path: package-lock.json
- name: Configure Git for HTTPS
# Convert SSH URLs to HTTPS for git dependencies (e.g., @electron/node-gyp)
# This is needed because SSH authentication isn't available in CI
run: git config --global url."https://github.com/".insteadOf "git@github.com:"
- name: Install dependencies
# Use npm install instead of npm ci to correctly resolve platform-specific
# optional dependencies (e.g., @tailwindcss/oxide, lightningcss binaries)
run: npm install
- name: Install Linux native bindings
# Workaround for npm optional dependencies bug (npm/cli#4828)
# Explicitly install Linux bindings needed for build tools
run: |
npm install --no-save --force \
@rollup/rollup-linux-x64-gnu@4.53.3 \
@tailwindcss/oxide-linux-x64-gnu@4.1.17
check-lockfile: "true"
rebuild-node-pty-path: "apps/server"
- name: Install Playwright browsers
run: npx playwright install --with-deps chromium
working-directory: apps/app
working-directory: apps/ui
- name: Build server
run: npm run build --workspace=apps/server
@@ -71,20 +52,20 @@ jobs:
exit 1
- name: Run E2E tests
# Playwright automatically starts the Next.js frontend via webServer config
# (see apps/app/playwright.config.ts) - no need to start it manually
run: npm run test --workspace=apps/app
# Playwright automatically starts the Vite frontend via webServer config
# (see apps/ui/playwright.config.ts) - no need to start it manually
run: npm run test --workspace=apps/ui
env:
CI: true
NEXT_PUBLIC_SERVER_URL: http://localhost:3008
NEXT_PUBLIC_SKIP_SETUP: "true"
VITE_SERVER_URL: http://localhost:3008
VITE_SKIP_SETUP: "true"
- name: Upload Playwright report
uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-report
path: apps/app/playwright-report/
path: apps/ui/playwright-report/
retention-days: 7
- name: Upload test results
@@ -92,5 +73,5 @@ jobs:
if: failure()
with:
name: test-results
path: apps/app/test-results/
path: apps/ui/test-results/
retention-days: 7

View File

@@ -17,33 +17,10 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
- name: Setup project
uses: ./.github/actions/setup-project
with:
node-version: "22"
cache: "npm"
cache-dependency-path: package-lock.json
check-lockfile: "true"
- name: Check for SSH URLs in lockfile
run: npm run lint:lockfile
- name: Configure Git for HTTPS
# Convert SSH URLs to HTTPS for git dependencies (e.g., @electron/node-gyp)
# This is needed because SSH authentication isn't available in CI
run: git config --global url."https://github.com/".insteadOf "git@github.com:"
- name: Install dependencies
# Use npm install instead of npm ci to correctly resolve platform-specific
# optional dependencies (e.g., @tailwindcss/oxide, lightningcss binaries)
run: npm install
- name: Install Linux native bindings
# Workaround for npm optional dependencies bug (npm/cli#4828)
# Explicitly install Linux bindings needed for build tools
run: |
npm install --no-save --force \
@rollup/rollup-linux-x64-gnu@4.53.3 \
@tailwindcss/oxide-linux-x64-gnu@4.1.17
- name: Run build:electron
run: npm run build:electron
- name: Run build:electron (dir only - faster CI)
run: npm run build:electron:dir

View File

@@ -1,180 +0,0 @@
name: Build and Release Electron App
on:
push:
tags:
- "v*.*.*" # Triggers on version tags like v1.0.0
workflow_dispatch: # Allows manual triggering
inputs:
version:
description: "Version to release (e.g., v1.0.0)"
required: true
default: "v0.1.0"
jobs:
build-and-release:
strategy:
fail-fast: false
matrix:
include:
- os: macos-latest
name: macOS
artifact-name: macos-builds
- os: windows-latest
name: Windows
artifact-name: windows-builds
- os: ubuntu-latest
name: Linux
artifact-name: linux-builds
runs-on: ${{ matrix.os }}
permissions:
contents: write
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "22"
cache: "npm"
cache-dependency-path: package-lock.json
- name: Configure Git for HTTPS
# Convert SSH URLs to HTTPS for git dependencies (e.g., @electron/node-gyp)
# This is needed because SSH authentication isn't available in CI
run: git config --global url."https://github.com/".insteadOf "git@github.com:"
- name: Install dependencies
# Use npm install instead of npm ci to correctly resolve platform-specific
# optional dependencies (e.g., @tailwindcss/oxide, lightningcss binaries)
run: npm install
- name: Install Linux native bindings
# Workaround for npm optional dependencies bug (npm/cli#4828)
# Only needed on Linux - macOS and Windows get their bindings automatically
if: matrix.os == 'ubuntu-latest'
run: |
npm install --no-save --force \
@rollup/rollup-linux-x64-gnu@4.53.3 \
@tailwindcss/oxide-linux-x64-gnu@4.1.17
- name: Extract and set version
id: version
shell: bash
run: |
VERSION_TAG="${{ github.event.inputs.version || github.ref_name }}"
# Remove 'v' prefix if present (e.g., v1.0.0 -> 1.0.0)
VERSION="${VERSION_TAG#v}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Extracted version: $VERSION from tag: $VERSION_TAG"
# Update the app's package.json version
cd apps/app
npm version $VERSION --no-git-tag-version
cd ../..
echo "Updated apps/app/package.json to version $VERSION"
- name: Build Electron App (macOS)
if: matrix.os == 'macos-latest'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run build:electron -- --mac --x64 --arm64
- name: Build Electron App (Windows)
if: matrix.os == 'windows-latest'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run build:electron -- --win --x64
- name: Build Electron App (Linux)
if: matrix.os == 'ubuntu-latest'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run build:electron -- --linux --x64
- name: Upload Release Assets
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ github.event.inputs.version || github.ref_name }}
files: |
apps/app/dist/*.exe
apps/app/dist/*.dmg
apps/app/dist/*.AppImage
apps/app/dist/*.zip
apps/app/dist/*.deb
apps/app/dist/*.rpm
draft: false
prerelease: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload macOS artifacts for R2
if: matrix.os == 'macos-latest'
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.artifact-name }}
path: apps/app/dist/*.dmg
retention-days: 1
- name: Upload Windows artifacts for R2
if: matrix.os == 'windows-latest'
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.artifact-name }}
path: apps/app/dist/*.exe
retention-days: 1
- name: Upload Linux artifacts for R2
if: matrix.os == 'ubuntu-latest'
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.artifact-name }}
path: apps/app/dist/*.AppImage
retention-days: 1
upload-to-r2:
needs: build-and-release
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: artifacts
- name: Install AWS SDK
run: npm install @aws-sdk/client-s3
- name: Extract version
id: version
shell: bash
run: |
VERSION_TAG="${{ github.event.inputs.version || github.ref_name }}"
# Remove 'v' prefix if present (e.g., v1.0.0 -> 1.0.0)
VERSION="${VERSION_TAG#v}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "version_tag=$VERSION_TAG" >> $GITHUB_OUTPUT
echo "Extracted version: $VERSION from tag: $VERSION_TAG"
- name: Upload to R2 and update releases.json
env:
R2_ENDPOINT: ${{ secrets.R2_ENDPOINT }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
R2_BUCKET_NAME: ${{ secrets.R2_BUCKET_NAME }}
R2_PUBLIC_URL: ${{ secrets.R2_PUBLIC_URL }}
RELEASE_VERSION: ${{ steps.version.outputs.version }}
RELEASE_TAG: ${{ steps.version.outputs.version_tag }}
GITHUB_REPOSITORY: ${{ github.repository }}
run: node .github/scripts/upload-to-r2.js

View File

@@ -17,30 +17,11 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
- name: Setup project
uses: ./.github/actions/setup-project
with:
node-version: "22"
cache: "npm"
cache-dependency-path: package-lock.json
- name: Configure Git for HTTPS
# Convert SSH URLs to HTTPS for git dependencies (e.g., @electron/node-gyp)
# This is needed because SSH authentication isn't available in CI
run: git config --global url."https://github.com/".insteadOf "git@github.com:"
- name: Install dependencies
# Use npm install instead of npm ci to correctly resolve platform-specific
# optional dependencies (e.g., @tailwindcss/oxide, lightningcss binaries)
run: npm install
- name: Install Linux native bindings
# Workaround for npm optional dependencies bug (npm/cli#4828)
# Explicitly install Linux bindings needed for build tools
run: |
npm install --no-save --force \
@rollup/rollup-linux-x64-gnu@4.53.3 \
@tailwindcss/oxide-linux-x64-gnu@4.1.17
check-lockfile: "true"
rebuild-node-pty-path: "apps/server"
- name: Run server tests with coverage
run: npm run test:server:coverage

View File

@@ -1,5 +1,5 @@
<p align="center">
<img src="apps/app/public/readme_logo.png" alt="Automaker Logo" height="80" />
<img src="apps/ui/public/readme_logo.png" alt="Automaker Logo" height="80" />
</p>
> **[!TIP]**
@@ -88,6 +88,7 @@ The future of software development is **agentic coding**—where developers beco
Join the **Agentic Jumpstart** to connect with other builders exploring **agentic coding** and autonomous development workflows.
In the Discord, you can:
- 💬 Discuss agentic coding patterns and best practices
- 🧠 Share ideas for AI-driven development workflows
- 🛠️ Get help setting up or extending Automaker
@@ -252,19 +253,16 @@ This project is licensed under the **Automaker License Agreement**. See [LICENSE
**Summary of Terms:**
- **Allowed:**
- **Build Anything:** You can clone and use Automaker locally or in your organization to build ANY product (commercial or free).
- **Internal Use:** You can use it internally within your company (commercial or non-profit) without restriction.
- **Modify:** You can modify the code for internal use within your organization (commercial or non-profit).
- **Restricted (The "No Monetization of the Tool" Rule):**
- **No Resale:** You cannot resell Automaker itself.
- **No SaaS:** You cannot host Automaker as a service for others.
- **No Monetizing Mods:** You cannot distribute modified versions of Automaker for money.
- **Liability:**
- **Use at Own Risk:** This tool uses AI. We are **NOT** responsible if it breaks your computer, deletes your files, or generates bad code. You assume all risk.
- **Contributing:**

View File

@@ -1,310 +0,0 @@
# Large Files - Refactoring Candidates
This document tracks files in the AutoMaker codebase that exceed 3000 lines or are significantly large (1000+ lines) and should be considered for refactoring into smaller, more maintainable components.
**Last Updated:** 2025-12-15
**Total Large Files:** 8
**Combined Size:** 15,027 lines
---
## 🔴 CRITICAL - Over 3000 Lines
### 1. board-view.tsx - 3,325 lines
**Path:** `apps/app/src/components/views/board-view.tsx`
**Type:** React Component (TSX)
**Priority:** VERY HIGH
**Description:**
Main Kanban board view component that serves as the centerpiece of the application.
**Current Responsibilities:**
- Feature/task card management and drag-and-drop operations using @dnd-kit
- Adding, editing, and deleting features
- Running autonomous agents to implement features
- Displaying feature status across multiple columns (Backlog, In Progress, Waiting Approval, Verified)
- Model/AI profile selection for feature implementation
- Advanced options configuration (thinking level, model selection, skip tests)
- Search/filtering functionality for cards
- Output modal for viewing agent results
- Feature suggestions dialog
- Board background customization
- Integration with Electron APIs for IPC communication
- Keyboard shortcuts support
- 40+ state variables for managing UI state
**Refactoring Recommendations:**
Extract into smaller components:
- `AddFeatureDialog.tsx` - Feature creation dialog with image upload
- `EditFeatureDialog.tsx` - Feature editing dialog
- `AgentOutputModal.tsx` - Already exists, verify separation
- `FeatureSuggestionsDialog.tsx` - Already exists, verify separation
- `BoardHeader.tsx` - Header with controls and search
- `BoardSearchBar.tsx` - Search and filter functionality
- `ConcurrencyControl.tsx` - Concurrency slider component
- `BoardActions.tsx` - Action buttons (add feature, auto mode, etc.)
- `DragDropContext.tsx` - Wrap drag-and-drop logic
- Custom hooks:
- `useBoardFeatures.ts` - Feature loading and management
- `useBoardDragDrop.ts` - Drag and drop handlers
- `useBoardActions.ts` - Feature action handlers (run, verify, delete, etc.)
- `useBoardKeyboardShortcuts.ts` - Keyboard shortcut logic
---
## 🟡 HIGH PRIORITY - 2000+ Lines
### 2. sidebar.tsx - 2,396 lines
**Path:** `apps/app/src/components/layout/sidebar.tsx`
**Type:** React Component (TSX)
**Priority:** HIGH
**Description:**
Main navigation sidebar with comprehensive project management.
**Current Responsibilities:**
- Project folder navigation and selection
- View mode switching (Board, Agent, Settings, etc.)
- Project operations (create, delete, rename)
- Theme and appearance controls
- Terminal, Wiki, and other view launchers
- Drag-and-drop project reordering
- Settings and configuration access
**Refactoring Recommendations:**
Split into focused components:
- `ProjectSelector.tsx` - Project list and selection
- `NavigationTabs.tsx` - View mode tabs
- `ProjectActions.tsx` - Create, delete, rename operations
- `SettingsMenu.tsx` - Settings dropdown
- `ThemeSelector.tsx` - Theme controls
- `ViewLaunchers.tsx` - Terminal, Wiki launchers
- Custom hooks:
- `useProjectManagement.ts` - Project CRUD operations
- `useSidebarState.ts` - Sidebar state management
---
### 3. electron.ts - 2,356 lines
**Path:** `apps/app/src/lib/electron.ts`
**Type:** TypeScript Utility/API Bridge
**Priority:** HIGH
**Description:**
Electron IPC bridge and type definitions for frontend-backend communication.
**Current Responsibilities:**
- File system operations (read, write, directory listing)
- Project management APIs
- Feature management APIs
- Terminal/shell execution
- Auto mode and agent execution APIs
- Worktree management
- Provider status APIs
- Event handling and subscriptions
**Refactoring Recommendations:**
Modularize into domain-specific API modules:
- `api/file-system-api.ts` - File operations
- `api/project-api.ts` - Project CRUD
- `api/feature-api.ts` - Feature management
- `api/execution-api.ts` - Auto mode and agent execution
- `api/provider-api.ts` - Provider status and management
- `api/worktree-api.ts` - Git worktree operations
- `api/terminal-api.ts` - Terminal/shell APIs
- `types/electron-types.ts` - Shared type definitions
- `electron.ts` - Main export aggregator
---
### 4. app-store.ts - 2,174 lines
**Path:** `apps/app/src/store/app-store.ts`
**Type:** TypeScript State Management (Zustand Store)
**Priority:** HIGH
**Description:**
Centralized application state store using Zustand.
**Current Responsibilities:**
- Global app state types and interfaces
- Project and feature management state
- Theme and appearance settings
- API keys configuration
- Keyboard shortcuts configuration
- Terminal themes configuration
- Auto mode settings
- All store mutations and selectors
**Refactoring Recommendations:**
Split into domain-specific stores:
- `stores/projects-store.ts` - Project state and actions
- `stores/features-store.ts` - Feature state and actions
- `stores/ui-store.ts` - UI state (theme, sidebar, modals)
- `stores/settings-store.ts` - User settings and preferences
- `stores/execution-store.ts` - Auto mode and running tasks
- `stores/provider-store.ts` - Provider configuration
- `types/store-types.ts` - Shared type definitions
- `app-store.ts` - Main store aggregator with combined selectors
---
## 🟢 MEDIUM PRIORITY - 1000-2000 Lines
### 5. auto-mode-service.ts - 1,232 lines
**Path:** `apps/server/src/services/auto-mode-service.ts`
**Type:** TypeScript Service (Backend)
**Priority:** MEDIUM-HIGH
**Description:**
Core autonomous feature implementation service.
**Current Responsibilities:**
- Worktree creation and management
- Feature execution with Claude Agent SDK
- Concurrent execution with concurrency limits
- Progress streaming via events
- Verification and merge workflows
- Provider management
- Error handling and classification
**Refactoring Recommendations:**
Extract into service modules:
- `services/worktree-manager.ts` - Worktree operations
- `services/feature-executor.ts` - Feature execution logic
- `services/concurrency-manager.ts` - Concurrency control
- `services/verification-service.ts` - Verification workflows
- `utils/error-classifier.ts` - Error handling utilities
---
### 6. spec-view.tsx - 1,230 lines
**Path:** `apps/app/src/components/views/spec-view.tsx`
**Type:** React Component (TSX)
**Priority:** MEDIUM
**Description:**
Specification editor view component for feature specification management.
**Refactoring Recommendations:**
Extract editor components and hooks:
- `SpecEditor.tsx` - Main editor component
- `SpecToolbar.tsx` - Editor toolbar
- `SpecSidebar.tsx` - Spec navigation sidebar
- `useSpecEditor.ts` - Editor state management
---
### 7. kanban-card.tsx - 1,180 lines
**Path:** `apps/app/src/components/views/kanban-card.tsx`
**Type:** React Component (TSX)
**Priority:** MEDIUM
**Description:**
Individual Kanban card component with rich feature display and interaction.
**Refactoring Recommendations:**
Split into smaller card components:
- `KanbanCardHeader.tsx` - Card title and metadata
- `KanbanCardBody.tsx` - Card content
- `KanbanCardActions.tsx` - Action buttons
- `KanbanCardStatus.tsx` - Status indicators
- `useKanbanCard.ts` - Card interaction logic
---
### 8. analysis-view.tsx - 1,134 lines
**Path:** `apps/app/src/components/views/analysis-view.tsx`
**Type:** React Component (TSX)
**Priority:** MEDIUM
**Description:**
Analysis view component for displaying and managing feature analysis data.
**Refactoring Recommendations:**
Extract visualization and data components:
- `AnalysisChart.tsx` - Chart/graph components
- `AnalysisTable.tsx` - Data table
- `AnalysisFilters.tsx` - Filter controls
- `useAnalysisData.ts` - Data fetching and processing
---
## Refactoring Strategy
### Phase 1: Critical (Immediate)
1. **board-view.tsx** - Break into dialogs, header, and custom hooks
- Extract all dialogs first (AddFeature, EditFeature)
- Move to custom hooks for business logic
- Split remaining UI into smaller components
### Phase 2: High Priority (Next Sprint)
2. **sidebar.tsx** - Componentize navigation and project management
3. **electron.ts** - Modularize into API domains
4. **app-store.ts** - Split into domain stores
### Phase 3: Medium Priority (Future)
5. **auto-mode-service.ts** - Extract service modules
6. **spec-view.tsx** - Break into editor components
7. **kanban-card.tsx** - Split card into sub-components
8. **analysis-view.tsx** - Extract visualization components
---
## General Refactoring Guidelines
### When Refactoring Large Components:
1. **Extract Dialogs/Modals First**
- Move dialog components to separate files
- Keep dialog state management in parent initially
- Later extract to custom hooks if complex
2. **Create Custom Hooks for Business Logic**
- Move data fetching to `useFetch*` hooks
- Move complex state logic to `use*State` hooks
- Move side effects to `use*Effect` hooks
3. **Split UI into Presentational Components**
- Header/toolbar components
- Content area components
- Footer/action components
4. **Move Utils and Helpers**
- Extract pure functions to utility files
- Move constants to separate constant files
- Create type files for shared interfaces
### When Refactoring Large Files:
1. **Identify Domains/Concerns**
- Group related functionality
- Find natural boundaries
2. **Extract Gradually**
- Start with least coupled code
- Work towards core functionality
- Test after each extraction
3. **Maintain Type Safety**
- Export types from extracted modules
- Use shared type files for common interfaces
- Ensure no type errors after refactoring
---
## Progress Tracking
- [ ] board-view.tsx (3,325 lines)
- [ ] sidebar.tsx (2,396 lines)
- [ ] electron.ts (2,356 lines)
- [ ] app-store.ts (2,174 lines)
- [ ] auto-mode-service.ts (1,232 lines)
- [ ] spec-view.tsx (1,230 lines)
- [ ] kanban-card.tsx (1,180 lines)
- [ ] analysis-view.tsx (1,134 lines)
**Target:** All files under 500 lines, most under 300 lines
---
*Generated: 2025-12-15*

View File

@@ -1,5 +0,0 @@
module.exports = {
rules: {
"@typescript-eslint/no-require-imports": "off",
},
};

View File

@@ -1,37 +0,0 @@
/**
* Simplified Electron preload script
*
* Only exposes native features (dialogs, shell) and server URL.
* All other operations go through HTTP API.
*/
const { contextBridge, ipcRenderer } = require("electron");
// Expose minimal API for native features
contextBridge.exposeInMainWorld("electronAPI", {
// Platform info
platform: process.platform,
isElectron: true,
// Connection check
ping: () => ipcRenderer.invoke("ping"),
// Get server URL for HTTP client
getServerUrl: () => ipcRenderer.invoke("server:getUrl"),
// Native dialogs - better UX than prompt()
openDirectory: () => ipcRenderer.invoke("dialog:openDirectory"),
openFile: (options) => ipcRenderer.invoke("dialog:openFile", options),
saveFile: (options) => ipcRenderer.invoke("dialog:saveFile", options),
// Shell operations
openExternalLink: (url) => ipcRenderer.invoke("shell:openExternal", url),
openPath: (filePath) => ipcRenderer.invoke("shell:openPath", filePath),
// App info
getPath: (name) => ipcRenderer.invoke("app:getPath", name),
getVersion: () => ipcRenderer.invoke("app:getVersion"),
isPackaged: () => ipcRenderer.invoke("app:isPackaged"),
});
console.log("[Preload] Electron API exposed (simplified mode)");

View File

@@ -1,20 +0,0 @@
import { defineConfig, globalIgnores } from "eslint/config";
import nextVitals from "eslint-config-next/core-web-vitals";
import nextTs from "eslint-config-next/typescript";
const eslintConfig = defineConfig([
...nextVitals,
...nextTs,
// Override default ignores of eslint-config-next.
globalIgnores([
// Default ignores of eslint-config-next:
".next/**",
"out/**",
"build/**",
"next-env.d.ts",
// Electron files use CommonJS
"electron/**",
]),
]);
export default eslintConfig;

View File

@@ -1,7 +0,0 @@
import type { NextConfig } from "next";
const nextConfig: NextConfig = {
output: "export",
};
export default nextConfig;

View File

@@ -1,7 +0,0 @@
const config = {
plugins: {
"@tailwindcss/postcss": {},
},
};
export default config;

View File

@@ -1,97 +0,0 @@
import { NextRequest, NextResponse } from "next/server";
interface AnthropicResponse {
content?: Array<{ type: string; text?: string }>;
model?: string;
error?: { message?: string };
}
export async function POST(request: NextRequest) {
try {
const { apiKey } = await request.json();
// Use provided API key or fall back to environment variable
const effectiveApiKey = apiKey || process.env.ANTHROPIC_API_KEY;
if (!effectiveApiKey) {
return NextResponse.json(
{ success: false, error: "No API key provided or configured in environment" },
{ status: 400 }
);
}
// Send a simple test prompt to the Anthropic API
const response = await fetch("https://api.anthropic.com/v1/messages", {
method: "POST",
headers: {
"Content-Type": "application/json",
"x-api-key": effectiveApiKey,
"anthropic-version": "2023-06-01",
},
body: JSON.stringify({
model: "claude-sonnet-4-20250514",
max_tokens: 100,
messages: [
{
role: "user",
content: "Respond with exactly: 'Claude API connection successful!' and nothing else.",
},
],
}),
});
if (!response.ok) {
const errorData = (await response.json()) as AnthropicResponse;
const errorMessage = errorData.error?.message || `HTTP ${response.status}`;
if (response.status === 401) {
return NextResponse.json(
{ success: false, error: "Invalid API key. Please check your Anthropic API key." },
{ status: 401 }
);
}
if (response.status === 429) {
return NextResponse.json(
{ success: false, error: "Rate limit exceeded. Please try again later." },
{ status: 429 }
);
}
return NextResponse.json(
{ success: false, error: `API error: ${errorMessage}` },
{ status: response.status }
);
}
const data = (await response.json()) as AnthropicResponse;
// Check if we got a valid response
if (data.content && data.content.length > 0) {
const textContent = data.content.find((block) => block.type === "text");
if (textContent && textContent.type === "text" && textContent.text) {
return NextResponse.json({
success: true,
message: `Connection successful! Response: "${textContent.text}"`,
model: data.model,
});
}
}
return NextResponse.json({
success: true,
message: "Connection successful! Claude responded.",
model: data.model,
});
} catch (error: unknown) {
console.error("Claude API test error:", error);
const errorMessage =
error instanceof Error ? error.message : "Failed to connect to Claude API";
return NextResponse.json(
{ success: false, error: errorMessage },
{ status: 500 }
);
}
}

View File

@@ -1,191 +0,0 @@
import { NextRequest, NextResponse } from "next/server";
interface GeminiContent {
parts: Array<{
text?: string;
inlineData?: {
mimeType: string;
data: string;
};
}>;
role?: string;
}
interface GeminiRequest {
contents: GeminiContent[];
generationConfig?: {
maxOutputTokens?: number;
temperature?: number;
};
}
interface GeminiResponse {
candidates?: Array<{
content: {
parts: Array<{
text: string;
}>;
role: string;
};
finishReason: string;
safetyRatings?: Array<{
category: string;
probability: string;
}>;
}>;
promptFeedback?: {
safetyRatings?: Array<{
category: string;
probability: string;
}>;
};
error?: {
code: number;
message: string;
status: string;
};
}
export async function POST(request: NextRequest) {
try {
const { apiKey, imageData, mimeType, prompt } = await request.json();
// Use provided API key or fall back to environment variable
const effectiveApiKey = apiKey || process.env.GOOGLE_API_KEY;
if (!effectiveApiKey) {
return NextResponse.json(
{ success: false, error: "No API key provided or configured in environment" },
{ status: 400 }
);
}
// Build the request body
const requestBody: GeminiRequest = {
contents: [
{
parts: [],
},
],
generationConfig: {
maxOutputTokens: 150,
temperature: 0.4,
},
};
// Add image if provided
if (imageData && mimeType) {
requestBody.contents[0].parts.push({
inlineData: {
mimeType: mimeType,
data: imageData,
},
});
}
// Add text prompt
const textPrompt = prompt || (imageData
? "Describe what you see in this image briefly."
: "Respond with exactly: 'Gemini SDK connection successful!' and nothing else.");
requestBody.contents[0].parts.push({
text: textPrompt,
});
// Call Gemini API - using gemini-1.5-flash as it supports both text and vision
const model = imageData ? "gemini-1.5-flash" : "gemini-1.5-flash";
const geminiUrl = `https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent?key=${effectiveApiKey}`;
const response = await fetch(geminiUrl, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(requestBody),
});
const data: GeminiResponse = await response.json();
// Check for API errors
if (data.error) {
const errorMessage = data.error.message || "Unknown Gemini API error";
const statusCode = data.error.code || 500;
if (statusCode === 400 && errorMessage.includes("API key")) {
return NextResponse.json(
{ success: false, error: "Invalid API key. Please check your Google API key." },
{ status: 401 }
);
}
if (statusCode === 429) {
return NextResponse.json(
{ success: false, error: "Rate limit exceeded. Please try again later." },
{ status: 429 }
);
}
return NextResponse.json(
{ success: false, error: `API error: ${errorMessage}` },
{ status: statusCode }
);
}
// Check for valid response
if (!response.ok) {
return NextResponse.json(
{ success: false, error: `HTTP error: ${response.status} ${response.statusText}` },
{ status: response.status }
);
}
// Extract response text
if (data.candidates && data.candidates.length > 0 && data.candidates[0].content?.parts?.length > 0) {
const responseText = data.candidates[0].content.parts
.filter((part) => part.text)
.map((part) => part.text)
.join("");
return NextResponse.json({
success: true,
message: `Connection successful! Response: "${responseText.substring(0, 200)}${responseText.length > 200 ? '...' : ''}"`,
model: model,
hasImage: !!imageData,
});
}
// Handle blocked responses
if (data.promptFeedback?.safetyRatings) {
return NextResponse.json({
success: true,
message: "Connection successful! Gemini responded (response may have been filtered).",
model: model,
hasImage: !!imageData,
});
}
return NextResponse.json({
success: true,
message: "Connection successful! Gemini responded.",
model: model,
hasImage: !!imageData,
});
} catch (error: unknown) {
console.error("Gemini API test error:", error);
if (error instanceof TypeError && error.message.includes("fetch")) {
return NextResponse.json(
{ success: false, error: "Network error. Unable to reach Gemini API." },
{ status: 503 }
);
}
const errorMessage =
error instanceof Error ? error.message : "Failed to connect to Gemini API";
return NextResponse.json(
{ success: false, error: errorMessage },
{ status: 500 }
);
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

View File

@@ -1,26 +0,0 @@
import type { Metadata } from "next";
import { GeistSans } from "geist/font/sans";
import { GeistMono } from "geist/font/mono";
import { Toaster } from "sonner";
import "./globals.css";
export const metadata: Metadata = {
title: "Automaker - Autonomous AI Development Studio",
description: "Build software autonomously with intelligent orchestration",
};
export default function RootLayout({
children,
}: Readonly<{
children: React.ReactNode;
}>) {
return (
<html lang="en" suppressHydrationWarning>
<body
className={`${GeistSans.variable} ${GeistMono.variable} antialiased`}
>
{children}
<Toaster richColors position="bottom-right" />
</body>
</html>
);
}

View File

@@ -1,234 +0,0 @@
"use client";
import { useEffect, useState, useCallback } from "react";
import { Sidebar } from "@/components/layout/sidebar";
import { WelcomeView } from "@/components/views/welcome-view";
import { BoardView } from "@/components/views/board-view";
import { SpecView } from "@/components/views/spec-view";
import { AgentView } from "@/components/views/agent-view";
import { SettingsView } from "@/components/views/settings-view";
import { InterviewView } from "@/components/views/interview-view";
import { ContextView } from "@/components/views/context-view";
import { ProfilesView } from "@/components/views/profiles-view";
import { SetupView } from "@/components/views/setup-view";
import { RunningAgentsView } from "@/components/views/running-agents-view";
import { TerminalView } from "@/components/views/terminal-view";
import { WikiView } from "@/components/views/wiki-view";
import { useAppStore } from "@/store/app-store";
import { useSetupStore } from "@/store/setup-store";
import { getElectronAPI, isElectron } from "@/lib/electron";
import {
FileBrowserProvider,
useFileBrowser,
setGlobalFileBrowser,
} from "@/contexts/file-browser-context";
function HomeContent() {
const {
currentView,
setCurrentView,
setIpcConnected,
theme,
currentProject,
previewTheme,
getEffectiveTheme,
} = useAppStore();
const { isFirstRun, setupComplete } = useSetupStore();
const [isMounted, setIsMounted] = useState(false);
const [streamerPanelOpen, setStreamerPanelOpen] = useState(false);
const { openFileBrowser } = useFileBrowser();
// Hidden streamer panel - opens with "\" key
const handleStreamerPanelShortcut = useCallback((event: KeyboardEvent) => {
// Don't trigger when typing in inputs
const activeElement = document.activeElement;
if (activeElement) {
const tagName = activeElement.tagName.toLowerCase();
if (
tagName === "input" ||
tagName === "textarea" ||
tagName === "select"
) {
return;
}
if (activeElement.getAttribute("contenteditable") === "true") {
return;
}
const role = activeElement.getAttribute("role");
if (role === "textbox" || role === "searchbox" || role === "combobox") {
return;
}
}
// Don't trigger with modifier keys
if (event.ctrlKey || event.altKey || event.metaKey) {
return;
}
// Check for "\" key (backslash)
if (event.key === "\\") {
event.preventDefault();
setStreamerPanelOpen((prev) => !prev);
}
}, []);
// Register the "\" shortcut for streamer panel
useEffect(() => {
window.addEventListener("keydown", handleStreamerPanelShortcut);
return () => {
window.removeEventListener("keydown", handleStreamerPanelShortcut);
};
}, [handleStreamerPanelShortcut]);
// Compute the effective theme: previewTheme takes priority, then project theme, then global theme
// This is reactive because it depends on previewTheme, currentProject, and theme from the store
const effectiveTheme = getEffectiveTheme();
// Prevent hydration issues
useEffect(() => {
setIsMounted(true);
}, []);
// Initialize global file browser for HttpApiClient
useEffect(() => {
setGlobalFileBrowser(openFileBrowser);
}, [openFileBrowser]);
// Check if this is first run and redirect to setup if needed
useEffect(() => {
console.log("[Setup Flow] Checking setup state:", {
isMounted,
isFirstRun,
setupComplete,
currentView,
shouldShowSetup: isMounted && isFirstRun && !setupComplete,
});
if (isMounted && isFirstRun && !setupComplete) {
console.log(
"[Setup Flow] Redirecting to setup wizard (first run, not complete)"
);
setCurrentView("setup");
} else if (isMounted && setupComplete) {
console.log("[Setup Flow] Setup already complete, showing normal view");
}
}, [isMounted, isFirstRun, setupComplete, setCurrentView, currentView]);
// Test IPC connection on mount
useEffect(() => {
const testConnection = async () => {
try {
const api = getElectronAPI();
const result = await api.ping();
setIpcConnected(result === "pong");
} catch (error) {
console.error("IPC connection failed:", error);
setIpcConnected(false);
}
};
testConnection();
}, [setIpcConnected]);
// Apply theme class to document (uses effective theme - preview, project-specific, or global)
useEffect(() => {
const root = document.documentElement;
const themeClasses = [
"dark",
"light",
"retro",
"dracula",
"nord",
"monokai",
"tokyonight",
"solarized",
"gruvbox",
"catppuccin",
"onedark",
"synthwave",
"red",
"cream",
"sunset",
"gray",
];
// Remove all theme classes
root.classList.remove(...themeClasses);
// Apply the effective theme
if (themeClasses.includes(effectiveTheme)) {
root.classList.add(effectiveTheme);
} else if (effectiveTheme === "system") {
// System theme - detect OS preference
const isDark = window.matchMedia("(prefers-color-scheme: dark)").matches;
root.classList.add(isDark ? "dark" : "light");
}
}, [effectiveTheme, previewTheme, currentProject, theme]);
const renderView = () => {
switch (currentView) {
case "welcome":
return <WelcomeView />;
case "setup":
return <SetupView />;
case "board":
return <BoardView />;
case "spec":
return <SpecView />;
case "agent":
return <AgentView />;
case "settings":
return <SettingsView />;
case "interview":
return <InterviewView />;
case "context":
return <ContextView />;
case "profiles":
return <ProfilesView />;
case "running-agents":
return <RunningAgentsView />;
case "terminal":
return <TerminalView />;
case "wiki":
return <WikiView />;
default:
return <WelcomeView />;
}
};
// Setup view is full-screen without sidebar
if (currentView === "setup") {
return (
<main className="h-screen overflow-hidden" data-testid="app-container">
<SetupView />
</main>
);
}
return (
<main className="flex h-screen overflow-hidden" data-testid="app-container">
<Sidebar />
<div
className="flex-1 flex flex-col overflow-hidden transition-all duration-300"
style={{ marginRight: streamerPanelOpen ? "250px" : "0" }}
>
{renderView()}
</div>
{/* Hidden streamer panel - opens with "\" key, pushes content */}
<div
className={`fixed top-0 right-0 h-full w-[250px] bg-background border-l border-border transition-transform duration-300 ${
streamerPanelOpen ? "translate-x-0" : "translate-x-full"
}`}
/>
</main>
);
}
export default function Home() {
return (
<FileBrowserProvider>
<HomeContent />
</FileBrowserProvider>
);
}

View File

@@ -1,88 +0,0 @@
"use client";
import * as React from "react";
import { Sparkles, X } from "lucide-react";
import {
Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
interface CoursePromoBadgeProps {
sidebarOpen?: boolean;
}
export function CoursePromoBadge({ sidebarOpen = true }: CoursePromoBadgeProps) {
const [dismissed, setDismissed] = React.useState(false);
if (dismissed) {
return null;
}
// Collapsed state - show only icon with tooltip
if (!sidebarOpen) {
return (
<div className="p-2 pb-0 flex justify-center">
<TooltipProvider delayDuration={300}>
<Tooltip>
<TooltipTrigger asChild>
<a
href="https://agenticjumpstart.com"
target="_blank"
rel="noopener noreferrer"
className="group cursor-pointer flex items-center justify-center w-10 h-10 bg-primary/10 text-primary rounded-lg hover:bg-primary/20 transition-all border border-primary/30"
data-testid="course-promo-badge-collapsed"
>
<Sparkles className="size-4 shrink-0" />
</a>
</TooltipTrigger>
<TooltipContent side="right" className="flex items-center gap-2">
<span>Become a 10x Dev</span>
<span
onClick={(e) => {
e.preventDefault();
e.stopPropagation();
setDismissed(true);
}}
className="p-0.5 rounded-full hover:bg-primary/30 transition-colors cursor-pointer"
aria-label="Dismiss"
>
<X className="size-3" />
</span>
</TooltipContent>
</Tooltip>
</TooltipProvider>
</div>
);
}
// Expanded state - show full badge
return (
<div className="p-2 pb-0">
<a
href="https://agenticjumpstart.com"
target="_blank"
rel="noopener noreferrer"
className="group cursor-pointer flex items-center justify-between w-full px-2 lg:px-3 py-2.5 bg-primary/10 text-primary rounded-lg font-medium text-sm hover:bg-primary/20 transition-all border border-primary/30"
data-testid="course-promo-badge"
>
<div className="flex items-center gap-2">
<Sparkles className="size-4 shrink-0" />
<span className="hidden lg:block">Become a 10x Dev</span>
</div>
<span
onClick={(e) => {
e.preventDefault();
e.stopPropagation();
setDismissed(true);
}}
className="hidden lg:block p-1 rounded-full hover:bg-primary/30 transition-colors cursor-pointer"
aria-label="Dismiss"
>
<X className="size-3.5" />
</span>
</a>
</div>
);
}

View File

@@ -1,194 +0,0 @@
"use client";
import { Button } from "@/components/ui/button";
import { RefreshCw, Globe, Loader2 } from "lucide-react";
import { cn } from "@/lib/utils";
import type { WorktreeInfo, BranchInfo, DevServerInfo } from "../types";
import { BranchSwitchDropdown } from "./branch-switch-dropdown";
import { WorktreeActionsDropdown } from "./worktree-actions-dropdown";
interface WorktreeTabProps {
worktree: WorktreeInfo;
cardCount?: number; // Number of unarchived cards for this branch
isSelected: boolean;
isRunning: boolean;
isActivating: boolean;
isDevServerRunning: boolean;
devServerInfo?: DevServerInfo;
defaultEditorName: string;
branches: BranchInfo[];
filteredBranches: BranchInfo[];
branchFilter: string;
isLoadingBranches: boolean;
isSwitching: boolean;
isPulling: boolean;
isPushing: boolean;
isStartingDevServer: boolean;
aheadCount: number;
behindCount: number;
onSelectWorktree: (worktree: WorktreeInfo) => void;
onBranchDropdownOpenChange: (open: boolean) => void;
onActionsDropdownOpenChange: (open: boolean) => void;
onBranchFilterChange: (value: string) => void;
onSwitchBranch: (worktree: WorktreeInfo, branchName: string) => void;
onCreateBranch: (worktree: WorktreeInfo) => void;
onPull: (worktree: WorktreeInfo) => void;
onPush: (worktree: WorktreeInfo) => void;
onOpenInEditor: (worktree: WorktreeInfo) => void;
onCommit: (worktree: WorktreeInfo) => void;
onCreatePR: (worktree: WorktreeInfo) => void;
onDeleteWorktree: (worktree: WorktreeInfo) => void;
onStartDevServer: (worktree: WorktreeInfo) => void;
onStopDevServer: (worktree: WorktreeInfo) => void;
onOpenDevServerUrl: (worktree: WorktreeInfo) => void;
}
export function WorktreeTab({
worktree,
cardCount,
isSelected,
isRunning,
isActivating,
isDevServerRunning,
devServerInfo,
defaultEditorName,
branches,
filteredBranches,
branchFilter,
isLoadingBranches,
isSwitching,
isPulling,
isPushing,
isStartingDevServer,
aheadCount,
behindCount,
onSelectWorktree,
onBranchDropdownOpenChange,
onActionsDropdownOpenChange,
onBranchFilterChange,
onSwitchBranch,
onCreateBranch,
onPull,
onPush,
onOpenInEditor,
onCommit,
onCreatePR,
onDeleteWorktree,
onStartDevServer,
onStopDevServer,
onOpenDevServerUrl,
}: WorktreeTabProps) {
return (
<div className="flex items-center">
{worktree.isMain ? (
<>
<Button
variant={isSelected ? "default" : "outline"}
size="sm"
className={cn(
"h-7 px-3 text-xs font-mono gap-1.5 border-r-0 rounded-l-md rounded-r-none",
isSelected && "bg-primary text-primary-foreground",
!isSelected && "bg-secondary/50 hover:bg-secondary"
)}
onClick={() => onSelectWorktree(worktree)}
disabled={isActivating}
title="Click to preview main"
>
{isRunning && <Loader2 className="w-3 h-3 animate-spin" />}
{isActivating && !isRunning && (
<RefreshCw className="w-3 h-3 animate-spin" />
)}
{worktree.branch}
{cardCount !== undefined && cardCount > 0 && (
<span className="inline-flex items-center justify-center h-4 min-w-[1rem] px-1 text-[10px] font-medium rounded bg-background/80 text-foreground border border-border">
{cardCount}
</span>
)}
</Button>
<BranchSwitchDropdown
worktree={worktree}
isSelected={isSelected}
branches={branches}
filteredBranches={filteredBranches}
branchFilter={branchFilter}
isLoadingBranches={isLoadingBranches}
isSwitching={isSwitching}
onOpenChange={onBranchDropdownOpenChange}
onFilterChange={onBranchFilterChange}
onSwitchBranch={onSwitchBranch}
onCreateBranch={onCreateBranch}
/>
</>
) : (
<Button
variant={isSelected ? "default" : "outline"}
size="sm"
className={cn(
"h-7 px-3 text-xs font-mono gap-1.5 rounded-l-md rounded-r-none border-r-0",
isSelected && "bg-primary text-primary-foreground",
!isSelected && "bg-secondary/50 hover:bg-secondary",
!worktree.hasWorktree && !isSelected && "opacity-70"
)}
onClick={() => onSelectWorktree(worktree)}
disabled={isActivating}
title={
worktree.hasWorktree
? "Click to switch to this worktree's branch"
: "Click to switch to this branch"
}
>
{isRunning && <Loader2 className="w-3 h-3 animate-spin" />}
{isActivating && !isRunning && (
<RefreshCw className="w-3 h-3 animate-spin" />
)}
{worktree.branch}
{cardCount !== undefined && cardCount > 0 && (
<span className="inline-flex items-center justify-center h-4 min-w-[1rem] px-1 text-[10px] font-medium rounded bg-background/80 text-foreground border border-border">
{cardCount}
</span>
)}
</Button>
)}
{isDevServerRunning && (
<Button
variant={isSelected ? "default" : "outline"}
size="sm"
className={cn(
"h-7 w-7 p-0 rounded-none border-r-0",
isSelected && "bg-primary text-primary-foreground",
!isSelected && "bg-secondary/50 hover:bg-secondary",
"text-green-500"
)}
onClick={() => onOpenDevServerUrl(worktree)}
title={`Open dev server (port ${devServerInfo?.port})`}
>
<Globe className="w-3 h-3" />
</Button>
)}
<WorktreeActionsDropdown
worktree={worktree}
isSelected={isSelected}
defaultEditorName={defaultEditorName}
aheadCount={aheadCount}
behindCount={behindCount}
isPulling={isPulling}
isPushing={isPushing}
isStartingDevServer={isStartingDevServer}
isDevServerRunning={isDevServerRunning}
devServerInfo={devServerInfo}
onOpenChange={onActionsDropdownOpenChange}
onPull={onPull}
onPush={onPush}
onOpenInEditor={onOpenInEditor}
onCommit={onCommit}
onCreatePR={onCreatePR}
onDeleteWorktree={onDeleteWorktree}
onStartDevServer={onStartDevServer}
onStopDevServer={onStopDevServer}
onOpenDevServerUrl={onOpenDevServerUrl}
/>
</div>
);
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,34 +0,0 @@
{
"compilerOptions": {
"target": "ES2017",
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
"noEmit": true,
"esModuleInterop": true,
"module": "esnext",
"moduleResolution": "bundler",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "react-jsx",
"incremental": true,
"plugins": [
{
"name": "next"
}
],
"paths": {
"@/*": ["./src/*"]
}
},
"include": [
"next-env.d.ts",
"**/*.ts",
"**/*.tsx",
".next/types/**/*.ts",
".next/dev/types/**/*.ts",
"**/*.mts"
],
"exclude": ["node_modules"]
}

View File

@@ -18,24 +18,24 @@
"test:unit": "vitest run tests/unit"
},
"dependencies": {
"@anthropic-ai/claude-agent-sdk": "^0.1.61",
"@anthropic-ai/claude-agent-sdk": "^0.1.72",
"cors": "^2.8.5",
"dotenv": "^17.2.3",
"express": "^5.1.0",
"express": "^5.2.1",
"morgan": "^1.10.1",
"node-pty": "1.1.0-beta41",
"ws": "^8.18.0"
"ws": "^8.18.3"
},
"devDependencies": {
"@types/cors": "^2.8.18",
"@types/express": "^5.0.1",
"@types/cors": "^2.8.19",
"@types/express": "^5.0.6",
"@types/morgan": "^1.9.10",
"@types/node": "^20",
"@types/node": "^22",
"@types/ws": "^8.18.1",
"@vitest/coverage-v8": "^4.0.15",
"@vitest/ui": "^4.0.15",
"tsx": "^4.19.4",
"@vitest/coverage-v8": "^4.0.16",
"@vitest/ui": "^4.0.16",
"tsx": "^4.21.0",
"typescript": "^5",
"vitest": "^4.0.15"
"vitest": "^4.0.16"
}
}

View File

@@ -4,6 +4,235 @@
* This format must be included in all prompts that generate, modify, or regenerate
* app specifications to ensure consistency across the application.
*/
/**
* TypeScript interface for structured spec output
*/
export interface SpecOutput {
project_name: string;
overview: string;
technology_stack: string[];
core_capabilities: string[];
implemented_features: Array<{
name: string;
description: string;
file_locations?: string[];
}>;
additional_requirements?: string[];
development_guidelines?: string[];
implementation_roadmap?: Array<{
phase: string;
status: "completed" | "in_progress" | "pending";
description: string;
}>;
}
/**
* JSON Schema for structured spec output
* Used with Claude's structured output feature for reliable parsing
*/
export const specOutputSchema = {
type: "object",
properties: {
project_name: {
type: "string",
description: "The name of the project",
},
overview: {
type: "string",
description:
"A comprehensive description of what the project does, its purpose, and key goals",
},
technology_stack: {
type: "array",
items: { type: "string" },
description:
"List of all technologies, frameworks, libraries, and tools used",
},
core_capabilities: {
type: "array",
items: { type: "string" },
description: "List of main features and capabilities the project provides",
},
implemented_features: {
type: "array",
items: {
type: "object",
properties: {
name: {
type: "string",
description: "Name of the implemented feature",
},
description: {
type: "string",
description: "Description of what the feature does",
},
file_locations: {
type: "array",
items: { type: "string" },
description: "File paths where this feature is implemented",
},
},
required: ["name", "description"],
},
description: "Features that have been implemented based on code analysis",
},
additional_requirements: {
type: "array",
items: { type: "string" },
description: "Any additional requirements or constraints",
},
development_guidelines: {
type: "array",
items: { type: "string" },
description: "Development standards and practices",
},
implementation_roadmap: {
type: "array",
items: {
type: "object",
properties: {
phase: {
type: "string",
description: "Name of the implementation phase",
},
status: {
type: "string",
enum: ["completed", "in_progress", "pending"],
description: "Current status of this phase",
},
description: {
type: "string",
description: "Description of what this phase involves",
},
},
required: ["phase", "status", "description"],
},
description: "Phases or roadmap items for implementation",
},
},
required: [
"project_name",
"overview",
"technology_stack",
"core_capabilities",
"implemented_features",
],
additionalProperties: false,
};
/**
* Escape special XML characters
*/
function escapeXml(str: string): string {
return str
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;")
.replace(/'/g, "&apos;");
}
/**
* Convert structured spec output to XML format
*/
export function specToXml(spec: SpecOutput): string {
const indent = " ";
let xml = `<?xml version="1.0" encoding="UTF-8"?>
<project_specification>
${indent}<project_name>${escapeXml(spec.project_name)}</project_name>
${indent}<overview>
${indent}${indent}${escapeXml(spec.overview)}
${indent}</overview>
${indent}<technology_stack>
${spec.technology_stack.map((t) => `${indent}${indent}<technology>${escapeXml(t)}</technology>`).join("\n")}
${indent}</technology_stack>
${indent}<core_capabilities>
${spec.core_capabilities.map((c) => `${indent}${indent}<capability>${escapeXml(c)}</capability>`).join("\n")}
${indent}</core_capabilities>
${indent}<implemented_features>
${spec.implemented_features
.map(
(f) => `${indent}${indent}<feature>
${indent}${indent}${indent}<name>${escapeXml(f.name)}</name>
${indent}${indent}${indent}<description>${escapeXml(f.description)}</description>${
f.file_locations && f.file_locations.length > 0
? `\n${indent}${indent}${indent}<file_locations>
${f.file_locations.map((loc) => `${indent}${indent}${indent}${indent}<location>${escapeXml(loc)}</location>`).join("\n")}
${indent}${indent}${indent}</file_locations>`
: ""
}
${indent}${indent}</feature>`
)
.join("\n")}
${indent}</implemented_features>`;
// Optional sections
if (spec.additional_requirements && spec.additional_requirements.length > 0) {
xml += `
${indent}<additional_requirements>
${spec.additional_requirements.map((r) => `${indent}${indent}<requirement>${escapeXml(r)}</requirement>`).join("\n")}
${indent}</additional_requirements>`;
}
if (spec.development_guidelines && spec.development_guidelines.length > 0) {
xml += `
${indent}<development_guidelines>
${spec.development_guidelines.map((g) => `${indent}${indent}<guideline>${escapeXml(g)}</guideline>`).join("\n")}
${indent}</development_guidelines>`;
}
if (spec.implementation_roadmap && spec.implementation_roadmap.length > 0) {
xml += `
${indent}<implementation_roadmap>
${spec.implementation_roadmap
.map(
(r) => `${indent}${indent}<phase>
${indent}${indent}${indent}<name>${escapeXml(r.phase)}</name>
${indent}${indent}${indent}<status>${escapeXml(r.status)}</status>
${indent}${indent}${indent}<description>${escapeXml(r.description)}</description>
${indent}${indent}</phase>`
)
.join("\n")}
${indent}</implementation_roadmap>`;
}
xml += `
</project_specification>`;
return xml;
}
/**
* Get prompt instruction for structured output (simpler than XML instructions)
*/
export function getStructuredSpecPromptInstruction(): string {
return `
Analyze the project and provide a comprehensive specification with:
1. **project_name**: The name of the project
2. **overview**: A comprehensive description of what the project does, its purpose, and key goals
3. **technology_stack**: List all technologies, frameworks, libraries, and tools used
4. **core_capabilities**: List the main features and capabilities the project provides
5. **implemented_features**: For each implemented feature, provide:
- name: Feature name
- description: What it does
- file_locations: Key files where it's implemented (optional)
6. **additional_requirements**: Any system requirements, dependencies, or constraints (optional)
7. **development_guidelines**: Development standards and best practices (optional)
8. **implementation_roadmap**: Project phases with status (completed/in_progress/pending) (optional)
Be thorough in your analysis. The output will be automatically formatted as structured JSON.
`;
}
export const APP_SPEC_XML_FORMAT = `
The app_spec.txt file MUST follow this exact XML format:
@@ -63,10 +292,11 @@ export function getAppSpecFormatInstruction(): string {
${APP_SPEC_XML_FORMAT}
CRITICAL FORMATTING REQUIREMENTS:
- Do NOT use the Write, Edit, or Bash tools to create files - just OUTPUT the XML in your response
- Your ENTIRE response MUST be valid XML following the exact template structure above
- Do NOT use markdown formatting (no # headers, no **bold**, no - lists, etc.)
- Do NOT include any explanatory text, prefix, or suffix outside the XML tags
- Do NOT include phrases like "Based on my analysis..." or "I'll create..." before the XML
- Do NOT include phrases like "Based on my analysis...", "I'll create...", "Let me analyze..." before the XML
- Do NOT include any text before <project_specification> or after </project_specification>
- Your response must start IMMEDIATELY with <project_specification> with no preceding text
- Your response must end IMMEDIATELY with </project_specification> with no following text

View File

@@ -53,6 +53,13 @@ export function getImagesDir(projectPath: string): string {
return path.join(getAutomakerDir(projectPath), "images");
}
/**
* Get the context files directory for a project (user-added context files)
*/
export function getContextDir(projectPath: string): string {
return path.join(getAutomakerDir(projectPath), "context");
}
/**
* Get the worktrees metadata directory for a project
*/

View File

@@ -58,13 +58,13 @@ export const TOOL_PRESETS = {
*/
export const MAX_TURNS = {
/** Quick operations that shouldn't need many iterations */
quick: 5,
quick: 50,
/** Standard operations */
standard: 20,
standard: 100,
/** Long-running operations like full spec generation */
extended: 50,
extended: 250,
/** Very long operations that may require extensive exploration */
maximum: 1000,
@@ -143,6 +143,12 @@ export interface CreateSdkOptionsConfig {
/** Optional abort controller for cancellation */
abortController?: AbortController;
/** Optional output format for structured outputs */
outputFormat?: {
type: "json_schema";
schema: Record<string, unknown>;
};
}
/**
@@ -158,12 +164,17 @@ export function createSpecGenerationOptions(
): Options {
return {
...getBaseOptions(),
// Override permissionMode - spec generation only needs read-only tools
// Using "acceptEdits" can cause Claude to write files to unexpected locations
// See: https://github.com/AutoMaker-Org/automaker/issues/149
permissionMode: "default",
model: getModelForUseCase("spec", config.model),
maxTurns: MAX_TURNS.maximum,
cwd: config.cwd,
allowedTools: [...TOOL_PRESETS.specGeneration],
...(config.systemPrompt && { systemPrompt: config.systemPrompt }),
...(config.abortController && { abortController: config.abortController }),
...(config.outputFormat && { outputFormat: config.outputFormat }),
};
}
@@ -180,6 +191,8 @@ export function createFeatureGenerationOptions(
): Options {
return {
...getBaseOptions(),
// Override permissionMode - feature generation only needs read-only tools
permissionMode: "default",
model: getModelForUseCase("features", config.model),
maxTurns: MAX_TURNS.quick,
cwd: config.cwd,
@@ -194,7 +207,7 @@ export function createFeatureGenerationOptions(
*
* Configuration:
* - Uses read-only tools for analysis
* - Quick turns for focused suggestions
* - Standard turns to allow thorough codebase exploration and structured output generation
* - Opus model by default for thorough analysis
*/
export function createSuggestionsOptions(
@@ -203,11 +216,12 @@ export function createSuggestionsOptions(
return {
...getBaseOptions(),
model: getModelForUseCase("suggestions", config.model),
maxTurns: MAX_TURNS.quick,
maxTurns: MAX_TURNS.extended,
cwd: config.cwd,
allowedTools: [...TOOL_PRESETS.readOnly],
...(config.systemPrompt && { systemPrompt: config.systemPrompt }),
...(config.abortController && { abortController: config.abortController }),
...(config.outputFormat && { outputFormat: config.outputFormat }),
};
}

View File

@@ -0,0 +1,183 @@
/**
* Worktree metadata storage utilities
* Stores worktree-specific data in .automaker/worktrees/:branch/worktree.json
*/
import * as fs from "fs/promises";
import * as path from "path";
/** Maximum length for sanitized branch names in filesystem paths */
const MAX_SANITIZED_BRANCH_PATH_LENGTH = 200;
export interface WorktreePRInfo {
number: number;
url: string;
title: string;
state: string;
createdAt: string;
}
export interface WorktreeMetadata {
branch: string;
createdAt: string;
pr?: WorktreePRInfo;
}
/**
* Sanitize branch name for cross-platform filesystem safety
*/
function sanitizeBranchName(branch: string): string {
// Replace characters that are invalid or problematic on various filesystems:
// - Forward and backslashes (path separators)
// - Windows invalid chars: : * ? " < > |
// - Other potentially problematic chars
let safeBranch = branch
.replace(/[/\\:*?"<>|]/g, "-") // Replace invalid chars with dash
.replace(/\s+/g, "_") // Replace spaces with underscores
.replace(/\.+$/g, "") // Remove trailing dots (Windows issue)
.replace(/-+/g, "-") // Collapse multiple dashes
.replace(/^-|-$/g, ""); // Remove leading/trailing dashes
// Truncate to safe length (leave room for path components)
safeBranch = safeBranch.substring(0, MAX_SANITIZED_BRANCH_PATH_LENGTH);
// Handle Windows reserved names (CON, PRN, AUX, NUL, COM1-9, LPT1-9)
const windowsReserved = /^(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9])$/i;
if (windowsReserved.test(safeBranch) || safeBranch.length === 0) {
safeBranch = `_${safeBranch || "branch"}`;
}
return safeBranch;
}
/**
* Get the path to the worktree metadata directory
*/
function getWorktreeMetadataDir(projectPath: string, branch: string): string {
const safeBranch = sanitizeBranchName(branch);
return path.join(projectPath, ".automaker", "worktrees", safeBranch);
}
/**
* Get the path to the worktree metadata file
*/
function getWorktreeMetadataPath(projectPath: string, branch: string): string {
return path.join(getWorktreeMetadataDir(projectPath, branch), "worktree.json");
}
/**
* Read worktree metadata for a branch
*/
export async function readWorktreeMetadata(
projectPath: string,
branch: string
): Promise<WorktreeMetadata | null> {
try {
const metadataPath = getWorktreeMetadataPath(projectPath, branch);
const content = await fs.readFile(metadataPath, "utf-8");
return JSON.parse(content) as WorktreeMetadata;
} catch (error) {
// File doesn't exist or can't be read
return null;
}
}
/**
* Write worktree metadata for a branch
*/
export async function writeWorktreeMetadata(
projectPath: string,
branch: string,
metadata: WorktreeMetadata
): Promise<void> {
const metadataDir = getWorktreeMetadataDir(projectPath, branch);
const metadataPath = getWorktreeMetadataPath(projectPath, branch);
// Ensure directory exists
await fs.mkdir(metadataDir, { recursive: true });
// Write metadata
await fs.writeFile(metadataPath, JSON.stringify(metadata, null, 2), "utf-8");
}
/**
* Update PR info in worktree metadata
*/
export async function updateWorktreePRInfo(
projectPath: string,
branch: string,
prInfo: WorktreePRInfo
): Promise<void> {
// Read existing metadata or create new
let metadata = await readWorktreeMetadata(projectPath, branch);
if (!metadata) {
metadata = {
branch,
createdAt: new Date().toISOString(),
};
}
// Update PR info
metadata.pr = prInfo;
// Write back
await writeWorktreeMetadata(projectPath, branch, metadata);
}
/**
* Get PR info for a branch from metadata
*/
export async function getWorktreePRInfo(
projectPath: string,
branch: string
): Promise<WorktreePRInfo | null> {
const metadata = await readWorktreeMetadata(projectPath, branch);
return metadata?.pr || null;
}
/**
* Read all worktree metadata for a project
*/
export async function readAllWorktreeMetadata(
projectPath: string
): Promise<Map<string, WorktreeMetadata>> {
const result = new Map<string, WorktreeMetadata>();
const worktreesDir = path.join(projectPath, ".automaker", "worktrees");
try {
const dirs = await fs.readdir(worktreesDir, { withFileTypes: true });
for (const dir of dirs) {
if (dir.isDirectory()) {
const metadataPath = path.join(worktreesDir, dir.name, "worktree.json");
try {
const content = await fs.readFile(metadataPath, "utf-8");
const metadata = JSON.parse(content) as WorktreeMetadata;
result.set(metadata.branch, metadata);
} catch {
// Skip if file doesn't exist or can't be read
}
}
}
} catch {
// Directory doesn't exist
}
return result;
}
/**
* Delete worktree metadata for a branch
*/
export async function deleteWorktreeMetadata(
projectPath: string,
branch: string
): Promise<void> {
const metadataDir = getWorktreeMetadataDir(projectPath, branch);
try {
await fs.rm(metadataDir, { recursive: true, force: true });
} catch {
// Ignore errors if directory doesn't exist
}
}

View File

@@ -6,7 +6,12 @@ import { query } from "@anthropic-ai/claude-agent-sdk";
import path from "path";
import fs from "fs/promises";
import type { EventEmitter } from "../../lib/events.js";
import { getAppSpecFormatInstruction } from "../../lib/app-spec-format.js";
import {
specOutputSchema,
specToXml,
getStructuredSpecPromptInstruction,
type SpecOutput,
} from "../../lib/app-spec-format.js";
import { createLogger } from "../../lib/logger.js";
import { createSpecGenerationOptions } from "../../lib/sdk-options.js";
import { logAuthStatus } from "./common.js";
@@ -38,7 +43,7 @@ export async function generateSpec(
if (analyzeProject !== false) {
// Default to true - analyze the project
analysisInstructions = `Based on this overview, analyze the project directory (if it exists) and create a comprehensive specification. Use the Read, Glob, and Grep tools to explore the codebase and understand:
analysisInstructions = `Based on this overview, analyze the project directory (if it exists) using the Read, Glob, and Grep tools to understand:
- Existing technologies and frameworks
- Project structure and architecture
- Current features and capabilities
@@ -66,7 +71,7 @@ ${techStackDefaults}
${analysisInstructions}
${getAppSpecFormatInstruction()}`;
${getStructuredSpecPromptInstruction()}`;
logger.info("========== PROMPT BEING SENT ==========");
logger.info(`Prompt length: ${prompt.length} chars`);
@@ -81,6 +86,10 @@ ${getAppSpecFormatInstruction()}`;
const options = createSpecGenerationOptions({
cwd: projectPath,
abortController,
outputFormat: {
type: "json_schema",
schema: specOutputSchema,
},
});
logger.debug("SDK Options:", JSON.stringify(options, null, 2));
@@ -101,6 +110,7 @@ ${getAppSpecFormatInstruction()}`;
let responseText = "";
let messageCount = 0;
let structuredOutput: SpecOutput | null = null;
logger.info("Starting to iterate over stream...");
@@ -114,75 +124,49 @@ ${getAppSpecFormatInstruction()}`;
);
if (msg.type === "assistant") {
// Log the full message structure to debug
logger.info(`Assistant msg keys: ${Object.keys(msg).join(", ")}`);
const msgAny = msg as any;
if (msgAny.message) {
logger.info(
`msg.message keys: ${Object.keys(msgAny.message).join(", ")}`
);
if (msgAny.message.content) {
logger.info(
`msg.message.content length: ${msgAny.message.content.length}`
);
for (const block of msgAny.message.content) {
if (msgAny.message?.content) {
for (const block of msgAny.message.content) {
if (block.type === "text") {
responseText += block.text;
logger.info(
`Block keys: ${Object.keys(block).join(", ")}, type: ${
block.type
}`
`Text block received (${block.text.length} chars), total now: ${responseText.length} chars`
);
if (block.type === "text") {
responseText += block.text;
logger.info(
`Text block received (${block.text.length} chars), total now: ${responseText.length} chars`
);
logger.info(`Text preview: ${block.text.substring(0, 200)}...`);
events.emit("spec-regeneration:event", {
type: "spec_regeneration_progress",
content: block.text,
projectPath: projectPath,
});
} else if (block.type === "tool_use") {
logger.info("Tool use:", block.name);
events.emit("spec-regeneration:event", {
type: "spec_tool",
tool: block.name,
input: block.input,
});
}
events.emit("spec-regeneration:event", {
type: "spec_regeneration_progress",
content: block.text,
projectPath: projectPath,
});
} else if (block.type === "tool_use") {
logger.info("Tool use:", block.name);
events.emit("spec-regeneration:event", {
type: "spec_tool",
tool: block.name,
input: block.input,
});
}
} else {
logger.warn("msg.message.content is falsy");
}
} else {
logger.warn("msg.message is falsy");
// Log full message to see structure
logger.info(
`Full assistant msg: ${JSON.stringify(msg).substring(0, 1000)}`
);
}
} else if (msg.type === "result" && (msg as any).subtype === "success") {
logger.info("Received success result");
logger.info(`Result value: "${(msg as any).result}"`);
logger.info(
`Current responseText length before result: ${responseText.length}`
);
// Only use result if it has content, otherwise keep accumulated text
if ((msg as any).result && (msg as any).result.length > 0) {
logger.info("Using result value as responseText");
responseText = (msg as any).result;
// Check for structured output - this is the reliable way to get spec data
const resultMsg = msg as any;
if (resultMsg.structured_output) {
structuredOutput = resultMsg.structured_output as SpecOutput;
logger.info("✅ Received structured output");
logger.debug("Structured output:", JSON.stringify(structuredOutput, null, 2));
} else {
logger.info("Result is empty, keeping accumulated responseText");
logger.warn("⚠️ No structured output in result, will fall back to text parsing");
}
} else if (msg.type === "result") {
// Handle all result types
// Handle error result types
const subtype = (msg as any).subtype;
logger.info(`Result message: subtype=${subtype}`);
if (subtype === "error_max_turns") {
logger.error(
"❌ Hit max turns limit! Claude used too many tool calls."
);
logger.info(`responseText so far: ${responseText.length} chars`);
logger.error("❌ Hit max turns limit!");
} else if (subtype === "error_max_structured_output_retries") {
logger.error("❌ Failed to produce valid structured output after retries");
throw new Error("Could not produce valid spec output");
}
} else if ((msg as { type: string }).type === "error") {
logger.error("❌ Received error message from stream:");
@@ -202,22 +186,58 @@ ${getAppSpecFormatInstruction()}`;
logger.info(`Stream iteration complete. Total messages: ${messageCount}`);
logger.info(`Response text length: ${responseText.length} chars`);
logger.info("========== FINAL RESPONSE TEXT ==========");
logger.info(responseText || "(empty)");
logger.info("========== END RESPONSE TEXT ==========");
if (!responseText || responseText.trim().length === 0) {
logger.error("❌ WARNING: responseText is empty! Nothing to save.");
// Determine XML content to save
let xmlContent: string;
if (structuredOutput) {
// Use structured output - convert JSON to XML
logger.info("✅ Using structured output for XML generation");
xmlContent = specToXml(structuredOutput);
logger.info(`Generated XML from structured output: ${xmlContent.length} chars`);
} else {
// Fallback: Extract XML content from response text
// Claude might include conversational text before/after
// See: https://github.com/AutoMaker-Org/automaker/issues/149
logger.warn("⚠️ No structured output, falling back to text parsing");
logger.info("========== FINAL RESPONSE TEXT ==========");
logger.info(responseText || "(empty)");
logger.info("========== END RESPONSE TEXT ==========");
if (!responseText || responseText.trim().length === 0) {
throw new Error("No response text and no structured output - cannot generate spec");
}
const xmlStart = responseText.indexOf("<project_specification>");
const xmlEnd = responseText.lastIndexOf("</project_specification>");
if (xmlStart !== -1 && xmlEnd !== -1) {
// Extract just the XML content, discarding any conversational text before/after
xmlContent = responseText.substring(xmlStart, xmlEnd + "</project_specification>".length);
logger.info(`Extracted XML content: ${xmlContent.length} chars (from position ${xmlStart})`);
} else {
// No valid XML structure found in the response text
// This happens when structured output was expected but not received, and the agent
// output conversational text instead of XML (e.g., "The project directory appears to be empty...")
// We should NOT save this conversational text as it's not a valid spec
logger.error("❌ Response does not contain valid <project_specification> XML structure");
logger.error("This typically happens when structured output failed and the agent produced conversational text instead of XML");
throw new Error(
"Failed to generate spec: No valid XML structure found in response. " +
"The response contained conversational text but no <project_specification> tags. " +
"Please try again."
);
}
}
// Save spec to .automaker directory
const specDir = await ensureAutomakerDir(projectPath);
await ensureAutomakerDir(projectPath);
const specPath = getAppSpecPath(projectPath);
logger.info("Saving spec to:", specPath);
logger.info(`Content to save (${responseText.length} chars)`);
logger.info(`Content to save (${xmlContent.length} chars)`);
await fs.writeFile(specPath, responseText);
await fs.writeFile(specPath, xmlContent);
// Verify the file was written
const savedContent = await fs.readFile(specPath, "utf-8");

View File

@@ -29,6 +29,7 @@ const BINARY_EXTENSIONS = new Set([
]);
// Status map for git status codes
// Git porcelain format uses XY where X=staging area, Y=working tree
const GIT_STATUS_MAP: Record<string, string> = {
M: "Modified",
A: "Added",
@@ -37,8 +38,42 @@ const GIT_STATUS_MAP: Record<string, string> = {
C: "Copied",
U: "Updated",
"?": "Untracked",
"!": "Ignored",
" ": "Unmodified",
};
/**
* Get a readable status text from git status codes
* Handles both single character and XY format status codes
*/
function getStatusText(indexStatus: string, workTreeStatus: string): string {
// Untracked files
if (indexStatus === "?" && workTreeStatus === "?") {
return "Untracked";
}
// Ignored files
if (indexStatus === "!" && workTreeStatus === "!") {
return "Ignored";
}
// Prioritize staging area status, then working tree
const primaryStatus = indexStatus !== " " && indexStatus !== "?" ? indexStatus : workTreeStatus;
// Handle combined statuses
if (indexStatus !== " " && indexStatus !== "?" && workTreeStatus !== " " && workTreeStatus !== "?") {
// Both staging and working tree have changes
const indexText = GIT_STATUS_MAP[indexStatus] || "Changed";
const workText = GIT_STATUS_MAP[workTreeStatus] || "Changed";
if (indexText === workText) {
return indexText;
}
return `${indexText} (staged), ${workText} (unstaged)`;
}
return GIT_STATUS_MAP[primaryStatus] || "Changed";
}
/**
* File status interface for git status results
*/
@@ -70,18 +105,46 @@ export async function isGitRepo(repoPath: string): Promise<boolean> {
/**
* Parse the output of `git status --porcelain` into FileStatus array
* Git porcelain format: XY PATH where X=staging area status, Y=working tree status
* For renamed files: XY ORIG_PATH -> NEW_PATH
*/
export function parseGitStatus(statusOutput: string): FileStatus[] {
return statusOutput
.split("\n")
.filter(Boolean)
.map((line) => {
const statusChar = line[0];
const filePath = line.slice(3);
// Git porcelain format uses two status characters: XY
// X = status in staging area (index)
// Y = status in working tree
const indexStatus = line[0] || " ";
const workTreeStatus = line[1] || " ";
// File path starts at position 3 (after "XY ")
let filePath = line.slice(3);
// Handle renamed files (format: "R old_path -> new_path")
if (indexStatus === "R" || workTreeStatus === "R") {
const arrowIndex = filePath.indexOf(" -> ");
if (arrowIndex !== -1) {
filePath = filePath.slice(arrowIndex + 4); // Use new path
}
}
// Determine the primary status character for backwards compatibility
// Prioritize staging area status, then working tree
let primaryStatus: string;
if (indexStatus === "?" && workTreeStatus === "?") {
primaryStatus = "?"; // Untracked
} else if (indexStatus !== " " && indexStatus !== "?") {
primaryStatus = indexStatus; // Staged change
} else {
primaryStatus = workTreeStatus; // Working tree change
}
return {
status: statusChar,
status: primaryStatus,
path: filePath,
statusText: GIT_STATUS_MAP[statusChar] || "Unknown",
statusText: getStatusText(indexStatus, workTreeStatus),
};
});
}

View File

@@ -9,7 +9,7 @@ import { getErrorMessage, logError } from "../common.js";
// Optional files that are expected to not exist in new projects
// Don't log ENOENT errors for these to reduce noise
const OPTIONAL_FILES = ["categories.json"];
const OPTIONAL_FILES = ["categories.json", "app_spec.txt"];
function isOptionalFile(filePath: string): boolean {
return OPTIONAL_FILES.some((optionalFile) => filePath.endsWith(optionalFile));

View File

@@ -9,6 +9,39 @@ import { createSuggestionsOptions } from "../../lib/sdk-options.js";
const logger = createLogger("Suggestions");
/**
* JSON Schema for suggestions output
*/
const suggestionsSchema = {
type: "object",
properties: {
suggestions: {
type: "array",
items: {
type: "object",
properties: {
id: { type: "string" },
category: { type: "string" },
description: { type: "string" },
steps: {
type: "array",
items: { type: "string" },
},
priority: {
type: "number",
minimum: 1,
maximum: 3,
},
reasoning: { type: "string" },
},
required: ["category", "description", "steps", "priority", "reasoning"],
},
},
},
required: ["suggestions"],
additionalProperties: false,
};
export async function generateSuggestions(
projectPath: string,
suggestionType: string,
@@ -36,19 +69,7 @@ For each suggestion, provide:
4. Priority (1=high, 2=medium, 3=low)
5. Brief reasoning for why this would help
Format your response as JSON:
{
"suggestions": [
{
"id": "suggestion-123",
"category": "Category",
"description": "What to implement",
"steps": ["Step 1", "Step 2"],
"priority": 1,
"reasoning": "Why this helps"
}
]
}`;
The response will be automatically formatted as structured JSON.`;
events.emit("suggestions:event", {
type: "suggestions_progress",
@@ -58,16 +79,21 @@ Format your response as JSON:
const options = createSuggestionsOptions({
cwd: projectPath,
abortController,
outputFormat: {
type: "json_schema",
schema: suggestionsSchema,
},
});
const stream = query({ prompt, options });
let responseText = "";
let structuredOutput: { suggestions: Array<Record<string, unknown>> } | null = null;
for await (const msg of stream) {
if (msg.type === "assistant" && msg.message.content) {
for (const block of msg.message.content) {
if (block.type === "text") {
responseText = block.text;
responseText += block.text;
events.emit("suggestions:event", {
type: "suggestions_progress",
content: block.text,
@@ -81,18 +107,34 @@ Format your response as JSON:
}
}
} else if (msg.type === "result" && msg.subtype === "success") {
responseText = msg.result || responseText;
// Check for structured output
const resultMsg = msg as any;
if (resultMsg.structured_output) {
structuredOutput = resultMsg.structured_output as {
suggestions: Array<Record<string, unknown>>;
};
logger.debug("Received structured output:", structuredOutput);
}
} else if (msg.type === "result") {
const resultMsg = msg as any;
if (resultMsg.subtype === "error_max_structured_output_retries") {
logger.error("Failed to produce valid structured output after retries");
throw new Error("Could not produce valid suggestions output");
} else if (resultMsg.subtype === "error_max_turns") {
logger.error("Hit max turns limit before completing suggestions generation");
logger.warn(`Response text length: ${responseText.length} chars`);
// Still try to parse what we have
}
}
}
// Parse suggestions from response
// Use structured output if available, otherwise fall back to parsing text
try {
const jsonMatch = responseText.match(/\{[\s\S]*"suggestions"[\s\S]*\}/);
if (jsonMatch) {
const parsed = JSON.parse(jsonMatch[0]);
if (structuredOutput && structuredOutput.suggestions) {
// Use structured output directly
events.emit("suggestions:event", {
type: "suggestions_complete",
suggestions: parsed.suggestions.map(
suggestions: structuredOutput.suggestions.map(
(s: Record<string, unknown>, i: number) => ({
...s,
id: s.id || `suggestion-${Date.now()}-${i}`,
@@ -100,7 +142,23 @@ Format your response as JSON:
),
});
} else {
throw new Error("No valid JSON found in response");
// Fallback: try to parse from text (for backwards compatibility)
logger.warn("No structured output received, attempting to parse from text");
const jsonMatch = responseText.match(/\{[\s\S]*"suggestions"[\s\S]*\}/);
if (jsonMatch) {
const parsed = JSON.parse(jsonMatch[0]);
events.emit("suggestions:event", {
type: "suggestions_complete",
suggestions: parsed.suggestions.map(
(s: Record<string, unknown>, i: number) => ({
...s,
id: s.id || `suggestion-${Date.now()}-${i}`,
})
),
});
} else {
throw new Error("No valid JSON found in response");
}
}
} catch (error) {
// Log the parsing error for debugging

View File

@@ -5,13 +5,95 @@
import { createLogger } from "../../lib/logger.js";
import { exec } from "child_process";
import { promisify } from "util";
import path from "path";
import fs from "fs/promises";
import {
getErrorMessage as getErrorMessageShared,
createLogError,
} from "../common.js";
import { FeatureLoader } from "../../services/feature-loader.js";
const logger = createLogger("Worktree");
const execAsync = promisify(exec);
export const execAsync = promisify(exec);
const featureLoader = new FeatureLoader();
// ============================================================================
// Constants
// ============================================================================
/** Maximum allowed length for git branch names */
export const MAX_BRANCH_NAME_LENGTH = 250;
// ============================================================================
// Extended PATH configuration for Electron apps
// ============================================================================
const pathSeparator = process.platform === "win32" ? ";" : ":";
const additionalPaths: string[] = [];
if (process.platform === "win32") {
// Windows paths
if (process.env.LOCALAPPDATA) {
additionalPaths.push(`${process.env.LOCALAPPDATA}\\Programs\\Git\\cmd`);
}
if (process.env.PROGRAMFILES) {
additionalPaths.push(`${process.env.PROGRAMFILES}\\Git\\cmd`);
}
if (process.env["ProgramFiles(x86)"]) {
additionalPaths.push(`${process.env["ProgramFiles(x86)"]}\\Git\\cmd`);
}
} else {
// Unix/Mac paths
additionalPaths.push(
"/opt/homebrew/bin", // Homebrew on Apple Silicon
"/usr/local/bin", // Homebrew on Intel Mac, common Linux location
"/home/linuxbrew/.linuxbrew/bin", // Linuxbrew
`${process.env.HOME}/.local/bin`, // pipx, other user installs
);
}
const extendedPath = [
process.env.PATH,
...additionalPaths.filter(Boolean),
].filter(Boolean).join(pathSeparator);
/**
* Environment variables with extended PATH for executing shell commands.
* Electron apps don't inherit the user's shell PATH, so we need to add
* common tool installation locations.
*/
export const execEnv = {
...process.env,
PATH: extendedPath,
};
// ============================================================================
// Validation utilities
// ============================================================================
/**
* Validate branch name to prevent command injection.
* Git branch names cannot contain: space, ~, ^, :, ?, *, [, \, or control chars.
* We also reject shell metacharacters for safety.
*/
export function isValidBranchName(name: string): boolean {
return /^[a-zA-Z0-9._\-/]+$/.test(name) && name.length < MAX_BRANCH_NAME_LENGTH;
}
/**
* Check if gh CLI is available on the system
*/
export async function isGhCliAvailable(): Promise<boolean> {
try {
const checkCommand = process.platform === "win32"
? "where gh"
: "command -v gh";
await execAsync(checkCommand, { env: execEnv });
return true;
} catch {
return false;
}
}
export const AUTOMAKER_INITIAL_COMMIT_MESSAGE =
"chore: automaker initial commit";

View File

@@ -12,6 +12,7 @@ import { createMergeHandler } from "./routes/merge.js";
import { createCreateHandler } from "./routes/create.js";
import { createDeleteHandler } from "./routes/delete.js";
import { createCreatePRHandler } from "./routes/create-pr.js";
import { createPRInfoHandler } from "./routes/pr-info.js";
import { createCommitHandler } from "./routes/commit.js";
import { createPushHandler } from "./routes/push.js";
import { createPullHandler } from "./routes/pull.js";
@@ -40,6 +41,7 @@ export function createWorktreeRoutes(): Router {
router.post("/create", createCreateHandler());
router.post("/delete", createDeleteHandler());
router.post("/create-pr", createCreatePRHandler());
router.post("/pr-info", createPRInfoHandler());
router.post("/commit", createCommitHandler());
router.post("/push", createPushHandler());
router.post("/pull", createPullHandler());

View File

@@ -3,53 +3,22 @@
*/
import type { Request, Response } from "express";
import { exec } from "child_process";
import { promisify } from "util";
import { getErrorMessage, logError } from "../common.js";
const execAsync = promisify(exec);
// Extended PATH to include common tool installation locations
// This is needed because Electron apps don't inherit the user's shell PATH
const pathSeparator = process.platform === "win32" ? ";" : ":";
const additionalPaths: string[] = [];
if (process.platform === "win32") {
// Windows paths
if (process.env.LOCALAPPDATA) {
additionalPaths.push(`${process.env.LOCALAPPDATA}\\Programs\\Git\\cmd`);
}
if (process.env.PROGRAMFILES) {
additionalPaths.push(`${process.env.PROGRAMFILES}\\Git\\cmd`);
}
if (process.env["ProgramFiles(x86)"]) {
additionalPaths.push(`${process.env["ProgramFiles(x86)"]}\\Git\\cmd`);
}
} else {
// Unix/Mac paths
additionalPaths.push(
"/opt/homebrew/bin", // Homebrew on Apple Silicon
"/usr/local/bin", // Homebrew on Intel Mac, common Linux location
"/home/linuxbrew/.linuxbrew/bin", // Linuxbrew
`${process.env.HOME}/.local/bin`, // pipx, other user installs
);
}
const extendedPath = [
process.env.PATH,
...additionalPaths.filter(Boolean),
].filter(Boolean).join(pathSeparator);
const execEnv = {
...process.env,
PATH: extendedPath,
};
import {
getErrorMessage,
logError,
execAsync,
execEnv,
isValidBranchName,
isGhCliAvailable,
} from "../common.js";
import { updateWorktreePRInfo } from "../../../lib/worktree-metadata.js";
export function createCreatePRHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath, commitMessage, prTitle, prBody, baseBranch, draft } = req.body as {
const { worktreePath, projectPath, commitMessage, prTitle, prBody, baseBranch, draft } = req.body as {
worktreePath: string;
projectPath?: string;
commitMessage?: string;
prTitle?: string;
prBody?: string;
@@ -65,6 +34,10 @@ export function createCreatePRHandler() {
return;
}
// Use projectPath if provided, otherwise derive from worktreePath
// For worktrees, projectPath is needed to store metadata in the main project's .automaker folder
const effectiveProjectPath = projectPath || worktreePath;
// Get current branch name
const { stdout: branchOutput } = await execAsync(
"git rev-parse --abbrev-ref HEAD",
@@ -72,6 +45,15 @@ export function createCreatePRHandler() {
);
const branchName = branchOutput.trim();
// Validate branch name for security
if (!isValidBranchName(branchName)) {
res.status(400).json({
success: false,
error: "Invalid branch name contains unsafe characters",
});
return;
}
// Check for uncommitted changes
const { stdout: status } = await execAsync("git status --porcelain", {
cwd: worktreePath,
@@ -143,18 +125,8 @@ export function createCreatePRHandler() {
let browserUrl: string | null = null;
let ghCliAvailable = false;
// Check if gh CLI is available (cross-platform)
try {
const checkCommand = process.platform === "win32"
? "where gh"
: "command -v gh";
await execAsync(checkCommand, { env: execEnv });
ghCliAvailable = true;
} catch {
ghCliAvailable = false;
}
// Get repository URL for browser fallback
// Get repository URL and detect fork workflow FIRST
// This is needed for both the existing PR check and PR creation
let repoUrl: string | null = null;
let upstreamRepo: string | null = null;
let originOwner: string | null = null;
@@ -180,7 +152,7 @@ export function createCreatePRHandler() {
// Try HTTPS format: https://github.com/owner/repo.git
match = line.match(/^(\w+)\s+https?:\/\/[^/]+\/([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/);
}
if (match) {
const [, remoteName, owner, repo] = match;
if (remoteName === "upstream") {
@@ -206,7 +178,7 @@ export function createCreatePRHandler() {
env: execEnv,
});
const url = originUrl.trim();
// Parse URL to extract owner/repo
// Handle both SSH (git@github.com:owner/repo.git) and HTTPS (https://github.com/owner/repo.git)
let match = url.match(/[:/]([^/]+)\/([^/\s]+?)(?:\.git)?$/);
@@ -220,6 +192,9 @@ export function createCreatePRHandler() {
}
}
// Check if gh CLI is available (cross-platform)
ghCliAvailable = await isGhCliAvailable();
// Construct browser URL for PR creation
if (repoUrl) {
const encodedTitle = encodeURIComponent(title);
@@ -234,32 +209,136 @@ export function createCreatePRHandler() {
}
}
let prNumber: number | undefined;
let prAlreadyExisted = false;
if (ghCliAvailable) {
// First, check if a PR already exists for this branch using gh pr list
// This is more reliable than gh pr view as it explicitly searches by branch name
// For forks, we need to use owner:branch format for the head parameter
const headRef = upstreamRepo && originOwner ? `${originOwner}:${branchName}` : branchName;
const repoArg = upstreamRepo ? ` --repo "${upstreamRepo}"` : "";
console.log(`[CreatePR] Checking for existing PR for branch: ${branchName} (headRef: ${headRef})`);
try {
// Build gh pr create command
let prCmd = `gh pr create --base "${base}"`;
// If this is a fork (has upstream remote), specify the repo and head
if (upstreamRepo && originOwner) {
// For forks: --repo specifies where to create PR, --head specifies source
prCmd += ` --repo "${upstreamRepo}" --head "${originOwner}:${branchName}"`;
} else {
// Not a fork, just specify the head branch
prCmd += ` --head "${branchName}"`;
}
prCmd += ` --title "${title.replace(/"/g, '\\"')}" --body "${body.replace(/"/g, '\\"')}" ${draftFlag}`;
prCmd = prCmd.trim();
const { stdout: prOutput } = await execAsync(prCmd, {
const listCmd = `gh pr list${repoArg} --head "${headRef}" --json number,title,url,state --limit 1`;
console.log(`[CreatePR] Running: ${listCmd}`);
const { stdout: existingPrOutput } = await execAsync(listCmd, {
cwd: worktreePath,
env: execEnv,
});
prUrl = prOutput.trim();
} catch (ghError: unknown) {
// gh CLI failed
const err = ghError as { stderr?: string; message?: string };
prError = err.stderr || err.message || "PR creation failed";
console.log(`[CreatePR] gh pr list output: ${existingPrOutput}`);
const existingPrs = JSON.parse(existingPrOutput);
if (Array.isArray(existingPrs) && existingPrs.length > 0) {
const existingPr = existingPrs[0];
// PR already exists - use it and store metadata
console.log(`[CreatePR] PR already exists for branch ${branchName}: PR #${existingPr.number}`);
prUrl = existingPr.url;
prNumber = existingPr.number;
prAlreadyExisted = true;
// Store the existing PR info in metadata
await updateWorktreePRInfo(effectiveProjectPath, branchName, {
number: existingPr.number,
url: existingPr.url,
title: existingPr.title || title,
state: existingPr.state || "open",
createdAt: new Date().toISOString(),
});
console.log(`[CreatePR] Stored existing PR info for branch ${branchName}: PR #${existingPr.number}`);
} else {
console.log(`[CreatePR] No existing PR found for branch ${branchName}`);
}
} catch (listError) {
// gh pr list failed - log but continue to try creating
console.log(`[CreatePR] gh pr list failed (this is ok, will try to create):`, listError);
}
// Only create a new PR if one doesn't already exist
if (!prUrl) {
try {
// Build gh pr create command
let prCmd = `gh pr create --base "${base}"`;
// If this is a fork (has upstream remote), specify the repo and head
if (upstreamRepo && originOwner) {
// For forks: --repo specifies where to create PR, --head specifies source
prCmd += ` --repo "${upstreamRepo}" --head "${originOwner}:${branchName}"`;
} else {
// Not a fork, just specify the head branch
prCmd += ` --head "${branchName}"`;
}
prCmd += ` --title "${title.replace(/"/g, '\\"')}" --body "${body.replace(/"/g, '\\"')}" ${draftFlag}`;
prCmd = prCmd.trim();
console.log(`[CreatePR] Creating PR with command: ${prCmd}`);
const { stdout: prOutput } = await execAsync(prCmd, {
cwd: worktreePath,
env: execEnv,
});
prUrl = prOutput.trim();
console.log(`[CreatePR] PR created: ${prUrl}`);
// Extract PR number and store metadata for newly created PR
if (prUrl) {
const prMatch = prUrl.match(/\/pull\/(\d+)/);
prNumber = prMatch ? parseInt(prMatch[1], 10) : undefined;
if (prNumber) {
try {
await updateWorktreePRInfo(effectiveProjectPath, branchName, {
number: prNumber,
url: prUrl,
title,
state: draft ? "draft" : "open",
createdAt: new Date().toISOString(),
});
console.log(`[CreatePR] Stored PR info for branch ${branchName}: PR #${prNumber}`);
} catch (metadataError) {
console.error("[CreatePR] Failed to store PR metadata:", metadataError);
}
}
}
} catch (ghError: unknown) {
// gh CLI failed - check if it's "already exists" error and try to fetch the PR
const err = ghError as { stderr?: string; message?: string };
const errorMessage = err.stderr || err.message || "PR creation failed";
console.log(`[CreatePR] gh pr create failed: ${errorMessage}`);
// If error indicates PR already exists, try to fetch it
if (errorMessage.toLowerCase().includes("already exists")) {
console.log(`[CreatePR] PR already exists error - trying to fetch existing PR`);
try {
const { stdout: viewOutput } = await execAsync(
`gh pr view --json number,title,url,state`,
{ cwd: worktreePath, env: execEnv }
);
const existingPr = JSON.parse(viewOutput);
if (existingPr.url) {
prUrl = existingPr.url;
prNumber = existingPr.number;
prAlreadyExisted = true;
await updateWorktreePRInfo(effectiveProjectPath, branchName, {
number: existingPr.number,
url: existingPr.url,
title: existingPr.title || title,
state: existingPr.state || "open",
createdAt: new Date().toISOString(),
});
console.log(`[CreatePR] Fetched and stored existing PR: #${existingPr.number}`);
}
} catch (viewError) {
console.error("[CreatePR] Failed to fetch existing PR:", viewError);
prError = errorMessage;
}
} else {
prError = errorMessage;
}
}
}
} else {
prError = "gh_cli_not_available";
@@ -274,7 +353,9 @@ export function createCreatePRHandler() {
commitHash,
pushed: true,
prUrl,
prNumber,
prCreated: !!prUrl,
prAlreadyExisted,
prError: prError || undefined,
browserUrl: browserUrl || undefined,
ghCliAvailable,

View File

@@ -38,8 +38,10 @@ export function createListBranchesHandler() {
const currentBranch = currentBranchOutput.trim();
// List all local branches
// Use double quotes around the format string for cross-platform compatibility
// Single quotes are preserved literally on Windows; double quotes work on both
const { stdout: branchesOutput } = await execAsync(
"git branch --format='%(refname:short)'",
'git branch --format="%(refname:short)"',
{ cwd: worktreePath }
);
@@ -47,11 +49,15 @@ export function createListBranchesHandler() {
.trim()
.split("\n")
.filter((b) => b.trim())
.map((name) => ({
name: name.trim(),
isCurrent: name.trim() === currentBranch,
isRemote: false,
}));
.map((name) => {
// Remove any surrounding quotes (Windows git may preserve them)
const cleanName = name.trim().replace(/^['"]|['"]$/g, "");
return {
name: cleanName,
isCurrent: cleanName === currentBranch,
isRemote: false,
};
});
// Get ahead/behind count for current branch
let aheadCount = 0;

View File

@@ -10,6 +10,7 @@ import { exec } from "child_process";
import { promisify } from "util";
import { existsSync } from "fs";
import { isGitRepo, getErrorMessage, logError, normalizePath } from "../common.js";
import { readAllWorktreeMetadata, type WorktreePRInfo } from "../../../lib/worktree-metadata.js";
const execAsync = promisify(exec);
@@ -21,6 +22,7 @@ interface WorktreeInfo {
hasWorktree: boolean; // Always true for items in this list
hasChanges?: boolean;
changedFilesCount?: number;
pr?: WorktreePRInfo; // PR info if a PR has been created for this branch
}
async function getCurrentBranch(cwd: string): Promise<string> {
@@ -106,6 +108,9 @@ export function createListHandler() {
}
}
// Read all worktree metadata to get PR info
const allMetadata = await readAllWorktreeMetadata(projectPath);
// If includeDetails is requested, fetch change status for each worktree
if (includeDetails) {
for (const worktree of worktrees) {
@@ -127,6 +132,14 @@ export function createListHandler() {
}
}
// Add PR info from metadata for each worktree
for (const worktree of worktrees) {
const metadata = allMetadata.get(worktree.branch);
if (metadata?.pr) {
worktree.pr = metadata.pr;
}
}
res.json({
success: true,
worktrees,

View File

@@ -0,0 +1,269 @@
/**
* POST /pr-info endpoint - Get PR info and comments for a branch
*/
import type { Request, Response } from "express";
import {
getErrorMessage,
logError,
execAsync,
execEnv,
isValidBranchName,
isGhCliAvailable,
} from "../common.js";
export interface PRComment {
id: number;
author: string;
body: string;
path?: string;
line?: number;
createdAt: string;
isReviewComment: boolean;
}
export interface PRInfo {
number: number;
title: string;
url: string;
state: string;
author: string;
body: string;
comments: PRComment[];
reviewComments: PRComment[];
}
export function createPRInfoHandler() {
return async (req: Request, res: Response): Promise<void> => {
try {
const { worktreePath, branchName } = req.body as {
worktreePath: string;
branchName: string;
};
if (!worktreePath || !branchName) {
res.status(400).json({
success: false,
error: "worktreePath and branchName required",
});
return;
}
// Validate branch name to prevent command injection
if (!isValidBranchName(branchName)) {
res.status(400).json({
success: false,
error: "Invalid branch name contains unsafe characters",
});
return;
}
// Check if gh CLI is available
const ghCliAvailable = await isGhCliAvailable();
if (!ghCliAvailable) {
res.json({
success: true,
result: {
hasPR: false,
ghCliAvailable: false,
error: "gh CLI not available",
},
});
return;
}
// Detect repository information (supports fork workflows)
let upstreamRepo: string | null = null;
let originOwner: string | null = null;
let originRepo: string | null = null;
try {
const { stdout: remotes } = await execAsync("git remote -v", {
cwd: worktreePath,
env: execEnv,
});
const lines = remotes.split(/\r?\n/);
for (const line of lines) {
let match =
line.match(
/^(\w+)\s+.*[:/]([^/]+)\/([^/\s]+?)(?:\.git)?\s+\(fetch\)/
) ||
line.match(
/^(\w+)\s+git@[^:]+:([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/
) ||
line.match(
/^(\w+)\s+https?:\/\/[^/]+\/([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/
);
if (match) {
const [, remoteName, owner, repo] = match;
if (remoteName === "upstream") {
upstreamRepo = `${owner}/${repo}`;
} else if (remoteName === "origin") {
originOwner = owner;
originRepo = repo;
}
}
}
} catch {
// Ignore remote parsing errors
}
if (!originOwner || !originRepo) {
try {
const { stdout: originUrl } = await execAsync(
"git config --get remote.origin.url",
{
cwd: worktreePath,
env: execEnv,
}
);
const match = originUrl
.trim()
.match(/[:/]([^/]+)\/([^/\s]+?)(?:\.git)?$/);
if (match) {
if (!originOwner) {
originOwner = match[1];
}
if (!originRepo) {
originRepo = match[2];
}
}
} catch {
// Ignore fallback errors
}
}
const targetRepo =
upstreamRepo || (originOwner && originRepo
? `${originOwner}/${originRepo}`
: null);
const repoFlag = targetRepo ? ` --repo "${targetRepo}"` : "";
const headRef =
upstreamRepo && originOwner ? `${originOwner}:${branchName}` : branchName;
// Get PR info for the branch using gh CLI
try {
// First, find the PR associated with this branch
const listCmd = `gh pr list${repoFlag} --head "${headRef}" --json number,title,url,state,author,body --limit 1`;
const { stdout: prListOutput } = await execAsync(
listCmd,
{ cwd: worktreePath, env: execEnv }
);
const prList = JSON.parse(prListOutput);
if (prList.length === 0) {
res.json({
success: true,
result: {
hasPR: false,
ghCliAvailable: true,
},
});
return;
}
const pr = prList[0];
const prNumber = pr.number;
// Get regular PR comments (issue comments)
let comments: PRComment[] = [];
try {
const viewCmd = `gh pr view ${prNumber}${repoFlag} --json comments`;
const { stdout: commentsOutput } = await execAsync(
viewCmd,
{ cwd: worktreePath, env: execEnv }
);
const commentsData = JSON.parse(commentsOutput);
comments = (commentsData.comments || []).map((c: {
id: number;
author: { login: string };
body: string;
createdAt: string;
}) => ({
id: c.id,
author: c.author?.login || "unknown",
body: c.body,
createdAt: c.createdAt,
isReviewComment: false,
}));
} catch (error) {
console.warn("[PRInfo] Failed to fetch PR comments:", error);
}
// Get review comments (inline code comments)
let reviewComments: PRComment[] = [];
// Only fetch review comments if we have repository info
if (targetRepo) {
try {
const reviewsEndpoint = `repos/${targetRepo}/pulls/${prNumber}/comments`;
const reviewsCmd = `gh api ${reviewsEndpoint}`;
const { stdout: reviewsOutput } = await execAsync(
reviewsCmd,
{ cwd: worktreePath, env: execEnv }
);
const reviewsData = JSON.parse(reviewsOutput);
reviewComments = reviewsData.map((c: {
id: number;
user: { login: string };
body: string;
path: string;
line?: number;
original_line?: number;
created_at: string;
}) => ({
id: c.id,
author: c.user?.login || "unknown",
body: c.body,
path: c.path,
line: c.line || c.original_line,
createdAt: c.created_at,
isReviewComment: true,
}));
} catch (error) {
console.warn("[PRInfo] Failed to fetch review comments:", error);
}
} else {
console.warn("[PRInfo] Cannot fetch review comments: repository info not available");
}
const prInfo: PRInfo = {
number: prNumber,
title: pr.title,
url: pr.url,
state: pr.state,
author: pr.author?.login || "unknown",
body: pr.body || "",
comments,
reviewComments,
};
res.json({
success: true,
result: {
hasPR: true,
ghCliAvailable: true,
prInfo,
},
});
} catch (error) {
// gh CLI failed - might not be authenticated or no remote
logError(error, "Failed to get PR info");
res.json({
success: true,
result: {
hasPR: false,
ghCliAvailable: true,
error: getErrorMessage(error),
},
});
}
} catch (error) {
logError(error, "PR info handler failed");
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
}

View File

@@ -23,7 +23,7 @@ import { isAbortError, classifyError } from "../lib/error-handler.js";
import { resolveDependencies, areDependenciesSatisfied } from "../lib/dependency-resolver.js";
import type { Feature } from "./feature-loader.js";
import { FeatureLoader } from "./feature-loader.js";
import { getFeatureDir, getAutomakerDir, getFeaturesDir } from "../lib/automaker-paths.js";
import { getFeatureDir, getAutomakerDir, getFeaturesDir, getContextDir } from "../lib/automaker-paths.js";
const execAsync = promisify(exec);
@@ -558,6 +558,9 @@ export class AutoModeService {
// Build the prompt - use continuation prompt if provided (for recovery after plan approval)
let prompt: string;
// Load project context files (CLAUDE.md, CODE_QUALITY.md, etc.) - passed as system prompt
const contextFiles = await this.loadContextFiles(projectPath);
if (options?.continuationPrompt) {
// Continuation prompt is used when recovering from a plan approval
// The plan was already approved, so skip the planning phase
@@ -591,6 +594,7 @@ export class AutoModeService {
);
// Run the agent with the feature's model and images
// Context files are passed as system prompt for higher priority
await this.runAgent(
workDir,
featureId,
@@ -603,6 +607,7 @@ export class AutoModeService {
projectPath,
planningMode: feature.planningMode,
requirePlanApproval: feature.requirePlanApproval,
systemPrompt: contextFiles || undefined,
}
);
@@ -755,6 +760,9 @@ export class AutoModeService {
// No previous context
}
// Load project context files (CLAUDE.md, CODE_QUALITY.md, etc.) - passed as system prompt
const contextFiles = await this.loadContextFiles(projectPath);
// Build complete prompt with feature info, previous context, and follow-up instructions
let fullPrompt = `## Follow-up on Feature Implementation
@@ -873,6 +881,7 @@ Address the follow-up instructions above. Review the previous work and make the
// Use fullPrompt (already built above) with model and all images
// Note: Follow-ups skip planning mode - they continue from previous work
// Pass previousContext so the history is preserved in the output file
// Context files are passed as system prompt for higher priority
await this.runAgent(
workDir,
featureId,
@@ -885,6 +894,7 @@ Address the follow-up instructions above. Review the previous work and make the
projectPath,
planningMode: 'skip', // Follow-ups don't require approval
previousContent: previousContext || undefined,
systemPrompt: contextFiles || undefined,
}
);
@@ -1083,6 +1093,65 @@ Address the follow-up instructions above. Review the previous work and make the
}
}
/**
* Load context files from .automaker/context/ directory
* These are user-defined context files (CLAUDE.md, CODE_QUALITY.md, etc.)
* that provide project-specific rules and guidelines for the agent.
*/
private async loadContextFiles(projectPath: string): Promise<string> {
// Use path.resolve for cross-platform absolute path handling
const contextDir = path.resolve(getContextDir(projectPath));
try {
// Check if directory exists first
await fs.access(contextDir);
const files = await fs.readdir(contextDir);
// Filter for text-based context files (case-insensitive for Windows)
const textFiles = files.filter((f) => {
const lower = f.toLowerCase();
return lower.endsWith(".md") || lower.endsWith(".txt");
});
if (textFiles.length === 0) return "";
const contents: string[] = [];
for (const file of textFiles) {
// Use path.join for cross-platform path construction
const filePath = path.join(contextDir, file);
const content = await fs.readFile(filePath, "utf-8");
contents.push(`## ${file}\n\n${content}`);
}
console.log(
`[AutoMode] Loaded ${textFiles.length} context file(s): ${textFiles.join(", ")}`
);
return `# ⚠️ CRITICAL: Project Context Files - READ AND FOLLOW STRICTLY
**IMPORTANT**: The following context files contain MANDATORY project-specific rules and conventions. You MUST:
1. Read these rules carefully before taking any action
2. Follow ALL commands exactly as shown (e.g., if the project uses \`pnpm\`, NEVER use \`npm\` or \`npx\`)
3. Follow ALL coding conventions, commit message formats, and architectural patterns specified
4. Reference these rules before running ANY shell commands or making commits
Failure to follow these rules will result in broken builds, failed CI, and rejected commits.
${contents.join("\n\n---\n\n")}
---
**REMINDER**: Before running any command, verify you are using the correct package manager and following the conventions above.
---
`;
} catch {
// Context directory doesn't exist or is empty - this is fine
return "";
}
}
/**
* Analyze project to gather context
*/
@@ -1676,6 +1745,7 @@ This helps parse your summary correctly in the output logs.`;
planningMode?: PlanningMode;
requirePlanApproval?: boolean;
previousContent?: string;
systemPrompt?: string;
}
): Promise<void> {
const finalProjectPath = options?.projectPath || projectPath;
@@ -1783,6 +1853,13 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
false // don't duplicate paths in text
);
// Debug: Log if system prompt is provided
if (options?.systemPrompt) {
console.log(
`[AutoMode] System prompt provided (${options.systemPrompt.length} chars), first 200 chars:\n${options.systemPrompt.substring(0, 200)}...`
);
}
const executeOptions: ExecuteOptions = {
prompt: promptContent,
model: finalModel,
@@ -1790,6 +1867,7 @@ This mock response was generated because AUTOMAKER_MOCK_AGENT=true was set.
cwd: workDir,
allowedTools: allowedTools,
abortController,
systemPrompt: options?.systemPrompt,
};
// Execute via provider

View File

@@ -1,57 +1,189 @@
import { describe, it, expect } from "vitest";
import {
APP_SPEC_XML_FORMAT,
specToXml,
getStructuredSpecPromptInstruction,
getAppSpecFormatInstruction,
APP_SPEC_XML_FORMAT,
type SpecOutput,
} from "@/lib/app-spec-format.js";
describe("app-spec-format.ts", () => {
describe("APP_SPEC_XML_FORMAT", () => {
it("should export a non-empty string constant", () => {
expect(typeof APP_SPEC_XML_FORMAT).toBe("string");
expect(APP_SPEC_XML_FORMAT.length).toBeGreaterThan(0);
describe("specToXml", () => {
it("should convert minimal spec to XML", () => {
const spec: SpecOutput = {
project_name: "Test Project",
overview: "A test project",
technology_stack: ["TypeScript", "Node.js"],
core_capabilities: ["Testing", "Development"],
implemented_features: [
{ name: "Feature 1", description: "First feature" },
],
};
const xml = specToXml(spec);
expect(xml).toContain('<?xml version="1.0" encoding="UTF-8"?>');
expect(xml).toContain("<project_specification>");
expect(xml).toContain("</project_specification>");
expect(xml).toContain("<project_name>Test Project</project_name>");
expect(xml).toContain("<technology>TypeScript</technology>");
expect(xml).toContain("<capability>Testing</capability>");
});
it("should contain XML format documentation", () => {
expect(APP_SPEC_XML_FORMAT).toContain("<project_specification>");
expect(APP_SPEC_XML_FORMAT).toContain("</project_specification>");
expect(APP_SPEC_XML_FORMAT).toContain("<project_name>");
expect(APP_SPEC_XML_FORMAT).toContain("<overview>");
expect(APP_SPEC_XML_FORMAT).toContain("<technology_stack>");
expect(APP_SPEC_XML_FORMAT).toContain("<core_capabilities>");
it("should escape XML special characters", () => {
const spec: SpecOutput = {
project_name: "Test & Project",
overview: "Description with <tags>",
technology_stack: ["TypeScript"],
core_capabilities: ["Cap"],
implemented_features: [],
};
const xml = specToXml(spec);
expect(xml).toContain("Test &amp; Project");
expect(xml).toContain("&lt;tags&gt;");
});
it("should contain XML escaping instructions", () => {
expect(APP_SPEC_XML_FORMAT).toContain("&lt;");
expect(APP_SPEC_XML_FORMAT).toContain("&gt;");
expect(APP_SPEC_XML_FORMAT).toContain("&amp;");
it("should include file_locations when provided", () => {
const spec: SpecOutput = {
project_name: "Test",
overview: "Test",
technology_stack: ["TS"],
core_capabilities: ["Cap"],
implemented_features: [
{
name: "Feature",
description: "Desc",
file_locations: ["src/index.ts"],
},
],
};
const xml = specToXml(spec);
expect(xml).toContain("<file_locations>");
expect(xml).toContain("<location>src/index.ts</location>");
});
it("should not include file_locations when empty", () => {
const spec: SpecOutput = {
project_name: "Test",
overview: "Test",
technology_stack: ["TS"],
core_capabilities: ["Cap"],
implemented_features: [
{ name: "Feature", description: "Desc", file_locations: [] },
],
};
const xml = specToXml(spec);
expect(xml).not.toContain("<file_locations>");
});
it("should include additional_requirements when provided", () => {
const spec: SpecOutput = {
project_name: "Test",
overview: "Test",
technology_stack: ["TS"],
core_capabilities: ["Cap"],
implemented_features: [],
additional_requirements: ["Node.js 18+"],
};
const xml = specToXml(spec);
expect(xml).toContain("<additional_requirements>");
expect(xml).toContain("<requirement>Node.js 18+</requirement>");
});
it("should include development_guidelines when provided", () => {
const spec: SpecOutput = {
project_name: "Test",
overview: "Test",
technology_stack: ["TS"],
core_capabilities: ["Cap"],
implemented_features: [],
development_guidelines: ["Use ESLint"],
};
const xml = specToXml(spec);
expect(xml).toContain("<development_guidelines>");
expect(xml).toContain("<guideline>Use ESLint</guideline>");
});
it("should include implementation_roadmap when provided", () => {
const spec: SpecOutput = {
project_name: "Test",
overview: "Test",
technology_stack: ["TS"],
core_capabilities: ["Cap"],
implemented_features: [],
implementation_roadmap: [
{ phase: "Phase 1", status: "completed", description: "Setup" },
],
};
const xml = specToXml(spec);
expect(xml).toContain("<implementation_roadmap>");
expect(xml).toContain("<status>completed</status>");
});
it("should not include optional sections when empty", () => {
const spec: SpecOutput = {
project_name: "Test",
overview: "Test",
technology_stack: ["TS"],
core_capabilities: ["Cap"],
implemented_features: [],
additional_requirements: [],
development_guidelines: [],
implementation_roadmap: [],
};
const xml = specToXml(spec);
expect(xml).not.toContain("<additional_requirements>");
expect(xml).not.toContain("<development_guidelines>");
expect(xml).not.toContain("<implementation_roadmap>");
});
});
describe("getStructuredSpecPromptInstruction", () => {
it("should return non-empty prompt instruction", () => {
const instruction = getStructuredSpecPromptInstruction();
expect(instruction).toBeTruthy();
expect(instruction.length).toBeGreaterThan(100);
});
it("should mention required fields", () => {
const instruction = getStructuredSpecPromptInstruction();
expect(instruction).toContain("project_name");
expect(instruction).toContain("overview");
expect(instruction).toContain("technology_stack");
});
});
describe("getAppSpecFormatInstruction", () => {
it("should return a string containing the XML format", () => {
it("should return non-empty format instruction", () => {
const instruction = getAppSpecFormatInstruction();
expect(typeof instruction).toBe("string");
expect(instruction).toContain(APP_SPEC_XML_FORMAT);
expect(instruction).toBeTruthy();
expect(instruction.length).toBeGreaterThan(100);
});
it("should contain critical formatting requirements", () => {
it("should include critical formatting requirements", () => {
const instruction = getAppSpecFormatInstruction();
expect(instruction).toContain("CRITICAL FORMATTING REQUIREMENTS");
expect(instruction).toContain("<project_specification>");
expect(instruction).toContain("</project_specification>");
});
});
it("should contain verification instructions", () => {
const instruction = getAppSpecFormatInstruction();
expect(instruction).toContain("VERIFICATION");
expect(instruction).toContain("exactly one root XML element");
});
it("should instruct not to use markdown", () => {
const instruction = getAppSpecFormatInstruction();
expect(instruction).toContain("Do NOT use markdown");
expect(instruction).toContain("no # headers");
expect(instruction).toContain("no **bold**");
describe("APP_SPEC_XML_FORMAT", () => {
it("should contain valid XML template structure", () => {
expect(APP_SPEC_XML_FORMAT).toContain("<project_specification>");
expect(APP_SPEC_XML_FORMAT).toContain("</project_specification>");
});
});
});

View File

@@ -16,16 +16,19 @@ import {
} from "@/lib/automaker-paths.js";
describe("automaker-paths.ts", () => {
const projectPath = "/test/project";
const projectPath = path.join("/test", "project");
describe("getAutomakerDir", () => {
it("should return path to .automaker directory", () => {
expect(getAutomakerDir(projectPath)).toBe("/test/project/.automaker");
expect(getAutomakerDir(projectPath)).toBe(
path.join(projectPath, ".automaker")
);
});
it("should handle paths with trailing slashes", () => {
expect(getAutomakerDir("/test/project/")).toBe(
path.join("/test/project/", ".automaker")
const pathWithSlash = path.join("/test", "project") + path.sep;
expect(getAutomakerDir(pathWithSlash)).toBe(
path.join(pathWithSlash, ".automaker")
);
});
});
@@ -33,7 +36,7 @@ describe("automaker-paths.ts", () => {
describe("getFeaturesDir", () => {
it("should return path to features directory", () => {
expect(getFeaturesDir(projectPath)).toBe(
"/test/project/.automaker/features"
path.join(projectPath, ".automaker", "features")
);
});
});
@@ -41,13 +44,13 @@ describe("automaker-paths.ts", () => {
describe("getFeatureDir", () => {
it("should return path to specific feature directory", () => {
expect(getFeatureDir(projectPath, "feature-123")).toBe(
"/test/project/.automaker/features/feature-123"
path.join(projectPath, ".automaker", "features", "feature-123")
);
});
it("should handle feature IDs with special characters", () => {
expect(getFeatureDir(projectPath, "my-feature_v2")).toBe(
"/test/project/.automaker/features/my-feature_v2"
path.join(projectPath, ".automaker", "features", "my-feature_v2")
);
});
});
@@ -55,27 +58,31 @@ describe("automaker-paths.ts", () => {
describe("getFeatureImagesDir", () => {
it("should return path to feature images directory", () => {
expect(getFeatureImagesDir(projectPath, "feature-123")).toBe(
"/test/project/.automaker/features/feature-123/images"
path.join(projectPath, ".automaker", "features", "feature-123", "images")
);
});
});
describe("getBoardDir", () => {
it("should return path to board directory", () => {
expect(getBoardDir(projectPath)).toBe("/test/project/.automaker/board");
expect(getBoardDir(projectPath)).toBe(
path.join(projectPath, ".automaker", "board")
);
});
});
describe("getImagesDir", () => {
it("should return path to images directory", () => {
expect(getImagesDir(projectPath)).toBe("/test/project/.automaker/images");
expect(getImagesDir(projectPath)).toBe(
path.join(projectPath, ".automaker", "images")
);
});
});
describe("getWorktreesDir", () => {
it("should return path to worktrees directory", () => {
expect(getWorktreesDir(projectPath)).toBe(
"/test/project/.automaker/worktrees"
path.join(projectPath, ".automaker", "worktrees")
);
});
});
@@ -83,7 +90,7 @@ describe("automaker-paths.ts", () => {
describe("getAppSpecPath", () => {
it("should return path to app_spec.txt file", () => {
expect(getAppSpecPath(projectPath)).toBe(
"/test/project/.automaker/app_spec.txt"
path.join(projectPath, ".automaker", "app_spec.txt")
);
});
});
@@ -91,7 +98,7 @@ describe("automaker-paths.ts", () => {
describe("getBranchTrackingPath", () => {
it("should return path to active-branches.json file", () => {
expect(getBranchTrackingPath(projectPath)).toBe(
"/test/project/.automaker/active-branches.json"
path.join(projectPath, ".automaker", "active-branches.json")
);
});
});

View File

@@ -65,6 +65,47 @@ describe("fs-utils.ts", () => {
// Should not throw
await expect(mkdirSafe(symlinkPath)).resolves.toBeUndefined();
});
it("should handle ELOOP error gracefully when checking path", async () => {
// Mock lstat to throw ELOOP error
const originalLstat = fs.lstat;
const mkdirSafePath = path.join(testDir, "eloop-path");
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ELOOP" });
// Should not throw, should return gracefully
await expect(mkdirSafe(mkdirSafePath)).resolves.toBeUndefined();
vi.restoreAllMocks();
});
it("should handle EEXIST error gracefully when creating directory", async () => {
const newDir = path.join(testDir, "race-condition-dir");
// Mock lstat to return ENOENT (path doesn't exist)
// Then mock mkdir to throw EEXIST (race condition)
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ENOENT" });
vi.spyOn(fs, "mkdir").mockRejectedValueOnce({ code: "EEXIST" });
// Should not throw, should return gracefully
await expect(mkdirSafe(newDir)).resolves.toBeUndefined();
vi.restoreAllMocks();
});
it("should handle ELOOP error gracefully when creating directory", async () => {
const newDir = path.join(testDir, "eloop-create-dir");
// Mock lstat to return ENOENT (path doesn't exist)
// Then mock mkdir to throw ELOOP
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ENOENT" });
vi.spyOn(fs, "mkdir").mockRejectedValueOnce({ code: "ELOOP" });
// Should not throw, should return gracefully
await expect(mkdirSafe(newDir)).resolves.toBeUndefined();
vi.restoreAllMocks();
});
});
describe("existsSafe", () => {
@@ -109,5 +150,24 @@ describe("fs-utils.ts", () => {
const exists = await existsSafe(symlinkPath);
expect(exists).toBe(true);
});
it("should return true for ELOOP error (symlink loop)", async () => {
// Mock lstat to throw ELOOP error
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ELOOP" });
const exists = await existsSafe("/some/path/with/loop");
expect(exists).toBe(true);
vi.restoreAllMocks();
});
it("should throw for other errors", async () => {
// Mock lstat to throw a non-ENOENT, non-ELOOP error
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "EACCES" });
await expect(existsSafe("/some/path")).rejects.toMatchObject({ code: "EACCES" });
vi.restoreAllMocks();
});
});
});

View File

@@ -40,9 +40,9 @@ describe("sdk-options.ts", () => {
describe("MAX_TURNS", () => {
it("should export turn presets", async () => {
const { MAX_TURNS } = await import("@/lib/sdk-options.js");
expect(MAX_TURNS.quick).toBe(5);
expect(MAX_TURNS.standard).toBe(20);
expect(MAX_TURNS.extended).toBe(50);
expect(MAX_TURNS.quick).toBe(50);
expect(MAX_TURNS.standard).toBe(100);
expect(MAX_TURNS.extended).toBe(250);
expect(MAX_TURNS.maximum).toBe(1000);
});
});
@@ -88,7 +88,7 @@ describe("sdk-options.ts", () => {
expect(options.cwd).toBe("/test/path");
expect(options.maxTurns).toBe(MAX_TURNS.maximum);
expect(options.allowedTools).toEqual([...TOOL_PRESETS.specGeneration]);
expect(options.permissionMode).toBe("acceptEdits");
expect(options.permissionMode).toBe("default");
});
it("should include system prompt when provided", async () => {
@@ -141,7 +141,7 @@ describe("sdk-options.ts", () => {
const options = createSuggestionsOptions({ cwd: "/test/path" });
expect(options.cwd).toBe("/test/path");
expect(options.maxTurns).toBe(MAX_TURNS.quick);
expect(options.maxTurns).toBe(MAX_TURNS.extended);
expect(options.allowedTools).toEqual([...TOOL_PRESETS.readOnly]);
});
});

View File

@@ -0,0 +1,368 @@
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import {
readWorktreeMetadata,
writeWorktreeMetadata,
updateWorktreePRInfo,
getWorktreePRInfo,
readAllWorktreeMetadata,
deleteWorktreeMetadata,
type WorktreeMetadata,
type WorktreePRInfo,
} from "@/lib/worktree-metadata.js";
import fs from "fs/promises";
import path from "path";
import os from "os";
describe("worktree-metadata.ts", () => {
let testProjectPath: string;
beforeEach(async () => {
testProjectPath = path.join(os.tmpdir(), `worktree-metadata-test-${Date.now()}`);
await fs.mkdir(testProjectPath, { recursive: true });
});
afterEach(async () => {
try {
await fs.rm(testProjectPath, { recursive: true, force: true });
} catch {
// Ignore cleanup errors
}
});
describe("sanitizeBranchName", () => {
// Test through readWorktreeMetadata and writeWorktreeMetadata
it("should sanitize branch names with invalid characters", async () => {
const branch = "feature/test-branch";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const result = await readWorktreeMetadata(testProjectPath, branch);
expect(result).toEqual(metadata);
});
it("should sanitize branch names with Windows invalid characters", async () => {
const branch = "feature:test*branch?";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const result = await readWorktreeMetadata(testProjectPath, branch);
expect(result).toEqual(metadata);
});
it("should sanitize Windows reserved names", async () => {
const branch = "CON";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const result = await readWorktreeMetadata(testProjectPath, branch);
expect(result).toEqual(metadata);
});
});
describe("readWorktreeMetadata", () => {
it("should return null when metadata file doesn't exist", async () => {
const result = await readWorktreeMetadata(testProjectPath, "nonexistent-branch");
expect(result).toBeNull();
});
it("should read existing metadata", async () => {
const branch = "test-branch";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const result = await readWorktreeMetadata(testProjectPath, branch);
expect(result).toEqual(metadata);
});
it("should read metadata with PR info", async () => {
const branch = "pr-branch";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
pr: {
number: 123,
url: "https://github.com/owner/repo/pull/123",
title: "Test PR",
state: "open",
createdAt: new Date().toISOString(),
},
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const result = await readWorktreeMetadata(testProjectPath, branch);
expect(result).toEqual(metadata);
});
});
describe("writeWorktreeMetadata", () => {
it("should create metadata directory if it doesn't exist", async () => {
const branch = "new-branch";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const result = await readWorktreeMetadata(testProjectPath, branch);
expect(result).toEqual(metadata);
});
it("should overwrite existing metadata", async () => {
const branch = "existing-branch";
const metadata1: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
const metadata2: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
pr: {
number: 456,
url: "https://github.com/owner/repo/pull/456",
title: "Updated PR",
state: "closed",
createdAt: new Date().toISOString(),
},
};
await writeWorktreeMetadata(testProjectPath, branch, metadata1);
await writeWorktreeMetadata(testProjectPath, branch, metadata2);
const result = await readWorktreeMetadata(testProjectPath, branch);
expect(result).toEqual(metadata2);
});
});
describe("updateWorktreePRInfo", () => {
it("should create new metadata if it doesn't exist", async () => {
const branch = "new-pr-branch";
const prInfo: WorktreePRInfo = {
number: 789,
url: "https://github.com/owner/repo/pull/789",
title: "New PR",
state: "open",
createdAt: new Date().toISOString(),
};
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
const result = await readWorktreeMetadata(testProjectPath, branch);
expect(result).not.toBeNull();
expect(result?.branch).toBe(branch);
expect(result?.pr).toEqual(prInfo);
});
it("should update existing metadata with PR info", async () => {
const branch = "existing-pr-branch";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const prInfo: WorktreePRInfo = {
number: 999,
url: "https://github.com/owner/repo/pull/999",
title: "Updated PR",
state: "merged",
createdAt: new Date().toISOString(),
};
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
const result = await readWorktreeMetadata(testProjectPath, branch);
expect(result?.pr).toEqual(prInfo);
});
it("should preserve existing metadata when updating PR info", async () => {
const branch = "preserve-branch";
const originalCreatedAt = new Date().toISOString();
const metadata: WorktreeMetadata = {
branch,
createdAt: originalCreatedAt,
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const prInfo: WorktreePRInfo = {
number: 111,
url: "https://github.com/owner/repo/pull/111",
title: "PR",
state: "open",
createdAt: new Date().toISOString(),
};
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
const result = await readWorktreeMetadata(testProjectPath, branch);
expect(result?.createdAt).toBe(originalCreatedAt);
expect(result?.pr).toEqual(prInfo);
});
});
describe("getWorktreePRInfo", () => {
it("should return null when metadata doesn't exist", async () => {
const result = await getWorktreePRInfo(testProjectPath, "nonexistent");
expect(result).toBeNull();
});
it("should return null when metadata exists but has no PR info", async () => {
const branch = "no-pr-branch";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const result = await getWorktreePRInfo(testProjectPath, branch);
expect(result).toBeNull();
});
it("should return PR info when it exists", async () => {
const branch = "has-pr-branch";
const prInfo: WorktreePRInfo = {
number: 222,
url: "https://github.com/owner/repo/pull/222",
title: "Has PR",
state: "open",
createdAt: new Date().toISOString(),
};
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
const result = await getWorktreePRInfo(testProjectPath, branch);
expect(result).toEqual(prInfo);
});
});
describe("readAllWorktreeMetadata", () => {
it("should return empty map when worktrees directory doesn't exist", async () => {
const result = await readAllWorktreeMetadata(testProjectPath);
expect(result.size).toBe(0);
});
it("should return empty map when worktrees directory is empty", async () => {
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
await fs.mkdir(worktreesDir, { recursive: true });
const result = await readAllWorktreeMetadata(testProjectPath);
expect(result.size).toBe(0);
});
it("should read all worktree metadata", async () => {
const branch1 = "branch-1";
const branch2 = "branch-2";
const metadata1: WorktreeMetadata = {
branch: branch1,
createdAt: new Date().toISOString(),
};
const metadata2: WorktreeMetadata = {
branch: branch2,
createdAt: new Date().toISOString(),
pr: {
number: 333,
url: "https://github.com/owner/repo/pull/333",
title: "PR 3",
state: "open",
createdAt: new Date().toISOString(),
},
};
await writeWorktreeMetadata(testProjectPath, branch1, metadata1);
await writeWorktreeMetadata(testProjectPath, branch2, metadata2);
const result = await readAllWorktreeMetadata(testProjectPath);
expect(result.size).toBe(2);
expect(result.get(branch1)).toEqual(metadata1);
expect(result.get(branch2)).toEqual(metadata2);
});
it("should skip directories without worktree.json", async () => {
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
const emptyDir = path.join(worktreesDir, "empty-dir");
await fs.mkdir(emptyDir, { recursive: true });
const branch = "valid-branch";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const result = await readAllWorktreeMetadata(testProjectPath);
expect(result.size).toBe(1);
expect(result.get(branch)).toEqual(metadata);
});
it("should skip files in worktrees directory", async () => {
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
await fs.mkdir(worktreesDir, { recursive: true });
const filePath = path.join(worktreesDir, "not-a-dir.txt");
await fs.writeFile(filePath, "content");
const branch = "valid-branch";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const result = await readAllWorktreeMetadata(testProjectPath);
expect(result.size).toBe(1);
expect(result.get(branch)).toEqual(metadata);
});
it("should skip directories with malformed JSON", async () => {
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
const badDir = path.join(worktreesDir, "bad-dir");
await fs.mkdir(badDir, { recursive: true });
const badJsonPath = path.join(badDir, "worktree.json");
await fs.writeFile(badJsonPath, "not valid json");
const branch = "valid-branch";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
const result = await readAllWorktreeMetadata(testProjectPath);
expect(result.size).toBe(1);
expect(result.get(branch)).toEqual(metadata);
});
});
describe("deleteWorktreeMetadata", () => {
it("should delete worktree metadata directory", async () => {
const branch = "to-delete";
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
};
await writeWorktreeMetadata(testProjectPath, branch, metadata);
let result = await readWorktreeMetadata(testProjectPath, branch);
expect(result).not.toBeNull();
await deleteWorktreeMetadata(testProjectPath, branch);
result = await readWorktreeMetadata(testProjectPath, branch);
expect(result).toBeNull();
});
it("should handle deletion when metadata doesn't exist", async () => {
// Should not throw
await expect(
deleteWorktreeMetadata(testProjectPath, "nonexistent")
).resolves.toBeUndefined();
});
});
});

View File

@@ -13,12 +13,9 @@
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# Vite
/dist/
/dist-electron/
# misc
.DS_Store
@@ -33,12 +30,8 @@ yarn-error.log*
# env files (can opt-in for committing if needed)
.env*
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts
# Playwright
/test-results/
@@ -47,5 +40,8 @@ next-env.d.ts
/playwright/.cache/
# Electron
/dist/
/release/
/server-bundle/
# TanStack Router generated
src/routeTree.gen.ts

36
apps/ui/eslint.config.mjs Normal file
View File

@@ -0,0 +1,36 @@
import { defineConfig, globalIgnores } from "eslint/config";
import js from "@eslint/js";
import ts from "@typescript-eslint/eslint-plugin";
import tsParser from "@typescript-eslint/parser";
const eslintConfig = defineConfig([
js.configs.recommended,
{
files: ["**/*.ts", "**/*.tsx"],
languageOptions: {
parser: tsParser,
parserOptions: {
ecmaVersion: "latest",
sourceType: "module",
},
},
plugins: {
"@typescript-eslint": ts,
},
rules: {
...ts.configs.recommended.rules,
"@typescript-eslint/no-unused-vars": ["warn", { argsIgnorePattern: "^_" }],
"@typescript-eslint/no-explicit-any": "warn",
},
},
globalIgnores([
"dist/**",
"dist-electron/**",
"node_modules/**",
"server-bundle/**",
"release/**",
"src/routeTree.gen.ts",
]),
]);
export default eslintConfig;

32
apps/ui/index.html Normal file
View File

@@ -0,0 +1,32 @@
<!doctype html>
<html lang="en" suppressHydrationWarning>
<head>
<meta charset="UTF-8" />
<title>Automaker - Autonomous AI Development Studio</title>
<meta name="description" content="Build software autonomously with AI agents" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="icon" type="image/x-icon" href="/favicon.ico" />
<script>
// Prevent theme flash - apply stored theme before React hydrates
(function() {
try {
const stored = localStorage.getItem('automaker-storage');
if (stored) {
const data = JSON.parse(stored);
const theme = data.state?.theme;
if (theme && theme !== 'system' && theme !== 'light') {
// Apply the actual theme class (dark, retro, dracula, nord, etc.)
document.documentElement.classList.add(theme);
} else if (theme === 'system' && window.matchMedia('(prefers-color-scheme: dark)').matches) {
document.documentElement.classList.add('dark');
}
}
} catch (e) {}
})();
</script>
</head>
<body class="antialiased">
<div id="app"></div>
<script type="module" src="/src/renderer.tsx"></script>
</body>
</html>

View File

@@ -1,5 +1,5 @@
{
"name": "@automaker/app",
"name": "@automaker/ui",
"version": "0.1.0",
"description": "An autonomous AI development studio that helps you build software faster using AI-powered agents",
"homepage": "https://github.com/AutoMaker-Org/automaker",
@@ -13,25 +13,29 @@
},
"private": true,
"license": "Unlicense",
"main": "electron/main.js",
"main": "dist-electron/main.js",
"scripts": {
"dev": "next dev -p 3007",
"dev:web": "next dev -p 3007",
"dev:electron": "concurrently \"next dev -p 3007\" \"wait-on http://localhost:3007 && electron .\"",
"dev:electron:debug": "concurrently \"next dev -p 3007\" \"wait-on http://localhost:3007 && OPEN_DEVTOOLS=true electron .\"",
"build": "next build",
"build:electron": "node scripts/prepare-server.js && next build && electron-builder",
"build:electron:win": "node scripts/prepare-server.js && next build && electron-builder --win",
"build:electron:mac": "node scripts/prepare-server.js && next build && electron-builder --mac",
"build:electron:linux": "node scripts/prepare-server.js && next build && electron-builder --linux",
"dev": "vite",
"dev:web": "cross-env VITE_SKIP_ELECTRON=true vite",
"dev:electron": "vite",
"dev:electron:debug": "cross-env OPEN_DEVTOOLS=true vite",
"build": "vite build",
"build:electron": "node scripts/prepare-server.mjs && vite build && electron-builder",
"build:electron:dir": "node scripts/prepare-server.mjs && vite build && electron-builder --dir",
"build:electron:win": "node scripts/prepare-server.mjs && vite build && electron-builder --win",
"build:electron:win:dir": "node scripts/prepare-server.mjs && vite build && electron-builder --win --dir",
"build:electron:mac": "node scripts/prepare-server.mjs && vite build && electron-builder --mac",
"build:electron:mac:dir": "node scripts/prepare-server.mjs && vite build && electron-builder --mac --dir",
"build:electron:linux": "node scripts/prepare-server.mjs && vite build && electron-builder --linux",
"build:electron:linux:dir": "node scripts/prepare-server.mjs && vite build && electron-builder --linux --dir",
"postinstall": "electron-builder install-app-deps",
"start": "next start",
"preview": "vite preview",
"lint": "eslint",
"pretest": "node scripts/setup-e2e-fixtures.js",
"pretest": "node scripts/setup-e2e-fixtures.mjs",
"test": "playwright test",
"test:headed": "playwright test --headed",
"dev:electron:wsl": "concurrently \"next dev -p 3007\" \"wait-on http://localhost:3007 && electron . --no-sandbox --disable-gpu\"",
"dev:electron:wsl:gpu": "concurrently \"next dev -p 3007\" \"wait-on http://localhost:3007 && MESA_D3D12_DEFAULT_ADAPTER_NAME=NVIDIA electron . --no-sandbox --disable-gpu-sandbox\""
"dev:electron:wsl": "cross-env vite",
"dev:electron:wsl:gpu": "cross-env MESA_D3D12_DEFAULT_ADAPTER_NAME=NVIDIA vite"
},
"dependencies": {
"@codemirror/lang-xml": "^6.1.0",
@@ -53,6 +57,7 @@
"@radix-ui/react-tabs": "^1.1.13",
"@radix-ui/react-tooltip": "^1.2.8",
"@tanstack/react-query": "^5.90.12",
"@tanstack/react-router": "^1.141.6",
"@uiw/react-codemirror": "^4.25.4",
"@xterm/addon-fit": "^0.10.0",
"@xterm/addon-webgl": "^0.18.0",
@@ -62,10 +67,9 @@
"cmdk": "^1.1.1",
"dotenv": "^17.2.3",
"geist": "^1.5.1",
"lucide-react": "^0.556.0",
"next": "^16.0.10",
"react": "19.2.0",
"react-dom": "19.2.0",
"lucide-react": "^0.562.0",
"react": "19.2.3",
"react-dom": "19.2.3",
"react-markdown": "^10.1.0",
"react-resizable-panels": "^3.0.6",
"sonner": "^2.0.7",
@@ -85,32 +89,39 @@
},
"devDependencies": {
"@electron/rebuild": "^4.0.2",
"@eslint/js": "^9.0.0",
"@playwright/test": "^1.57.0",
"@tailwindcss/postcss": "^4",
"@types/node": "^20",
"@types/react": "^19",
"@types/react-dom": "^19",
"concurrently": "^9.2.1",
"@tailwindcss/vite": "^4.1.18",
"@tanstack/router-plugin": "^1.141.7",
"@types/node": "^22",
"@types/react": "^19.2.7",
"@types/react-dom": "^19.2.3",
"@typescript-eslint/eslint-plugin": "^8.50.0",
"@typescript-eslint/parser": "^8.50.0",
"@vitejs/plugin-react": "^5.1.2",
"cross-env": "^10.1.0",
"electron": "39.2.7",
"electron-builder": "^26.0.12",
"eslint": "^9",
"eslint-config-next": "16.0.7",
"tailwindcss": "^4",
"eslint": "^9.39.2",
"tailwindcss": "^4.1.18",
"tw-animate-css": "^1.4.0",
"typescript": "5.9.3",
"wait-on": "^9.0.3"
"vite": "^7.3.0",
"vite-plugin-electron": "^0.29.0",
"vite-plugin-electron-renderer": "^0.14.6"
},
"build": {
"appId": "com.automaker.app",
"productName": "Automaker",
"artifactName": "${productName}-${version}-${arch}.${ext}",
"afterPack": "./scripts/rebuild-server-natives.js",
"npmRebuild": false,
"afterPack": "./scripts/rebuild-server-natives.cjs",
"directories": {
"output": "dist"
"output": "release"
},
"files": [
"electron/**/*",
"out/**/*",
"dist/**/*",
"dist-electron/**/*",
"public/**/*",
"!node_modules/**/*"
],

View File

@@ -44,15 +44,17 @@ export default defineConfig({
ALLOWED_PROJECT_DIRS: "/Users,/home,/tmp,/var/folders",
},
},
// Frontend Next.js server
// Frontend Vite dev server
{
command: `npx next dev -p ${port}`,
command: `npm run dev`,
url: `http://localhost:${port}`,
reuseExistingServer: true,
timeout: 120000,
env: {
...process.env,
NEXT_PUBLIC_SKIP_SETUP: "true",
VITE_SKIP_SETUP: "true",
// Skip electron plugin in CI - no display available for Electron
VITE_SKIP_ELECTRON: process.env.CI === "true" ? "true" : undefined,
},
},
],

View File

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

Before

Width:  |  Height:  |  Size: 391 B

After

Width:  |  Height:  |  Size: 391 B

View File

Before

Width:  |  Height:  |  Size: 1.0 KiB

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

View File

Before

Width:  |  Height:  |  Size: 97 KiB

After

Width:  |  Height:  |  Size: 97 KiB

View File

Before

Width:  |  Height:  |  Size: 317 KiB

After

Width:  |  Height:  |  Size: 317 KiB

View File

Before

Width:  |  Height:  |  Size: 1.3 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

Before

Width:  |  Height:  |  Size: 61 KiB

After

Width:  |  Height:  |  Size: 61 KiB

View File

Before

Width:  |  Height:  |  Size: 128 B

After

Width:  |  Height:  |  Size: 128 B

View File

Before

Width:  |  Height:  |  Size: 385 B

After

Width:  |  Height:  |  Size: 385 B

View File

@@ -12,7 +12,7 @@ import { fileURLToPath } from "url";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// Resolve workspace root (apps/app/scripts -> workspace root)
// Resolve workspace root (apps/ui/scripts -> workspace root)
const WORKSPACE_ROOT = path.resolve(__dirname, "../../..");
const FIXTURE_PATH = path.join(WORKSPACE_ROOT, "test/fixtures/projectA");
const SPEC_FILE_PATH = path.join(FIXTURE_PATH, ".automaker/app_spec.txt");

7
apps/ui/src/App.tsx Normal file
View File

@@ -0,0 +1,7 @@
import { RouterProvider } from "@tanstack/react-router";
import { router } from "./utils/router";
import "./styles/global.css";
export default function App() {
return <RouterProvider router={router} />;
}

View File

@@ -1,4 +1,3 @@
"use client";
import { useState, useRef, useCallback, useEffect } from "react";
import { ImageIcon, Upload, Loader2, Trash2 } from "lucide-react";
@@ -72,7 +71,7 @@ export function BoardBackgroundModal({
useEffect(() => {
if (currentProject && backgroundSettings.imagePath) {
const serverUrl =
process.env.NEXT_PUBLIC_SERVER_URL || "http://localhost:3008";
import.meta.env.VITE_SERVER_URL || "http://localhost:3008";
// Add cache-busting query parameter to force browser to reload image
const cacheBuster = imageVersion
? `&v=${imageVersion}`

View File

@@ -1,4 +1,3 @@
"use client";
import { useState, useEffect, useRef, useCallback } from "react";
import {
@@ -129,7 +128,7 @@ export function FileBrowserDialog({
try {
// Get server URL from environment or default
const serverUrl =
process.env.NEXT_PUBLIC_SERVER_URL || "http://localhost:3008";
import.meta.env.VITE_SERVER_URL || "http://localhost:3008";
const response = await fetch(`${serverUrl}/api/fs/browse`, {
method: "POST",

View File

@@ -1,9 +1,7 @@
"use client";
import { useState, useMemo, useEffect, useCallback, useRef } from "react";
import { useNavigate, useLocation } from "@tanstack/react-router";
import { cn } from "@/lib/utils";
import { useAppStore, formatShortcut, type ThemeMode } from "@/store/app-store";
import { CoursePromoBadge } from "@/components/ui/course-promo-badge";
import {
FolderOpen,
Plus,
@@ -82,10 +80,8 @@ import { themeOptions } from "@/config/theme-options";
import type { SpecRegenerationEvent } from "@/types/electron";
import { DeleteProjectDialog } from "@/components/views/settings-view/components/delete-project-dialog";
import { NewProjectModal } from "@/components/new-project-modal";
import {
ProjectSetupDialog,
type FeatureCount,
} from "@/components/layout/project-setup-dialog";
import { CreateSpecDialog } from "@/components/views/spec-view/dialogs";
import type { FeatureCount } from "@/components/views/spec-view/types";
import {
DndContext,
DragEndEvent,
@@ -223,16 +219,17 @@ const BugReportButton = ({
};
export function Sidebar() {
const navigate = useNavigate();
const location = useLocation();
const {
projects,
trashedProjects,
currentProject,
currentView,
sidebarOpen,
projectHistory,
upsertAndSetCurrentProject,
setCurrentProject,
setCurrentView,
toggleSidebar,
restoreTrashedProject,
deleteTrashedProject,
@@ -251,14 +248,13 @@ export function Sidebar() {
} = useAppStore();
// Environment variable flags for hiding sidebar items
// Note: Next.js requires static access to process.env variables (no dynamic keys)
const hideTerminal = process.env.NEXT_PUBLIC_HIDE_TERMINAL === "true";
const hideWiki = process.env.NEXT_PUBLIC_HIDE_WIKI === "true";
const hideTerminal = import.meta.env.VITE_HIDE_TERMINAL === "true";
const hideWiki = import.meta.env.VITE_HIDE_WIKI === "true";
const hideRunningAgents =
process.env.NEXT_PUBLIC_HIDE_RUNNING_AGENTS === "true";
const hideContext = process.env.NEXT_PUBLIC_HIDE_CONTEXT === "true";
const hideSpecEditor = process.env.NEXT_PUBLIC_HIDE_SPEC_EDITOR === "true";
const hideAiProfiles = process.env.NEXT_PUBLIC_HIDE_AI_PROFILES === "true";
import.meta.env.VITE_HIDE_RUNNING_AGENTS === "true";
const hideContext = import.meta.env.VITE_HIDE_CONTEXT === "true";
const hideSpecEditor = import.meta.env.VITE_HIDE_SPEC_EDITOR === "true";
const hideAiProfiles = import.meta.env.VITE_HIDE_AI_PROFILES === "true";
// Get customizable keyboard shortcuts
const shortcuts = useKeyboardShortcutsConfig();
@@ -291,6 +287,7 @@ export function Sidebar() {
const [setupProjectPath, setSetupProjectPath] = useState("");
const [projectOverview, setProjectOverview] = useState("");
const [generateFeatures, setGenerateFeatures] = useState(true);
const [analyzeProject, setAnalyzeProject] = useState(true);
const [featureCount, setFeatureCount] = useState<FeatureCount>(50);
const [showSpecIndicator, setShowSpecIndicator] = useState(true);
@@ -429,7 +426,6 @@ export function Sidebar() {
unsubscribe();
};
}, [
setCurrentView,
creatingSpecProjectPath,
setupProjectPath,
setSpecCreatingForProject,
@@ -498,7 +494,7 @@ export function Sidebar() {
setupProjectPath,
projectOverview.trim(),
generateFeatures,
undefined, // analyzeProject - use default
analyzeProject,
generateFeatures ? featureCount : undefined // only pass maxFeatures if generating features
);
@@ -527,6 +523,7 @@ export function Sidebar() {
setupProjectPath,
projectOverview,
generateFeatures,
analyzeProject,
featureCount,
setSpecCreatingForProject,
]);
@@ -1177,7 +1174,7 @@ export function Sidebar() {
if (item.shortcut) {
shortcutsList.push({
key: item.shortcut,
action: () => setCurrentView(item.id as any),
action: () => navigate({ to: `/${item.id}` as const }),
description: `Navigate to ${item.label}`,
});
}
@@ -1187,7 +1184,7 @@ export function Sidebar() {
// Add settings shortcut
shortcutsList.push({
key: shortcuts.settings,
action: () => setCurrentView("settings"),
action: () => navigate({ to: "/settings" }),
description: "Navigate to Settings",
});
}
@@ -1196,7 +1193,7 @@ export function Sidebar() {
}, [
shortcuts,
currentProject,
setCurrentView,
navigate,
toggleSidebar,
projects.length,
handleOpenFolder,
@@ -1210,7 +1207,9 @@ export function Sidebar() {
useKeyboardShortcuts(navigationShortcuts);
const isActiveRoute = (id: string) => {
return currentView === id;
// Map view IDs to route paths
const routePath = id === "welcome" ? "/" : `/${id}`;
return location.pathname === routePath;
};
return (
@@ -1289,7 +1288,7 @@ export function Sidebar() {
"flex items-center gap-3 titlebar-no-drag cursor-pointer group",
!sidebarOpen && "flex-col gap-1"
)}
onClick={() => setCurrentView("welcome")}
onClick={() => navigate({ to: "/" })}
data-testid="logo-button"
>
{!sidebarOpen ? (
@@ -1847,7 +1846,7 @@ export function Sidebar() {
return (
<button
key={item.id}
onClick={() => setCurrentView(item.id as any)}
onClick={() => navigate({ to: `/${item.id}` as const })}
className={cn(
"group flex items-center w-full px-3 py-2.5 rounded-xl relative overflow-hidden titlebar-no-drag",
"transition-all duration-200 ease-out",
@@ -1872,9 +1871,6 @@ export function Sidebar() {
title={!sidebarOpen ? item.label : undefined}
data-testid={`nav-${item.id}`}
>
{isActive && (
<div className="absolute inset-y-0 left-0 w-1 bg-gradient-to-b from-brand-400 via-brand-500 to-brand-600 rounded-r-full shadow-sm shadow-brand-500/50"></div>
)}
<Icon
className={cn(
"w-[18px] h-[18px] shrink-0 transition-all duration-200",
@@ -1945,13 +1941,11 @@ export function Sidebar() {
"bg-gradient-to-t from-background/10 via-sidebar/50 to-transparent"
)}
>
{/* Course Promo Badge */}
<CoursePromoBadge sidebarOpen={sidebarOpen} />
{/* Wiki Link */}
{!hideWiki && (
<div className="p-2 pb-0">
<button
onClick={() => setCurrentView("wiki")}
onClick={() => navigate({ to: "/wiki" })}
className={cn(
"group flex items-center w-full px-3 py-2.5 rounded-xl relative overflow-hidden titlebar-no-drag",
"transition-all duration-200 ease-out",
@@ -1974,9 +1968,6 @@ export function Sidebar() {
title={!sidebarOpen ? "Wiki" : undefined}
data-testid="wiki-link"
>
{isActiveRoute("wiki") && (
<div className="absolute inset-y-0 left-0 w-1 bg-gradient-to-b from-brand-400 via-brand-500 to-brand-600 rounded-r-full shadow-sm shadow-brand-500/50"></div>
)}
<BookOpen
className={cn(
"w-[18px] h-[18px] shrink-0 transition-all duration-200",
@@ -2014,7 +2005,7 @@ export function Sidebar() {
{!hideRunningAgents && (
<div className="p-2 pb-0">
<button
onClick={() => setCurrentView("running-agents")}
onClick={() => navigate({ to: "/running-agents" })}
className={cn(
"group flex items-center w-full px-3 py-2.5 rounded-xl relative overflow-hidden titlebar-no-drag",
"transition-all duration-200 ease-out",
@@ -2037,9 +2028,6 @@ export function Sidebar() {
title={!sidebarOpen ? "Running Agents" : undefined}
data-testid="running-agents-link"
>
{isActiveRoute("running-agents") && (
<div className="absolute inset-y-0 left-0 w-1 bg-gradient-to-b from-brand-400 via-brand-500 to-brand-600 rounded-r-full shadow-sm shadow-brand-500/50"></div>
)}
<div className="relative">
<Activity
className={cn(
@@ -2112,7 +2100,7 @@ export function Sidebar() {
{/* Settings Link */}
<div className="p-2">
<button
onClick={() => setCurrentView("settings")}
onClick={() => navigate({ to: "/settings" })}
className={cn(
"group flex items-center w-full px-3 py-2.5 rounded-xl relative overflow-hidden titlebar-no-drag",
"transition-all duration-200 ease-out",
@@ -2135,9 +2123,6 @@ export function Sidebar() {
title={!sidebarOpen ? "Settings" : undefined}
data-testid="settings-button"
>
{isActiveRoute("settings") && (
<div className="absolute inset-y-0 left-0 w-1 bg-gradient-to-b from-brand-400 via-brand-500 to-brand-600 rounded-r-full shadow-sm shadow-brand-500/50"></div>
)}
<Settings
className={cn(
"w-[18px] h-[18px] shrink-0 transition-all duration-200",
@@ -2276,18 +2261,23 @@ export function Sidebar() {
</Dialog>
{/* New Project Setup Dialog */}
<ProjectSetupDialog
<CreateSpecDialog
open={showSetupDialog}
onOpenChange={setShowSetupDialog}
projectOverview={projectOverview}
onProjectOverviewChange={setProjectOverview}
generateFeatures={generateFeatures}
onGenerateFeaturesChange={setGenerateFeatures}
analyzeProject={analyzeProject}
onAnalyzeProjectChange={setAnalyzeProject}
featureCount={featureCount}
onFeatureCountChange={setFeatureCount}
onCreateSpec={handleCreateInitialSpec}
onSkip={handleSkipSetup}
isCreatingSpec={isCreatingSpec}
showSkipButton={true}
title="Set Up Your Project"
description="We didn't find an app_spec.txt file. Let us help you generate your app_spec.txt to help describe your project for our system. We'll analyze your project's tech stack and create a comprehensive specification."
/>
{/* New Project Onboarding Dialog */}

View File

@@ -1,4 +1,3 @@
"use client";
import { useState, useEffect } from "react";
import {

View File

@@ -1,4 +1,3 @@
"use client";
import { useState, useEffect } from "react";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";

View File

@@ -1,4 +1,3 @@
"use client";
import * as React from "react";
import { ChevronDown } from "lucide-react";

View File

@@ -1,4 +1,3 @@
"use client";
import * as React from "react";
import { Check, ChevronsUpDown, LucideIcon } from "lucide-react";

View File

@@ -1,4 +1,3 @@
"use client";
import * as React from "react";
import { GitBranch } from "lucide-react";

View File

@@ -1,4 +1,3 @@
"use client";
import * as React from "react";
import { Autocomplete } from "@/components/ui/autocomplete";

View File

@@ -1,4 +1,3 @@
"use client";
import * as React from "react";
import * as CheckboxPrimitive from "@radix-ui/react-checkbox";

View File

@@ -1,4 +1,3 @@
"use client"
import * as React from "react"
import { Command as CommandPrimitive } from "cmdk"

View File

@@ -1,4 +1,3 @@
"use client";
import { useState, useEffect } from "react";
import { Clock } from "lucide-react";

View File

@@ -1,4 +1,3 @@
"use client";
import React, { useState, useRef, useCallback } from "react";
import { cn } from "@/lib/utils";
@@ -85,7 +84,7 @@ export function DescriptionImageDropZone({
// Construct server URL for loading saved images
const getImageServerUrl = useCallback((imagePath: string): string => {
const serverUrl = process.env.NEXT_PUBLIC_SERVER_URL || "http://localhost:3008";
const serverUrl = import.meta.env.VITE_SERVER_URL || "http://localhost:3008";
const projectPath = currentProject?.path || "";
return `${serverUrl}/api/fs/image?path=${encodeURIComponent(imagePath)}&projectPath=${encodeURIComponent(projectPath)}`;
}, [currentProject?.path]);

View File

@@ -1,4 +1,3 @@
"use client";
import * as React from "react";
import * as DialogPrimitive from "@radix-ui/react-dialog";

View File

@@ -1,4 +1,3 @@
"use client"
import * as React from "react"
import * as DropdownMenuPrimitive from "@radix-ui/react-dropdown-menu"

View File

@@ -1,4 +1,3 @@
"use client";
import React, { useState, useRef, useCallback } from "react";
import { cn } from "@/lib/utils";

View File

@@ -1,4 +1,3 @@
"use client";
import { useState, useEffect, useMemo, useCallback } from "react";
import { getElectronAPI } from "@/lib/electron";

View File

@@ -1,4 +1,3 @@
"use client";
import * as React from "react";
import { useEffect, useCallback, useRef } from "react";

View File

@@ -1,4 +1,3 @@
"use client";
import React, { useState, useRef, useCallback } from "react";
import { cn } from "@/lib/utils";

View File

@@ -1,4 +1,3 @@
"use client";
import * as React from "react";
import { useAppStore, DEFAULT_KEYBOARD_SHORTCUTS, parseShortcut, formatShortcut } from "@/store/app-store";

View File

@@ -1,4 +1,3 @@
"use client"
import * as React from "react"
import * as LabelPrimitive from "@radix-ui/react-label"

View File

@@ -1,4 +1,3 @@
"use client";
import { useState, useMemo, useEffect, useRef } from "react";
import {
@@ -326,7 +325,7 @@ function LogEntryItem({ entry, isExpanded, onToggle }: LogEntryItemProps) {
return (
<div
className={cn(
"rounded-lg border-l-4 transition-all duration-200",
"rounded-lg border transition-all duration-200",
bgColor,
borderColor,
"hover:brightness-110"
@@ -380,7 +379,7 @@ function LogEntryItem({ entry, isExpanded, onToggle }: LogEntryItemProps) {
{formattedContent.map((part, index) => (
<div key={index}>
{part.type === "json" ? (
<pre className="bg-zinc-900/50 rounded p-2 overflow-x-auto text-xs text-primary">
<pre className="bg-zinc-900/50 rounded p-2 overflow-x-auto scrollbar-styled text-xs text-primary">
{part.content}
</pre>
) : (
@@ -419,6 +418,8 @@ export function LogViewer({ output, className }: LogViewerProps) {
const [searchQuery, setSearchQuery] = useState("");
const [hiddenTypes, setHiddenTypes] = useState<Set<LogEntryType>>(new Set());
const [hiddenCategories, setHiddenCategories] = useState<Set<ToolCategory>>(new Set());
// Track if user has "Expand All" mode active - new entries will auto-expand when this is true
const [expandAllMode, setExpandAllMode] = useState(false);
// Parse entries and compute initial expanded state together
const { entries, initialExpandedIds } = useMemo(() => {
@@ -443,16 +444,27 @@ export function LogViewer({ output, className }: LogViewerProps) {
const appliedInitialRef = useRef<Set<string>>(new Set());
// Apply initial expanded state for new entries
// Also auto-expand all entries when expandAllMode is active
const effectiveExpandedIds = useMemo(() => {
const result = new Set(expandedIds);
initialExpandedIds.forEach((id) => {
if (!appliedInitialRef.current.has(id)) {
appliedInitialRef.current.add(id);
result.add(id);
}
});
// If expand all mode is active, expand all filtered entries
if (expandAllMode) {
entries.forEach((entry) => {
result.add(entry.id);
});
} else {
// Otherwise, only auto-expand entries based on initial state (shouldCollapseByDefault)
initialExpandedIds.forEach((id) => {
if (!appliedInitialRef.current.has(id)) {
appliedInitialRef.current.add(id);
result.add(id);
}
});
}
return result;
}, [expandedIds, initialExpandedIds]);
}, [expandedIds, initialExpandedIds, expandAllMode, entries]);
// Calculate stats for tool categories
const stats = useMemo(() => {
@@ -508,6 +520,10 @@ export function LogViewer({ output, className }: LogViewerProps) {
}, [entries, hiddenTypes, hiddenCategories, searchQuery]);
const toggleEntry = (id: string) => {
// When user manually collapses an entry, turn off expand all mode
if (effectiveExpandedIds.has(id)) {
setExpandAllMode(false);
}
setExpandedIds((prev) => {
const next = new Set(prev);
if (next.has(id)) {
@@ -520,10 +536,14 @@ export function LogViewer({ output, className }: LogViewerProps) {
};
const expandAll = () => {
// Enable expand all mode so new entries will also be expanded
setExpandAllMode(true);
setExpandedIds(new Set(filteredEntries.map((e) => e.id)));
};
const collapseAll = () => {
// Disable expand all mode when collapsing all
setExpandAllMode(false);
setExpandedIds(new Set());
};
@@ -566,7 +586,7 @@ export function LogViewer({ output, className }: LogViewerProps) {
<Info className="w-8 h-8 mx-auto mb-2 opacity-50" />
<p className="text-sm">No log entries yet. Logs will appear here as the process runs.</p>
{output && output.trim() && (
<div className="mt-4 p-3 bg-zinc-900/50 rounded text-xs font-mono text-left max-h-40 overflow-auto">
<div className="mt-4 p-3 bg-zinc-900/50 rounded text-xs font-mono text-left max-h-40 overflow-auto scrollbar-styled">
<pre className="whitespace-pre-wrap">{output}</pre>
</div>
)}
@@ -700,10 +720,16 @@ export function LogViewer({ output, className }: LogViewerProps) {
</span>
<button
onClick={expandAll}
className="text-xs text-zinc-400 hover:text-zinc-200 px-2 py-1 rounded hover:bg-zinc-800/50 transition-colors"
className={cn(
"text-xs px-2 py-1 rounded transition-colors",
expandAllMode
? "text-primary bg-primary/20 hover:bg-primary/30"
: "text-zinc-400 hover:text-zinc-200 hover:bg-zinc-800/50"
)}
data-testid="log-expand-all"
title={expandAllMode ? "Expand All (Active - new items will auto-expand)" : "Expand All"}
>
Expand All
Expand All{expandAllMode ? " (On)" : ""}
</button>
<button
onClick={collapseAll}

View File

@@ -1,4 +1,3 @@
"use client";
import ReactMarkdown from "react-markdown";
import { cn } from "@/lib/utils";

View File

@@ -1,4 +1,3 @@
"use client"
import * as React from "react"
import * as PopoverPrimitive from "@radix-ui/react-popover"

View File

@@ -1,4 +1,3 @@
"use client";
import * as React from "react";
import * as SheetPrimitive from "@radix-ui/react-dialog";

View File

@@ -1,4 +1,3 @@
"use client";
import * as React from "react";
import * as SliderPrimitive from "@radix-ui/react-slider";

Some files were not shown because too many files have changed in this diff Show More