mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-30 06:12:03 +00:00
3
.github/actions/setup-project/action.yml
vendored
3
.github/actions/setup-project/action.yml
vendored
@@ -41,7 +41,8 @@ runs:
|
||||
# Use npm install instead of npm ci to correctly resolve platform-specific
|
||||
# optional dependencies (e.g., @tailwindcss/oxide, lightningcss binaries)
|
||||
# Skip scripts to avoid electron-builder install-app-deps which uses too much memory
|
||||
run: npm install --ignore-scripts
|
||||
# Use --force to allow platform-specific dev dependencies like dmg-license on non-darwin platforms
|
||||
run: npm install --ignore-scripts --force
|
||||
|
||||
- name: Install Linux native bindings
|
||||
shell: bash
|
||||
|
||||
2
.github/workflows/format-check.yml
vendored
2
.github/workflows/format-check.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
cache-dependency-path: package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --ignore-scripts
|
||||
run: npm install --ignore-scripts --force
|
||||
|
||||
- name: Check formatting
|
||||
run: npm run format:check
|
||||
|
||||
13
.github/workflows/release.yml
vendored
13
.github/workflows/release.yml
vendored
@@ -35,6 +35,11 @@ jobs:
|
||||
with:
|
||||
check-lockfile: 'true'
|
||||
|
||||
- name: Install RPM build tools (Linux)
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
shell: bash
|
||||
run: sudo apt-get update && sudo apt-get install -y rpm
|
||||
|
||||
- name: Build Electron app (macOS)
|
||||
if: matrix.os == 'macos-latest'
|
||||
shell: bash
|
||||
@@ -73,7 +78,7 @@ jobs:
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: linux-builds
|
||||
path: apps/ui/release/*.{AppImage,deb}
|
||||
path: apps/ui/release/*.{AppImage,deb,rpm}
|
||||
retention-days: 30
|
||||
|
||||
upload:
|
||||
@@ -104,8 +109,8 @@ jobs:
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: |
|
||||
artifacts/macos-builds/*
|
||||
artifacts/windows-builds/*
|
||||
artifacts/linux-builds/*
|
||||
artifacts/macos-builds/*.{dmg,zip,blockmap}
|
||||
artifacts/windows-builds/*.{exe,blockmap}
|
||||
artifacts/linux-builds/*.{AppImage,deb,rpm,blockmap}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -166,7 +166,10 @@ Use `resolveModelString()` from `@automaker/model-resolver` to convert model ali
|
||||
## Environment Variables
|
||||
|
||||
- `ANTHROPIC_API_KEY` - Anthropic API key (or use Claude Code CLI auth)
|
||||
- `HOST` - Host to bind server to (default: 0.0.0.0)
|
||||
- `HOSTNAME` - Hostname for user-facing URLs (default: localhost)
|
||||
- `PORT` - Server port (default: 3008)
|
||||
- `DATA_DIR` - Data storage directory (default: ./data)
|
||||
- `ALLOWED_ROOT_DIRECTORY` - Restrict file operations to specific directory
|
||||
- `AUTOMAKER_MOCK_AGENT=true` - Enable mock agent mode for CI testing
|
||||
- `VITE_HOSTNAME` - Hostname for frontend API URLs (default: localhost)
|
||||
|
||||
159
README.md
159
README.md
@@ -28,6 +28,7 @@
|
||||
- [Quick Start](#quick-start)
|
||||
- [How to Run](#how-to-run)
|
||||
- [Development Mode](#development-mode)
|
||||
- [Interactive TUI Launcher](#interactive-tui-launcher-recommended-for-new-users)
|
||||
- [Building for Production](#building-for-production)
|
||||
- [Testing](#testing)
|
||||
- [Linting](#linting)
|
||||
@@ -101,11 +102,9 @@ In the Discord, you can:
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- **Node.js 18+** (tested with Node.js 22)
|
||||
- **Node.js 22+** (required: >=22.0.0 <23.0.0)
|
||||
- **npm** (comes with Node.js)
|
||||
- **Authentication** (choose one):
|
||||
- **[Claude Code CLI](https://code.claude.com/docs/en/overview)** (recommended) - Install and authenticate, credentials used automatically
|
||||
- **Anthropic API Key** - Direct API key for Claude Agent SDK ([get one here](https://console.anthropic.com/))
|
||||
- **[Claude Code CLI](https://code.claude.com/docs/en/overview)** - Install and authenticate with your Anthropic subscription. Automaker integrates with your authenticated Claude Code CLI to access Claude models.
|
||||
|
||||
### Quick Start
|
||||
|
||||
@@ -117,30 +116,14 @@ cd automaker
|
||||
# 2. Install dependencies
|
||||
npm install
|
||||
|
||||
# 3. Build shared packages (can be skipped - npm run dev does it automatically)
|
||||
npm run build:packages
|
||||
|
||||
# 4. Start Automaker
|
||||
# 3. Start Automaker
|
||||
npm run dev
|
||||
# Choose between:
|
||||
# 1. Web Application (browser at localhost:3007)
|
||||
# 2. Desktop Application (Electron - recommended)
|
||||
```
|
||||
|
||||
**Authentication Setup:** On first run, Automaker will automatically show a setup wizard where you can configure authentication. You can choose to:
|
||||
|
||||
- Use **Claude Code CLI** (recommended) - Automaker will detect your CLI credentials automatically
|
||||
- Enter an **API key** directly in the wizard
|
||||
|
||||
If you prefer to set up authentication before running (e.g., for headless deployments or CI/CD), you can set it manually:
|
||||
|
||||
```bash
|
||||
# Option A: Environment variable
|
||||
export ANTHROPIC_API_KEY="sk-ant-..."
|
||||
|
||||
# Option B: Create .env file in project root
|
||||
echo "ANTHROPIC_API_KEY=sk-ant-..." > .env
|
||||
```
|
||||
**Authentication:** Automaker integrates with your authenticated Claude Code CLI. Make sure you have [installed and authenticated](https://code.claude.com/docs/en/quickstart) the Claude Code CLI before running Automaker. Your CLI credentials will be detected automatically.
|
||||
|
||||
**For Development:** `npm run dev` starts the development server with Vite live reload and hot module replacement for fast refresh and instant updates as you make changes.
|
||||
|
||||
@@ -179,6 +162,40 @@ npm run dev:electron:wsl:gpu
|
||||
npm run dev:web
|
||||
```
|
||||
|
||||
### Interactive TUI Launcher (Recommended for New Users)
|
||||
|
||||
For a user-friendly interactive menu, use the built-in TUI launcher script:
|
||||
|
||||
```bash
|
||||
# Show interactive menu with all launch options
|
||||
./start-automaker.sh
|
||||
|
||||
# Or launch directly without menu
|
||||
./start-automaker.sh web # Web browser
|
||||
./start-automaker.sh electron # Desktop app
|
||||
./start-automaker.sh electron-debug # Desktop + DevTools
|
||||
|
||||
# Additional options
|
||||
./start-automaker.sh --help # Show all available options
|
||||
./start-automaker.sh --version # Show version information
|
||||
./start-automaker.sh --check-deps # Verify project dependencies
|
||||
./start-automaker.sh --no-colors # Disable colored output
|
||||
./start-automaker.sh --no-history # Don't remember last choice
|
||||
```
|
||||
|
||||
**Features:**
|
||||
|
||||
- 🎨 Beautiful terminal UI with gradient colors and ASCII art
|
||||
- ⌨️ Interactive menu (press 1-3 to select, Q to exit)
|
||||
- 💾 Remembers your last choice
|
||||
- ✅ Pre-flight checks (validates Node.js, npm, dependencies)
|
||||
- 📏 Responsive layout (adapts to terminal size)
|
||||
- ⏱️ 30-second timeout for hands-free selection
|
||||
- 🌐 Cross-shell compatible (bash/zsh)
|
||||
|
||||
**History File:**
|
||||
Your last selected mode is saved in `~/.automaker_launcher_history` for quick re-runs.
|
||||
|
||||
### Building for Production
|
||||
|
||||
#### Web Application
|
||||
@@ -197,11 +214,30 @@ npm run build:electron
|
||||
# Platform-specific builds
|
||||
npm run build:electron:mac # macOS (DMG + ZIP, x64 + arm64)
|
||||
npm run build:electron:win # Windows (NSIS installer, x64)
|
||||
npm run build:electron:linux # Linux (AppImage + DEB, x64)
|
||||
npm run build:electron:linux # Linux (AppImage + DEB + RPM, x64)
|
||||
|
||||
# Output directory: apps/ui/release/
|
||||
```
|
||||
|
||||
**Linux Distribution Packages:**
|
||||
|
||||
- **AppImage**: Universal format, works on any Linux distribution
|
||||
- **DEB**: Ubuntu, Debian, Linux Mint, Pop!\_OS
|
||||
- **RPM**: Fedora, RHEL, Rocky Linux, AlmaLinux, openSUSE
|
||||
|
||||
**Installing on Fedora/RHEL:**
|
||||
|
||||
```bash
|
||||
# Download the RPM package
|
||||
wget https://github.com/AutoMaker-Org/automaker/releases/latest/download/Automaker-<version>-x86_64.rpm
|
||||
|
||||
# Install with dnf (Fedora)
|
||||
sudo dnf install ./Automaker-<version>-x86_64.rpm
|
||||
|
||||
# Or with yum (RHEL/CentOS)
|
||||
sudo yum localinstall ./Automaker-<version>-x86_64.rpm
|
||||
```
|
||||
|
||||
#### Docker Deployment
|
||||
|
||||
Docker provides the most secure way to run Automaker by isolating it from your host filesystem.
|
||||
@@ -220,16 +256,9 @@ docker-compose logs -f
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
##### Configuration
|
||||
##### Authentication
|
||||
|
||||
Create a `.env` file in the project root if using API key authentication:
|
||||
|
||||
```bash
|
||||
# Optional: Anthropic API key (not needed if using Claude CLI authentication)
|
||||
ANTHROPIC_API_KEY=sk-ant-...
|
||||
```
|
||||
|
||||
**Note:** Most users authenticate via Claude CLI instead of API keys. See [Claude CLI Authentication](#claude-cli-authentication-optional) below.
|
||||
Automaker integrates with your authenticated Claude Code CLI. To use CLI authentication in Docker, mount your Claude CLI config directory (see [Claude CLI Authentication](#claude-cli-authentication) below).
|
||||
|
||||
##### Working with Projects (Host Directory Access)
|
||||
|
||||
@@ -243,9 +272,9 @@ services:
|
||||
- /path/to/your/project:/projects/your-project
|
||||
```
|
||||
|
||||
##### Claude CLI Authentication (Optional)
|
||||
##### Claude CLI Authentication
|
||||
|
||||
To use Claude Code CLI authentication instead of an API key, mount your Claude CLI config directory:
|
||||
Mount your Claude CLI config directory to use your authenticated CLI credentials:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
@@ -343,10 +372,6 @@ npm run lint
|
||||
|
||||
### Environment Configuration
|
||||
|
||||
#### Authentication (if not using Claude Code CLI)
|
||||
|
||||
- `ANTHROPIC_API_KEY` - Your Anthropic API key for Claude Agent SDK (not needed if using Claude Code CLI)
|
||||
|
||||
#### Optional - Server
|
||||
|
||||
- `PORT` - Server port (default: 3008)
|
||||
@@ -357,49 +382,22 @@ npm run lint
|
||||
|
||||
- `AUTOMAKER_API_KEY` - Optional API authentication for the server
|
||||
- `ALLOWED_ROOT_DIRECTORY` - Restrict file operations to specific directory
|
||||
- `CORS_ORIGIN` - CORS policy (default: \*)
|
||||
- `CORS_ORIGIN` - CORS allowed origins (comma-separated list; defaults to localhost only)
|
||||
|
||||
#### Optional - Development
|
||||
|
||||
- `VITE_SKIP_ELECTRON` - Skip Electron in dev mode
|
||||
- `OPEN_DEVTOOLS` - Auto-open DevTools in Electron
|
||||
- `AUTOMAKER_SKIP_SANDBOX_WARNING` - Skip sandbox warning dialog (useful for dev/CI)
|
||||
|
||||
### Authentication Setup
|
||||
|
||||
#### Option 1: Claude Code CLI (Recommended)
|
||||
Automaker integrates with your authenticated Claude Code CLI and uses your Anthropic subscription.
|
||||
|
||||
Install and authenticate the Claude Code CLI following the [official quickstart guide](https://code.claude.com/docs/en/quickstart).
|
||||
|
||||
Once authenticated, Automaker will automatically detect and use your CLI credentials. No additional configuration needed!
|
||||
|
||||
#### Option 2: Direct API Key
|
||||
|
||||
If you prefer not to use the CLI, you can provide an Anthropic API key directly using one of these methods:
|
||||
|
||||
##### 2a. Shell Configuration
|
||||
|
||||
Add to your `~/.bashrc` or `~/.zshrc`:
|
||||
|
||||
```bash
|
||||
export ANTHROPIC_API_KEY="sk-ant-..."
|
||||
```
|
||||
|
||||
Then restart your terminal or run `source ~/.bashrc` (or `source ~/.zshrc`).
|
||||
|
||||
##### 2b. .env File
|
||||
|
||||
Create a `.env` file in the project root (gitignored):
|
||||
|
||||
```bash
|
||||
ANTHROPIC_API_KEY=sk-ant-...
|
||||
PORT=3008
|
||||
DATA_DIR=./data
|
||||
```
|
||||
|
||||
##### 2c. In-App Storage
|
||||
|
||||
The application can store your API key securely in the settings UI. The key is persisted in the `DATA_DIR` directory.
|
||||
|
||||
## Features
|
||||
|
||||
### Core Workflow
|
||||
@@ -508,20 +506,24 @@ Automaker provides several specialized views accessible via the sidebar or keybo
|
||||
| **Agent** | `A` | Interactive chat sessions with AI agents for exploratory work and questions |
|
||||
| **Spec** | `D` | Project specification editor with AI-powered generation and feature suggestions |
|
||||
| **Context** | `C` | Manage context files (markdown, images) that AI agents automatically reference |
|
||||
| **Profiles** | `M` | Create and manage AI agent profiles with custom prompts and configurations |
|
||||
| **Settings** | `S` | Configure themes, shortcuts, defaults, authentication, and more |
|
||||
| **Terminal** | `T` | Integrated terminal with tabs, splits, and persistent sessions |
|
||||
| **GitHub Issues** | - | Import and validate GitHub issues, convert to tasks |
|
||||
| **Graph** | `H` | Visualize feature dependencies with interactive graph visualization |
|
||||
| **Ideation** | `I` | Brainstorm and generate ideas with AI assistance |
|
||||
| **Memory** | `Y` | View and manage agent memory and conversation history |
|
||||
| **GitHub Issues** | `G` | Import and validate GitHub issues, convert to tasks |
|
||||
| **GitHub PRs** | `R` | View and manage GitHub pull requests |
|
||||
| **Running Agents** | - | View all active agents across projects with status and progress |
|
||||
|
||||
### Keyboard Navigation
|
||||
|
||||
All shortcuts are customizable in Settings. Default shortcuts:
|
||||
|
||||
- **Navigation:** `K` (Board), `A` (Agent), `D` (Spec), `C` (Context), `S` (Settings), `M` (Profiles), `T` (Terminal)
|
||||
- **Navigation:** `K` (Board), `A` (Agent), `D` (Spec), `C` (Context), `S` (Settings), `T` (Terminal), `H` (Graph), `I` (Ideation), `Y` (Memory), `G` (GitHub Issues), `R` (GitHub PRs)
|
||||
- **UI:** `` ` `` (Toggle sidebar)
|
||||
- **Actions:** `N` (New item in current view), `G` (Start next features), `O` (Open project), `P` (Project picker)
|
||||
- **Actions:** `N` (New item in current view), `O` (Open project), `P` (Project picker)
|
||||
- **Projects:** `Q`/`E` (Cycle previous/next project)
|
||||
- **Terminal:** `Alt+D` (Split right), `Alt+S` (Split down), `Alt+W` (Close), `Alt+T` (New tab)
|
||||
|
||||
## Architecture
|
||||
|
||||
@@ -586,10 +588,16 @@ Stored in `{projectPath}/.automaker/`:
|
||||
│ ├── agent-output.md # AI agent output log
|
||||
│ └── images/ # Attached images
|
||||
├── context/ # Context files for AI agents
|
||||
├── worktrees/ # Git worktree metadata
|
||||
├── validations/ # GitHub issue validation results
|
||||
├── ideation/ # Brainstorming and analysis data
|
||||
│ └── analysis.json # Project structure analysis
|
||||
├── board/ # Board-related data
|
||||
├── images/ # Project-level images
|
||||
├── settings.json # Project-specific settings
|
||||
├── spec.md # Project specification
|
||||
├── analysis.json # Project structure analysis
|
||||
└── feature-suggestions.json # AI-generated suggestions
|
||||
├── app_spec.txt # Project specification (XML format)
|
||||
├── active-branches.json # Active git branches tracking
|
||||
└── execution-state.json # Auto-mode execution state
|
||||
```
|
||||
|
||||
#### Global Data
|
||||
@@ -627,7 +635,6 @@ data/
|
||||
|
||||
- [Contributing Guide](./CONTRIBUTING.md) - How to contribute to Automaker
|
||||
- [Project Documentation](./docs/) - Architecture guides, patterns, and developer docs
|
||||
- [Docker Isolation Guide](./docs/docker-isolation.md) - Security-focused Docker deployment
|
||||
- [Shared Packages Guide](./docs/llm-shared-packages.md) - Using monorepo packages
|
||||
|
||||
### Community
|
||||
|
||||
@@ -44,6 +44,11 @@ CORS_ORIGIN=http://localhost:3007
|
||||
# OPTIONAL - Server
|
||||
# ============================================
|
||||
|
||||
# Host to bind the server to (default: 0.0.0.0)
|
||||
# Use 0.0.0.0 to listen on all interfaces (recommended for Docker/remote access)
|
||||
# Use 127.0.0.1 or localhost to restrict to local connections only
|
||||
HOST=0.0.0.0
|
||||
|
||||
# Port to run the server on
|
||||
PORT=3008
|
||||
|
||||
@@ -63,6 +68,14 @@ TERMINAL_PASSWORD=
|
||||
|
||||
ENABLE_REQUEST_LOGGING=false
|
||||
|
||||
# ============================================
|
||||
# OPTIONAL - UI Behavior
|
||||
# ============================================
|
||||
|
||||
# Skip the sandbox warning dialog on startup (default: false)
|
||||
# Set to "true" to disable the warning entirely (useful for dev/CI environments)
|
||||
AUTOMAKER_SKIP_SANDBOX_WARNING=false
|
||||
|
||||
# ============================================
|
||||
# OPTIONAL - Debugging
|
||||
# ============================================
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@automaker/server",
|
||||
"version": "0.11.0",
|
||||
"version": "0.12.0",
|
||||
"description": "Backend server for Automaker - provides API for both web and Electron modes",
|
||||
"author": "AutoMaker Team",
|
||||
"license": "SEE LICENSE IN LICENSE",
|
||||
|
||||
@@ -17,9 +17,19 @@ import dotenv from 'dotenv';
|
||||
|
||||
import { createEventEmitter, type EventEmitter } from './lib/events.js';
|
||||
import { initAllowedPaths } from '@automaker/platform';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createLogger, setLogLevel, LogLevel } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('Server');
|
||||
|
||||
/**
|
||||
* Map server log level string to LogLevel enum
|
||||
*/
|
||||
const LOG_LEVEL_MAP: Record<string, LogLevel> = {
|
||||
error: LogLevel.ERROR,
|
||||
warn: LogLevel.WARN,
|
||||
info: LogLevel.INFO,
|
||||
debug: LogLevel.DEBUG,
|
||||
};
|
||||
import { authMiddleware, validateWsConnectionToken, checkRawAuthentication } from './lib/auth.js';
|
||||
import { requireJsonContentType } from './middleware/require-json-content-type.js';
|
||||
import { createAuthRoutes } from './routes/auth/index.js';
|
||||
@@ -68,13 +78,37 @@ import { pipelineService } from './services/pipeline-service.js';
|
||||
import { createIdeationRoutes } from './routes/ideation/index.js';
|
||||
import { IdeationService } from './services/ideation-service.js';
|
||||
import { getDevServerService } from './services/dev-server-service.js';
|
||||
import { eventHookService } from './services/event-hook-service.js';
|
||||
import { createNotificationsRoutes } from './routes/notifications/index.js';
|
||||
import { getNotificationService } from './services/notification-service.js';
|
||||
import { createEventHistoryRoutes } from './routes/event-history/index.js';
|
||||
import { getEventHistoryService } from './services/event-history-service.js';
|
||||
|
||||
// Load environment variables
|
||||
dotenv.config();
|
||||
|
||||
const PORT = parseInt(process.env.PORT || '3008', 10);
|
||||
const HOST = process.env.HOST || '0.0.0.0';
|
||||
const HOSTNAME = process.env.HOSTNAME || 'localhost';
|
||||
const DATA_DIR = process.env.DATA_DIR || './data';
|
||||
const ENABLE_REQUEST_LOGGING = process.env.ENABLE_REQUEST_LOGGING !== 'false'; // Default to true
|
||||
const ENABLE_REQUEST_LOGGING_DEFAULT = process.env.ENABLE_REQUEST_LOGGING !== 'false'; // Default to true
|
||||
|
||||
// Runtime-configurable request logging flag (can be changed via settings)
|
||||
let requestLoggingEnabled = ENABLE_REQUEST_LOGGING_DEFAULT;
|
||||
|
||||
/**
|
||||
* Enable or disable HTTP request logging at runtime
|
||||
*/
|
||||
export function setRequestLoggingEnabled(enabled: boolean): void {
|
||||
requestLoggingEnabled = enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current request logging state
|
||||
*/
|
||||
export function isRequestLoggingEnabled(): boolean {
|
||||
return requestLoggingEnabled;
|
||||
}
|
||||
|
||||
// Check for required environment variables
|
||||
const hasAnthropicKey = !!process.env.ANTHROPIC_API_KEY;
|
||||
@@ -103,22 +137,21 @@ initAllowedPaths();
|
||||
const app = express();
|
||||
|
||||
// Middleware
|
||||
// Custom colored logger showing only endpoint and status code (configurable via ENABLE_REQUEST_LOGGING env var)
|
||||
if (ENABLE_REQUEST_LOGGING) {
|
||||
morgan.token('status-colored', (_req, res) => {
|
||||
const status = res.statusCode;
|
||||
if (status >= 500) return `\x1b[31m${status}\x1b[0m`; // Red for server errors
|
||||
if (status >= 400) return `\x1b[33m${status}\x1b[0m`; // Yellow for client errors
|
||||
if (status >= 300) return `\x1b[36m${status}\x1b[0m`; // Cyan for redirects
|
||||
return `\x1b[32m${status}\x1b[0m`; // Green for success
|
||||
});
|
||||
// Custom colored logger showing only endpoint and status code (dynamically configurable)
|
||||
morgan.token('status-colored', (_req, res) => {
|
||||
const status = res.statusCode;
|
||||
if (status >= 500) return `\x1b[31m${status}\x1b[0m`; // Red for server errors
|
||||
if (status >= 400) return `\x1b[33m${status}\x1b[0m`; // Yellow for client errors
|
||||
if (status >= 300) return `\x1b[36m${status}\x1b[0m`; // Cyan for redirects
|
||||
return `\x1b[32m${status}\x1b[0m`; // Green for success
|
||||
});
|
||||
|
||||
app.use(
|
||||
morgan(':method :url :status-colored', {
|
||||
skip: (req) => req.url === '/api/health', // Skip health check logs
|
||||
})
|
||||
);
|
||||
}
|
||||
app.use(
|
||||
morgan(':method :url :status-colored', {
|
||||
// Skip when request logging is disabled or for health check endpoints
|
||||
skip: (req) => !requestLoggingEnabled || req.url === '/api/health',
|
||||
})
|
||||
);
|
||||
// CORS configuration
|
||||
// When using credentials (cookies), origin cannot be '*'
|
||||
// We dynamically allow the requesting origin for local development
|
||||
@@ -181,8 +214,33 @@ const ideationService = new IdeationService(events, settingsService, featureLoad
|
||||
const devServerService = getDevServerService();
|
||||
devServerService.setEventEmitter(events);
|
||||
|
||||
// Initialize Notification Service with event emitter for real-time updates
|
||||
const notificationService = getNotificationService();
|
||||
notificationService.setEventEmitter(events);
|
||||
|
||||
// Initialize Event History Service
|
||||
const eventHistoryService = getEventHistoryService();
|
||||
|
||||
// Initialize Event Hook Service for custom event triggers (with history storage)
|
||||
eventHookService.initialize(events, settingsService, eventHistoryService);
|
||||
|
||||
// Initialize services
|
||||
(async () => {
|
||||
// Apply logging settings from saved settings
|
||||
try {
|
||||
const settings = await settingsService.getGlobalSettings();
|
||||
if (settings.serverLogLevel && LOG_LEVEL_MAP[settings.serverLogLevel] !== undefined) {
|
||||
setLogLevel(LOG_LEVEL_MAP[settings.serverLogLevel]);
|
||||
logger.info(`Server log level set to: ${settings.serverLogLevel}`);
|
||||
}
|
||||
// Apply request logging setting (default true if not set)
|
||||
const enableRequestLog = settings.enableRequestLogging ?? true;
|
||||
setRequestLoggingEnabled(enableRequestLog);
|
||||
logger.info(`HTTP request logging: ${enableRequestLog ? 'enabled' : 'disabled'}`);
|
||||
} catch (err) {
|
||||
logger.warn('Failed to load logging settings, using defaults');
|
||||
}
|
||||
|
||||
await agentService.initialize();
|
||||
logger.info('Agent service initialized');
|
||||
|
||||
@@ -219,7 +277,7 @@ app.get('/api/health/detailed', createDetailedHandler());
|
||||
app.use('/api/fs', createFsRoutes(events));
|
||||
app.use('/api/agent', createAgentRoutes(agentService, events));
|
||||
app.use('/api/sessions', createSessionsRoutes(agentService));
|
||||
app.use('/api/features', createFeaturesRoutes(featureLoader));
|
||||
app.use('/api/features', createFeaturesRoutes(featureLoader, settingsService, events));
|
||||
app.use('/api/auto-mode', createAutoModeRoutes(autoModeService));
|
||||
app.use('/api/enhance-prompt', createEnhancePromptRoutes(settingsService));
|
||||
app.use('/api/worktree', createWorktreeRoutes(events, settingsService));
|
||||
@@ -240,6 +298,8 @@ app.use('/api/backlog-plan', createBacklogPlanRoutes(events, settingsService));
|
||||
app.use('/api/mcp', createMCPRoutes(mcpTestService));
|
||||
app.use('/api/pipeline', createPipelineRoutes(pipelineService));
|
||||
app.use('/api/ideation', createIdeationRoutes(events, ideationService, featureLoader));
|
||||
app.use('/api/notifications', createNotificationsRoutes(notificationService));
|
||||
app.use('/api/event-history', createEventHistoryRoutes(eventHistoryService, settingsService));
|
||||
|
||||
// Create HTTP server
|
||||
const server = createServer(app);
|
||||
@@ -551,8 +611,8 @@ terminalWss.on('connection', (ws: WebSocket, req: import('http').IncomingMessage
|
||||
});
|
||||
|
||||
// Start server with error handling for port conflicts
|
||||
const startServer = (port: number) => {
|
||||
server.listen(port, () => {
|
||||
const startServer = (port: number, host: string) => {
|
||||
server.listen(port, host, () => {
|
||||
const terminalStatus = isTerminalEnabled()
|
||||
? isTerminalPasswordRequired()
|
||||
? 'enabled (password protected)'
|
||||
@@ -563,10 +623,11 @@ const startServer = (port: number) => {
|
||||
╔═══════════════════════════════════════════════════════╗
|
||||
║ Automaker Backend Server ║
|
||||
╠═══════════════════════════════════════════════════════╣
|
||||
║ HTTP API: http://localhost:${portStr} ║
|
||||
║ WebSocket: ws://localhost:${portStr}/api/events ║
|
||||
║ Terminal: ws://localhost:${portStr}/api/terminal/ws ║
|
||||
║ Health: http://localhost:${portStr}/api/health ║
|
||||
║ Listening: ${host}:${port}${' '.repeat(Math.max(0, 34 - host.length - port.toString().length))}║
|
||||
║ HTTP API: http://${HOSTNAME}:${portStr} ║
|
||||
║ WebSocket: ws://${HOSTNAME}:${portStr}/api/events ║
|
||||
║ Terminal: ws://${HOSTNAME}:${portStr}/api/terminal/ws ║
|
||||
║ Health: http://${HOSTNAME}:${portStr}/api/health ║
|
||||
║ Terminal: ${terminalStatus.padEnd(37)}║
|
||||
╚═══════════════════════════════════════════════════════╝
|
||||
`);
|
||||
@@ -600,7 +661,7 @@ const startServer = (port: number) => {
|
||||
});
|
||||
};
|
||||
|
||||
startServer(PORT);
|
||||
startServer(PORT, HOST);
|
||||
|
||||
// Global error handlers to prevent crashes from uncaught errors
|
||||
process.on('unhandledRejection', (reason: unknown, _promise: Promise<unknown>) => {
|
||||
|
||||
@@ -11,8 +11,12 @@ export { specOutputSchema } from '@automaker/types';
|
||||
|
||||
/**
|
||||
* Escape special XML characters
|
||||
* Handles undefined/null values by converting them to empty strings
|
||||
*/
|
||||
function escapeXml(str: string): string {
|
||||
export function escapeXml(str: string | undefined | null): string {
|
||||
if (str == null) {
|
||||
return '';
|
||||
}
|
||||
return str
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
|
||||
@@ -142,6 +142,8 @@ if (process.env.AUTOMAKER_HIDE_API_KEY !== 'true') {
|
||||
║ ${API_KEY}
|
||||
║ ║
|
||||
║ In Electron mode, authentication is handled automatically. ║
|
||||
║ ║
|
||||
║ 💡 Tip: Set AUTOMAKER_API_KEY env var to use a fixed key for dev ║
|
||||
╚═══════════════════════════════════════════════════════════════════════╝
|
||||
`);
|
||||
} else {
|
||||
|
||||
@@ -11,6 +11,14 @@ import {
|
||||
mergeAgentPrompts,
|
||||
mergeBacklogPlanPrompts,
|
||||
mergeEnhancementPrompts,
|
||||
mergeCommitMessagePrompts,
|
||||
mergeTitleGenerationPrompts,
|
||||
mergeIssueValidationPrompts,
|
||||
mergeIdeationPrompts,
|
||||
mergeAppSpecPrompts,
|
||||
mergeContextDescriptionPrompts,
|
||||
mergeSuggestionsPrompts,
|
||||
mergeTaskExecutionPrompts,
|
||||
} from '@automaker/prompts';
|
||||
|
||||
const logger = createLogger('SettingsHelper');
|
||||
@@ -218,6 +226,14 @@ export async function getPromptCustomization(
|
||||
agent: ReturnType<typeof mergeAgentPrompts>;
|
||||
backlogPlan: ReturnType<typeof mergeBacklogPlanPrompts>;
|
||||
enhancement: ReturnType<typeof mergeEnhancementPrompts>;
|
||||
commitMessage: ReturnType<typeof mergeCommitMessagePrompts>;
|
||||
titleGeneration: ReturnType<typeof mergeTitleGenerationPrompts>;
|
||||
issueValidation: ReturnType<typeof mergeIssueValidationPrompts>;
|
||||
ideation: ReturnType<typeof mergeIdeationPrompts>;
|
||||
appSpec: ReturnType<typeof mergeAppSpecPrompts>;
|
||||
contextDescription: ReturnType<typeof mergeContextDescriptionPrompts>;
|
||||
suggestions: ReturnType<typeof mergeSuggestionsPrompts>;
|
||||
taskExecution: ReturnType<typeof mergeTaskExecutionPrompts>;
|
||||
}> {
|
||||
let customization: PromptCustomization = {};
|
||||
|
||||
@@ -239,6 +255,14 @@ export async function getPromptCustomization(
|
||||
agent: mergeAgentPrompts(customization.agent),
|
||||
backlogPlan: mergeBacklogPlanPrompts(customization.backlogPlan),
|
||||
enhancement: mergeEnhancementPrompts(customization.enhancement),
|
||||
commitMessage: mergeCommitMessagePrompts(customization.commitMessage),
|
||||
titleGeneration: mergeTitleGenerationPrompts(customization.titleGeneration),
|
||||
issueValidation: mergeIssueValidationPrompts(customization.issueValidation),
|
||||
ideation: mergeIdeationPrompts(customization.ideation),
|
||||
appSpec: mergeAppSpecPrompts(customization.appSpec),
|
||||
contextDescription: mergeContextDescriptionPrompts(customization.contextDescription),
|
||||
suggestions: mergeSuggestionsPrompts(customization.suggestions),
|
||||
taskExecution: mergeTaskExecutionPrompts(customization.taskExecution),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
611
apps/server/src/lib/xml-extractor.ts
Normal file
611
apps/server/src/lib/xml-extractor.ts
Normal file
@@ -0,0 +1,611 @@
|
||||
/**
|
||||
* XML Extraction Utilities
|
||||
*
|
||||
* Robust XML parsing utilities for extracting and updating sections
|
||||
* from app_spec.txt XML content. Uses regex-based parsing which is
|
||||
* sufficient for our controlled XML structure.
|
||||
*
|
||||
* Note: If more complex XML parsing is needed in the future, consider
|
||||
* using a library like 'fast-xml-parser' or 'xml2js'.
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { SpecOutput } from '@automaker/types';
|
||||
|
||||
const logger = createLogger('XmlExtractor');
|
||||
|
||||
/**
|
||||
* Represents an implemented feature extracted from XML
|
||||
*/
|
||||
export interface ImplementedFeature {
|
||||
name: string;
|
||||
description: string;
|
||||
file_locations?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Logger interface for optional custom logging
|
||||
*/
|
||||
export interface XmlExtractorLogger {
|
||||
debug: (message: string, ...args: unknown[]) => void;
|
||||
warn?: (message: string, ...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for XML extraction operations
|
||||
*/
|
||||
export interface ExtractXmlOptions {
|
||||
/** Custom logger (defaults to internal logger) */
|
||||
logger?: XmlExtractorLogger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape special XML characters
|
||||
* Handles undefined/null values by converting them to empty strings
|
||||
*/
|
||||
export function escapeXml(str: string | undefined | null): string {
|
||||
if (str == null) {
|
||||
return '';
|
||||
}
|
||||
return str
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
}
|
||||
|
||||
/**
|
||||
* Unescape XML entities back to regular characters
|
||||
*/
|
||||
export function unescapeXml(str: string): string {
|
||||
return str
|
||||
.replace(/'/g, "'")
|
||||
.replace(/"/g, '"')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/</g, '<')
|
||||
.replace(/&/g, '&');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the content of a specific XML section
|
||||
*
|
||||
* @param xmlContent - The full XML content
|
||||
* @param tagName - The tag name to extract (e.g., 'implemented_features')
|
||||
* @param options - Optional extraction options
|
||||
* @returns The content between the tags, or null if not found
|
||||
*/
|
||||
export function extractXmlSection(
|
||||
xmlContent: string,
|
||||
tagName: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string | null {
|
||||
const log = options.logger || logger;
|
||||
|
||||
const regex = new RegExp(`<${tagName}>([\\s\\S]*?)<\\/${tagName}>`, 'i');
|
||||
const match = xmlContent.match(regex);
|
||||
|
||||
if (match) {
|
||||
log.debug(`Extracted <${tagName}> section`);
|
||||
return match[1];
|
||||
}
|
||||
|
||||
log.debug(`Section <${tagName}> not found`);
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all values from repeated XML elements
|
||||
*
|
||||
* @param xmlContent - The XML content to search
|
||||
* @param tagName - The tag name to extract values from
|
||||
* @param options - Optional extraction options
|
||||
* @returns Array of extracted values (unescaped)
|
||||
*/
|
||||
export function extractXmlElements(
|
||||
xmlContent: string,
|
||||
tagName: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string[] {
|
||||
const log = options.logger || logger;
|
||||
const values: string[] = [];
|
||||
|
||||
const regex = new RegExp(`<${tagName}>([\\s\\S]*?)<\\/${tagName}>`, 'g');
|
||||
const matches = xmlContent.matchAll(regex);
|
||||
|
||||
for (const match of matches) {
|
||||
values.push(unescapeXml(match[1].trim()));
|
||||
}
|
||||
|
||||
log.debug(`Extracted ${values.length} <${tagName}> elements`);
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract implemented features from app_spec.txt XML content
|
||||
*
|
||||
* @param specContent - The full XML content of app_spec.txt
|
||||
* @param options - Optional extraction options
|
||||
* @returns Array of implemented features with name, description, and optional file_locations
|
||||
*/
|
||||
export function extractImplementedFeatures(
|
||||
specContent: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): ImplementedFeature[] {
|
||||
const log = options.logger || logger;
|
||||
const features: ImplementedFeature[] = [];
|
||||
|
||||
// Match <implemented_features>...</implemented_features> section
|
||||
const implementedSection = extractXmlSection(specContent, 'implemented_features', options);
|
||||
|
||||
if (!implementedSection) {
|
||||
log.debug('No implemented_features section found');
|
||||
return features;
|
||||
}
|
||||
|
||||
// Extract individual feature blocks
|
||||
const featureRegex = /<feature>([\s\S]*?)<\/feature>/g;
|
||||
const featureMatches = implementedSection.matchAll(featureRegex);
|
||||
|
||||
for (const featureMatch of featureMatches) {
|
||||
const featureContent = featureMatch[1];
|
||||
|
||||
// Extract name
|
||||
const nameMatch = featureContent.match(/<name>([\s\S]*?)<\/name>/);
|
||||
const name = nameMatch ? unescapeXml(nameMatch[1].trim()) : '';
|
||||
|
||||
// Extract description
|
||||
const descMatch = featureContent.match(/<description>([\s\S]*?)<\/description>/);
|
||||
const description = descMatch ? unescapeXml(descMatch[1].trim()) : '';
|
||||
|
||||
// Extract file_locations if present
|
||||
const locationsSection = extractXmlSection(featureContent, 'file_locations', options);
|
||||
const file_locations = locationsSection
|
||||
? extractXmlElements(locationsSection, 'location', options)
|
||||
: undefined;
|
||||
|
||||
if (name) {
|
||||
features.push({
|
||||
name,
|
||||
description,
|
||||
...(file_locations && file_locations.length > 0 ? { file_locations } : {}),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
log.debug(`Extracted ${features.length} implemented features`);
|
||||
return features;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract only the feature names from implemented_features section
|
||||
*
|
||||
* @param specContent - The full XML content of app_spec.txt
|
||||
* @param options - Optional extraction options
|
||||
* @returns Array of feature names
|
||||
*/
|
||||
export function extractImplementedFeatureNames(
|
||||
specContent: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string[] {
|
||||
const features = extractImplementedFeatures(specContent, options);
|
||||
return features.map((f) => f.name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate XML for a single implemented feature
|
||||
*
|
||||
* @param feature - The feature to convert to XML
|
||||
* @param indent - The base indentation level (default: 2 spaces)
|
||||
* @returns XML string for the feature
|
||||
*/
|
||||
export function featureToXml(feature: ImplementedFeature, indent: string = ' '): string {
|
||||
const i2 = indent.repeat(2);
|
||||
const i3 = indent.repeat(3);
|
||||
const i4 = indent.repeat(4);
|
||||
|
||||
let xml = `${i2}<feature>
|
||||
${i3}<name>${escapeXml(feature.name)}</name>
|
||||
${i3}<description>${escapeXml(feature.description)}</description>`;
|
||||
|
||||
if (feature.file_locations && feature.file_locations.length > 0) {
|
||||
xml += `
|
||||
${i3}<file_locations>
|
||||
${feature.file_locations.map((loc) => `${i4}<location>${escapeXml(loc)}</location>`).join('\n')}
|
||||
${i3}</file_locations>`;
|
||||
}
|
||||
|
||||
xml += `
|
||||
${i2}</feature>`;
|
||||
|
||||
return xml;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate XML for an array of implemented features
|
||||
*
|
||||
* @param features - Array of features to convert to XML
|
||||
* @param indent - The base indentation level (default: 2 spaces)
|
||||
* @returns XML string for the implemented_features section content
|
||||
*/
|
||||
export function featuresToXml(features: ImplementedFeature[], indent: string = ' '): string {
|
||||
return features.map((f) => featureToXml(f, indent)).join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the implemented_features section in XML content
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param newFeatures - The new features to set
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content with the new implemented_features section
|
||||
*/
|
||||
export function updateImplementedFeaturesSection(
|
||||
specContent: string,
|
||||
newFeatures: ImplementedFeature[],
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
const indent = ' ';
|
||||
|
||||
// Generate new section content
|
||||
const newSectionContent = featuresToXml(newFeatures, indent);
|
||||
|
||||
// Build the new section
|
||||
const newSection = `<implemented_features>
|
||||
${newSectionContent}
|
||||
${indent}</implemented_features>`;
|
||||
|
||||
// Check if section exists
|
||||
const sectionRegex = /<implemented_features>[\s\S]*?<\/implemented_features>/;
|
||||
|
||||
if (sectionRegex.test(specContent)) {
|
||||
log.debug('Replacing existing implemented_features section');
|
||||
return specContent.replace(sectionRegex, newSection);
|
||||
}
|
||||
|
||||
// If section doesn't exist, try to insert after core_capabilities
|
||||
const coreCapabilitiesEnd = '</core_capabilities>';
|
||||
const insertIndex = specContent.indexOf(coreCapabilitiesEnd);
|
||||
|
||||
if (insertIndex !== -1) {
|
||||
const insertPosition = insertIndex + coreCapabilitiesEnd.length;
|
||||
log.debug('Inserting implemented_features after core_capabilities');
|
||||
return (
|
||||
specContent.slice(0, insertPosition) +
|
||||
'\n\n' +
|
||||
indent +
|
||||
newSection +
|
||||
specContent.slice(insertPosition)
|
||||
);
|
||||
}
|
||||
|
||||
// As a fallback, insert before </project_specification>
|
||||
const projectSpecEnd = '</project_specification>';
|
||||
const fallbackIndex = specContent.indexOf(projectSpecEnd);
|
||||
|
||||
if (fallbackIndex !== -1) {
|
||||
log.debug('Inserting implemented_features before </project_specification>');
|
||||
return (
|
||||
specContent.slice(0, fallbackIndex) +
|
||||
indent +
|
||||
newSection +
|
||||
'\n' +
|
||||
specContent.slice(fallbackIndex)
|
||||
);
|
||||
}
|
||||
|
||||
log.warn?.('Could not find appropriate insertion point for implemented_features');
|
||||
log.debug('Could not find appropriate insertion point for implemented_features');
|
||||
return specContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new feature to the implemented_features section
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param newFeature - The feature to add
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content with the new feature added
|
||||
*/
|
||||
export function addImplementedFeature(
|
||||
specContent: string,
|
||||
newFeature: ImplementedFeature,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
|
||||
// Extract existing features
|
||||
const existingFeatures = extractImplementedFeatures(specContent, options);
|
||||
|
||||
// Check for duplicates by name
|
||||
const isDuplicate = existingFeatures.some(
|
||||
(f) => f.name.toLowerCase() === newFeature.name.toLowerCase()
|
||||
);
|
||||
|
||||
if (isDuplicate) {
|
||||
log.debug(`Feature "${newFeature.name}" already exists, skipping`);
|
||||
return specContent;
|
||||
}
|
||||
|
||||
// Add the new feature
|
||||
const updatedFeatures = [...existingFeatures, newFeature];
|
||||
|
||||
log.debug(`Adding feature "${newFeature.name}"`);
|
||||
return updateImplementedFeaturesSection(specContent, updatedFeatures, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a feature from the implemented_features section by name
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param featureName - The name of the feature to remove
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content with the feature removed
|
||||
*/
|
||||
export function removeImplementedFeature(
|
||||
specContent: string,
|
||||
featureName: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
|
||||
// Extract existing features
|
||||
const existingFeatures = extractImplementedFeatures(specContent, options);
|
||||
|
||||
// Filter out the feature to remove
|
||||
const updatedFeatures = existingFeatures.filter(
|
||||
(f) => f.name.toLowerCase() !== featureName.toLowerCase()
|
||||
);
|
||||
|
||||
if (updatedFeatures.length === existingFeatures.length) {
|
||||
log.debug(`Feature "${featureName}" not found, no changes made`);
|
||||
return specContent;
|
||||
}
|
||||
|
||||
log.debug(`Removing feature "${featureName}"`);
|
||||
return updateImplementedFeaturesSection(specContent, updatedFeatures, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an existing feature in the implemented_features section
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param featureName - The name of the feature to update
|
||||
* @param updates - Partial updates to apply to the feature
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content with the feature modified
|
||||
*/
|
||||
export function updateImplementedFeature(
|
||||
specContent: string,
|
||||
featureName: string,
|
||||
updates: Partial<ImplementedFeature>,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
|
||||
// Extract existing features
|
||||
const existingFeatures = extractImplementedFeatures(specContent, options);
|
||||
|
||||
// Find and update the feature
|
||||
let found = false;
|
||||
const updatedFeatures = existingFeatures.map((f) => {
|
||||
if (f.name.toLowerCase() === featureName.toLowerCase()) {
|
||||
found = true;
|
||||
return {
|
||||
...f,
|
||||
...updates,
|
||||
// Preserve the original name if not explicitly updated
|
||||
name: updates.name ?? f.name,
|
||||
};
|
||||
}
|
||||
return f;
|
||||
});
|
||||
|
||||
if (!found) {
|
||||
log.debug(`Feature "${featureName}" not found, no changes made`);
|
||||
return specContent;
|
||||
}
|
||||
|
||||
log.debug(`Updating feature "${featureName}"`);
|
||||
return updateImplementedFeaturesSection(specContent, updatedFeatures, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a feature exists in the implemented_features section
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param featureName - The name of the feature to check
|
||||
* @param options - Optional extraction options
|
||||
* @returns True if the feature exists
|
||||
*/
|
||||
export function hasImplementedFeature(
|
||||
specContent: string,
|
||||
featureName: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): boolean {
|
||||
const features = extractImplementedFeatures(specContent, options);
|
||||
return features.some((f) => f.name.toLowerCase() === featureName.toLowerCase());
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert extracted features to SpecOutput.implemented_features format
|
||||
*
|
||||
* @param features - Array of extracted features
|
||||
* @returns Features in SpecOutput format
|
||||
*/
|
||||
export function toSpecOutputFeatures(
|
||||
features: ImplementedFeature[]
|
||||
): SpecOutput['implemented_features'] {
|
||||
return features.map((f) => ({
|
||||
name: f.name,
|
||||
description: f.description,
|
||||
...(f.file_locations && f.file_locations.length > 0
|
||||
? { file_locations: f.file_locations }
|
||||
: {}),
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert SpecOutput.implemented_features to ImplementedFeature format
|
||||
*
|
||||
* @param specFeatures - Features from SpecOutput
|
||||
* @returns Features in ImplementedFeature format
|
||||
*/
|
||||
export function fromSpecOutputFeatures(
|
||||
specFeatures: SpecOutput['implemented_features']
|
||||
): ImplementedFeature[] {
|
||||
return specFeatures.map((f) => ({
|
||||
name: f.name,
|
||||
description: f.description,
|
||||
...(f.file_locations && f.file_locations.length > 0
|
||||
? { file_locations: f.file_locations }
|
||||
: {}),
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a roadmap phase extracted from XML
|
||||
*/
|
||||
export interface RoadmapPhase {
|
||||
name: string;
|
||||
status: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the technology stack from app_spec.txt XML content
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param options - Optional extraction options
|
||||
* @returns Array of technology names
|
||||
*/
|
||||
export function extractTechnologyStack(
|
||||
specContent: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string[] {
|
||||
const log = options.logger || logger;
|
||||
|
||||
const techSection = extractXmlSection(specContent, 'technology_stack', options);
|
||||
if (!techSection) {
|
||||
log.debug('No technology_stack section found');
|
||||
return [];
|
||||
}
|
||||
|
||||
const technologies = extractXmlElements(techSection, 'technology', options);
|
||||
log.debug(`Extracted ${technologies.length} technologies`);
|
||||
return technologies;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the technology_stack section in XML content
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param technologies - The new technology list
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content
|
||||
*/
|
||||
export function updateTechnologyStack(
|
||||
specContent: string,
|
||||
technologies: string[],
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
const indent = ' ';
|
||||
const i2 = indent.repeat(2);
|
||||
|
||||
// Generate new section content
|
||||
const techXml = technologies
|
||||
.map((t) => `${i2}<technology>${escapeXml(t)}</technology>`)
|
||||
.join('\n');
|
||||
const newSection = `<technology_stack>\n${techXml}\n${indent}</technology_stack>`;
|
||||
|
||||
// Check if section exists
|
||||
const sectionRegex = /<technology_stack>[\s\S]*?<\/technology_stack>/;
|
||||
|
||||
if (sectionRegex.test(specContent)) {
|
||||
log.debug('Replacing existing technology_stack section');
|
||||
return specContent.replace(sectionRegex, newSection);
|
||||
}
|
||||
|
||||
log.debug('No technology_stack section found to update');
|
||||
return specContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract roadmap phases from app_spec.txt XML content
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param options - Optional extraction options
|
||||
* @returns Array of roadmap phases
|
||||
*/
|
||||
export function extractRoadmapPhases(
|
||||
specContent: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): RoadmapPhase[] {
|
||||
const log = options.logger || logger;
|
||||
const phases: RoadmapPhase[] = [];
|
||||
|
||||
const roadmapSection = extractXmlSection(specContent, 'implementation_roadmap', options);
|
||||
if (!roadmapSection) {
|
||||
log.debug('No implementation_roadmap section found');
|
||||
return phases;
|
||||
}
|
||||
|
||||
// Extract individual phase blocks
|
||||
const phaseRegex = /<phase>([\s\S]*?)<\/phase>/g;
|
||||
const phaseMatches = roadmapSection.matchAll(phaseRegex);
|
||||
|
||||
for (const phaseMatch of phaseMatches) {
|
||||
const phaseContent = phaseMatch[1];
|
||||
|
||||
const nameMatch = phaseContent.match(/<name>([\s\S]*?)<\/name>/);
|
||||
const name = nameMatch ? unescapeXml(nameMatch[1].trim()) : '';
|
||||
|
||||
const statusMatch = phaseContent.match(/<status>([\s\S]*?)<\/status>/);
|
||||
const status = statusMatch ? unescapeXml(statusMatch[1].trim()) : 'pending';
|
||||
|
||||
const descMatch = phaseContent.match(/<description>([\s\S]*?)<\/description>/);
|
||||
const description = descMatch ? unescapeXml(descMatch[1].trim()) : undefined;
|
||||
|
||||
if (name) {
|
||||
phases.push({ name, status, description });
|
||||
}
|
||||
}
|
||||
|
||||
log.debug(`Extracted ${phases.length} roadmap phases`);
|
||||
return phases;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a roadmap phase status in XML content
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param phaseName - The name of the phase to update
|
||||
* @param newStatus - The new status value
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content
|
||||
*/
|
||||
export function updateRoadmapPhaseStatus(
|
||||
specContent: string,
|
||||
phaseName: string,
|
||||
newStatus: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
|
||||
// Find the phase and update its status
|
||||
// Match the phase block containing the specific name
|
||||
const phaseRegex = new RegExp(
|
||||
`(<phase>\\s*<name>\\s*${escapeXml(phaseName)}\\s*<\\/name>\\s*<status>)[\\s\\S]*?(<\\/status>)`,
|
||||
'i'
|
||||
);
|
||||
|
||||
if (phaseRegex.test(specContent)) {
|
||||
log.debug(`Updating phase "${phaseName}" status to "${newStatus}"`);
|
||||
return specContent.replace(phaseRegex, `$1${escapeXml(newStatus)}$2`);
|
||||
}
|
||||
|
||||
log.debug(`Phase "${phaseName}" not found`);
|
||||
return specContent;
|
||||
}
|
||||
@@ -35,6 +35,7 @@ import {
|
||||
type SubprocessOptions,
|
||||
type WslCliResult,
|
||||
} from '@automaker/platform';
|
||||
import { calculateReasoningTimeout } from '@automaker/types';
|
||||
import { createLogger, isAbortError } from '@automaker/utils';
|
||||
import { execSync } from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
@@ -107,6 +108,15 @@ export interface CliDetectionResult {
|
||||
// Create logger for CLI operations
|
||||
const cliLogger = createLogger('CliProvider');
|
||||
|
||||
/**
|
||||
* Base timeout for CLI operations in milliseconds.
|
||||
* CLI tools have longer startup and processing times compared to direct API calls,
|
||||
* so we use a higher base timeout (120s) than the default provider timeout (30s).
|
||||
* This is multiplied by reasoning effort multipliers when applicable.
|
||||
* @see calculateReasoningTimeout from @automaker/types
|
||||
*/
|
||||
const CLI_BASE_TIMEOUT_MS = 120000;
|
||||
|
||||
/**
|
||||
* Abstract base class for CLI-based providers
|
||||
*
|
||||
@@ -450,6 +460,10 @@ export abstract class CliProvider extends BaseProvider {
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate dynamic timeout based on reasoning effort.
|
||||
// This addresses GitHub issue #530 where reasoning models with 'xhigh' effort would timeout.
|
||||
const timeout = calculateReasoningTimeout(options.reasoningEffort, CLI_BASE_TIMEOUT_MS);
|
||||
|
||||
// WSL strategy
|
||||
if (this.useWsl && this.wslCliPath) {
|
||||
const wslCwd = windowsToWslPath(cwd);
|
||||
@@ -473,7 +487,7 @@ export abstract class CliProvider extends BaseProvider {
|
||||
cwd, // Windows cwd for spawn
|
||||
env: filteredEnv,
|
||||
abortController: options.abortController,
|
||||
timeout: 120000, // CLI operations may take longer
|
||||
timeout,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -488,7 +502,7 @@ export abstract class CliProvider extends BaseProvider {
|
||||
cwd,
|
||||
env: filteredEnv,
|
||||
abortController: options.abortController,
|
||||
timeout: 120000,
|
||||
timeout,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -501,7 +515,7 @@ export abstract class CliProvider extends BaseProvider {
|
||||
cwd,
|
||||
env: filteredEnv,
|
||||
abortController: options.abortController,
|
||||
timeout: 120000,
|
||||
timeout,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -33,6 +33,8 @@ import {
|
||||
CODEX_MODEL_MAP,
|
||||
supportsReasoningEffort,
|
||||
validateBareModelId,
|
||||
calculateReasoningTimeout,
|
||||
DEFAULT_TIMEOUT_MS,
|
||||
type CodexApprovalPolicy,
|
||||
type CodexSandboxMode,
|
||||
type CodexAuthStatus,
|
||||
@@ -91,7 +93,14 @@ const CODEX_ITEM_TYPES = {
|
||||
const SYSTEM_PROMPT_LABEL = 'System instructions';
|
||||
const HISTORY_HEADER = 'Current request:\n';
|
||||
const TEXT_ENCODING = 'utf-8';
|
||||
const DEFAULT_TIMEOUT_MS = 30000;
|
||||
/**
|
||||
* Default timeout for Codex CLI operations in milliseconds.
|
||||
* This is the "no output" timeout - if the CLI doesn't produce any JSONL output
|
||||
* for this duration, the process is killed. For reasoning models with high
|
||||
* reasoning effort, this timeout is dynamically extended via calculateReasoningTimeout().
|
||||
* @see calculateReasoningTimeout from @automaker/types
|
||||
*/
|
||||
const CODEX_CLI_TIMEOUT_MS = DEFAULT_TIMEOUT_MS;
|
||||
const CONTEXT_WINDOW_256K = 256000;
|
||||
const MAX_OUTPUT_32K = 32000;
|
||||
const MAX_OUTPUT_16K = 16000;
|
||||
@@ -814,13 +823,19 @@ export class CodexProvider extends BaseProvider {
|
||||
envOverrides[OPENAI_API_KEY_ENV] = executionPlan.openAiApiKey;
|
||||
}
|
||||
|
||||
// Calculate dynamic timeout based on reasoning effort.
|
||||
// Higher reasoning effort (e.g., 'xhigh' for "xtra thinking" mode) requires more time
|
||||
// for the model to generate reasoning tokens before producing output.
|
||||
// This fixes GitHub issue #530 where features would get stuck with reasoning models.
|
||||
const timeout = calculateReasoningTimeout(options.reasoningEffort, CODEX_CLI_TIMEOUT_MS);
|
||||
|
||||
const stream = spawnJSONLProcess({
|
||||
command: commandPath,
|
||||
args,
|
||||
cwd: options.cwd,
|
||||
env: envOverrides,
|
||||
abortController: options.abortController,
|
||||
timeout: DEFAULT_TIMEOUT_MS,
|
||||
timeout,
|
||||
stdinData: promptText, // Pass prompt via stdin
|
||||
});
|
||||
|
||||
|
||||
@@ -6,8 +6,17 @@ import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
// Types for running generation
|
||||
export type GenerationType = 'spec_regeneration' | 'feature_generation' | 'sync';
|
||||
|
||||
interface RunningGeneration {
|
||||
isRunning: boolean;
|
||||
type: GenerationType;
|
||||
startedAt: string;
|
||||
}
|
||||
|
||||
// Shared state for tracking generation status - scoped by project path
|
||||
const runningProjects = new Map<string, boolean>();
|
||||
const runningProjects = new Map<string, RunningGeneration>();
|
||||
const abortControllers = new Map<string, AbortController>();
|
||||
|
||||
/**
|
||||
@@ -17,16 +26,21 @@ export function getSpecRegenerationStatus(projectPath?: string): {
|
||||
isRunning: boolean;
|
||||
currentAbortController: AbortController | null;
|
||||
projectPath?: string;
|
||||
type?: GenerationType;
|
||||
startedAt?: string;
|
||||
} {
|
||||
if (projectPath) {
|
||||
const generation = runningProjects.get(projectPath);
|
||||
return {
|
||||
isRunning: runningProjects.get(projectPath) || false,
|
||||
isRunning: generation?.isRunning || false,
|
||||
currentAbortController: abortControllers.get(projectPath) || null,
|
||||
projectPath,
|
||||
type: generation?.type,
|
||||
startedAt: generation?.startedAt,
|
||||
};
|
||||
}
|
||||
// Fallback: check if any project is running (for backward compatibility)
|
||||
const isAnyRunning = Array.from(runningProjects.values()).some((running) => running);
|
||||
const isAnyRunning = Array.from(runningProjects.values()).some((g) => g.isRunning);
|
||||
return { isRunning: isAnyRunning, currentAbortController: null };
|
||||
}
|
||||
|
||||
@@ -46,10 +60,15 @@ export function getRunningProjectPath(): string | null {
|
||||
export function setRunningState(
|
||||
projectPath: string,
|
||||
running: boolean,
|
||||
controller: AbortController | null = null
|
||||
controller: AbortController | null = null,
|
||||
type: GenerationType = 'spec_regeneration'
|
||||
): void {
|
||||
if (running) {
|
||||
runningProjects.set(projectPath, true);
|
||||
runningProjects.set(projectPath, {
|
||||
isRunning: true,
|
||||
type,
|
||||
startedAt: new Date().toISOString(),
|
||||
});
|
||||
if (controller) {
|
||||
abortControllers.set(projectPath, controller);
|
||||
}
|
||||
@@ -59,6 +78,33 @@ export function setRunningState(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all running spec/feature generations for the running agents view
|
||||
*/
|
||||
export function getAllRunningGenerations(): Array<{
|
||||
projectPath: string;
|
||||
type: GenerationType;
|
||||
startedAt: string;
|
||||
}> {
|
||||
const results: Array<{
|
||||
projectPath: string;
|
||||
type: GenerationType;
|
||||
startedAt: string;
|
||||
}> = [];
|
||||
|
||||
for (const [projectPath, generation] of runningProjects.entries()) {
|
||||
if (generation.isRunning) {
|
||||
results.push({
|
||||
projectPath,
|
||||
type: generation.type,
|
||||
startedAt: generation.startedAt,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to log authentication status
|
||||
*/
|
||||
|
||||
@@ -14,7 +14,8 @@ import { streamingQuery } from '../../providers/simple-query-service.js';
|
||||
import { parseAndCreateFeatures } from './parse-and-create-features.js';
|
||||
import { getAppSpecPath } from '@automaker/platform';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import { getAutoLoadClaudeMdSetting } from '../../lib/settings-helpers.js';
|
||||
import { getAutoLoadClaudeMdSetting, getPromptCustomization } from '../../lib/settings-helpers.js';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
@@ -53,38 +54,48 @@ export async function generateFeaturesFromSpec(
|
||||
return;
|
||||
}
|
||||
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(settingsService, '[FeatureGeneration]');
|
||||
|
||||
// Load existing features to prevent duplicates
|
||||
const featureLoader = new FeatureLoader();
|
||||
const existingFeatures = await featureLoader.getAll(projectPath);
|
||||
|
||||
logger.info(`Found ${existingFeatures.length} existing features to exclude from generation`);
|
||||
|
||||
// Build existing features context for the prompt
|
||||
let existingFeaturesContext = '';
|
||||
if (existingFeatures.length > 0) {
|
||||
const featuresList = existingFeatures
|
||||
.map(
|
||||
(f) =>
|
||||
`- "${f.title}" (ID: ${f.id}): ${f.description?.substring(0, 100) || 'No description'}`
|
||||
)
|
||||
.join('\n');
|
||||
existingFeaturesContext = `
|
||||
|
||||
## EXISTING FEATURES (DO NOT REGENERATE THESE)
|
||||
|
||||
The following ${existingFeatures.length} features already exist in the project. You MUST NOT generate features that duplicate or overlap with these:
|
||||
|
||||
${featuresList}
|
||||
|
||||
CRITICAL INSTRUCTIONS:
|
||||
- DO NOT generate any features with the same or similar titles as the existing features listed above
|
||||
- DO NOT generate features that cover the same functionality as existing features
|
||||
- ONLY generate NEW features that are not yet in the system
|
||||
- If a feature from the roadmap already exists, skip it entirely
|
||||
- Generate unique feature IDs that do not conflict with existing IDs: ${existingFeatures.map((f) => f.id).join(', ')}
|
||||
`;
|
||||
}
|
||||
|
||||
const prompt = `Based on this project specification:
|
||||
|
||||
${spec}
|
||||
${existingFeaturesContext}
|
||||
${prompts.appSpec.generateFeaturesFromSpecPrompt}
|
||||
|
||||
Generate a prioritized list of implementable features. For each feature provide:
|
||||
|
||||
1. **id**: A unique lowercase-hyphenated identifier
|
||||
2. **category**: Functional category (e.g., "Core", "UI", "API", "Authentication", "Database")
|
||||
3. **title**: Short descriptive title
|
||||
4. **description**: What this feature does (2-3 sentences)
|
||||
5. **priority**: 1 (high), 2 (medium), or 3 (low)
|
||||
6. **complexity**: "simple", "moderate", or "complex"
|
||||
7. **dependencies**: Array of feature IDs this depends on (can be empty)
|
||||
|
||||
Format as JSON:
|
||||
{
|
||||
"features": [
|
||||
{
|
||||
"id": "feature-id",
|
||||
"category": "Feature Category",
|
||||
"title": "Feature Title",
|
||||
"description": "What it does",
|
||||
"priority": 1,
|
||||
"complexity": "moderate",
|
||||
"dependencies": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Generate ${featureCount} features that build on each other logically.
|
||||
|
||||
IMPORTANT: Do not ask for clarification. The specification is provided above. Generate the JSON immediately.`;
|
||||
Generate ${featureCount} NEW features that build on each other logically. Remember: ONLY generate features that DO NOT already exist.`;
|
||||
|
||||
logger.info('========== PROMPT BEING SENT ==========');
|
||||
logger.info(`Prompt length: ${prompt.length} chars`);
|
||||
|
||||
@@ -7,12 +7,7 @@
|
||||
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import {
|
||||
specOutputSchema,
|
||||
specToXml,
|
||||
getStructuredSpecPromptInstruction,
|
||||
type SpecOutput,
|
||||
} from '../../lib/app-spec-format.js';
|
||||
import { specOutputSchema, specToXml, type SpecOutput } from '../../lib/app-spec-format.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { DEFAULT_PHASE_MODELS, isCursorModel } from '@automaker/types';
|
||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||
@@ -21,7 +16,7 @@ import { streamingQuery } from '../../providers/simple-query-service.js';
|
||||
import { generateFeaturesFromSpec } from './generate-features-from-spec.js';
|
||||
import { ensureAutomakerDir, getAppSpecPath } from '@automaker/platform';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import { getAutoLoadClaudeMdSetting } from '../../lib/settings-helpers.js';
|
||||
import { getAutoLoadClaudeMdSetting, getPromptCustomization } from '../../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
@@ -43,6 +38,9 @@ export async function generateSpec(
|
||||
logger.info('analyzeProject:', analyzeProject);
|
||||
logger.info('maxFeatures:', maxFeatures);
|
||||
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(settingsService, '[SpecRegeneration]');
|
||||
|
||||
// Build the prompt based on whether we should analyze the project
|
||||
let analysisInstructions = '';
|
||||
let techStackDefaults = '';
|
||||
@@ -66,9 +64,7 @@ export async function generateSpec(
|
||||
Use these technologies as the foundation for the specification.`;
|
||||
}
|
||||
|
||||
const prompt = `You are helping to define a software project specification.
|
||||
|
||||
IMPORTANT: Never ask for clarification or additional information. Use the information provided and make reasonable assumptions to create the best possible specification. If details are missing, infer them based on common patterns and best practices.
|
||||
const prompt = `${prompts.appSpec.generateSpecSystemPrompt}
|
||||
|
||||
Project Overview:
|
||||
${projectOverview}
|
||||
@@ -77,7 +73,7 @@ ${techStackDefaults}
|
||||
|
||||
${analysisInstructions}
|
||||
|
||||
${getStructuredSpecPromptInstruction()}`;
|
||||
${prompts.appSpec.structuredSpecInstructions}`;
|
||||
|
||||
logger.info('========== PROMPT BEING SENT ==========');
|
||||
logger.info(`Prompt length: ${prompt.length} chars`);
|
||||
@@ -205,19 +201,33 @@ Your entire response should be valid JSON starting with { and ending with }. No
|
||||
xmlContent = responseText.substring(xmlStart, xmlEnd + '</project_specification>'.length);
|
||||
logger.info(`Extracted XML content: ${xmlContent.length} chars (from position ${xmlStart})`);
|
||||
} else {
|
||||
// No valid XML structure found in the response text
|
||||
// This happens when structured output was expected but not received, and the agent
|
||||
// output conversational text instead of XML (e.g., "The project directory appears to be empty...")
|
||||
// We should NOT save this conversational text as it's not a valid spec
|
||||
logger.error('❌ Response does not contain valid <project_specification> XML structure');
|
||||
logger.error(
|
||||
'This typically happens when structured output failed and the agent produced conversational text instead of XML'
|
||||
);
|
||||
throw new Error(
|
||||
'Failed to generate spec: No valid XML structure found in response. ' +
|
||||
'The response contained conversational text but no <project_specification> tags. ' +
|
||||
'Please try again.'
|
||||
);
|
||||
// No XML found, try JSON extraction
|
||||
logger.warn('⚠️ No XML tags found, attempting JSON extraction...');
|
||||
const extractedJson = extractJson<SpecOutput>(responseText, { logger });
|
||||
|
||||
if (
|
||||
extractedJson &&
|
||||
typeof extractedJson.project_name === 'string' &&
|
||||
typeof extractedJson.overview === 'string' &&
|
||||
Array.isArray(extractedJson.technology_stack) &&
|
||||
Array.isArray(extractedJson.core_capabilities) &&
|
||||
Array.isArray(extractedJson.implemented_features)
|
||||
) {
|
||||
logger.info('✅ Successfully extracted JSON from response text');
|
||||
xmlContent = specToXml(extractedJson);
|
||||
logger.info(`✅ Converted extracted JSON to XML: ${xmlContent.length} chars`);
|
||||
} else {
|
||||
// Neither XML nor valid JSON found
|
||||
logger.error('❌ Response does not contain valid XML or JSON structure');
|
||||
logger.error(
|
||||
'This typically happens when structured output failed and the agent produced conversational text instead of structured output'
|
||||
);
|
||||
throw new Error(
|
||||
'Failed to generate spec: No valid XML or JSON structure found in response. ' +
|
||||
'The response contained conversational text but no <project_specification> tags or valid JSON. ' +
|
||||
'Please try again.'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createCreateHandler } from './routes/create.js';
|
||||
import { createGenerateHandler } from './routes/generate.js';
|
||||
import { createGenerateFeaturesHandler } from './routes/generate-features.js';
|
||||
import { createSyncHandler } from './routes/sync.js';
|
||||
import { createStopHandler } from './routes/stop.js';
|
||||
import { createStatusHandler } from './routes/status.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
@@ -20,6 +21,7 @@ export function createSpecRegenerationRoutes(
|
||||
router.post('/create', createCreateHandler(events));
|
||||
router.post('/generate', createGenerateHandler(events, settingsService));
|
||||
router.post('/generate-features', createGenerateFeaturesHandler(events, settingsService));
|
||||
router.post('/sync', createSyncHandler(events, settingsService));
|
||||
router.post('/stop', createStopHandler());
|
||||
router.get('/status', createStatusHandler());
|
||||
|
||||
|
||||
@@ -5,9 +5,10 @@
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
||||
import { getFeaturesDir } from '@automaker/platform';
|
||||
import { extractJsonWithArray } from '../../lib/json-extractor.js';
|
||||
import { getNotificationService } from '../../services/notification-service.js';
|
||||
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
@@ -73,10 +74,10 @@ export async function parseAndCreateFeatures(
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await secureFs.writeFile(
|
||||
path.join(featureDir, 'feature.json'),
|
||||
JSON.stringify(featureData, null, 2)
|
||||
);
|
||||
// Use atomic write with backup support for crash protection
|
||||
await atomicWriteJson(path.join(featureDir, 'feature.json'), featureData, {
|
||||
backupCount: DEFAULT_BACKUP_COUNT,
|
||||
});
|
||||
|
||||
createdFeatures.push({ id: feature.id, title: feature.title });
|
||||
}
|
||||
@@ -88,6 +89,15 @@ export async function parseAndCreateFeatures(
|
||||
message: `Spec regeneration complete! Created ${createdFeatures.length} features.`,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
|
||||
// Create notification for spec generation completion
|
||||
const notificationService = getNotificationService();
|
||||
await notificationService.createNotification({
|
||||
type: 'spec_regeneration_complete',
|
||||
title: 'Spec Generation Complete',
|
||||
message: `Created ${createdFeatures.length} features from the project specification.`,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('❌ parseAndCreateFeatures() failed:');
|
||||
logger.error('Error:', error);
|
||||
|
||||
@@ -50,7 +50,7 @@ export function createGenerateFeaturesHandler(
|
||||
logAuthStatus('Before starting feature generation');
|
||||
|
||||
const abortController = new AbortController();
|
||||
setRunningState(projectPath, true, abortController);
|
||||
setRunningState(projectPath, true, abortController, 'feature_generation');
|
||||
logger.info('Starting background feature generation task...');
|
||||
|
||||
generateFeaturesFromSpec(projectPath, events, abortController, maxFeatures, settingsService)
|
||||
|
||||
76
apps/server/src/routes/app-spec/routes/sync.ts
Normal file
76
apps/server/src/routes/app-spec/routes/sync.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
/**
|
||||
* POST /sync endpoint - Sync spec with codebase and features
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import {
|
||||
getSpecRegenerationStatus,
|
||||
setRunningState,
|
||||
logAuthStatus,
|
||||
logError,
|
||||
getErrorMessage,
|
||||
} from '../common.js';
|
||||
import { syncSpec } from '../sync-spec.js';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
|
||||
const logger = createLogger('SpecSync');
|
||||
|
||||
export function createSyncHandler(events: EventEmitter, settingsService?: SettingsService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
logger.info('========== /sync endpoint called ==========');
|
||||
logger.debug('Request body:', JSON.stringify(req.body, null, 2));
|
||||
|
||||
try {
|
||||
const { projectPath } = req.body as {
|
||||
projectPath: string;
|
||||
};
|
||||
|
||||
logger.debug('projectPath:', projectPath);
|
||||
|
||||
if (!projectPath) {
|
||||
logger.error('Missing projectPath parameter');
|
||||
res.status(400).json({ success: false, error: 'projectPath required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const { isRunning } = getSpecRegenerationStatus(projectPath);
|
||||
if (isRunning) {
|
||||
logger.warn('Generation/sync already running for project:', projectPath);
|
||||
res.json({ success: false, error: 'Operation already running for this project' });
|
||||
return;
|
||||
}
|
||||
|
||||
logAuthStatus('Before starting spec sync');
|
||||
|
||||
const abortController = new AbortController();
|
||||
setRunningState(projectPath, true, abortController, 'sync');
|
||||
logger.info('Starting background spec sync task...');
|
||||
|
||||
syncSpec(projectPath, events, abortController, settingsService)
|
||||
.then((result) => {
|
||||
logger.info('Spec sync completed successfully');
|
||||
logger.info('Result:', JSON.stringify(result, null, 2));
|
||||
})
|
||||
.catch((error) => {
|
||||
logError(error, 'Spec sync failed with error');
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_error',
|
||||
error: getErrorMessage(error),
|
||||
projectPath,
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
logger.info('Spec sync task finished (success or error)');
|
||||
setRunningState(projectPath, false, null);
|
||||
});
|
||||
|
||||
logger.info('Returning success response (sync running in background)');
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, 'Sync route handler failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
307
apps/server/src/routes/app-spec/sync-spec.ts
Normal file
307
apps/server/src/routes/app-spec/sync-spec.ts
Normal file
@@ -0,0 +1,307 @@
|
||||
/**
|
||||
* Sync spec with current codebase and feature state
|
||||
*
|
||||
* Updates the spec file based on:
|
||||
* - Completed Automaker features
|
||||
* - Code analysis for tech stack and implementations
|
||||
* - Roadmap phase status updates
|
||||
*/
|
||||
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { DEFAULT_PHASE_MODELS } from '@automaker/types';
|
||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||
import { streamingQuery } from '../../providers/simple-query-service.js';
|
||||
import { getAppSpecPath } from '@automaker/platform';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import { getAutoLoadClaudeMdSetting } from '../../lib/settings-helpers.js';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
import {
|
||||
extractImplementedFeatures,
|
||||
extractTechnologyStack,
|
||||
extractRoadmapPhases,
|
||||
updateImplementedFeaturesSection,
|
||||
updateTechnologyStack,
|
||||
updateRoadmapPhaseStatus,
|
||||
type ImplementedFeature,
|
||||
type RoadmapPhase,
|
||||
} from '../../lib/xml-extractor.js';
|
||||
import { getNotificationService } from '../../services/notification-service.js';
|
||||
|
||||
const logger = createLogger('SpecSync');
|
||||
|
||||
/**
|
||||
* Result of a sync operation
|
||||
*/
|
||||
export interface SyncResult {
|
||||
techStackUpdates: {
|
||||
added: string[];
|
||||
removed: string[];
|
||||
};
|
||||
implementedFeaturesUpdates: {
|
||||
addedFromFeatures: string[];
|
||||
removed: string[];
|
||||
};
|
||||
roadmapUpdates: Array<{ phaseName: string; newStatus: string }>;
|
||||
summary: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync the spec with current codebase and feature state
|
||||
*/
|
||||
export async function syncSpec(
|
||||
projectPath: string,
|
||||
events: EventEmitter,
|
||||
abortController: AbortController,
|
||||
settingsService?: SettingsService
|
||||
): Promise<SyncResult> {
|
||||
logger.info('========== syncSpec() started ==========');
|
||||
logger.info('projectPath:', projectPath);
|
||||
|
||||
const result: SyncResult = {
|
||||
techStackUpdates: { added: [], removed: [] },
|
||||
implementedFeaturesUpdates: { addedFromFeatures: [], removed: [] },
|
||||
roadmapUpdates: [],
|
||||
summary: '',
|
||||
};
|
||||
|
||||
// Read existing spec
|
||||
const specPath = getAppSpecPath(projectPath);
|
||||
let specContent: string;
|
||||
|
||||
try {
|
||||
specContent = (await secureFs.readFile(specPath, 'utf-8')) as string;
|
||||
logger.info(`Spec loaded successfully (${specContent.length} chars)`);
|
||||
} catch (readError) {
|
||||
logger.error('Failed to read spec file:', readError);
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_error',
|
||||
error: 'No project spec found. Create or regenerate spec first.',
|
||||
projectPath,
|
||||
});
|
||||
throw new Error('No project spec found');
|
||||
}
|
||||
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: '[Phase: sync] Starting spec sync...\n',
|
||||
projectPath,
|
||||
});
|
||||
|
||||
// Extract current state from spec
|
||||
const currentImplementedFeatures = extractImplementedFeatures(specContent);
|
||||
const currentTechStack = extractTechnologyStack(specContent);
|
||||
const currentRoadmapPhases = extractRoadmapPhases(specContent);
|
||||
|
||||
logger.info(`Current spec has ${currentImplementedFeatures.length} implemented features`);
|
||||
logger.info(`Current spec has ${currentTechStack.length} technologies`);
|
||||
logger.info(`Current spec has ${currentRoadmapPhases.length} roadmap phases`);
|
||||
|
||||
// Load completed Automaker features
|
||||
const featureLoader = new FeatureLoader();
|
||||
const allFeatures = await featureLoader.getAll(projectPath);
|
||||
const completedFeatures = allFeatures.filter(
|
||||
(f) => f.status === 'completed' || f.status === 'verified'
|
||||
);
|
||||
|
||||
logger.info(`Found ${completedFeatures.length} completed/verified features in Automaker`);
|
||||
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: `Found ${completedFeatures.length} completed features to sync...\n`,
|
||||
projectPath,
|
||||
});
|
||||
|
||||
// Build new implemented features list from completed Automaker features
|
||||
const newImplementedFeatures: ImplementedFeature[] = [];
|
||||
const existingNames = new Set(currentImplementedFeatures.map((f) => f.name.toLowerCase()));
|
||||
|
||||
for (const feature of completedFeatures) {
|
||||
const name = feature.title || `Feature: ${feature.id}`;
|
||||
if (!existingNames.has(name.toLowerCase())) {
|
||||
newImplementedFeatures.push({
|
||||
name,
|
||||
description: feature.description || '',
|
||||
});
|
||||
result.implementedFeaturesUpdates.addedFromFeatures.push(name);
|
||||
}
|
||||
}
|
||||
|
||||
// Merge: keep existing + add new from completed features
|
||||
const mergedFeatures = [...currentImplementedFeatures, ...newImplementedFeatures];
|
||||
|
||||
// Update spec with merged features
|
||||
if (result.implementedFeaturesUpdates.addedFromFeatures.length > 0) {
|
||||
specContent = updateImplementedFeaturesSection(specContent, mergedFeatures);
|
||||
logger.info(
|
||||
`Added ${result.implementedFeaturesUpdates.addedFromFeatures.length} features to spec`
|
||||
);
|
||||
}
|
||||
|
||||
// Analyze codebase for tech stack updates using AI
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: 'Analyzing codebase for technology updates...\n',
|
||||
projectPath,
|
||||
});
|
||||
|
||||
const autoLoadClaudeMd = await getAutoLoadClaudeMdSetting(
|
||||
projectPath,
|
||||
settingsService,
|
||||
'[SpecSync]'
|
||||
);
|
||||
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.specGenerationModel || DEFAULT_PHASE_MODELS.specGenerationModel;
|
||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||
|
||||
// Use AI to analyze tech stack
|
||||
const techAnalysisPrompt = `Analyze this project and return ONLY a JSON object with the current technology stack.
|
||||
|
||||
Current known technologies: ${currentTechStack.join(', ')}
|
||||
|
||||
Look at package.json, config files, and source code to identify:
|
||||
- Frameworks (React, Vue, Express, etc.)
|
||||
- Languages (TypeScript, JavaScript, Python, etc.)
|
||||
- Build tools (Vite, Webpack, etc.)
|
||||
- Databases (PostgreSQL, MongoDB, etc.)
|
||||
- Key libraries and tools
|
||||
|
||||
Return ONLY this JSON format, no other text:
|
||||
{
|
||||
"technologies": ["Technology 1", "Technology 2", ...]
|
||||
}`;
|
||||
|
||||
try {
|
||||
const techResult = await streamingQuery({
|
||||
prompt: techAnalysisPrompt,
|
||||
model,
|
||||
cwd: projectPath,
|
||||
maxTurns: 10,
|
||||
allowedTools: ['Read', 'Glob', 'Grep'],
|
||||
abortController,
|
||||
thinkingLevel,
|
||||
readOnly: true,
|
||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||
onText: (text) => {
|
||||
logger.debug(`Tech analysis text: ${text.substring(0, 100)}`);
|
||||
},
|
||||
});
|
||||
|
||||
// Parse tech stack from response
|
||||
const jsonMatch = techResult.text.match(/\{[\s\S]*"technologies"[\s\S]*\}/);
|
||||
if (jsonMatch) {
|
||||
const parsed = JSON.parse(jsonMatch[0]);
|
||||
if (Array.isArray(parsed.technologies)) {
|
||||
const newTechStack = parsed.technologies as string[];
|
||||
|
||||
// Calculate differences
|
||||
const currentSet = new Set(currentTechStack.map((t) => t.toLowerCase()));
|
||||
const newSet = new Set(newTechStack.map((t) => t.toLowerCase()));
|
||||
|
||||
for (const tech of newTechStack) {
|
||||
if (!currentSet.has(tech.toLowerCase())) {
|
||||
result.techStackUpdates.added.push(tech);
|
||||
}
|
||||
}
|
||||
|
||||
for (const tech of currentTechStack) {
|
||||
if (!newSet.has(tech.toLowerCase())) {
|
||||
result.techStackUpdates.removed.push(tech);
|
||||
}
|
||||
}
|
||||
|
||||
// Update spec with new tech stack if there are changes
|
||||
if (
|
||||
result.techStackUpdates.added.length > 0 ||
|
||||
result.techStackUpdates.removed.length > 0
|
||||
) {
|
||||
specContent = updateTechnologyStack(specContent, newTechStack);
|
||||
logger.info(
|
||||
`Updated tech stack: +${result.techStackUpdates.added.length}, -${result.techStackUpdates.removed.length}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to analyze tech stack:', error);
|
||||
// Continue with other sync operations
|
||||
}
|
||||
|
||||
// Update roadmap phase statuses based on completed features
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: 'Checking roadmap phase statuses...\n',
|
||||
projectPath,
|
||||
});
|
||||
|
||||
// For each phase, check if all its features are completed
|
||||
// This is a heuristic - we check if the phase name appears in any feature titles/descriptions
|
||||
for (const phase of currentRoadmapPhases) {
|
||||
if (phase.status === 'completed') continue; // Already completed
|
||||
|
||||
// Check if this phase should be marked as completed
|
||||
// A phase is considered complete if we have completed features that mention it
|
||||
const phaseNameLower = phase.name.toLowerCase();
|
||||
const relatedCompletedFeatures = completedFeatures.filter(
|
||||
(f) =>
|
||||
f.title?.toLowerCase().includes(phaseNameLower) ||
|
||||
f.description?.toLowerCase().includes(phaseNameLower) ||
|
||||
f.category?.toLowerCase().includes(phaseNameLower)
|
||||
);
|
||||
|
||||
// If we have related completed features and the phase is still pending/in_progress,
|
||||
// update it to in_progress or completed based on feature count
|
||||
if (relatedCompletedFeatures.length > 0 && phase.status !== 'completed') {
|
||||
const newStatus = 'in_progress';
|
||||
specContent = updateRoadmapPhaseStatus(specContent, phase.name, newStatus);
|
||||
result.roadmapUpdates.push({ phaseName: phase.name, newStatus });
|
||||
logger.info(`Updated phase "${phase.name}" to ${newStatus}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Save updated spec
|
||||
await secureFs.writeFile(specPath, specContent, 'utf-8');
|
||||
logger.info('Spec saved successfully');
|
||||
|
||||
// Build summary
|
||||
const summaryParts: string[] = [];
|
||||
if (result.implementedFeaturesUpdates.addedFromFeatures.length > 0) {
|
||||
summaryParts.push(
|
||||
`Added ${result.implementedFeaturesUpdates.addedFromFeatures.length} implemented features`
|
||||
);
|
||||
}
|
||||
if (result.techStackUpdates.added.length > 0) {
|
||||
summaryParts.push(`Added ${result.techStackUpdates.added.length} technologies`);
|
||||
}
|
||||
if (result.techStackUpdates.removed.length > 0) {
|
||||
summaryParts.push(`Removed ${result.techStackUpdates.removed.length} technologies`);
|
||||
}
|
||||
if (result.roadmapUpdates.length > 0) {
|
||||
summaryParts.push(`Updated ${result.roadmapUpdates.length} roadmap phases`);
|
||||
}
|
||||
|
||||
result.summary = summaryParts.length > 0 ? summaryParts.join(', ') : 'Spec is already up to date';
|
||||
|
||||
// Create notification
|
||||
const notificationService = getNotificationService();
|
||||
await notificationService.createNotification({
|
||||
type: 'spec_regeneration_complete',
|
||||
title: 'Spec Sync Complete',
|
||||
message: result.summary,
|
||||
projectPath,
|
||||
});
|
||||
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_complete',
|
||||
message: `Spec sync complete! ${result.summary}`,
|
||||
projectPath,
|
||||
});
|
||||
|
||||
logger.info('========== syncSpec() completed ==========');
|
||||
logger.info('Summary:', result.summary);
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -3,12 +3,31 @@
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { ensureAutomakerDir, getAutomakerDir } from '@automaker/platform';
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import path from 'path';
|
||||
import type { BacklogPlanResult } from '@automaker/types';
|
||||
|
||||
const logger = createLogger('BacklogPlan');
|
||||
|
||||
// State for tracking running generation
|
||||
let isRunning = false;
|
||||
let currentAbortController: AbortController | null = null;
|
||||
let runningDetails: {
|
||||
projectPath: string;
|
||||
prompt: string;
|
||||
model?: string;
|
||||
startedAt: string;
|
||||
} | null = null;
|
||||
|
||||
const BACKLOG_PLAN_FILENAME = 'backlog-plan.json';
|
||||
|
||||
export interface StoredBacklogPlan {
|
||||
savedAt: string;
|
||||
prompt: string;
|
||||
model?: string;
|
||||
result: BacklogPlanResult;
|
||||
}
|
||||
|
||||
export function getBacklogPlanStatus(): { isRunning: boolean } {
|
||||
return { isRunning };
|
||||
@@ -16,11 +35,67 @@ export function getBacklogPlanStatus(): { isRunning: boolean } {
|
||||
|
||||
export function setRunningState(running: boolean, abortController?: AbortController | null): void {
|
||||
isRunning = running;
|
||||
if (!running) {
|
||||
runningDetails = null;
|
||||
}
|
||||
if (abortController !== undefined) {
|
||||
currentAbortController = abortController;
|
||||
}
|
||||
}
|
||||
|
||||
export function setRunningDetails(
|
||||
details: {
|
||||
projectPath: string;
|
||||
prompt: string;
|
||||
model?: string;
|
||||
startedAt: string;
|
||||
} | null
|
||||
): void {
|
||||
runningDetails = details;
|
||||
}
|
||||
|
||||
export function getRunningDetails(): {
|
||||
projectPath: string;
|
||||
prompt: string;
|
||||
model?: string;
|
||||
startedAt: string;
|
||||
} | null {
|
||||
return runningDetails;
|
||||
}
|
||||
|
||||
function getBacklogPlanPath(projectPath: string): string {
|
||||
return path.join(getAutomakerDir(projectPath), BACKLOG_PLAN_FILENAME);
|
||||
}
|
||||
|
||||
export async function saveBacklogPlan(projectPath: string, plan: StoredBacklogPlan): Promise<void> {
|
||||
await ensureAutomakerDir(projectPath);
|
||||
const filePath = getBacklogPlanPath(projectPath);
|
||||
await secureFs.writeFile(filePath, JSON.stringify(plan, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
export async function loadBacklogPlan(projectPath: string): Promise<StoredBacklogPlan | null> {
|
||||
try {
|
||||
const filePath = getBacklogPlanPath(projectPath);
|
||||
const raw = await secureFs.readFile(filePath, 'utf-8');
|
||||
const parsed = JSON.parse(raw as string) as StoredBacklogPlan;
|
||||
if (!Array.isArray(parsed?.result?.changes)) {
|
||||
return null;
|
||||
}
|
||||
return parsed;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export async function clearBacklogPlan(projectPath: string): Promise<void> {
|
||||
try {
|
||||
const filePath = getBacklogPlanPath(projectPath);
|
||||
await secureFs.unlink(filePath);
|
||||
} catch {
|
||||
// ignore missing file
|
||||
}
|
||||
}
|
||||
|
||||
export function getAbortController(): AbortController | null {
|
||||
return currentAbortController;
|
||||
}
|
||||
|
||||
@@ -17,7 +17,13 @@ import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
import { ProviderFactory } from '../../providers/provider-factory.js';
|
||||
import { extractJsonWithArray } from '../../lib/json-extractor.js';
|
||||
import { logger, setRunningState, getErrorMessage } from './common.js';
|
||||
import {
|
||||
logger,
|
||||
setRunningState,
|
||||
setRunningDetails,
|
||||
getErrorMessage,
|
||||
saveBacklogPlan,
|
||||
} from './common.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import { getAutoLoadClaudeMdSetting, getPromptCustomization } from '../../lib/settings-helpers.js';
|
||||
|
||||
@@ -200,6 +206,13 @@ ${userPrompt}`;
|
||||
// Parse the response
|
||||
const result = parsePlanResponse(responseText);
|
||||
|
||||
await saveBacklogPlan(projectPath, {
|
||||
savedAt: new Date().toISOString(),
|
||||
prompt,
|
||||
model: effectiveModel,
|
||||
result,
|
||||
});
|
||||
|
||||
events.emit('backlog-plan:event', {
|
||||
type: 'backlog_plan_complete',
|
||||
result,
|
||||
@@ -218,5 +231,6 @@ ${userPrompt}`;
|
||||
throw error;
|
||||
} finally {
|
||||
setRunningState(false, null);
|
||||
setRunningDetails(null);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import { createGenerateHandler } from './routes/generate.js';
|
||||
import { createStopHandler } from './routes/stop.js';
|
||||
import { createStatusHandler } from './routes/status.js';
|
||||
import { createApplyHandler } from './routes/apply.js';
|
||||
import { createClearHandler } from './routes/clear.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
|
||||
export function createBacklogPlanRoutes(
|
||||
@@ -23,8 +24,9 @@ export function createBacklogPlanRoutes(
|
||||
createGenerateHandler(events, settingsService)
|
||||
);
|
||||
router.post('/stop', createStopHandler());
|
||||
router.get('/status', createStatusHandler());
|
||||
router.get('/status', validatePathParams('projectPath'), createStatusHandler());
|
||||
router.post('/apply', validatePathParams('projectPath'), createApplyHandler());
|
||||
router.post('/clear', validatePathParams('projectPath'), createClearHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
import type { Request, Response } from 'express';
|
||||
import type { BacklogPlanResult, BacklogChange, Feature } from '@automaker/types';
|
||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import { getErrorMessage, logError, logger } from '../common.js';
|
||||
import { clearBacklogPlan, getErrorMessage, logError, logger } from '../common.js';
|
||||
|
||||
const featureLoader = new FeatureLoader();
|
||||
|
||||
@@ -147,6 +147,17 @@ export function createApplyHandler() {
|
||||
}
|
||||
}
|
||||
|
||||
// Clear the plan before responding
|
||||
try {
|
||||
await clearBacklogPlan(projectPath);
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`[BacklogPlan] Failed to clear backlog plan after apply:`,
|
||||
getErrorMessage(error)
|
||||
);
|
||||
// Don't throw - operation succeeded, just cleanup failed
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
appliedChanges,
|
||||
|
||||
25
apps/server/src/routes/backlog-plan/routes/clear.ts
Normal file
25
apps/server/src/routes/backlog-plan/routes/clear.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
/**
|
||||
* POST /clear endpoint - Clear saved backlog plan
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { clearBacklogPlan, getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createClearHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: 'projectPath required' });
|
||||
return;
|
||||
}
|
||||
|
||||
await clearBacklogPlan(projectPath);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, 'Clear backlog plan failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -4,7 +4,13 @@
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { getBacklogPlanStatus, setRunningState, getErrorMessage, logError } from '../common.js';
|
||||
import {
|
||||
getBacklogPlanStatus,
|
||||
setRunningState,
|
||||
setRunningDetails,
|
||||
getErrorMessage,
|
||||
logError,
|
||||
} from '../common.js';
|
||||
import { generateBacklogPlan } from '../generate-plan.js';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
|
||||
@@ -37,6 +43,12 @@ export function createGenerateHandler(events: EventEmitter, settingsService?: Se
|
||||
}
|
||||
|
||||
setRunningState(true);
|
||||
setRunningDetails({
|
||||
projectPath,
|
||||
prompt,
|
||||
model,
|
||||
startedAt: new Date().toISOString(),
|
||||
});
|
||||
const abortController = new AbortController();
|
||||
setRunningState(true, abortController);
|
||||
|
||||
@@ -51,6 +63,7 @@ export function createGenerateHandler(events: EventEmitter, settingsService?: Se
|
||||
})
|
||||
.finally(() => {
|
||||
setRunningState(false, null);
|
||||
setRunningDetails(null);
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
|
||||
@@ -3,13 +3,15 @@
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { getBacklogPlanStatus, getErrorMessage, logError } from '../common.js';
|
||||
import { getBacklogPlanStatus, loadBacklogPlan, getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStatusHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const status = getBacklogPlanStatus();
|
||||
res.json({ success: true, ...status });
|
||||
const projectPath = typeof req.query.projectPath === 'string' ? req.query.projectPath : '';
|
||||
const savedPlan = projectPath ? await loadBacklogPlan(projectPath) : null;
|
||||
res.json({ success: true, ...status, savedPlan });
|
||||
} catch (error) {
|
||||
logError(error, 'Get backlog plan status failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
|
||||
@@ -3,7 +3,13 @@
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { getAbortController, setRunningState, getErrorMessage, logError } from '../common.js';
|
||||
import {
|
||||
getAbortController,
|
||||
setRunningState,
|
||||
setRunningDetails,
|
||||
getErrorMessage,
|
||||
logError,
|
||||
} from '../common.js';
|
||||
|
||||
export function createStopHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
@@ -12,6 +18,7 @@ export function createStopHandler() {
|
||||
if (abortController) {
|
||||
abortController.abort();
|
||||
setRunningState(false, null);
|
||||
setRunningDetails(null);
|
||||
}
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
|
||||
@@ -19,7 +19,10 @@ import { simpleQuery } from '../../../providers/simple-query-service.js';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import * as path from 'path';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getAutoLoadClaudeMdSetting } from '../../../lib/settings-helpers.js';
|
||||
import {
|
||||
getAutoLoadClaudeMdSetting,
|
||||
getPromptCustomization,
|
||||
} from '../../../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('DescribeFile');
|
||||
|
||||
@@ -130,11 +133,12 @@ export function createDescribeFileHandler(
|
||||
// Get the filename for context
|
||||
const fileName = path.basename(resolvedPath);
|
||||
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(settingsService, '[DescribeFile]');
|
||||
|
||||
// Build prompt with file content passed as structured data
|
||||
// The file content is included directly, not via tool invocation
|
||||
const prompt = `Analyze the following file and provide a 1-2 sentence description suitable for use as context in an AI coding assistant. Focus on what the file contains, its purpose, and why an AI agent might want to use this context in the future (e.g., "API documentation for the authentication endpoints", "Configuration file for database connections", "Coding style guidelines for the project").
|
||||
|
||||
Respond with ONLY the description text, no additional formatting, preamble, or explanation.
|
||||
const prompt = `${prompts.contextDescription.describeFilePrompt}
|
||||
|
||||
File: ${fileName}${truncated ? ' (truncated)' : ''}
|
||||
|
||||
|
||||
@@ -19,7 +19,10 @@ import { simpleQuery } from '../../../providers/simple-query-service.js';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
import * as path from 'path';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getAutoLoadClaudeMdSetting } from '../../../lib/settings-helpers.js';
|
||||
import {
|
||||
getAutoLoadClaudeMdSetting,
|
||||
getPromptCustomization,
|
||||
} from '../../../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('DescribeImage');
|
||||
|
||||
@@ -278,12 +281,11 @@ export function createDescribeImageHandler(
|
||||
|
||||
logger.info(`[${requestId}] Using model: ${model}`);
|
||||
|
||||
// Build the instruction text
|
||||
const instructionText =
|
||||
`Describe this image in 1-2 sentences suitable for use as context in an AI coding assistant. ` +
|
||||
`Focus on what the image shows and its purpose (e.g., "UI mockup showing login form with email/password fields", ` +
|
||||
`"Architecture diagram of microservices", "Screenshot of error message in terminal").\n\n` +
|
||||
`Respond with ONLY the description text, no additional formatting, preamble, or explanation.`;
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(settingsService, '[DescribeImage]');
|
||||
|
||||
// Build the instruction text from centralized prompts
|
||||
const instructionText = prompts.contextDescription.describeImagePrompt;
|
||||
|
||||
// Build prompt based on provider capability
|
||||
// Some providers (like Cursor) may not support image content blocks
|
||||
|
||||
19
apps/server/src/routes/event-history/common.ts
Normal file
19
apps/server/src/routes/event-history/common.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
/**
|
||||
* Common utilities for event history routes
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
/** Logger instance for event history operations */
|
||||
export const logger = createLogger('EventHistory');
|
||||
|
||||
/**
|
||||
* Extract user-friendly error message from error objects
|
||||
*/
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
/**
|
||||
* Log error with automatic logger binding
|
||||
*/
|
||||
export const logError = createLogError(logger);
|
||||
68
apps/server/src/routes/event-history/index.ts
Normal file
68
apps/server/src/routes/event-history/index.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* Event History routes - HTTP API for event history management
|
||||
*
|
||||
* Provides endpoints for:
|
||||
* - Listing events with filtering
|
||||
* - Getting individual event details
|
||||
* - Deleting events
|
||||
* - Clearing all events
|
||||
* - Replaying events to test hooks
|
||||
*
|
||||
* Mounted at /api/event-history in the main server.
|
||||
*/
|
||||
|
||||
import { Router } from 'express';
|
||||
import type { EventHistoryService } from '../../services/event-history-service.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createListHandler } from './routes/list.js';
|
||||
import { createGetHandler } from './routes/get.js';
|
||||
import { createDeleteHandler } from './routes/delete.js';
|
||||
import { createClearHandler } from './routes/clear.js';
|
||||
import { createReplayHandler } from './routes/replay.js';
|
||||
|
||||
/**
|
||||
* Create event history router with all endpoints
|
||||
*
|
||||
* Endpoints:
|
||||
* - POST /list - List events with optional filtering
|
||||
* - POST /get - Get a single event by ID
|
||||
* - POST /delete - Delete an event by ID
|
||||
* - POST /clear - Clear all events for a project
|
||||
* - POST /replay - Replay an event to trigger hooks
|
||||
*
|
||||
* @param eventHistoryService - Instance of EventHistoryService
|
||||
* @param settingsService - Instance of SettingsService (for replay)
|
||||
* @returns Express Router configured with all event history endpoints
|
||||
*/
|
||||
export function createEventHistoryRoutes(
|
||||
eventHistoryService: EventHistoryService,
|
||||
settingsService: SettingsService
|
||||
): Router {
|
||||
const router = Router();
|
||||
|
||||
// List events with filtering
|
||||
router.post('/list', validatePathParams('projectPath'), createListHandler(eventHistoryService));
|
||||
|
||||
// Get single event
|
||||
router.post('/get', validatePathParams('projectPath'), createGetHandler(eventHistoryService));
|
||||
|
||||
// Delete event
|
||||
router.post(
|
||||
'/delete',
|
||||
validatePathParams('projectPath'),
|
||||
createDeleteHandler(eventHistoryService)
|
||||
);
|
||||
|
||||
// Clear all events
|
||||
router.post('/clear', validatePathParams('projectPath'), createClearHandler(eventHistoryService));
|
||||
|
||||
// Replay event
|
||||
router.post(
|
||||
'/replay',
|
||||
validatePathParams('projectPath'),
|
||||
createReplayHandler(eventHistoryService, settingsService)
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
33
apps/server/src/routes/event-history/routes/clear.ts
Normal file
33
apps/server/src/routes/event-history/routes/clear.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* POST /api/event-history/clear - Clear all events for a project
|
||||
*
|
||||
* Request body: { projectPath: string }
|
||||
* Response: { success: true, cleared: number }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventHistoryService } from '../../../services/event-history-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createClearHandler(eventHistoryService: EventHistoryService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const cleared = await eventHistoryService.clearEvents(projectPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
cleared,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Clear events failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
43
apps/server/src/routes/event-history/routes/delete.ts
Normal file
43
apps/server/src/routes/event-history/routes/delete.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
/**
|
||||
* POST /api/event-history/delete - Delete an event by ID
|
||||
*
|
||||
* Request body: { projectPath: string, eventId: string }
|
||||
* Response: { success: true } or { success: false, error: string }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventHistoryService } from '../../../services/event-history-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createDeleteHandler(eventHistoryService: EventHistoryService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, eventId } = req.body as {
|
||||
projectPath: string;
|
||||
eventId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!eventId || typeof eventId !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'eventId is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const deleted = await eventHistoryService.deleteEvent(projectPath, eventId);
|
||||
|
||||
if (!deleted) {
|
||||
res.status(404).json({ success: false, error: 'Event not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, 'Delete event failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
46
apps/server/src/routes/event-history/routes/get.ts
Normal file
46
apps/server/src/routes/event-history/routes/get.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* POST /api/event-history/get - Get a single event by ID
|
||||
*
|
||||
* Request body: { projectPath: string, eventId: string }
|
||||
* Response: { success: true, event: StoredEvent } or { success: false, error: string }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventHistoryService } from '../../../services/event-history-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createGetHandler(eventHistoryService: EventHistoryService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, eventId } = req.body as {
|
||||
projectPath: string;
|
||||
eventId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!eventId || typeof eventId !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'eventId is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const event = await eventHistoryService.getEvent(projectPath, eventId);
|
||||
|
||||
if (!event) {
|
||||
res.status(404).json({ success: false, error: 'Event not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
event,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Get event failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
53
apps/server/src/routes/event-history/routes/list.ts
Normal file
53
apps/server/src/routes/event-history/routes/list.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
/**
|
||||
* POST /api/event-history/list - List events for a project
|
||||
*
|
||||
* Request body: {
|
||||
* projectPath: string,
|
||||
* filter?: {
|
||||
* trigger?: EventHookTrigger,
|
||||
* featureId?: string,
|
||||
* since?: string,
|
||||
* until?: string,
|
||||
* limit?: number,
|
||||
* offset?: number
|
||||
* }
|
||||
* }
|
||||
* Response: { success: true, events: StoredEventSummary[], total: number }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventHistoryService } from '../../../services/event-history-service.js';
|
||||
import type { EventHistoryFilter } from '@automaker/types';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createListHandler(eventHistoryService: EventHistoryService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, filter } = req.body as {
|
||||
projectPath: string;
|
||||
filter?: EventHistoryFilter;
|
||||
};
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const events = await eventHistoryService.getEvents(projectPath, filter);
|
||||
const total = await eventHistoryService.getEventCount(projectPath, {
|
||||
...filter,
|
||||
limit: undefined,
|
||||
offset: undefined,
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
events,
|
||||
total,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'List events failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
234
apps/server/src/routes/event-history/routes/replay.ts
Normal file
234
apps/server/src/routes/event-history/routes/replay.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
/**
|
||||
* POST /api/event-history/replay - Replay an event to trigger hooks
|
||||
*
|
||||
* Request body: {
|
||||
* projectPath: string,
|
||||
* eventId: string,
|
||||
* hookIds?: string[] // Optional: specific hooks to run (if not provided, runs all enabled matching hooks)
|
||||
* }
|
||||
* Response: { success: true, result: EventReplayResult }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventHistoryService } from '../../../services/event-history-service.js';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import type { EventReplayResult, EventReplayHookResult, EventHook } from '@automaker/types';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError, logger } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
/** Default timeout for shell commands (30 seconds) */
|
||||
const DEFAULT_SHELL_TIMEOUT = 30000;
|
||||
|
||||
/** Default timeout for HTTP requests (10 seconds) */
|
||||
const DEFAULT_HTTP_TIMEOUT = 10000;
|
||||
|
||||
interface HookContext {
|
||||
featureId?: string;
|
||||
featureName?: string;
|
||||
projectPath?: string;
|
||||
projectName?: string;
|
||||
error?: string;
|
||||
errorType?: string;
|
||||
timestamp: string;
|
||||
eventType: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Substitute {{variable}} placeholders in a string
|
||||
*/
|
||||
function substituteVariables(template: string, context: HookContext): string {
|
||||
return template.replace(/\{\{(\w+)\}\}/g, (match, variable) => {
|
||||
const value = context[variable as keyof HookContext];
|
||||
if (value === undefined || value === null) {
|
||||
return '';
|
||||
}
|
||||
return String(value);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a single hook and return the result
|
||||
*/
|
||||
async function executeHook(hook: EventHook, context: HookContext): Promise<EventReplayHookResult> {
|
||||
const hookName = hook.name || hook.id;
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
if (hook.action.type === 'shell') {
|
||||
const command = substituteVariables(hook.action.command, context);
|
||||
const timeout = hook.action.timeout || DEFAULT_SHELL_TIMEOUT;
|
||||
|
||||
logger.info(`Replaying shell hook "${hookName}": ${command}`);
|
||||
|
||||
await execAsync(command, {
|
||||
timeout,
|
||||
maxBuffer: 1024 * 1024,
|
||||
});
|
||||
|
||||
return {
|
||||
hookId: hook.id,
|
||||
hookName: hook.name,
|
||||
success: true,
|
||||
durationMs: Date.now() - startTime,
|
||||
};
|
||||
} else if (hook.action.type === 'http') {
|
||||
const url = substituteVariables(hook.action.url, context);
|
||||
const method = hook.action.method || 'POST';
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
if (hook.action.headers) {
|
||||
for (const [key, value] of Object.entries(hook.action.headers)) {
|
||||
headers[key] = substituteVariables(value, context);
|
||||
}
|
||||
}
|
||||
|
||||
let body: string | undefined;
|
||||
if (hook.action.body) {
|
||||
body = substituteVariables(hook.action.body, context);
|
||||
} else if (method !== 'GET') {
|
||||
body = JSON.stringify({
|
||||
eventType: context.eventType,
|
||||
timestamp: context.timestamp,
|
||||
featureId: context.featureId,
|
||||
projectPath: context.projectPath,
|
||||
projectName: context.projectName,
|
||||
error: context.error,
|
||||
});
|
||||
}
|
||||
|
||||
logger.info(`Replaying HTTP hook "${hookName}": ${method} ${url}`);
|
||||
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), DEFAULT_HTTP_TIMEOUT);
|
||||
|
||||
const response = await fetch(url, {
|
||||
method,
|
||||
headers,
|
||||
body: method !== 'GET' ? body : undefined,
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
if (!response.ok) {
|
||||
return {
|
||||
hookId: hook.id,
|
||||
hookName: hook.name,
|
||||
success: false,
|
||||
error: `HTTP ${response.status}: ${response.statusText}`,
|
||||
durationMs: Date.now() - startTime,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
hookId: hook.id,
|
||||
hookName: hook.name,
|
||||
success: true,
|
||||
durationMs: Date.now() - startTime,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
hookId: hook.id,
|
||||
hookName: hook.name,
|
||||
success: false,
|
||||
error: 'Unknown hook action type',
|
||||
durationMs: Date.now() - startTime,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error
|
||||
? error.name === 'AbortError'
|
||||
? 'Request timed out'
|
||||
: error.message
|
||||
: String(error);
|
||||
|
||||
return {
|
||||
hookId: hook.id,
|
||||
hookName: hook.name,
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
durationMs: Date.now() - startTime,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function createReplayHandler(
|
||||
eventHistoryService: EventHistoryService,
|
||||
settingsService: SettingsService
|
||||
) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, eventId, hookIds } = req.body as {
|
||||
projectPath: string;
|
||||
eventId: string;
|
||||
hookIds?: string[];
|
||||
};
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!eventId || typeof eventId !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'eventId is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the event
|
||||
const event = await eventHistoryService.getEvent(projectPath, eventId);
|
||||
if (!event) {
|
||||
res.status(404).json({ success: false, error: 'Event not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Get hooks from settings
|
||||
const settings = await settingsService.getGlobalSettings();
|
||||
let hooks = settings.eventHooks || [];
|
||||
|
||||
// Filter to matching trigger and enabled hooks
|
||||
hooks = hooks.filter((h) => h.enabled && h.trigger === event.trigger);
|
||||
|
||||
// If specific hook IDs requested, filter to those
|
||||
if (hookIds && hookIds.length > 0) {
|
||||
hooks = hooks.filter((h) => hookIds.includes(h.id));
|
||||
}
|
||||
|
||||
// Build context for variable substitution
|
||||
const context: HookContext = {
|
||||
featureId: event.featureId,
|
||||
featureName: event.featureName,
|
||||
projectPath: event.projectPath,
|
||||
projectName: event.projectName,
|
||||
error: event.error,
|
||||
errorType: event.errorType,
|
||||
timestamp: event.timestamp,
|
||||
eventType: event.trigger,
|
||||
};
|
||||
|
||||
// Execute all hooks in parallel
|
||||
const hookResults = await Promise.all(hooks.map((hook) => executeHook(hook, context)));
|
||||
|
||||
const result: EventReplayResult = {
|
||||
eventId,
|
||||
hooksTriggered: hooks.length,
|
||||
hookResults,
|
||||
};
|
||||
|
||||
logger.info(`Replayed event ${eventId}: ${hooks.length} hooks triggered`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Replay event failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -4,6 +4,8 @@
|
||||
|
||||
import { Router } from 'express';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createListHandler } from './routes/list.js';
|
||||
import { createGetHandler } from './routes/get.js';
|
||||
@@ -15,12 +17,20 @@ import { createDeleteHandler } from './routes/delete.js';
|
||||
import { createAgentOutputHandler, createRawOutputHandler } from './routes/agent-output.js';
|
||||
import { createGenerateTitleHandler } from './routes/generate-title.js';
|
||||
|
||||
export function createFeaturesRoutes(featureLoader: FeatureLoader): Router {
|
||||
export function createFeaturesRoutes(
|
||||
featureLoader: FeatureLoader,
|
||||
settingsService?: SettingsService,
|
||||
events?: EventEmitter
|
||||
): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post('/list', validatePathParams('projectPath'), createListHandler(featureLoader));
|
||||
router.post('/get', validatePathParams('projectPath'), createGetHandler(featureLoader));
|
||||
router.post('/create', validatePathParams('projectPath'), createCreateHandler(featureLoader));
|
||||
router.post(
|
||||
'/create',
|
||||
validatePathParams('projectPath'),
|
||||
createCreateHandler(featureLoader, events)
|
||||
);
|
||||
router.post('/update', validatePathParams('projectPath'), createUpdateHandler(featureLoader));
|
||||
router.post(
|
||||
'/bulk-update',
|
||||
@@ -35,7 +45,7 @@ export function createFeaturesRoutes(featureLoader: FeatureLoader): Router {
|
||||
router.post('/delete', validatePathParams('projectPath'), createDeleteHandler(featureLoader));
|
||||
router.post('/agent-output', createAgentOutputHandler(featureLoader));
|
||||
router.post('/raw-output', createRawOutputHandler(featureLoader));
|
||||
router.post('/generate-title', createGenerateTitleHandler());
|
||||
router.post('/generate-title', createGenerateTitleHandler(settingsService));
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -30,19 +30,27 @@ export function createBulkDeleteHandler(featureLoader: FeatureLoader) {
|
||||
return;
|
||||
}
|
||||
|
||||
const results = await Promise.all(
|
||||
featureIds.map(async (featureId) => {
|
||||
const success = await featureLoader.delete(projectPath, featureId);
|
||||
if (success) {
|
||||
return { featureId, success: true };
|
||||
}
|
||||
return {
|
||||
featureId,
|
||||
success: false,
|
||||
error: 'Deletion failed. Check server logs for details.',
|
||||
};
|
||||
})
|
||||
);
|
||||
// Process in parallel batches of 20 for efficiency
|
||||
const BATCH_SIZE = 20;
|
||||
const results: BulkDeleteResult[] = [];
|
||||
|
||||
for (let i = 0; i < featureIds.length; i += BATCH_SIZE) {
|
||||
const batch = featureIds.slice(i, i + BATCH_SIZE);
|
||||
const batchResults = await Promise.all(
|
||||
batch.map(async (featureId) => {
|
||||
const success = await featureLoader.delete(projectPath, featureId);
|
||||
if (success) {
|
||||
return { featureId, success: true };
|
||||
}
|
||||
return {
|
||||
featureId,
|
||||
success: false,
|
||||
error: 'Deletion failed. Check server logs for details.',
|
||||
};
|
||||
})
|
||||
);
|
||||
results.push(...batchResults);
|
||||
}
|
||||
|
||||
const successCount = results.reduce((count, r) => count + (r.success ? 1 : 0), 0);
|
||||
const failureCount = results.length - successCount;
|
||||
|
||||
@@ -43,17 +43,36 @@ export function createBulkUpdateHandler(featureLoader: FeatureLoader) {
|
||||
const results: BulkUpdateResult[] = [];
|
||||
const updatedFeatures: Feature[] = [];
|
||||
|
||||
for (const featureId of featureIds) {
|
||||
try {
|
||||
const updated = await featureLoader.update(projectPath, featureId, updates);
|
||||
results.push({ featureId, success: true });
|
||||
updatedFeatures.push(updated);
|
||||
} catch (error) {
|
||||
results.push({
|
||||
featureId,
|
||||
success: false,
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
// Process in parallel batches of 20 for efficiency
|
||||
const BATCH_SIZE = 20;
|
||||
for (let i = 0; i < featureIds.length; i += BATCH_SIZE) {
|
||||
const batch = featureIds.slice(i, i + BATCH_SIZE);
|
||||
const batchResults = await Promise.all(
|
||||
batch.map(async (featureId) => {
|
||||
try {
|
||||
const updated = await featureLoader.update(projectPath, featureId, updates);
|
||||
return { featureId, success: true as const, feature: updated };
|
||||
} catch (error) {
|
||||
return {
|
||||
featureId,
|
||||
success: false as const,
|
||||
error: getErrorMessage(error),
|
||||
};
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
for (const result of batchResults) {
|
||||
if (result.success) {
|
||||
results.push({ featureId: result.featureId, success: true });
|
||||
updatedFeatures.push(result.feature);
|
||||
} else {
|
||||
results.push({
|
||||
featureId: result.featureId,
|
||||
success: false,
|
||||
error: result.error,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,10 +4,11 @@
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import type { Feature } from '@automaker/types';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createCreateHandler(featureLoader: FeatureLoader) {
|
||||
export function createCreateHandler(featureLoader: FeatureLoader, events?: EventEmitter) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, feature } = req.body as {
|
||||
@@ -23,7 +24,30 @@ export function createCreateHandler(featureLoader: FeatureLoader) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for duplicate title if title is provided
|
||||
if (feature.title && feature.title.trim()) {
|
||||
const duplicate = await featureLoader.findDuplicateTitle(projectPath, feature.title);
|
||||
if (duplicate) {
|
||||
res.status(409).json({
|
||||
success: false,
|
||||
error: `A feature with title "${feature.title}" already exists`,
|
||||
duplicateFeatureId: duplicate.id,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const created = await featureLoader.create(projectPath, feature);
|
||||
|
||||
// Emit feature_created event for hooks
|
||||
if (events) {
|
||||
events.emit('feature:created', {
|
||||
featureId: created.id,
|
||||
featureName: created.name,
|
||||
projectPath,
|
||||
});
|
||||
}
|
||||
|
||||
res.json({ success: true, feature: created });
|
||||
} catch (error) {
|
||||
logError(error, 'Create feature failed');
|
||||
|
||||
@@ -9,6 +9,8 @@ import type { Request, Response } from 'express';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { CLAUDE_MODEL_MAP } from '@automaker/model-resolver';
|
||||
import { simpleQuery } from '../../../providers/simple-query-service.js';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getPromptCustomization } from '../../../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('GenerateTitle');
|
||||
|
||||
@@ -26,16 +28,9 @@ interface GenerateTitleErrorResponse {
|
||||
error: string;
|
||||
}
|
||||
|
||||
const SYSTEM_PROMPT = `You are a title generator. Your task is to create a concise, descriptive title (5-10 words max) for a software feature based on its description.
|
||||
|
||||
Rules:
|
||||
- Output ONLY the title, nothing else
|
||||
- Keep it short and action-oriented (e.g., "Add dark mode toggle", "Fix login validation")
|
||||
- Start with a verb when possible (Add, Fix, Update, Implement, Create, etc.)
|
||||
- No quotes, periods, or extra formatting
|
||||
- Capture the essence of the feature in a scannable way`;
|
||||
|
||||
export function createGenerateTitleHandler(): (req: Request, res: Response) => Promise<void> {
|
||||
export function createGenerateTitleHandler(
|
||||
settingsService?: SettingsService
|
||||
): (req: Request, res: Response) => Promise<void> {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { description } = req.body as GenerateTitleRequestBody;
|
||||
@@ -61,11 +56,15 @@ export function createGenerateTitleHandler(): (req: Request, res: Response) => P
|
||||
|
||||
logger.info(`Generating title for description: ${trimmedDescription.substring(0, 50)}...`);
|
||||
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(settingsService, '[GenerateTitle]');
|
||||
const systemPrompt = prompts.titleGeneration.systemPrompt;
|
||||
|
||||
const userPrompt = `Generate a concise title for this feature:\n\n${trimmedDescription}`;
|
||||
|
||||
// Use simpleQuery - provider abstraction handles all the streaming/extraction
|
||||
const result = await simpleQuery({
|
||||
prompt: `${SYSTEM_PROMPT}\n\n${userPrompt}`,
|
||||
prompt: `${systemPrompt}\n\n${userPrompt}`,
|
||||
model: CLAUDE_MODEL_MAP.haiku,
|
||||
cwd: process.cwd(),
|
||||
maxTurns: 1,
|
||||
|
||||
@@ -4,8 +4,14 @@
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import type { Feature } from '@automaker/types';
|
||||
import type { Feature, FeatureStatus } from '@automaker/types';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('features/update');
|
||||
|
||||
// Statuses that should trigger syncing to app_spec.txt
|
||||
const SYNC_TRIGGER_STATUSES: FeatureStatus[] = ['verified', 'completed'];
|
||||
|
||||
export function createUpdateHandler(featureLoader: FeatureLoader) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -34,6 +40,28 @@ export function createUpdateHandler(featureLoader: FeatureLoader) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for duplicate title if title is being updated
|
||||
if (updates.title && updates.title.trim()) {
|
||||
const duplicate = await featureLoader.findDuplicateTitle(
|
||||
projectPath,
|
||||
updates.title,
|
||||
featureId // Exclude the current feature from duplicate check
|
||||
);
|
||||
if (duplicate) {
|
||||
res.status(409).json({
|
||||
success: false,
|
||||
error: `A feature with title "${updates.title}" already exists`,
|
||||
duplicateFeatureId: duplicate.id,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Get the current feature to detect status changes
|
||||
const currentFeature = await featureLoader.get(projectPath, featureId);
|
||||
const previousStatus = currentFeature?.status as FeatureStatus | undefined;
|
||||
const newStatus = updates.status as FeatureStatus | undefined;
|
||||
|
||||
const updated = await featureLoader.update(
|
||||
projectPath,
|
||||
featureId,
|
||||
@@ -42,6 +70,22 @@ export function createUpdateHandler(featureLoader: FeatureLoader) {
|
||||
enhancementMode,
|
||||
preEnhancementDescription
|
||||
);
|
||||
|
||||
// Trigger sync to app_spec.txt when status changes to verified or completed
|
||||
if (newStatus && SYNC_TRIGGER_STATUSES.includes(newStatus) && previousStatus !== newStatus) {
|
||||
try {
|
||||
const synced = await featureLoader.syncFeatureToAppSpec(projectPath, updated);
|
||||
if (synced) {
|
||||
logger.info(
|
||||
`Synced feature "${updated.title || updated.id}" to app_spec.txt on status change to ${newStatus}`
|
||||
);
|
||||
}
|
||||
} catch (syncError) {
|
||||
// Log the sync error but don't fail the update operation
|
||||
logger.error(`Failed to sync feature to app_spec.txt:`, syncError);
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ success: true, feature: updated });
|
||||
} catch (error) {
|
||||
logError(error, 'Update feature failed');
|
||||
|
||||
@@ -30,11 +30,11 @@ import { writeValidation } from '../../../lib/validation-storage.js';
|
||||
import { streamingQuery } from '../../../providers/simple-query-service.js';
|
||||
import {
|
||||
issueValidationSchema,
|
||||
ISSUE_VALIDATION_SYSTEM_PROMPT,
|
||||
buildValidationPrompt,
|
||||
ValidationComment,
|
||||
ValidationLinkedPR,
|
||||
} from './validation-schema.js';
|
||||
import { getPromptCustomization } from '../../../lib/settings-helpers.js';
|
||||
import {
|
||||
trySetValidationRunning,
|
||||
clearValidationStatus,
|
||||
@@ -117,13 +117,17 @@ async function runValidation(
|
||||
|
||||
let responseText = '';
|
||||
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(settingsService, '[ValidateIssue]');
|
||||
const issueValidationSystemPrompt = prompts.issueValidation.systemPrompt;
|
||||
|
||||
// Determine if we should use structured output (Claude/Codex support it, Cursor/OpenCode don't)
|
||||
const useStructuredOutput = isClaudeModel(model) || isCodexModel(model);
|
||||
|
||||
// Build the final prompt - for Cursor, include system prompt and JSON schema instructions
|
||||
let finalPrompt = basePrompt;
|
||||
if (!useStructuredOutput) {
|
||||
finalPrompt = `${ISSUE_VALIDATION_SYSTEM_PROMPT}
|
||||
finalPrompt = `${issueValidationSystemPrompt}
|
||||
|
||||
CRITICAL INSTRUCTIONS:
|
||||
1. DO NOT write any files. Return the JSON in your response only.
|
||||
@@ -167,7 +171,7 @@ ${basePrompt}`;
|
||||
prompt: finalPrompt,
|
||||
model: model as string,
|
||||
cwd: projectPath,
|
||||
systemPrompt: useStructuredOutput ? ISSUE_VALIDATION_SYSTEM_PROMPT : undefined,
|
||||
systemPrompt: useStructuredOutput ? issueValidationSystemPrompt : undefined,
|
||||
abortController,
|
||||
thinkingLevel: effectiveThinkingLevel,
|
||||
reasoningEffort: effectiveReasoningEffort,
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
/**
|
||||
* Issue Validation Schema and System Prompt
|
||||
* Issue Validation Schema and Prompt Building
|
||||
*
|
||||
* Defines the JSON schema for Claude's structured output and
|
||||
* the system prompt that guides the validation process.
|
||||
* helper functions for building validation prompts.
|
||||
*
|
||||
* Note: The system prompt is now centralized in @automaker/prompts
|
||||
* and accessed via getPromptCustomization() in validate-issue.ts
|
||||
*/
|
||||
|
||||
/**
|
||||
@@ -82,76 +85,6 @@ export const issueValidationSchema = {
|
||||
additionalProperties: false,
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* System prompt that guides Claude in validating GitHub issues.
|
||||
* Instructs the model to use read-only tools to analyze the codebase.
|
||||
*/
|
||||
export const ISSUE_VALIDATION_SYSTEM_PROMPT = `You are an expert code analyst validating GitHub issues against a codebase.
|
||||
|
||||
Your task is to analyze a GitHub issue and determine if it's valid by scanning the codebase.
|
||||
|
||||
## Validation Process
|
||||
|
||||
1. **Read the issue carefully** - Understand what is being reported or requested
|
||||
2. **Search the codebase** - Use Glob to find relevant files by pattern, Grep to search for keywords
|
||||
3. **Examine the code** - Use Read to look at the actual implementation in relevant files
|
||||
4. **Check linked PRs** - If there are linked pull requests, use \`gh pr diff <PR_NUMBER>\` to review the changes
|
||||
5. **Form your verdict** - Based on your analysis, determine if the issue is valid
|
||||
|
||||
## Verdicts
|
||||
|
||||
- **valid**: The issue describes a real problem that exists in the codebase, or a clear feature request that can be implemented. The referenced files/components exist and the issue is actionable.
|
||||
|
||||
- **invalid**: The issue describes behavior that doesn't exist, references non-existent files or components, is based on a misunderstanding of the code, or the described "bug" is actually expected behavior.
|
||||
|
||||
- **needs_clarification**: The issue lacks sufficient detail to verify. Specify what additional information is needed in the missingInfo field.
|
||||
|
||||
## For Bug Reports, Check:
|
||||
- Do the referenced files/components exist?
|
||||
- Does the code match what the issue describes?
|
||||
- Is the described behavior actually a bug or expected?
|
||||
- Can you locate the code that would cause the reported issue?
|
||||
|
||||
## For Feature Requests, Check:
|
||||
- Does the feature already exist?
|
||||
- Is the implementation location clear?
|
||||
- Is the request technically feasible given the codebase structure?
|
||||
|
||||
## Analyzing Linked Pull Requests
|
||||
|
||||
When an issue has linked PRs (especially open ones), you MUST analyze them:
|
||||
|
||||
1. **Run \`gh pr diff <PR_NUMBER>\`** to see what changes the PR makes
|
||||
2. **Run \`gh pr view <PR_NUMBER>\`** to see PR description and status
|
||||
3. **Evaluate if the PR fixes the issue** - Does the diff address the reported problem?
|
||||
4. **Provide a recommendation**:
|
||||
- \`wait_for_merge\`: The PR appears to fix the issue correctly. No additional work needed - just wait for it to be merged.
|
||||
- \`pr_needs_work\`: The PR attempts to fix the issue but is incomplete or has problems.
|
||||
- \`no_pr\`: No relevant PR exists for this issue.
|
||||
|
||||
5. **Include prAnalysis in your response** with:
|
||||
- hasOpenPR: true/false
|
||||
- prFixesIssue: true/false (based on diff analysis)
|
||||
- prNumber: the PR number you analyzed
|
||||
- prSummary: brief description of what the PR changes
|
||||
- recommendation: one of the above values
|
||||
|
||||
## Response Guidelines
|
||||
|
||||
- **Always include relatedFiles** when you find relevant code
|
||||
- **Set bugConfirmed to true** only if you can definitively confirm a bug exists in the code
|
||||
- **Provide a suggestedFix** when you have a clear idea of how to address the issue
|
||||
- **Use missingInfo** when the verdict is needs_clarification to list what's needed
|
||||
- **Include prAnalysis** when there are linked PRs - this is critical for avoiding duplicate work
|
||||
- **Set estimatedComplexity** to help prioritize:
|
||||
- trivial: Simple text changes, one-line fixes
|
||||
- simple: Small changes to one file
|
||||
- moderate: Changes to multiple files or moderate logic changes
|
||||
- complex: Significant refactoring or new feature implementation
|
||||
- very_complex: Major architectural changes or cross-cutting concerns
|
||||
|
||||
Be thorough in your analysis but focus on files that are directly relevant to the issue.`;
|
||||
|
||||
/**
|
||||
* Comment data structure for validation prompt
|
||||
*/
|
||||
|
||||
@@ -9,12 +9,14 @@ import type { Request, Response } from 'express';
|
||||
|
||||
export interface EnvironmentResponse {
|
||||
isContainerized: boolean;
|
||||
skipSandboxWarning?: boolean;
|
||||
}
|
||||
|
||||
export function createEnvironmentHandler() {
|
||||
return (_req: Request, res: Response): void => {
|
||||
res.json({
|
||||
isContainerized: process.env.IS_CONTAINERIZED === 'true',
|
||||
skipSandboxWarning: process.env.AUTOMAKER_SKIP_SANDBOX_WARNING === 'true',
|
||||
} satisfies EnvironmentResponse);
|
||||
};
|
||||
}
|
||||
|
||||
21
apps/server/src/routes/notifications/common.ts
Normal file
21
apps/server/src/routes/notifications/common.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* Common utilities for notification routes
|
||||
*
|
||||
* Provides logger and error handling utilities shared across all notification endpoints.
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
/** Logger instance for notification-related operations */
|
||||
export const logger = createLogger('Notifications');
|
||||
|
||||
/**
|
||||
* Extract user-friendly error message from error objects
|
||||
*/
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
/**
|
||||
* Log error with automatic logger binding
|
||||
*/
|
||||
export const logError = createLogError(logger);
|
||||
62
apps/server/src/routes/notifications/index.ts
Normal file
62
apps/server/src/routes/notifications/index.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
* Notifications routes - HTTP API for project-level notifications
|
||||
*
|
||||
* Provides endpoints for:
|
||||
* - Listing notifications
|
||||
* - Getting unread count
|
||||
* - Marking notifications as read
|
||||
* - Dismissing notifications
|
||||
*
|
||||
* All endpoints use handler factories that receive the NotificationService instance.
|
||||
* Mounted at /api/notifications in the main server.
|
||||
*/
|
||||
|
||||
import { Router } from 'express';
|
||||
import type { NotificationService } from '../../services/notification-service.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createListHandler } from './routes/list.js';
|
||||
import { createUnreadCountHandler } from './routes/unread-count.js';
|
||||
import { createMarkReadHandler } from './routes/mark-read.js';
|
||||
import { createDismissHandler } from './routes/dismiss.js';
|
||||
|
||||
/**
|
||||
* Create notifications router with all endpoints
|
||||
*
|
||||
* Endpoints:
|
||||
* - POST /list - List all notifications for a project
|
||||
* - POST /unread-count - Get unread notification count
|
||||
* - POST /mark-read - Mark notification(s) as read
|
||||
* - POST /dismiss - Dismiss notification(s)
|
||||
*
|
||||
* @param notificationService - Instance of NotificationService
|
||||
* @returns Express Router configured with all notification endpoints
|
||||
*/
|
||||
export function createNotificationsRoutes(notificationService: NotificationService): Router {
|
||||
const router = Router();
|
||||
|
||||
// List notifications
|
||||
router.post('/list', validatePathParams('projectPath'), createListHandler(notificationService));
|
||||
|
||||
// Get unread count
|
||||
router.post(
|
||||
'/unread-count',
|
||||
validatePathParams('projectPath'),
|
||||
createUnreadCountHandler(notificationService)
|
||||
);
|
||||
|
||||
// Mark as read (single or all)
|
||||
router.post(
|
||||
'/mark-read',
|
||||
validatePathParams('projectPath'),
|
||||
createMarkReadHandler(notificationService)
|
||||
);
|
||||
|
||||
// Dismiss (single or all)
|
||||
router.post(
|
||||
'/dismiss',
|
||||
validatePathParams('projectPath'),
|
||||
createDismissHandler(notificationService)
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
53
apps/server/src/routes/notifications/routes/dismiss.ts
Normal file
53
apps/server/src/routes/notifications/routes/dismiss.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
/**
|
||||
* POST /api/notifications/dismiss - Dismiss notification(s)
|
||||
*
|
||||
* Request body: { projectPath: string, notificationId?: string }
|
||||
* - If notificationId provided: dismisses that notification
|
||||
* - If notificationId not provided: dismisses all notifications
|
||||
*
|
||||
* Response: { success: true, dismissed: boolean | count: number }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { NotificationService } from '../../../services/notification-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler for POST /api/notifications/dismiss
|
||||
*
|
||||
* @param notificationService - Instance of NotificationService
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createDismissHandler(notificationService: NotificationService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, notificationId } = req.body;
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// If notificationId provided, dismiss single notification
|
||||
if (notificationId) {
|
||||
const dismissed = await notificationService.dismissNotification(
|
||||
projectPath,
|
||||
notificationId
|
||||
);
|
||||
if (!dismissed) {
|
||||
res.status(404).json({ success: false, error: 'Notification not found' });
|
||||
return;
|
||||
}
|
||||
res.json({ success: true, dismissed: true });
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise dismiss all
|
||||
const count = await notificationService.dismissAll(projectPath);
|
||||
res.json({ success: true, count });
|
||||
} catch (error) {
|
||||
logError(error, 'Dismiss failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
39
apps/server/src/routes/notifications/routes/list.ts
Normal file
39
apps/server/src/routes/notifications/routes/list.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* POST /api/notifications/list - List all notifications for a project
|
||||
*
|
||||
* Request body: { projectPath: string }
|
||||
* Response: { success: true, notifications: Notification[] }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { NotificationService } from '../../../services/notification-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler for POST /api/notifications/list
|
||||
*
|
||||
* @param notificationService - Instance of NotificationService
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createListHandler(notificationService: NotificationService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath } = req.body;
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const notifications = await notificationService.getNotifications(projectPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
notifications,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'List notifications failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
50
apps/server/src/routes/notifications/routes/mark-read.ts
Normal file
50
apps/server/src/routes/notifications/routes/mark-read.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
/**
|
||||
* POST /api/notifications/mark-read - Mark notification(s) as read
|
||||
*
|
||||
* Request body: { projectPath: string, notificationId?: string }
|
||||
* - If notificationId provided: marks that notification as read
|
||||
* - If notificationId not provided: marks all notifications as read
|
||||
*
|
||||
* Response: { success: true, count?: number, notification?: Notification }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { NotificationService } from '../../../services/notification-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler for POST /api/notifications/mark-read
|
||||
*
|
||||
* @param notificationService - Instance of NotificationService
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createMarkReadHandler(notificationService: NotificationService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, notificationId } = req.body;
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// If notificationId provided, mark single notification
|
||||
if (notificationId) {
|
||||
const notification = await notificationService.markAsRead(projectPath, notificationId);
|
||||
if (!notification) {
|
||||
res.status(404).json({ success: false, error: 'Notification not found' });
|
||||
return;
|
||||
}
|
||||
res.json({ success: true, notification });
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise mark all as read
|
||||
const count = await notificationService.markAllAsRead(projectPath);
|
||||
res.json({ success: true, count });
|
||||
} catch (error) {
|
||||
logError(error, 'Mark read failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
39
apps/server/src/routes/notifications/routes/unread-count.ts
Normal file
39
apps/server/src/routes/notifications/routes/unread-count.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* POST /api/notifications/unread-count - Get unread notification count
|
||||
*
|
||||
* Request body: { projectPath: string }
|
||||
* Response: { success: true, count: number }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { NotificationService } from '../../../services/notification-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler for POST /api/notifications/unread-count
|
||||
*
|
||||
* @param notificationService - Instance of NotificationService
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createUnreadCountHandler(notificationService: NotificationService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath } = req.body;
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const count = await notificationService.getUnreadCount(projectPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
count,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Get unread count failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -4,12 +4,58 @@
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { getBacklogPlanStatus, getRunningDetails } from '../../backlog-plan/common.js';
|
||||
import { getAllRunningGenerations } from '../../app-spec/common.js';
|
||||
import path from 'path';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createIndexHandler(autoModeService: AutoModeService) {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const runningAgents = await autoModeService.getRunningAgents();
|
||||
const runningAgents = [...(await autoModeService.getRunningAgents())];
|
||||
const backlogPlanStatus = getBacklogPlanStatus();
|
||||
const backlogPlanDetails = getRunningDetails();
|
||||
|
||||
if (backlogPlanStatus.isRunning && backlogPlanDetails) {
|
||||
runningAgents.push({
|
||||
featureId: `backlog-plan:${backlogPlanDetails.projectPath}`,
|
||||
projectPath: backlogPlanDetails.projectPath,
|
||||
projectName: path.basename(backlogPlanDetails.projectPath),
|
||||
isAutoMode: false,
|
||||
title: 'Backlog plan',
|
||||
description: backlogPlanDetails.prompt,
|
||||
});
|
||||
}
|
||||
|
||||
// Add spec/feature generation tasks
|
||||
const specGenerations = getAllRunningGenerations();
|
||||
for (const generation of specGenerations) {
|
||||
let title: string;
|
||||
let description: string;
|
||||
|
||||
switch (generation.type) {
|
||||
case 'feature_generation':
|
||||
title = 'Generating features from spec';
|
||||
description = 'Creating features from the project specification';
|
||||
break;
|
||||
case 'sync':
|
||||
title = 'Syncing spec with code';
|
||||
description = 'Updating spec from codebase and completed features';
|
||||
break;
|
||||
default:
|
||||
title = 'Regenerating spec';
|
||||
description = 'Analyzing project and generating specification';
|
||||
}
|
||||
|
||||
runningAgents.push({
|
||||
featureId: `spec-generation:${generation.projectPath}`,
|
||||
projectPath: generation.projectPath,
|
||||
projectName: path.basename(generation.projectPath),
|
||||
isAutoMode: false,
|
||||
title,
|
||||
description,
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
|
||||
@@ -12,6 +12,18 @@ import type { Request, Response } from 'express';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import type { GlobalSettings } from '../../../types/settings.js';
|
||||
import { getErrorMessage, logError, logger } from '../common.js';
|
||||
import { setLogLevel, LogLevel } from '@automaker/utils';
|
||||
import { setRequestLoggingEnabled } from '../../../index.js';
|
||||
|
||||
/**
|
||||
* Map server log level string to LogLevel enum
|
||||
*/
|
||||
const LOG_LEVEL_MAP: Record<string, LogLevel> = {
|
||||
error: LogLevel.ERROR,
|
||||
warn: LogLevel.WARN,
|
||||
info: LogLevel.INFO,
|
||||
debug: LogLevel.DEBUG,
|
||||
};
|
||||
|
||||
/**
|
||||
* Create handler factory for PUT /api/settings/global
|
||||
@@ -46,6 +58,23 @@ export function createUpdateGlobalHandler(settingsService: SettingsService) {
|
||||
|
||||
const settings = await settingsService.updateGlobalSettings(updates);
|
||||
|
||||
// Apply server log level if it was updated
|
||||
if ('serverLogLevel' in updates && updates.serverLogLevel) {
|
||||
const level = LOG_LEVEL_MAP[updates.serverLogLevel];
|
||||
if (level !== undefined) {
|
||||
setLogLevel(level);
|
||||
logger.info(`Server log level changed to: ${updates.serverLogLevel}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply request logging setting if it was updated
|
||||
if ('enableRequestLogging' in updates && typeof updates.enableRequestLogging === 'boolean') {
|
||||
setRequestLoggingEnabled(updates.enableRequestLogging);
|
||||
logger.info(
|
||||
`HTTP request logging ${updates.enableRequestLogging ? 'enabled' : 'disabled'}`
|
||||
);
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
settings,
|
||||
|
||||
@@ -15,7 +15,7 @@ import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
import { getAppSpecPath } from '@automaker/platform';
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import { getAutoLoadClaudeMdSetting } from '../../lib/settings-helpers.js';
|
||||
import { getAutoLoadClaudeMdSetting, getPromptCustomization } from '../../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('Suggestions');
|
||||
|
||||
@@ -137,11 +137,15 @@ export async function generateSuggestions(
|
||||
modelOverride?: string,
|
||||
thinkingLevelOverride?: ThinkingLevel
|
||||
): Promise<void> {
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(settingsService, '[Suggestions]');
|
||||
|
||||
// Map suggestion types to their prompts
|
||||
const typePrompts: Record<string, string> = {
|
||||
features: 'Analyze this project and suggest new features that would add value.',
|
||||
refactoring: 'Analyze this project and identify refactoring opportunities.',
|
||||
security: 'Analyze this project for security vulnerabilities and suggest fixes.',
|
||||
performance: 'Analyze this project for performance issues and suggest optimizations.',
|
||||
features: prompts.suggestions.featuresPrompt,
|
||||
refactoring: prompts.suggestions.refactoringPrompt,
|
||||
security: prompts.suggestions.securityPrompt,
|
||||
performance: prompts.suggestions.performancePrompt,
|
||||
};
|
||||
|
||||
// Load existing context to avoid duplicates
|
||||
@@ -151,15 +155,7 @@ export async function generateSuggestions(
|
||||
${existingContext}
|
||||
|
||||
${existingContext ? '\nIMPORTANT: Do NOT suggest features that are already implemented or already in the backlog above. Focus on NEW ideas that complement what already exists.\n' : ''}
|
||||
Look at the codebase and provide 3-5 concrete suggestions.
|
||||
|
||||
For each suggestion, provide:
|
||||
1. A category (e.g., "User Experience", "Security", "Performance")
|
||||
2. A clear description of what to implement
|
||||
3. Priority (1=high, 2=medium, 3=low)
|
||||
4. Brief reasoning for why this would help
|
||||
|
||||
The response will be automatically formatted as structured JSON.`;
|
||||
${prompts.suggestions.baseTemplate}`;
|
||||
|
||||
// Don't send initial message - let the agent output speak for itself
|
||||
// The first agent message will be captured as an info entry
|
||||
|
||||
@@ -34,6 +34,7 @@ export function createGetDevServerLogsHandler() {
|
||||
result: {
|
||||
worktreePath: result.result.worktreePath,
|
||||
port: result.result.port,
|
||||
url: result.result.url,
|
||||
logs: result.result.logs,
|
||||
startedAt: result.result.startedAt,
|
||||
},
|
||||
|
||||
@@ -16,10 +16,27 @@ import { isGitRepo } from '@automaker/git-utils';
|
||||
import { getErrorMessage, logError, normalizePath, execEnv, isGhCliAvailable } from '../common.js';
|
||||
import { readAllWorktreeMetadata, type WorktreePRInfo } from '../../../lib/worktree-metadata.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import {
|
||||
checkGitHubRemote,
|
||||
type GitHubRemoteStatus,
|
||||
} from '../../github/routes/check-github-remote.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
const logger = createLogger('Worktree');
|
||||
|
||||
/**
|
||||
* Cache for GitHub remote status per project path.
|
||||
* This prevents repeated "no git remotes found" warnings when polling
|
||||
* projects that don't have a GitHub remote configured.
|
||||
*/
|
||||
interface GitHubRemoteCacheEntry {
|
||||
status: GitHubRemoteStatus;
|
||||
checkedAt: number;
|
||||
}
|
||||
|
||||
const githubRemoteCache = new Map<string, GitHubRemoteCacheEntry>();
|
||||
const GITHUB_REMOTE_CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
interface WorktreeInfo {
|
||||
path: string;
|
||||
branch: string;
|
||||
@@ -121,23 +138,63 @@ async function scanWorktreesDirectory(
|
||||
return discovered;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cached GitHub remote status for a project, or check and cache it.
|
||||
* Returns null if gh CLI is not available.
|
||||
*/
|
||||
async function getGitHubRemoteStatus(projectPath: string): Promise<GitHubRemoteStatus | null> {
|
||||
// Check if gh CLI is available first
|
||||
const ghAvailable = await isGhCliAvailable();
|
||||
if (!ghAvailable) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
const cached = githubRemoteCache.get(projectPath);
|
||||
|
||||
// Return cached result if still valid
|
||||
if (cached && now - cached.checkedAt < GITHUB_REMOTE_CACHE_TTL_MS) {
|
||||
return cached.status;
|
||||
}
|
||||
|
||||
// Check GitHub remote and cache the result
|
||||
const status = await checkGitHubRemote(projectPath);
|
||||
githubRemoteCache.set(projectPath, {
|
||||
status,
|
||||
checkedAt: Date.now(),
|
||||
});
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch open PRs from GitHub and create a map of branch name to PR info.
|
||||
* This allows detecting PRs that were created outside the app.
|
||||
*
|
||||
* Uses cached GitHub remote status to avoid repeated warnings when the
|
||||
* project doesn't have a GitHub remote configured.
|
||||
*/
|
||||
async function fetchGitHubPRs(projectPath: string): Promise<Map<string, WorktreePRInfo>> {
|
||||
const prMap = new Map<string, WorktreePRInfo>();
|
||||
|
||||
try {
|
||||
// Check if gh CLI is available
|
||||
const ghAvailable = await isGhCliAvailable();
|
||||
if (!ghAvailable) {
|
||||
// Check GitHub remote status (uses cache to avoid repeated warnings)
|
||||
const remoteStatus = await getGitHubRemoteStatus(projectPath);
|
||||
|
||||
// If gh CLI not available or no GitHub remote, return empty silently
|
||||
if (!remoteStatus || !remoteStatus.hasGitHubRemote) {
|
||||
return prMap;
|
||||
}
|
||||
|
||||
// Use -R flag with owner/repo for more reliable PR fetching
|
||||
const repoFlag =
|
||||
remoteStatus.owner && remoteStatus.repo
|
||||
? `-R ${remoteStatus.owner}/${remoteStatus.repo}`
|
||||
: '';
|
||||
|
||||
// Fetch open PRs from GitHub
|
||||
const { stdout } = await execAsync(
|
||||
'gh pr list --state open --json number,title,url,state,headRefName,createdAt --limit 1000',
|
||||
`gh pr list ${repoFlag} --state open --json number,title,url,state,headRefName,createdAt --limit 1000`,
|
||||
{ cwd: projectPath, env: execEnv, timeout: 15000 }
|
||||
);
|
||||
|
||||
@@ -170,9 +227,10 @@ async function fetchGitHubPRs(projectPath: string): Promise<Map<string, Worktree
|
||||
export function createListHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, includeDetails } = req.body as {
|
||||
const { projectPath, includeDetails, forceRefreshGitHub } = req.body as {
|
||||
projectPath: string;
|
||||
includeDetails?: boolean;
|
||||
forceRefreshGitHub?: boolean;
|
||||
};
|
||||
|
||||
if (!projectPath) {
|
||||
@@ -180,6 +238,12 @@ export function createListHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clear GitHub remote cache if force refresh requested
|
||||
// This allows users to re-check for GitHub remote after adding one
|
||||
if (forceRefreshGitHub) {
|
||||
githubRemoteCache.delete(projectPath);
|
||||
}
|
||||
|
||||
if (!(await isGitRepo(projectPath))) {
|
||||
res.json({ success: true, worktrees: [] });
|
||||
return;
|
||||
|
||||
@@ -29,6 +29,10 @@ import {
|
||||
appendLearning,
|
||||
recordMemoryUsage,
|
||||
createLogger,
|
||||
atomicWriteJson,
|
||||
readJsonWithRecovery,
|
||||
logRecoveryWarning,
|
||||
DEFAULT_BACKUP_COUNT,
|
||||
} from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('AutoMode');
|
||||
@@ -60,6 +64,7 @@ import {
|
||||
getMCPServersFromSettings,
|
||||
getPromptCustomization,
|
||||
} from '../lib/settings-helpers.js';
|
||||
import { getNotificationService } from './notification-service.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -386,6 +391,7 @@ export class AutoModeService {
|
||||
this.emitAutoModeEvent('auto_mode_error', {
|
||||
error: errorInfo.message,
|
||||
errorType: errorInfo.type,
|
||||
projectPath,
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -579,6 +585,9 @@ export class AutoModeService {
|
||||
'[AutoMode]'
|
||||
);
|
||||
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(this.settingsService, '[AutoMode]');
|
||||
|
||||
// Build the prompt - use continuation prompt if provided (for recovery after plan approval)
|
||||
let prompt: string;
|
||||
// Load project context files (CLAUDE.md, CODE_QUALITY.md, etc.) and memory files
|
||||
@@ -604,7 +613,7 @@ export class AutoModeService {
|
||||
logger.info(`Using continuation prompt for feature ${featureId}`);
|
||||
} else {
|
||||
// Normal flow: build prompt with planning phase
|
||||
const featurePrompt = this.buildFeaturePrompt(feature);
|
||||
const featurePrompt = this.buildFeaturePrompt(feature, prompts.taskExecution);
|
||||
const planningPrefix = await this.getPlanningPromptPrefix(feature);
|
||||
prompt = planningPrefix + featurePrompt;
|
||||
|
||||
@@ -783,6 +792,9 @@ export class AutoModeService {
|
||||
): Promise<void> {
|
||||
logger.info(`Executing ${steps.length} pipeline step(s) for feature ${featureId}`);
|
||||
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(this.settingsService, '[AutoMode]');
|
||||
|
||||
// Load context files once with feature context for smart memory selection
|
||||
const contextResult = await loadContextFiles({
|
||||
projectPath,
|
||||
@@ -827,7 +839,12 @@ export class AutoModeService {
|
||||
});
|
||||
|
||||
// Build prompt for this pipeline step
|
||||
const prompt = this.buildPipelineStepPrompt(step, feature, previousContext);
|
||||
const prompt = this.buildPipelineStepPrompt(
|
||||
step,
|
||||
feature,
|
||||
previousContext,
|
||||
prompts.taskExecution
|
||||
);
|
||||
|
||||
// Get model from feature
|
||||
const model = resolveModelString(feature.model, DEFAULT_MODELS.claude);
|
||||
@@ -882,14 +899,18 @@ export class AutoModeService {
|
||||
private buildPipelineStepPrompt(
|
||||
step: PipelineStep,
|
||||
feature: Feature,
|
||||
previousContext: string
|
||||
previousContext: string,
|
||||
taskExecutionPrompts: {
|
||||
implementationInstructions: string;
|
||||
playwrightVerificationInstructions: string;
|
||||
}
|
||||
): string {
|
||||
let prompt = `## Pipeline Step: ${step.name}
|
||||
|
||||
This is an automated pipeline step following the initial feature implementation.
|
||||
|
||||
### Feature Context
|
||||
${this.buildFeaturePrompt(feature)}
|
||||
${this.buildFeaturePrompt(feature, taskExecutionPrompts)}
|
||||
|
||||
`;
|
||||
|
||||
@@ -1279,6 +1300,9 @@ Complete the pipeline step instructions above. Review the previous work and appl
|
||||
'[AutoMode]'
|
||||
);
|
||||
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(this.settingsService, '[AutoMode]');
|
||||
|
||||
// Load project context files (CLAUDE.md, CODE_QUALITY.md, etc.) - passed as system prompt
|
||||
const contextResult = await loadContextFiles({
|
||||
projectPath,
|
||||
@@ -1296,7 +1320,7 @@ Complete the pipeline step instructions above. Review the previous work and appl
|
||||
// Build complete prompt with feature info, previous context, and follow-up instructions
|
||||
let fullPrompt = `## Follow-up on Feature Implementation
|
||||
|
||||
${feature ? this.buildFeaturePrompt(feature) : `**Feature ID:** ${featureId}`}
|
||||
${feature ? this.buildFeaturePrompt(feature, prompts.taskExecution) : `**Feature ID:** ${featureId}`}
|
||||
`;
|
||||
|
||||
if (previousContext) {
|
||||
@@ -1396,13 +1420,13 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
allImagePaths.push(...allPaths);
|
||||
}
|
||||
|
||||
// Save updated feature.json with new images
|
||||
// Save updated feature.json with new images (atomic write with backup)
|
||||
if (copiedImagePaths.length > 0 && feature) {
|
||||
const featureDirForSave = getFeatureDir(projectPath, featureId);
|
||||
const featurePath = path.join(featureDirForSave, 'feature.json');
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
} catch (error) {
|
||||
logger.error(`Failed to save feature.json:`, error);
|
||||
}
|
||||
@@ -1529,6 +1553,7 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
message: allPassed
|
||||
? 'All verification checks passed'
|
||||
: `Verification failed: ${results.find((r) => !r.passed)?.check || 'Unknown'}`,
|
||||
projectPath,
|
||||
});
|
||||
|
||||
return allPassed;
|
||||
@@ -1602,6 +1627,7 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
featureId,
|
||||
passes: true,
|
||||
message: `Changes committed: ${hash.trim().substring(0, 8)}`,
|
||||
projectPath,
|
||||
});
|
||||
|
||||
return hash.trim();
|
||||
@@ -1888,13 +1914,17 @@ Format your response as a structured markdown document.`;
|
||||
content: editedPlan || feature.planSpec.content,
|
||||
});
|
||||
|
||||
// Build continuation prompt and re-run the feature
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(this.settingsService, '[AutoMode]');
|
||||
|
||||
// Build continuation prompt using centralized template
|
||||
const planContent = editedPlan || feature.planSpec.content || '';
|
||||
let continuationPrompt = `The plan/specification has been approved. `;
|
||||
if (feedback) {
|
||||
continuationPrompt += `\n\nUser feedback: ${feedback}\n\n`;
|
||||
}
|
||||
continuationPrompt += `Now proceed with the implementation as specified in the plan:\n\n${planContent}\n\nImplement the feature now.`;
|
||||
let continuationPrompt = prompts.taskExecution.continuationAfterApprovalTemplate;
|
||||
continuationPrompt = continuationPrompt.replace(
|
||||
/\{\{userFeedback\}\}/g,
|
||||
feedback || ''
|
||||
);
|
||||
continuationPrompt = continuationPrompt.replace(/\{\{approvedPlan\}\}/g, planContent);
|
||||
|
||||
logger.info(`Starting recovery execution for feature ${featureId}`);
|
||||
|
||||
@@ -2066,8 +2096,20 @@ Format your response as a structured markdown document.`;
|
||||
const featurePath = path.join(featureDir, 'feature.json');
|
||||
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||
return;
|
||||
}
|
||||
|
||||
feature.status = status;
|
||||
feature.updatedAt = new Date().toISOString();
|
||||
// Set justFinishedAt timestamp when moving to waiting_approval (agent just completed)
|
||||
@@ -2078,9 +2120,41 @@ Format your response as a structured markdown document.`;
|
||||
// Clear the timestamp when moving to other statuses
|
||||
feature.justFinishedAt = undefined;
|
||||
}
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
} catch {
|
||||
// Feature file may not exist
|
||||
|
||||
// Use atomic write with backup support
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
// Create notifications for important status changes
|
||||
const notificationService = getNotificationService();
|
||||
if (status === 'waiting_approval') {
|
||||
await notificationService.createNotification({
|
||||
type: 'feature_waiting_approval',
|
||||
title: 'Feature Ready for Review',
|
||||
message: `"${feature.name || featureId}" is ready for your review and approval.`,
|
||||
featureId,
|
||||
projectPath,
|
||||
});
|
||||
} else if (status === 'verified') {
|
||||
await notificationService.createNotification({
|
||||
type: 'feature_verified',
|
||||
title: 'Feature Verified',
|
||||
message: `"${feature.name || featureId}" has been verified and is complete.`,
|
||||
featureId,
|
||||
projectPath,
|
||||
});
|
||||
}
|
||||
|
||||
// Sync completed/verified features to app_spec.txt
|
||||
if (status === 'verified' || status === 'completed') {
|
||||
try {
|
||||
await this.featureLoader.syncFeatureToAppSpec(projectPath, feature);
|
||||
} catch (syncError) {
|
||||
// Log but don't fail the status update if sync fails
|
||||
logger.warn(`Failed to sync feature ${featureId} to app_spec.txt:`, syncError);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update feature status for ${featureId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2092,11 +2166,24 @@ Format your response as a structured markdown document.`;
|
||||
featureId: string,
|
||||
updates: Partial<PlanSpec>
|
||||
): Promise<void> {
|
||||
const featurePath = path.join(projectPath, '.automaker', 'features', featureId, 'feature.json');
|
||||
// Use getFeatureDir helper for consistent path resolution
|
||||
const featureDir = getFeatureDir(projectPath, featureId);
|
||||
const featurePath = path.join(featureDir, 'feature.json');
|
||||
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize planSpec if it doesn't exist
|
||||
if (!feature.planSpec) {
|
||||
@@ -2116,7 +2203,9 @@ Format your response as a structured markdown document.`;
|
||||
}
|
||||
|
||||
feature.updatedAt = new Date().toISOString();
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
|
||||
// Use atomic write with backup support
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update planSpec for ${featureId}:`, error);
|
||||
}
|
||||
@@ -2133,25 +2222,34 @@ Format your response as a structured markdown document.`;
|
||||
const allFeatures: Feature[] = [];
|
||||
const pendingFeatures: Feature[] = [];
|
||||
|
||||
// Load all features (for dependency checking)
|
||||
// Load all features (for dependency checking) with recovery support
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
allFeatures.push(feature);
|
||||
|
||||
// Track pending features separately
|
||||
if (
|
||||
feature.status === 'pending' ||
|
||||
feature.status === 'ready' ||
|
||||
feature.status === 'backlog'
|
||||
) {
|
||||
pendingFeatures.push(feature);
|
||||
}
|
||||
} catch {
|
||||
// Skip invalid features
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
// Skip features that couldn't be loaded or recovered
|
||||
continue;
|
||||
}
|
||||
|
||||
allFeatures.push(feature);
|
||||
|
||||
// Track pending features separately
|
||||
if (
|
||||
feature.status === 'pending' ||
|
||||
feature.status === 'ready' ||
|
||||
feature.status === 'backlog'
|
||||
) {
|
||||
pendingFeatures.push(feature);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2225,7 +2323,13 @@ Format your response as a structured markdown document.`;
|
||||
return planningPrompt + '\n\n---\n\n## Feature Request\n\n';
|
||||
}
|
||||
|
||||
private buildFeaturePrompt(feature: Feature): string {
|
||||
private buildFeaturePrompt(
|
||||
feature: Feature,
|
||||
taskExecutionPrompts: {
|
||||
implementationInstructions: string;
|
||||
playwrightVerificationInstructions: string;
|
||||
}
|
||||
): string {
|
||||
const title = this.extractTitleFromDescription(feature.description);
|
||||
|
||||
let prompt = `## Feature Implementation Task
|
||||
@@ -2267,80 +2371,10 @@ You can use the Read tool to view these images at any time during implementation
|
||||
// Add verification instructions based on testing mode
|
||||
if (feature.skipTests) {
|
||||
// Manual verification - just implement the feature
|
||||
prompt += `
|
||||
## Instructions
|
||||
|
||||
Implement this feature by:
|
||||
1. First, explore the codebase to understand the existing structure
|
||||
2. Plan your implementation approach
|
||||
3. Write the necessary code changes
|
||||
4. Ensure the code follows existing patterns and conventions
|
||||
|
||||
When done, wrap your final summary in <summary> tags like this:
|
||||
|
||||
<summary>
|
||||
## Summary: [Feature Title]
|
||||
|
||||
### Changes Implemented
|
||||
- [List of changes made]
|
||||
|
||||
### Files Modified
|
||||
- [List of files]
|
||||
|
||||
### Notes for Developer
|
||||
- [Any important notes]
|
||||
</summary>
|
||||
|
||||
This helps parse your summary correctly in the output logs.`;
|
||||
prompt += `\n${taskExecutionPrompts.implementationInstructions}`;
|
||||
} else {
|
||||
// Automated testing - implement and verify with Playwright
|
||||
prompt += `
|
||||
## Instructions
|
||||
|
||||
Implement this feature by:
|
||||
1. First, explore the codebase to understand the existing structure
|
||||
2. Plan your implementation approach
|
||||
3. Write the necessary code changes
|
||||
4. Ensure the code follows existing patterns and conventions
|
||||
|
||||
## Verification with Playwright (REQUIRED)
|
||||
|
||||
After implementing the feature, you MUST verify it works correctly using Playwright:
|
||||
|
||||
1. **Create a temporary Playwright test** to verify the feature works as expected
|
||||
2. **Run the test** to confirm the feature is working
|
||||
3. **Delete the test file** after verification - this is a temporary verification test, not a permanent test suite addition
|
||||
|
||||
Example verification workflow:
|
||||
\`\`\`bash
|
||||
# Create a simple verification test
|
||||
npx playwright test my-verification-test.spec.ts
|
||||
|
||||
# After successful verification, delete the test
|
||||
rm my-verification-test.spec.ts
|
||||
\`\`\`
|
||||
|
||||
The test should verify the core functionality of the feature. If the test fails, fix the implementation and re-test.
|
||||
|
||||
When done, wrap your final summary in <summary> tags like this:
|
||||
|
||||
<summary>
|
||||
## Summary: [Feature Title]
|
||||
|
||||
### Changes Implemented
|
||||
- [List of changes made]
|
||||
|
||||
### Files Modified
|
||||
- [List of files]
|
||||
|
||||
### Verification Status
|
||||
- [Describe how the feature was verified with Playwright]
|
||||
|
||||
### Notes for Developer
|
||||
- [Any important notes]
|
||||
</summary>
|
||||
|
||||
This helps parse your summary correctly in the output logs.`;
|
||||
prompt += `\n${taskExecutionPrompts.implementationInstructions}\n\n${taskExecutionPrompts.playwrightVerificationInstructions}`;
|
||||
}
|
||||
|
||||
return prompt;
|
||||
@@ -2910,6 +2944,12 @@ After generating the revised spec, output:
|
||||
`Starting multi-agent execution: ${parsedTasks.length} tasks for feature ${featureId}`
|
||||
);
|
||||
|
||||
// Get customized prompts for task execution
|
||||
const taskPrompts = await getPromptCustomization(
|
||||
this.settingsService,
|
||||
'[AutoMode]'
|
||||
);
|
||||
|
||||
// Execute each task with a separate agent
|
||||
for (let taskIndex = 0; taskIndex < parsedTasks.length; taskIndex++) {
|
||||
const task = parsedTasks[taskIndex];
|
||||
@@ -2941,6 +2981,7 @@ After generating the revised spec, output:
|
||||
parsedTasks,
|
||||
taskIndex,
|
||||
approvedPlanContent,
|
||||
taskPrompts.taskExecution.taskPromptTemplate,
|
||||
userFeedback
|
||||
);
|
||||
|
||||
@@ -3023,15 +3064,21 @@ After generating the revised spec, output:
|
||||
`No parsed tasks, using single-agent execution for feature ${featureId}`
|
||||
);
|
||||
|
||||
const continuationPrompt = `The plan/specification has been approved. Now implement it.
|
||||
${userFeedback ? `\n## User Feedback\n${userFeedback}\n` : ''}
|
||||
## Approved Plan
|
||||
|
||||
${approvedPlanContent}
|
||||
|
||||
## Instructions
|
||||
|
||||
Implement all the changes described in the plan above.`;
|
||||
// Get customized prompts for continuation
|
||||
const taskPrompts = await getPromptCustomization(
|
||||
this.settingsService,
|
||||
'[AutoMode]'
|
||||
);
|
||||
let continuationPrompt =
|
||||
taskPrompts.taskExecution.continuationAfterApprovalTemplate;
|
||||
continuationPrompt = continuationPrompt.replace(
|
||||
/\{\{userFeedback\}\}/g,
|
||||
userFeedback || ''
|
||||
);
|
||||
continuationPrompt = continuationPrompt.replace(
|
||||
/\{\{approvedPlan\}\}/g,
|
||||
approvedPlanContent
|
||||
);
|
||||
|
||||
const continuationStream = provider.executeQuery({
|
||||
prompt: continuationPrompt,
|
||||
@@ -3151,17 +3198,16 @@ Implement all the changes described in the plan above.`;
|
||||
throw new Error(`Feature ${featureId} not found`);
|
||||
}
|
||||
|
||||
const prompt = `## Continuing Feature Implementation
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(this.settingsService, '[AutoMode]');
|
||||
|
||||
${this.buildFeaturePrompt(feature)}
|
||||
// Build the feature prompt
|
||||
const featurePrompt = this.buildFeaturePrompt(feature, prompts.taskExecution);
|
||||
|
||||
## Previous Context
|
||||
The following is the output from a previous implementation attempt. Continue from where you left off:
|
||||
|
||||
${context}
|
||||
|
||||
## Instructions
|
||||
Review the previous work and continue the implementation. If the feature appears complete, verify it works correctly.`;
|
||||
// Use the resume feature template with variable substitution
|
||||
let prompt = prompts.taskExecution.resumeFeatureTemplate;
|
||||
prompt = prompt.replace(/\{\{featurePrompt\}\}/g, featurePrompt);
|
||||
prompt = prompt.replace(/\{\{previousContext\}\}/g, context);
|
||||
|
||||
return this.executeFeature(projectPath, featureId, useWorktrees, false, undefined, {
|
||||
continuationPrompt: prompt,
|
||||
@@ -3282,68 +3328,42 @@ Review the previous work and continue the implementation. If the feature appears
|
||||
allTasks: ParsedTask[],
|
||||
taskIndex: number,
|
||||
planContent: string,
|
||||
taskPromptTemplate: string,
|
||||
userFeedback?: string
|
||||
): string {
|
||||
const completedTasks = allTasks.slice(0, taskIndex);
|
||||
const remainingTasks = allTasks.slice(taskIndex + 1);
|
||||
|
||||
let prompt = `# Task Execution: ${task.id}
|
||||
// Build completed tasks string
|
||||
const completedTasksStr =
|
||||
completedTasks.length > 0
|
||||
? `### Already Completed (${completedTasks.length} tasks)\n${completedTasks.map((t) => `- [x] ${t.id}: ${t.description}`).join('\n')}\n`
|
||||
: '';
|
||||
|
||||
You are executing a specific task as part of a larger feature implementation.
|
||||
// Build remaining tasks string
|
||||
const remainingTasksStr =
|
||||
remainingTasks.length > 0
|
||||
? `### Coming Up Next (${remainingTasks.length} tasks remaining)\n${remainingTasks
|
||||
.slice(0, 3)
|
||||
.map((t) => `- [ ] ${t.id}: ${t.description}`)
|
||||
.join(
|
||||
'\n'
|
||||
)}${remainingTasks.length > 3 ? `\n... and ${remainingTasks.length - 3} more tasks` : ''}\n`
|
||||
: '';
|
||||
|
||||
## Your Current Task
|
||||
// Build user feedback string
|
||||
const userFeedbackStr = userFeedback ? `### User Feedback\n${userFeedback}\n` : '';
|
||||
|
||||
**Task ID:** ${task.id}
|
||||
**Description:** ${task.description}
|
||||
${task.filePath ? `**Primary File:** ${task.filePath}` : ''}
|
||||
${task.phase ? `**Phase:** ${task.phase}` : ''}
|
||||
|
||||
## Context
|
||||
|
||||
`;
|
||||
|
||||
// Show what's already done
|
||||
if (completedTasks.length > 0) {
|
||||
prompt += `### Already Completed (${completedTasks.length} tasks)
|
||||
${completedTasks.map((t) => `- [x] ${t.id}: ${t.description}`).join('\n')}
|
||||
|
||||
`;
|
||||
}
|
||||
|
||||
// Show remaining tasks
|
||||
if (remainingTasks.length > 0) {
|
||||
prompt += `### Coming Up Next (${remainingTasks.length} tasks remaining)
|
||||
${remainingTasks
|
||||
.slice(0, 3)
|
||||
.map((t) => `- [ ] ${t.id}: ${t.description}`)
|
||||
.join('\n')}
|
||||
${remainingTasks.length > 3 ? `... and ${remainingTasks.length - 3} more tasks` : ''}
|
||||
|
||||
`;
|
||||
}
|
||||
|
||||
// Add user feedback if any
|
||||
if (userFeedback) {
|
||||
prompt += `### User Feedback
|
||||
${userFeedback}
|
||||
|
||||
`;
|
||||
}
|
||||
|
||||
// Add relevant excerpt from plan (just the task-related part to save context)
|
||||
prompt += `### Reference: Full Plan
|
||||
<details>
|
||||
${planContent}
|
||||
</details>
|
||||
|
||||
## Instructions
|
||||
|
||||
1. Focus ONLY on completing task ${task.id}: "${task.description}"
|
||||
2. Do not work on other tasks
|
||||
3. Use the existing codebase patterns
|
||||
4. When done, summarize what you implemented
|
||||
|
||||
Begin implementing task ${task.id} now.`;
|
||||
// Use centralized template with variable substitution
|
||||
let prompt = taskPromptTemplate;
|
||||
prompt = prompt.replace(/\{\{taskId\}\}/g, task.id);
|
||||
prompt = prompt.replace(/\{\{taskDescription\}\}/g, task.description);
|
||||
prompt = prompt.replace(/\{\{taskFilePath\}\}/g, task.filePath || '');
|
||||
prompt = prompt.replace(/\{\{taskPhase\}\}/g, task.phase || '');
|
||||
prompt = prompt.replace(/\{\{completedTasks\}\}/g, completedTasksStr);
|
||||
prompt = prompt.replace(/\{\{remainingTasks\}\}/g, remainingTasksStr);
|
||||
prompt = prompt.replace(/\{\{userFeedback\}\}/g, userFeedbackStr);
|
||||
prompt = prompt.replace(/\{\{planContent\}\}/g, planContent);
|
||||
|
||||
return prompt;
|
||||
}
|
||||
@@ -3461,31 +3481,39 @@ Begin implementing task ${task.id} now.`;
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data) as Feature;
|
||||
|
||||
// Check if feature was interrupted (in_progress or pipeline_*)
|
||||
if (
|
||||
feature.status === 'in_progress' ||
|
||||
(feature.status && feature.status.startsWith('pipeline_'))
|
||||
) {
|
||||
// Verify it has existing context (agent-output.md)
|
||||
const featureDir = getFeatureDir(projectPath, feature.id);
|
||||
const contextPath = path.join(featureDir, 'agent-output.md');
|
||||
try {
|
||||
await secureFs.access(contextPath);
|
||||
interruptedFeatures.push(feature);
|
||||
logger.info(
|
||||
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
||||
);
|
||||
} catch {
|
||||
// No context file, skip this feature - it will be restarted fresh
|
||||
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
||||
}
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
// Skip features that couldn't be loaded or recovered
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if feature was interrupted (in_progress or pipeline_*)
|
||||
if (
|
||||
feature.status === 'in_progress' ||
|
||||
(feature.status && feature.status.startsWith('pipeline_'))
|
||||
) {
|
||||
// Verify it has existing context (agent-output.md)
|
||||
const featureDir = getFeatureDir(projectPath, feature.id);
|
||||
const contextPath = path.join(featureDir, 'agent-output.md');
|
||||
try {
|
||||
await secureFs.access(contextPath);
|
||||
interruptedFeatures.push(feature);
|
||||
logger.info(
|
||||
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
||||
);
|
||||
} catch {
|
||||
// No context file, skip this feature - it will be restarted fresh
|
||||
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
||||
}
|
||||
} catch {
|
||||
// Skip invalid features
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3553,32 +3581,13 @@ Begin implementing task ${task.id} now.`;
|
||||
// Limit output to avoid token limits
|
||||
const truncatedOutput = agentOutput.length > 10000 ? agentOutput.slice(-10000) : agentOutput;
|
||||
|
||||
const userPrompt = `You are an Architecture Decision Record (ADR) extractor. Analyze this implementation and return ONLY JSON with learnings. No explanations.
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(this.settingsService, '[AutoMode]');
|
||||
|
||||
Feature: "${feature.title}"
|
||||
|
||||
Implementation log:
|
||||
${truncatedOutput}
|
||||
|
||||
Extract MEANINGFUL learnings - not obvious things. For each, capture:
|
||||
- DECISIONS: Why this approach vs alternatives? What would break if changed?
|
||||
- GOTCHAS: What was unexpected? What's the root cause? How to avoid?
|
||||
- PATTERNS: Why this pattern? What problem does it solve? Trade-offs?
|
||||
|
||||
JSON format ONLY (no markdown, no text):
|
||||
{"learnings": [{
|
||||
"category": "architecture|api|ui|database|auth|testing|performance|security|gotchas",
|
||||
"type": "decision|gotcha|pattern",
|
||||
"content": "What was done/learned",
|
||||
"context": "Problem being solved or situation faced",
|
||||
"why": "Reasoning - why this approach",
|
||||
"rejected": "Alternative considered and why rejected",
|
||||
"tradeoffs": "What became easier/harder",
|
||||
"breaking": "What breaks if this is changed/removed"
|
||||
}]}
|
||||
|
||||
IMPORTANT: Only include NON-OBVIOUS learnings with real reasoning. Skip trivial patterns.
|
||||
If nothing notable: {"learnings": []}`;
|
||||
// Build user prompt using centralized template with variable substitution
|
||||
let userPrompt = prompts.taskExecution.learningExtractionUserPromptTemplate;
|
||||
userPrompt = userPrompt.replace(/\{\{featureTitle\}\}/g, feature.title || '');
|
||||
userPrompt = userPrompt.replace(/\{\{implementationLog\}\}/g, truncatedOutput);
|
||||
|
||||
try {
|
||||
// Get model from phase settings
|
||||
@@ -3612,8 +3621,7 @@ If nothing notable: {"learnings": []}`;
|
||||
cwd: projectPath,
|
||||
maxTurns: 1,
|
||||
allowedTools: [],
|
||||
systemPrompt:
|
||||
'You are a JSON extraction assistant. You MUST respond with ONLY valid JSON, no explanations, no markdown, no other text. Extract learnings from the provided implementation context and return them as JSON.',
|
||||
systemPrompt: prompts.taskExecution.learningExtractionSystemPrompt,
|
||||
});
|
||||
|
||||
const responseText = result.text;
|
||||
|
||||
@@ -22,6 +22,29 @@ export class ClaudeUsageService {
|
||||
private timeout = 30000; // 30 second timeout
|
||||
private isWindows = os.platform() === 'win32';
|
||||
private isLinux = os.platform() === 'linux';
|
||||
// On Windows, ConPTY requires AttachConsole which fails in Electron/service mode
|
||||
// Detect Electron by checking for electron-specific env vars or process properties
|
||||
// When in Electron, always use winpty to avoid ConPTY's AttachConsole errors
|
||||
private isElectron =
|
||||
!!(process.versions && (process.versions as Record<string, string>).electron) ||
|
||||
!!process.env.ELECTRON_RUN_AS_NODE;
|
||||
private useConptyFallback = false; // Track if we need to use winpty fallback on Windows
|
||||
|
||||
/**
|
||||
* Kill a PTY process with platform-specific handling.
|
||||
* Windows doesn't support Unix signals like SIGTERM, so we call kill() without arguments.
|
||||
* On Unix-like systems (macOS, Linux), we can specify the signal.
|
||||
*
|
||||
* @param ptyProcess - The PTY process to kill
|
||||
* @param signal - The signal to send on Unix-like systems (default: 'SIGTERM')
|
||||
*/
|
||||
private killPtyProcess(ptyProcess: pty.IPty, signal: string = 'SIGTERM'): void {
|
||||
if (this.isWindows) {
|
||||
ptyProcess.kill();
|
||||
} else {
|
||||
ptyProcess.kill(signal);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if Claude CLI is available on the system
|
||||
@@ -181,37 +204,94 @@ export class ClaudeUsageService {
|
||||
? ['/c', 'claude', '--add-dir', workingDirectory]
|
||||
: ['-c', `claude --add-dir "${workingDirectory}"`];
|
||||
|
||||
// Using 'any' for ptyProcess because node-pty types don't include 'killed' property
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let ptyProcess: any = null;
|
||||
|
||||
// Build PTY spawn options
|
||||
const ptyOptions: pty.IPtyForkOptions = {
|
||||
name: 'xterm-256color',
|
||||
cols: 120,
|
||||
rows: 30,
|
||||
cwd: workingDirectory,
|
||||
env: {
|
||||
...process.env,
|
||||
TERM: 'xterm-256color',
|
||||
} as Record<string, string>,
|
||||
};
|
||||
|
||||
// On Windows, always use winpty instead of ConPTY
|
||||
// ConPTY requires AttachConsole which fails in many contexts:
|
||||
// - Electron apps without a console
|
||||
// - VS Code integrated terminal
|
||||
// - Spawned from other applications
|
||||
// The error happens in a subprocess so we can't catch it - must proactively disable
|
||||
if (this.isWindows) {
|
||||
(ptyOptions as pty.IWindowsPtyForkOptions).useConpty = false;
|
||||
logger.info(
|
||||
'[executeClaudeUsageCommandPty] Using winpty on Windows (ConPTY disabled for compatibility)'
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
ptyProcess = pty.spawn(shell, args, {
|
||||
name: 'xterm-256color',
|
||||
cols: 120,
|
||||
rows: 30,
|
||||
cwd: workingDirectory,
|
||||
env: {
|
||||
...process.env,
|
||||
TERM: 'xterm-256color',
|
||||
} as Record<string, string>,
|
||||
});
|
||||
ptyProcess = pty.spawn(shell, args, ptyOptions);
|
||||
} catch (spawnError) {
|
||||
const errorMessage = spawnError instanceof Error ? spawnError.message : String(spawnError);
|
||||
logger.error('[executeClaudeUsageCommandPty] Failed to spawn PTY:', errorMessage);
|
||||
|
||||
// Return a user-friendly error instead of crashing
|
||||
reject(
|
||||
new Error(
|
||||
`Unable to access terminal: ${errorMessage}. Claude CLI may not be available or PTY support is limited in this environment.`
|
||||
)
|
||||
);
|
||||
return;
|
||||
// Check for Windows ConPTY-specific errors
|
||||
if (this.isWindows && errorMessage.includes('AttachConsole failed')) {
|
||||
// ConPTY failed - try winpty fallback
|
||||
if (!this.useConptyFallback) {
|
||||
logger.warn(
|
||||
'[executeClaudeUsageCommandPty] ConPTY AttachConsole failed, retrying with winpty fallback'
|
||||
);
|
||||
this.useConptyFallback = true;
|
||||
|
||||
try {
|
||||
(ptyOptions as pty.IWindowsPtyForkOptions).useConpty = false;
|
||||
ptyProcess = pty.spawn(shell, args, ptyOptions);
|
||||
logger.info(
|
||||
'[executeClaudeUsageCommandPty] Successfully spawned with winpty fallback'
|
||||
);
|
||||
} catch (fallbackError) {
|
||||
const fallbackMessage =
|
||||
fallbackError instanceof Error ? fallbackError.message : String(fallbackError);
|
||||
logger.error(
|
||||
'[executeClaudeUsageCommandPty] Winpty fallback also failed:',
|
||||
fallbackMessage
|
||||
);
|
||||
reject(
|
||||
new Error(
|
||||
`Windows PTY unavailable: Both ConPTY and winpty failed. This typically happens when running in Electron without a console. ConPTY error: ${errorMessage}. Winpty error: ${fallbackMessage}`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
logger.error('[executeClaudeUsageCommandPty] Winpty fallback failed:', errorMessage);
|
||||
reject(
|
||||
new Error(
|
||||
`Windows PTY unavailable: ${errorMessage}. The application is running without console access (common in Electron). Try running from a terminal window.`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
logger.error('[executeClaudeUsageCommandPty] Failed to spawn PTY:', errorMessage);
|
||||
reject(
|
||||
new Error(
|
||||
`Unable to access terminal: ${errorMessage}. Claude CLI may not be available or PTY support is limited in this environment.`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const timeoutId = setTimeout(() => {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
if (ptyProcess && !ptyProcess.killed) {
|
||||
ptyProcess.kill();
|
||||
this.killPtyProcess(ptyProcess);
|
||||
}
|
||||
// Don't fail if we have data - return it instead
|
||||
if (output.includes('Current session')) {
|
||||
@@ -244,16 +324,23 @@ export class ClaudeUsageService {
|
||||
const cleanOutput = output.replace(/\x1B\[[0-9;]*[A-Za-z]/g, '');
|
||||
|
||||
// Check for specific authentication/permission errors
|
||||
if (
|
||||
cleanOutput.includes('OAuth token does not meet scope requirement') ||
|
||||
cleanOutput.includes('permission_error') ||
|
||||
cleanOutput.includes('token_expired') ||
|
||||
cleanOutput.includes('authentication_error')
|
||||
) {
|
||||
// Must be very specific to avoid false positives from garbled terminal encoding
|
||||
// Removed permission_error check as it was causing false positives with winpty encoding
|
||||
const authChecks = {
|
||||
oauth: cleanOutput.includes('OAuth token does not meet scope requirement'),
|
||||
tokenExpired: cleanOutput.includes('token_expired'),
|
||||
// Only match if it looks like a JSON API error response
|
||||
authError:
|
||||
cleanOutput.includes('"type":"authentication_error"') ||
|
||||
cleanOutput.includes('"type": "authentication_error"'),
|
||||
};
|
||||
const hasAuthError = authChecks.oauth || authChecks.tokenExpired || authChecks.authError;
|
||||
|
||||
if (hasAuthError) {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
if (ptyProcess && !ptyProcess.killed) {
|
||||
ptyProcess.kill();
|
||||
this.killPtyProcess(ptyProcess);
|
||||
}
|
||||
reject(
|
||||
new Error(
|
||||
@@ -265,11 +352,16 @@ export class ClaudeUsageService {
|
||||
}
|
||||
|
||||
// Check if we've seen the usage data (look for "Current session" or the TUI Usage header)
|
||||
if (
|
||||
!hasSeenUsageData &&
|
||||
(cleanOutput.includes('Current session') ||
|
||||
(cleanOutput.includes('Usage') && cleanOutput.includes('% left')))
|
||||
) {
|
||||
// Also check for percentage patterns that appear in usage output
|
||||
const hasUsageIndicators =
|
||||
cleanOutput.includes('Current session') ||
|
||||
(cleanOutput.includes('Usage') && cleanOutput.includes('% left')) ||
|
||||
// Additional patterns for winpty - look for percentage patterns
|
||||
/\d+%\s*(left|used|remaining)/i.test(cleanOutput) ||
|
||||
cleanOutput.includes('Resets in') ||
|
||||
cleanOutput.includes('Current week');
|
||||
|
||||
if (!hasSeenUsageData && hasUsageIndicators) {
|
||||
hasSeenUsageData = true;
|
||||
// Wait for full output, then send escape to exit
|
||||
setTimeout(() => {
|
||||
@@ -277,9 +369,10 @@ export class ClaudeUsageService {
|
||||
ptyProcess.write('\x1b'); // Send escape key
|
||||
|
||||
// Fallback: if ESC doesn't exit (Linux), use SIGTERM after 2s
|
||||
// Windows doesn't support signals, so killPtyProcess handles platform differences
|
||||
setTimeout(() => {
|
||||
if (!settled && ptyProcess && !ptyProcess.killed) {
|
||||
ptyProcess.kill('SIGTERM');
|
||||
this.killPtyProcess(ptyProcess);
|
||||
}
|
||||
}, 2000);
|
||||
}
|
||||
@@ -307,10 +400,18 @@ export class ClaudeUsageService {
|
||||
}
|
||||
|
||||
// Detect REPL prompt and send /usage command
|
||||
if (
|
||||
!hasSentCommand &&
|
||||
(cleanOutput.includes('❯') || cleanOutput.includes('? for shortcuts'))
|
||||
) {
|
||||
// On Windows with winpty, Unicode prompt char ❯ gets garbled, so also check for ASCII indicators
|
||||
const isReplReady =
|
||||
cleanOutput.includes('❯') ||
|
||||
cleanOutput.includes('? for shortcuts') ||
|
||||
// Fallback for winpty garbled encoding - detect CLI welcome screen elements
|
||||
(cleanOutput.includes('Welcome back') && cleanOutput.includes('Claude')) ||
|
||||
(cleanOutput.includes('Tips for getting started') && cleanOutput.includes('Claude')) ||
|
||||
// Detect model indicator which appears when REPL is ready
|
||||
(cleanOutput.includes('Opus') && cleanOutput.includes('Claude API')) ||
|
||||
(cleanOutput.includes('Sonnet') && cleanOutput.includes('Claude API'));
|
||||
|
||||
if (!hasSentCommand && isReplReady) {
|
||||
hasSentCommand = true;
|
||||
// Wait for REPL to fully settle
|
||||
setTimeout(() => {
|
||||
@@ -347,11 +448,9 @@ export class ClaudeUsageService {
|
||||
if (settled) return;
|
||||
settled = true;
|
||||
|
||||
if (
|
||||
output.includes('token_expired') ||
|
||||
output.includes('authentication_error') ||
|
||||
output.includes('permission_error')
|
||||
) {
|
||||
// Check for auth errors - must be specific to avoid false positives
|
||||
// Removed permission_error check as it was causing false positives with winpty encoding
|
||||
if (output.includes('token_expired') || output.includes('"type":"authentication_error"')) {
|
||||
reject(new Error("Authentication required - please run 'claude login'"));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -379,10 +379,11 @@ class DevServerService {
|
||||
|
||||
// Create server info early so we can reference it in handlers
|
||||
// We'll add it to runningServers after verifying the process started successfully
|
||||
const hostname = process.env.HOSTNAME || 'localhost';
|
||||
const serverInfo: DevServerInfo = {
|
||||
worktreePath,
|
||||
port,
|
||||
url: `http://localhost:${port}`,
|
||||
url: `http://${hostname}:${port}`,
|
||||
process: devProcess,
|
||||
startedAt: new Date(),
|
||||
scrollbackBuffer: '',
|
||||
@@ -474,7 +475,7 @@ class DevServerService {
|
||||
result: {
|
||||
worktreePath,
|
||||
port,
|
||||
url: `http://localhost:${port}`,
|
||||
url: `http://${hostname}:${port}`,
|
||||
message: `Dev server started on port ${port}`,
|
||||
},
|
||||
};
|
||||
@@ -594,6 +595,7 @@ class DevServerService {
|
||||
result?: {
|
||||
worktreePath: string;
|
||||
port: number;
|
||||
url: string;
|
||||
logs: string;
|
||||
startedAt: string;
|
||||
};
|
||||
@@ -613,6 +615,7 @@ class DevServerService {
|
||||
result: {
|
||||
worktreePath: server.worktreePath,
|
||||
port: server.port,
|
||||
url: server.url,
|
||||
logs: server.scrollbackBuffer,
|
||||
startedAt: server.startedAt.toISOString(),
|
||||
},
|
||||
|
||||
338
apps/server/src/services/event-history-service.ts
Normal file
338
apps/server/src/services/event-history-service.ts
Normal file
@@ -0,0 +1,338 @@
|
||||
/**
|
||||
* Event History Service - Stores and retrieves event records for debugging and replay
|
||||
*
|
||||
* Provides persistent storage for events in {projectPath}/.automaker/events/
|
||||
* Each event is stored as a separate JSON file with an index for quick listing.
|
||||
*
|
||||
* Features:
|
||||
* - Store events when they occur
|
||||
* - List and filter historical events
|
||||
* - Replay events to test hook configurations
|
||||
* - Delete old events to manage disk space
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
import {
|
||||
getEventHistoryDir,
|
||||
getEventHistoryIndexPath,
|
||||
getEventPath,
|
||||
ensureEventHistoryDir,
|
||||
} from '@automaker/platform';
|
||||
import type {
|
||||
StoredEvent,
|
||||
StoredEventIndex,
|
||||
StoredEventSummary,
|
||||
EventHistoryFilter,
|
||||
EventHookTrigger,
|
||||
} from '@automaker/types';
|
||||
import { DEFAULT_EVENT_HISTORY_INDEX } from '@automaker/types';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
const logger = createLogger('EventHistoryService');
|
||||
|
||||
/** Maximum events to keep in the index (oldest are pruned) */
|
||||
const MAX_EVENTS_IN_INDEX = 1000;
|
||||
|
||||
/**
|
||||
* Atomic file write - write to temp file then rename
|
||||
*/
|
||||
async function atomicWriteJson(filePath: string, data: unknown): Promise<void> {
|
||||
const tempPath = `${filePath}.tmp.${Date.now()}`;
|
||||
const content = JSON.stringify(data, null, 2);
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(tempPath, content, 'utf-8');
|
||||
await secureFs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
try {
|
||||
await secureFs.unlink(tempPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely read JSON file with fallback to default
|
||||
*/
|
||||
async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||
try {
|
||||
const content = (await secureFs.readFile(filePath, 'utf-8')) as string;
|
||||
return JSON.parse(content) as T;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return defaultValue;
|
||||
}
|
||||
logger.error(`Error reading ${filePath}:`, error);
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Input for storing a new event
|
||||
*/
|
||||
export interface StoreEventInput {
|
||||
trigger: EventHookTrigger;
|
||||
projectPath: string;
|
||||
featureId?: string;
|
||||
featureName?: string;
|
||||
error?: string;
|
||||
errorType?: string;
|
||||
passes?: boolean;
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* EventHistoryService - Manages persistent storage of events
|
||||
*/
|
||||
export class EventHistoryService {
|
||||
/**
|
||||
* Store a new event to history
|
||||
*
|
||||
* @param input - Event data to store
|
||||
* @returns Promise resolving to the stored event
|
||||
*/
|
||||
async storeEvent(input: StoreEventInput): Promise<StoredEvent> {
|
||||
const { projectPath, trigger, featureId, featureName, error, errorType, passes, metadata } =
|
||||
input;
|
||||
|
||||
// Ensure events directory exists
|
||||
await ensureEventHistoryDir(projectPath);
|
||||
|
||||
const eventId = `evt-${Date.now()}-${randomUUID().slice(0, 8)}`;
|
||||
const timestamp = new Date().toISOString();
|
||||
const projectName = this.extractProjectName(projectPath);
|
||||
|
||||
const event: StoredEvent = {
|
||||
id: eventId,
|
||||
trigger,
|
||||
timestamp,
|
||||
projectPath,
|
||||
projectName,
|
||||
featureId,
|
||||
featureName,
|
||||
error,
|
||||
errorType,
|
||||
passes,
|
||||
metadata,
|
||||
};
|
||||
|
||||
// Write the full event to its own file
|
||||
const eventPath = getEventPath(projectPath, eventId);
|
||||
await atomicWriteJson(eventPath, event);
|
||||
|
||||
// Update the index
|
||||
await this.addToIndex(projectPath, event);
|
||||
|
||||
logger.info(`Stored event ${eventId} (${trigger}) for project ${projectName}`);
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all events for a project with optional filtering
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param filter - Optional filter criteria
|
||||
* @returns Promise resolving to array of event summaries
|
||||
*/
|
||||
async getEvents(projectPath: string, filter?: EventHistoryFilter): Promise<StoredEventSummary[]> {
|
||||
const indexPath = getEventHistoryIndexPath(projectPath);
|
||||
const index = await readJsonFile<StoredEventIndex>(indexPath, DEFAULT_EVENT_HISTORY_INDEX);
|
||||
|
||||
let events = [...index.events];
|
||||
|
||||
// Apply filters
|
||||
if (filter) {
|
||||
if (filter.trigger) {
|
||||
events = events.filter((e) => e.trigger === filter.trigger);
|
||||
}
|
||||
if (filter.featureId) {
|
||||
events = events.filter((e) => e.featureId === filter.featureId);
|
||||
}
|
||||
if (filter.since) {
|
||||
const sinceDate = new Date(filter.since).getTime();
|
||||
events = events.filter((e) => new Date(e.timestamp).getTime() >= sinceDate);
|
||||
}
|
||||
if (filter.until) {
|
||||
const untilDate = new Date(filter.until).getTime();
|
||||
events = events.filter((e) => new Date(e.timestamp).getTime() <= untilDate);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by timestamp (newest first)
|
||||
events.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
|
||||
|
||||
// Apply pagination
|
||||
if (filter?.offset) {
|
||||
events = events.slice(filter.offset);
|
||||
}
|
||||
if (filter?.limit) {
|
||||
events = events.slice(0, filter.limit);
|
||||
}
|
||||
|
||||
return events;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single event by ID
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param eventId - Event identifier
|
||||
* @returns Promise resolving to the full event or null if not found
|
||||
*/
|
||||
async getEvent(projectPath: string, eventId: string): Promise<StoredEvent | null> {
|
||||
const eventPath = getEventPath(projectPath, eventId);
|
||||
try {
|
||||
const content = (await secureFs.readFile(eventPath, 'utf-8')) as string;
|
||||
return JSON.parse(content) as StoredEvent;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
logger.error(`Error reading event ${eventId}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete an event by ID
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param eventId - Event identifier
|
||||
* @returns Promise resolving to true if deleted
|
||||
*/
|
||||
async deleteEvent(projectPath: string, eventId: string): Promise<boolean> {
|
||||
// Remove from index
|
||||
const indexPath = getEventHistoryIndexPath(projectPath);
|
||||
const index = await readJsonFile<StoredEventIndex>(indexPath, DEFAULT_EVENT_HISTORY_INDEX);
|
||||
|
||||
const initialLength = index.events.length;
|
||||
index.events = index.events.filter((e) => e.id !== eventId);
|
||||
|
||||
if (index.events.length === initialLength) {
|
||||
return false; // Event not found in index
|
||||
}
|
||||
|
||||
await atomicWriteJson(indexPath, index);
|
||||
|
||||
// Delete the event file
|
||||
const eventPath = getEventPath(projectPath, eventId);
|
||||
try {
|
||||
await secureFs.unlink(eventPath);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
logger.error(`Error deleting event file ${eventId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Deleted event ${eventId}`);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all events for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Promise resolving to number of events cleared
|
||||
*/
|
||||
async clearEvents(projectPath: string): Promise<number> {
|
||||
const indexPath = getEventHistoryIndexPath(projectPath);
|
||||
const index = await readJsonFile<StoredEventIndex>(indexPath, DEFAULT_EVENT_HISTORY_INDEX);
|
||||
|
||||
const count = index.events.length;
|
||||
|
||||
// Delete all event files
|
||||
for (const event of index.events) {
|
||||
const eventPath = getEventPath(projectPath, event.id);
|
||||
try {
|
||||
await secureFs.unlink(eventPath);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
logger.error(`Error deleting event file ${event.id}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reset the index
|
||||
await atomicWriteJson(indexPath, DEFAULT_EVENT_HISTORY_INDEX);
|
||||
|
||||
logger.info(`Cleared ${count} events for project`);
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get event count for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param filter - Optional filter criteria
|
||||
* @returns Promise resolving to event count
|
||||
*/
|
||||
async getEventCount(projectPath: string, filter?: EventHistoryFilter): Promise<number> {
|
||||
const events = await this.getEvents(projectPath, {
|
||||
...filter,
|
||||
limit: undefined,
|
||||
offset: undefined,
|
||||
});
|
||||
return events.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an event to the index (internal)
|
||||
*/
|
||||
private async addToIndex(projectPath: string, event: StoredEvent): Promise<void> {
|
||||
const indexPath = getEventHistoryIndexPath(projectPath);
|
||||
const index = await readJsonFile<StoredEventIndex>(indexPath, DEFAULT_EVENT_HISTORY_INDEX);
|
||||
|
||||
const summary: StoredEventSummary = {
|
||||
id: event.id,
|
||||
trigger: event.trigger,
|
||||
timestamp: event.timestamp,
|
||||
featureName: event.featureName,
|
||||
featureId: event.featureId,
|
||||
};
|
||||
|
||||
// Add to beginning (newest first)
|
||||
index.events.unshift(summary);
|
||||
|
||||
// Prune old events if over limit
|
||||
if (index.events.length > MAX_EVENTS_IN_INDEX) {
|
||||
const removed = index.events.splice(MAX_EVENTS_IN_INDEX);
|
||||
// Delete the pruned event files
|
||||
for (const oldEvent of removed) {
|
||||
const eventPath = getEventPath(projectPath, oldEvent.id);
|
||||
try {
|
||||
await secureFs.unlink(eventPath);
|
||||
} catch {
|
||||
// Ignore deletion errors for pruned events
|
||||
}
|
||||
}
|
||||
logger.info(`Pruned ${removed.length} old events from history`);
|
||||
}
|
||||
|
||||
await atomicWriteJson(indexPath, index);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract project name from path
|
||||
*/
|
||||
private extractProjectName(projectPath: string): string {
|
||||
const parts = projectPath.split(/[/\\]/);
|
||||
return parts[parts.length - 1] || projectPath;
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
let eventHistoryServiceInstance: EventHistoryService | null = null;
|
||||
|
||||
/**
|
||||
* Get the singleton event history service instance
|
||||
*/
|
||||
export function getEventHistoryService(): EventHistoryService {
|
||||
if (!eventHistoryServiceInstance) {
|
||||
eventHistoryServiceInstance = new EventHistoryService();
|
||||
}
|
||||
return eventHistoryServiceInstance;
|
||||
}
|
||||
373
apps/server/src/services/event-hook-service.ts
Normal file
373
apps/server/src/services/event-hook-service.ts
Normal file
@@ -0,0 +1,373 @@
|
||||
/**
|
||||
* Event Hook Service - Executes custom actions when system events occur
|
||||
*
|
||||
* Listens to the event emitter and triggers configured hooks:
|
||||
* - Shell commands: Executed with configurable timeout
|
||||
* - HTTP webhooks: POST/GET/PUT/PATCH requests with variable substitution
|
||||
*
|
||||
* Also stores events to history for debugging and replay.
|
||||
*
|
||||
* Supported events:
|
||||
* - feature_created: A new feature was created
|
||||
* - feature_success: Feature completed successfully
|
||||
* - feature_error: Feature failed with an error
|
||||
* - auto_mode_complete: Auto mode finished all features (idle state)
|
||||
* - auto_mode_error: Auto mode encountered a critical error
|
||||
*/
|
||||
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { EventEmitter } from '../lib/events.js';
|
||||
import type { SettingsService } from './settings-service.js';
|
||||
import type { EventHistoryService } from './event-history-service.js';
|
||||
import type {
|
||||
EventHook,
|
||||
EventHookTrigger,
|
||||
EventHookShellAction,
|
||||
EventHookHttpAction,
|
||||
} from '@automaker/types';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
const logger = createLogger('EventHooks');
|
||||
|
||||
/** Default timeout for shell commands (30 seconds) */
|
||||
const DEFAULT_SHELL_TIMEOUT = 30000;
|
||||
|
||||
/** Default timeout for HTTP requests (10 seconds) */
|
||||
const DEFAULT_HTTP_TIMEOUT = 10000;
|
||||
|
||||
/**
|
||||
* Context available for variable substitution in hooks
|
||||
*/
|
||||
interface HookContext {
|
||||
featureId?: string;
|
||||
featureName?: string;
|
||||
projectPath?: string;
|
||||
projectName?: string;
|
||||
error?: string;
|
||||
errorType?: string;
|
||||
timestamp: string;
|
||||
eventType: EventHookTrigger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Auto-mode event payload structure
|
||||
*/
|
||||
interface AutoModeEventPayload {
|
||||
type?: string;
|
||||
featureId?: string;
|
||||
passes?: boolean;
|
||||
message?: string;
|
||||
error?: string;
|
||||
errorType?: string;
|
||||
projectPath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Feature created event payload structure
|
||||
*/
|
||||
interface FeatureCreatedPayload {
|
||||
featureId: string;
|
||||
featureName?: string;
|
||||
projectPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Event Hook Service
|
||||
*
|
||||
* Manages execution of user-configured event hooks in response to system events.
|
||||
* Also stores events to history for debugging and replay.
|
||||
*/
|
||||
export class EventHookService {
|
||||
private emitter: EventEmitter | null = null;
|
||||
private settingsService: SettingsService | null = null;
|
||||
private eventHistoryService: EventHistoryService | null = null;
|
||||
private unsubscribe: (() => void) | null = null;
|
||||
|
||||
/**
|
||||
* Initialize the service with event emitter, settings service, and event history service
|
||||
*/
|
||||
initialize(
|
||||
emitter: EventEmitter,
|
||||
settingsService: SettingsService,
|
||||
eventHistoryService?: EventHistoryService
|
||||
): void {
|
||||
this.emitter = emitter;
|
||||
this.settingsService = settingsService;
|
||||
this.eventHistoryService = eventHistoryService || null;
|
||||
|
||||
// Subscribe to events
|
||||
this.unsubscribe = emitter.subscribe((type, payload) => {
|
||||
if (type === 'auto-mode:event') {
|
||||
this.handleAutoModeEvent(payload as AutoModeEventPayload);
|
||||
} else if (type === 'feature:created') {
|
||||
this.handleFeatureCreatedEvent(payload as FeatureCreatedPayload);
|
||||
}
|
||||
});
|
||||
|
||||
logger.info('Event hook service initialized');
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup subscriptions
|
||||
*/
|
||||
destroy(): void {
|
||||
if (this.unsubscribe) {
|
||||
this.unsubscribe();
|
||||
this.unsubscribe = null;
|
||||
}
|
||||
this.emitter = null;
|
||||
this.settingsService = null;
|
||||
this.eventHistoryService = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle auto-mode events and trigger matching hooks
|
||||
*/
|
||||
private async handleAutoModeEvent(payload: AutoModeEventPayload): Promise<void> {
|
||||
if (!payload.type) return;
|
||||
|
||||
// Map internal event types to hook triggers
|
||||
let trigger: EventHookTrigger | null = null;
|
||||
|
||||
switch (payload.type) {
|
||||
case 'auto_mode_feature_complete':
|
||||
trigger = payload.passes ? 'feature_success' : 'feature_error';
|
||||
break;
|
||||
case 'auto_mode_error':
|
||||
// Feature-level error (has featureId) vs auto-mode level error
|
||||
trigger = payload.featureId ? 'feature_error' : 'auto_mode_error';
|
||||
break;
|
||||
case 'auto_mode_idle':
|
||||
trigger = 'auto_mode_complete';
|
||||
break;
|
||||
default:
|
||||
// Other event types don't trigger hooks
|
||||
return;
|
||||
}
|
||||
|
||||
if (!trigger) return;
|
||||
|
||||
// Build context for variable substitution
|
||||
const context: HookContext = {
|
||||
featureId: payload.featureId,
|
||||
projectPath: payload.projectPath,
|
||||
projectName: payload.projectPath ? this.extractProjectName(payload.projectPath) : undefined,
|
||||
error: payload.error || payload.message,
|
||||
errorType: payload.errorType,
|
||||
timestamp: new Date().toISOString(),
|
||||
eventType: trigger,
|
||||
};
|
||||
|
||||
// Execute matching hooks (pass passes for feature completion events)
|
||||
await this.executeHooksForTrigger(trigger, context, { passes: payload.passes });
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle feature:created events and trigger matching hooks
|
||||
*/
|
||||
private async handleFeatureCreatedEvent(payload: FeatureCreatedPayload): Promise<void> {
|
||||
const context: HookContext = {
|
||||
featureId: payload.featureId,
|
||||
featureName: payload.featureName,
|
||||
projectPath: payload.projectPath,
|
||||
projectName: this.extractProjectName(payload.projectPath),
|
||||
timestamp: new Date().toISOString(),
|
||||
eventType: 'feature_created',
|
||||
};
|
||||
|
||||
await this.executeHooksForTrigger('feature_created', context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute all enabled hooks matching the given trigger and store event to history
|
||||
*/
|
||||
private async executeHooksForTrigger(
|
||||
trigger: EventHookTrigger,
|
||||
context: HookContext,
|
||||
additionalData?: { passes?: boolean }
|
||||
): Promise<void> {
|
||||
// Store event to history (even if no hooks match)
|
||||
if (this.eventHistoryService && context.projectPath) {
|
||||
try {
|
||||
await this.eventHistoryService.storeEvent({
|
||||
trigger,
|
||||
projectPath: context.projectPath,
|
||||
featureId: context.featureId,
|
||||
featureName: context.featureName,
|
||||
error: context.error,
|
||||
errorType: context.errorType,
|
||||
passes: additionalData?.passes,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to store event to history:', error);
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.settingsService) {
|
||||
logger.warn('Settings service not available');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const settings = await this.settingsService.getGlobalSettings();
|
||||
const hooks = settings.eventHooks || [];
|
||||
|
||||
// Filter to enabled hooks matching this trigger
|
||||
const matchingHooks = hooks.filter((hook) => hook.enabled && hook.trigger === trigger);
|
||||
|
||||
if (matchingHooks.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`Executing ${matchingHooks.length} hook(s) for trigger: ${trigger}`);
|
||||
|
||||
// Execute hooks in parallel (don't wait for one to finish before starting next)
|
||||
await Promise.allSettled(matchingHooks.map((hook) => this.executeHook(hook, context)));
|
||||
} catch (error) {
|
||||
logger.error('Error executing hooks:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a single hook
|
||||
*/
|
||||
private async executeHook(hook: EventHook, context: HookContext): Promise<void> {
|
||||
const hookName = hook.name || hook.id;
|
||||
|
||||
try {
|
||||
if (hook.action.type === 'shell') {
|
||||
await this.executeShellHook(hook.action, context, hookName);
|
||||
} else if (hook.action.type === 'http') {
|
||||
await this.executeHttpHook(hook.action, context, hookName);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Hook "${hookName}" failed:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a shell command hook
|
||||
*/
|
||||
private async executeShellHook(
|
||||
action: EventHookShellAction,
|
||||
context: HookContext,
|
||||
hookName: string
|
||||
): Promise<void> {
|
||||
const command = this.substituteVariables(action.command, context);
|
||||
const timeout = action.timeout || DEFAULT_SHELL_TIMEOUT;
|
||||
|
||||
logger.info(`Executing shell hook "${hookName}": ${command}`);
|
||||
|
||||
try {
|
||||
const { stdout, stderr } = await execAsync(command, {
|
||||
timeout,
|
||||
maxBuffer: 1024 * 1024, // 1MB buffer
|
||||
});
|
||||
|
||||
if (stdout) {
|
||||
logger.debug(`Hook "${hookName}" stdout: ${stdout.trim()}`);
|
||||
}
|
||||
if (stderr) {
|
||||
logger.warn(`Hook "${hookName}" stderr: ${stderr.trim()}`);
|
||||
}
|
||||
|
||||
logger.info(`Shell hook "${hookName}" completed successfully`);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ETIMEDOUT') {
|
||||
logger.error(`Shell hook "${hookName}" timed out after ${timeout}ms`);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute an HTTP webhook hook
|
||||
*/
|
||||
private async executeHttpHook(
|
||||
action: EventHookHttpAction,
|
||||
context: HookContext,
|
||||
hookName: string
|
||||
): Promise<void> {
|
||||
const url = this.substituteVariables(action.url, context);
|
||||
const method = action.method || 'POST';
|
||||
|
||||
// Substitute variables in headers
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
if (action.headers) {
|
||||
for (const [key, value] of Object.entries(action.headers)) {
|
||||
headers[key] = this.substituteVariables(value, context);
|
||||
}
|
||||
}
|
||||
|
||||
// Substitute variables in body
|
||||
let body: string | undefined;
|
||||
if (action.body) {
|
||||
body = this.substituteVariables(action.body, context);
|
||||
} else if (method !== 'GET') {
|
||||
// Default body with context information
|
||||
body = JSON.stringify({
|
||||
eventType: context.eventType,
|
||||
timestamp: context.timestamp,
|
||||
featureId: context.featureId,
|
||||
projectPath: context.projectPath,
|
||||
projectName: context.projectName,
|
||||
error: context.error,
|
||||
});
|
||||
}
|
||||
|
||||
logger.info(`Executing HTTP hook "${hookName}": ${method} ${url}`);
|
||||
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), DEFAULT_HTTP_TIMEOUT);
|
||||
|
||||
const response = await fetch(url, {
|
||||
method,
|
||||
headers,
|
||||
body: method !== 'GET' ? body : undefined,
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn(`HTTP hook "${hookName}" received status ${response.status}`);
|
||||
} else {
|
||||
logger.info(`HTTP hook "${hookName}" completed successfully (status: ${response.status})`);
|
||||
}
|
||||
} catch (error) {
|
||||
if ((error as Error).name === 'AbortError') {
|
||||
logger.error(`HTTP hook "${hookName}" timed out after ${DEFAULT_HTTP_TIMEOUT}ms`);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Substitute {{variable}} placeholders in a string
|
||||
*/
|
||||
private substituteVariables(template: string, context: HookContext): string {
|
||||
return template.replace(/\{\{(\w+)\}\}/g, (match, variable) => {
|
||||
const value = context[variable as keyof HookContext];
|
||||
if (value === undefined || value === null) {
|
||||
return '';
|
||||
}
|
||||
return String(value);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract project name from path
|
||||
*/
|
||||
private extractProjectName(projectPath: string): string {
|
||||
const parts = projectPath.split(/[/\\]/);
|
||||
return parts[parts.length - 1] || projectPath;
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
export const eventHookService = new EventHookService();
|
||||
@@ -5,14 +5,22 @@
|
||||
|
||||
import path from 'path';
|
||||
import type { Feature, DescriptionHistoryEntry } from '@automaker/types';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import {
|
||||
createLogger,
|
||||
atomicWriteJson,
|
||||
readJsonWithRecovery,
|
||||
logRecoveryWarning,
|
||||
DEFAULT_BACKUP_COUNT,
|
||||
} from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
import {
|
||||
getFeaturesDir,
|
||||
getFeatureDir,
|
||||
getFeatureImagesDir,
|
||||
getAppSpecPath,
|
||||
ensureAutomakerDir,
|
||||
} from '@automaker/platform';
|
||||
import { addImplementedFeature, type ImplementedFeature } from '../lib/xml-extractor.js';
|
||||
|
||||
const logger = createLogger('FeatureLoader');
|
||||
|
||||
@@ -192,31 +200,31 @@ export class FeatureLoader {
|
||||
})) as any[];
|
||||
const featureDirs = entries.filter((entry) => entry.isDirectory());
|
||||
|
||||
// Load all features concurrently (secureFs has built-in concurrency limiting)
|
||||
// Load all features concurrently with automatic recovery from backups
|
||||
const featurePromises = featureDirs.map(async (dir) => {
|
||||
const featureId = dir.name;
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
|
||||
try {
|
||||
const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(content);
|
||||
// Use recovery-enabled read to handle corrupted files
|
||||
const result = await readJsonWithRecovery<Feature | null>(featureJsonPath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
if (!feature.id) {
|
||||
logger.warn(`Feature ${featureId} missing required 'id' field, skipping`);
|
||||
return null;
|
||||
}
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
return feature as Feature;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
} else if (error instanceof SyntaxError) {
|
||||
logger.warn(`Failed to parse feature.json for ${featureId}: ${error.message}`);
|
||||
} else {
|
||||
logger.error(`Failed to load feature ${featureId}:`, (error as Error).message);
|
||||
}
|
||||
const feature = result.data;
|
||||
|
||||
if (!feature) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!feature.id) {
|
||||
logger.warn(`Feature ${featureId} missing required 'id' field, skipping`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return feature;
|
||||
});
|
||||
|
||||
const results = await Promise.all(featurePromises);
|
||||
@@ -236,21 +244,85 @@ export class FeatureLoader {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize a title for comparison (case-insensitive, trimmed)
|
||||
*/
|
||||
private normalizeTitle(title: string): string {
|
||||
return title.toLowerCase().trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a feature by its title (case-insensitive match)
|
||||
* @param projectPath - Path to the project
|
||||
* @param title - Title to search for
|
||||
* @returns The matching feature or null if not found
|
||||
*/
|
||||
async findByTitle(projectPath: string, title: string): Promise<Feature | null> {
|
||||
if (!title || !title.trim()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const normalizedTitle = this.normalizeTitle(title);
|
||||
const features = await this.getAll(projectPath);
|
||||
|
||||
for (const feature of features) {
|
||||
if (feature.title && this.normalizeTitle(feature.title) === normalizedTitle) {
|
||||
return feature;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a title already exists on another feature (for duplicate detection)
|
||||
* @param projectPath - Path to the project
|
||||
* @param title - Title to check
|
||||
* @param excludeFeatureId - Optional feature ID to exclude from the check (for updates)
|
||||
* @returns The duplicate feature if found, null otherwise
|
||||
*/
|
||||
async findDuplicateTitle(
|
||||
projectPath: string,
|
||||
title: string,
|
||||
excludeFeatureId?: string
|
||||
): Promise<Feature | null> {
|
||||
if (!title || !title.trim()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const normalizedTitle = this.normalizeTitle(title);
|
||||
const features = await this.getAll(projectPath);
|
||||
|
||||
for (const feature of features) {
|
||||
// Skip the feature being updated (if provided)
|
||||
if (excludeFeatureId && feature.id === excludeFeatureId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (feature.title && this.normalizeTitle(feature.title) === normalizedTitle) {
|
||||
return feature;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single feature by ID
|
||||
* Uses automatic recovery from backups if the main file is corrupted
|
||||
*/
|
||||
async get(projectPath: string, featureId: string): Promise<Feature | null> {
|
||||
try {
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
logger.error(`Failed to get feature ${featureId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
|
||||
// Use recovery-enabled read to handle corrupted files
|
||||
const result = await readJsonWithRecovery<Feature | null>(featureJsonPath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
return result.data;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -294,8 +366,8 @@ export class FeatureLoader {
|
||||
descriptionHistory: initialHistory,
|
||||
};
|
||||
|
||||
// Write feature.json
|
||||
await secureFs.writeFile(featureJsonPath, JSON.stringify(feature, null, 2), 'utf-8');
|
||||
// Write feature.json atomically with backup support
|
||||
await atomicWriteJson(featureJsonPath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
logger.info(`Created feature ${featureId}`);
|
||||
return feature;
|
||||
@@ -379,9 +451,9 @@ export class FeatureLoader {
|
||||
descriptionHistory: updatedHistory,
|
||||
};
|
||||
|
||||
// Write back to file
|
||||
// Write back to file atomically with backup support
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
await secureFs.writeFile(featureJsonPath, JSON.stringify(updatedFeature, null, 2), 'utf-8');
|
||||
await atomicWriteJson(featureJsonPath, updatedFeature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
logger.info(`Updated feature ${featureId}`);
|
||||
return updatedFeature;
|
||||
@@ -460,4 +532,64 @@ export class FeatureLoader {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync a completed feature to the app_spec.txt implemented_features section
|
||||
*
|
||||
* When a feature is completed, this method adds it to the implemented_features
|
||||
* section of the project's app_spec.txt file. This keeps the spec in sync
|
||||
* with the actual state of the codebase.
|
||||
*
|
||||
* @param projectPath - Path to the project
|
||||
* @param feature - The feature to sync (must have title or description)
|
||||
* @param fileLocations - Optional array of file paths where the feature was implemented
|
||||
* @returns True if the spec was updated, false if no spec exists or feature was skipped
|
||||
*/
|
||||
async syncFeatureToAppSpec(
|
||||
projectPath: string,
|
||||
feature: Feature,
|
||||
fileLocations?: string[]
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const appSpecPath = getAppSpecPath(projectPath);
|
||||
|
||||
// Read the current app_spec.txt
|
||||
let specContent: string;
|
||||
try {
|
||||
specContent = (await secureFs.readFile(appSpecPath, 'utf-8')) as string;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
logger.info(`No app_spec.txt found for project, skipping sync for feature ${feature.id}`);
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Build the implemented feature entry
|
||||
const featureName = feature.title || `Feature: ${feature.id}`;
|
||||
const implementedFeature: ImplementedFeature = {
|
||||
name: featureName,
|
||||
description: feature.description,
|
||||
...(fileLocations && fileLocations.length > 0 ? { file_locations: fileLocations } : {}),
|
||||
};
|
||||
|
||||
// Add the feature to the implemented_features section
|
||||
const updatedSpecContent = addImplementedFeature(specContent, implementedFeature);
|
||||
|
||||
// Check if the content actually changed (feature might already exist)
|
||||
if (updatedSpecContent === specContent) {
|
||||
logger.info(`Feature "${featureName}" already exists in app_spec.txt, skipping`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Write the updated spec back to the file
|
||||
await secureFs.writeFile(appSpecPath, updatedSpecContent, 'utf-8');
|
||||
|
||||
logger.info(`Synced feature "${featureName}" to app_spec.txt`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error(`Failed to sync feature ${feature.id} to app_spec.txt:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,6 +41,7 @@ import type { FeatureLoader } from './feature-loader.js';
|
||||
import { createChatOptions, validateWorkingDirectory } from '../lib/sdk-options.js';
|
||||
import { resolveModelString } from '@automaker/model-resolver';
|
||||
import { stripProviderPrefix } from '@automaker/types';
|
||||
import { getPromptCustomization } from '../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('IdeationService');
|
||||
|
||||
@@ -195,8 +196,12 @@ export class IdeationService {
|
||||
// Gather existing features and ideas to prevent duplicate suggestions
|
||||
const existingWorkContext = await this.gatherExistingWorkContext(projectPath);
|
||||
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(this.settingsService, '[IdeationService]');
|
||||
|
||||
// Build system prompt for ideation
|
||||
const systemPrompt = this.buildIdeationSystemPrompt(
|
||||
prompts.ideation.ideationSystemPrompt,
|
||||
contextResult.formattedPrompt,
|
||||
activeSession.session.promptCategory,
|
||||
existingWorkContext
|
||||
@@ -645,8 +650,12 @@ export class IdeationService {
|
||||
// Gather existing features and ideas to prevent duplicates
|
||||
const existingWorkContext = await this.gatherExistingWorkContext(projectPath);
|
||||
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(this.settingsService, '[IdeationService]');
|
||||
|
||||
// Build system prompt for structured suggestions
|
||||
const systemPrompt = this.buildSuggestionsSystemPrompt(
|
||||
prompts.ideation.suggestionsSystemPrompt,
|
||||
contextPrompt,
|
||||
category,
|
||||
count,
|
||||
@@ -721,8 +730,14 @@ export class IdeationService {
|
||||
|
||||
/**
|
||||
* Build system prompt for structured suggestion generation
|
||||
* @param basePrompt - The base system prompt from settings
|
||||
* @param contextFilesPrompt - Project context from loaded files
|
||||
* @param category - The idea category to focus on
|
||||
* @param count - Number of suggestions to generate
|
||||
* @param existingWorkContext - Context about existing features/ideas
|
||||
*/
|
||||
private buildSuggestionsSystemPrompt(
|
||||
basePrompt: string,
|
||||
contextFilesPrompt: string | undefined,
|
||||
category: IdeaCategory,
|
||||
count: number = 10,
|
||||
@@ -734,35 +749,18 @@ export class IdeationService {
|
||||
|
||||
const existingWorkSection = existingWorkContext ? `\n\n${existingWorkContext}` : '';
|
||||
|
||||
return `You are an AI product strategist helping brainstorm feature ideas for a software project.
|
||||
// Replace placeholder {{count}} if present, otherwise append count instruction
|
||||
let prompt = basePrompt;
|
||||
if (prompt.includes('{{count}}')) {
|
||||
prompt = prompt.replace(/\{\{count\}\}/g, String(count));
|
||||
} else {
|
||||
prompt += `\n\nGenerate exactly ${count} suggestions.`;
|
||||
}
|
||||
|
||||
IMPORTANT: You do NOT have access to any tools. You CANNOT read files, search code, or run commands.
|
||||
You must generate suggestions based ONLY on the project context provided below.
|
||||
Do NOT say "I'll analyze" or "Let me explore" - you cannot do those things.
|
||||
|
||||
Based on the project context and the user's prompt, generate exactly ${count} creative and actionable feature suggestions.
|
||||
|
||||
YOUR RESPONSE MUST BE ONLY A JSON ARRAY - nothing else. No explanation, no preamble, no markdown code fences.
|
||||
|
||||
Each suggestion must have this structure:
|
||||
{
|
||||
"title": "Short, actionable title (max 60 chars)",
|
||||
"description": "Clear description of what to build or improve (2-3 sentences)",
|
||||
"rationale": "Why this is valuable - the problem it solves or opportunity it creates",
|
||||
"priority": "high" | "medium" | "low"
|
||||
}
|
||||
return `${prompt}
|
||||
|
||||
Focus area: ${this.getCategoryDescription(category)}
|
||||
|
||||
Guidelines:
|
||||
- Generate exactly ${count} suggestions
|
||||
- Be specific and actionable - avoid vague ideas
|
||||
- Mix different priority levels (some high, some medium, some low)
|
||||
- Each suggestion should be independently implementable
|
||||
- Think creatively - include both obvious improvements and innovative ideas
|
||||
- Consider the project's domain and target users
|
||||
- IMPORTANT: Do NOT suggest features or ideas that already exist in the "Existing Features" or "Existing Ideas" sections below
|
||||
|
||||
${contextSection}${existingWorkSection}`;
|
||||
}
|
||||
|
||||
@@ -1269,30 +1267,11 @@ ${contextSection}${existingWorkSection}`;
|
||||
// ============================================================================
|
||||
|
||||
private buildIdeationSystemPrompt(
|
||||
basePrompt: string,
|
||||
contextFilesPrompt: string | undefined,
|
||||
category?: IdeaCategory,
|
||||
existingWorkContext?: string
|
||||
): string {
|
||||
const basePrompt = `You are an AI product strategist and UX expert helping brainstorm ideas for improving a software project.
|
||||
|
||||
Your role is to:
|
||||
- Analyze the codebase structure and patterns
|
||||
- Identify opportunities for improvement
|
||||
- Suggest actionable ideas with clear rationale
|
||||
- Consider user experience, technical feasibility, and business value
|
||||
- Be specific and reference actual files/components when possible
|
||||
|
||||
When suggesting ideas:
|
||||
1. Provide a clear, concise title
|
||||
2. Explain the problem or opportunity
|
||||
3. Describe the proposed solution
|
||||
4. Highlight the expected benefit
|
||||
5. Note any dependencies or considerations
|
||||
|
||||
IMPORTANT: Do NOT suggest features or ideas that already exist in the project. Check the "Existing Features" and "Existing Ideas" sections below to avoid duplicates.
|
||||
|
||||
Focus on practical, implementable suggestions that would genuinely improve the product.`;
|
||||
|
||||
const categoryContext = category
|
||||
? `\n\nFocus area: ${this.getCategoryDescription(category)}`
|
||||
: '';
|
||||
|
||||
280
apps/server/src/services/notification-service.ts
Normal file
280
apps/server/src/services/notification-service.ts
Normal file
@@ -0,0 +1,280 @@
|
||||
/**
|
||||
* Notification Service - Handles reading/writing notifications to JSON files
|
||||
*
|
||||
* Provides persistent storage for project-level notifications in
|
||||
* {projectPath}/.automaker/notifications.json
|
||||
*
|
||||
* Notifications alert users when:
|
||||
* - Features reach specific statuses (waiting_approval, verified)
|
||||
* - Long-running operations complete (spec generation)
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
import { getNotificationsPath, ensureAutomakerDir } from '@automaker/platform';
|
||||
import type { Notification, NotificationsFile, NotificationType } from '@automaker/types';
|
||||
import { DEFAULT_NOTIFICATIONS_FILE } from '@automaker/types';
|
||||
import type { EventEmitter } from '../lib/events.js';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
const logger = createLogger('NotificationService');
|
||||
|
||||
/**
|
||||
* Atomic file write - write to temp file then rename
|
||||
*/
|
||||
async function atomicWriteJson(filePath: string, data: unknown): Promise<void> {
|
||||
const tempPath = `${filePath}.tmp.${Date.now()}`;
|
||||
const content = JSON.stringify(data, null, 2);
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(tempPath, content, 'utf-8');
|
||||
await secureFs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
// Clean up temp file if it exists
|
||||
try {
|
||||
await secureFs.unlink(tempPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely read JSON file with fallback to default
|
||||
*/
|
||||
async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||
try {
|
||||
const content = (await secureFs.readFile(filePath, 'utf-8')) as string;
|
||||
return JSON.parse(content) as T;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return defaultValue;
|
||||
}
|
||||
logger.error(`Error reading ${filePath}:`, error);
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Input for creating a new notification
|
||||
*/
|
||||
export interface CreateNotificationInput {
|
||||
type: NotificationType;
|
||||
title: string;
|
||||
message: string;
|
||||
featureId?: string;
|
||||
projectPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* NotificationService - Manages persistent storage of notifications
|
||||
*
|
||||
* Handles reading and writing notifications to JSON files with atomic operations
|
||||
* for reliability. Each project has its own notifications.json file.
|
||||
*/
|
||||
export class NotificationService {
|
||||
private events: EventEmitter | null = null;
|
||||
|
||||
/**
|
||||
* Set the event emitter for broadcasting notification events
|
||||
*/
|
||||
setEventEmitter(events: EventEmitter): void {
|
||||
this.events = events;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all notifications for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Promise resolving to array of notifications
|
||||
*/
|
||||
async getNotifications(projectPath: string): Promise<Notification[]> {
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
// Filter out dismissed notifications and sort by date (newest first)
|
||||
return file.notifications
|
||||
.filter((n) => !n.dismissed)
|
||||
.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get unread notification count for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Promise resolving to unread count
|
||||
*/
|
||||
async getUnreadCount(projectPath: string): Promise<number> {
|
||||
const notifications = await this.getNotifications(projectPath);
|
||||
return notifications.filter((n) => !n.read).length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new notification
|
||||
*
|
||||
* @param input - Notification creation input
|
||||
* @returns Promise resolving to the created notification
|
||||
*/
|
||||
async createNotification(input: CreateNotificationInput): Promise<Notification> {
|
||||
const { projectPath, type, title, message, featureId } = input;
|
||||
|
||||
// Ensure automaker directory exists
|
||||
await ensureAutomakerDir(projectPath);
|
||||
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
|
||||
const notification: Notification = {
|
||||
id: randomUUID(),
|
||||
type,
|
||||
title,
|
||||
message,
|
||||
createdAt: new Date().toISOString(),
|
||||
read: false,
|
||||
dismissed: false,
|
||||
featureId,
|
||||
projectPath,
|
||||
};
|
||||
|
||||
file.notifications.push(notification);
|
||||
await atomicWriteJson(notificationsPath, file);
|
||||
|
||||
logger.info(`Created notification: ${title} for project ${projectPath}`);
|
||||
|
||||
// Emit event for real-time updates
|
||||
if (this.events) {
|
||||
this.events.emit('notification:created', notification);
|
||||
}
|
||||
|
||||
return notification;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a notification as read
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param notificationId - ID of the notification to mark as read
|
||||
* @returns Promise resolving to the updated notification or null if not found
|
||||
*/
|
||||
async markAsRead(projectPath: string, notificationId: string): Promise<Notification | null> {
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
|
||||
const notification = file.notifications.find((n) => n.id === notificationId);
|
||||
if (!notification) {
|
||||
return null;
|
||||
}
|
||||
|
||||
notification.read = true;
|
||||
await atomicWriteJson(notificationsPath, file);
|
||||
|
||||
logger.info(`Marked notification ${notificationId} as read`);
|
||||
return notification;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark all notifications as read for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Promise resolving to number of notifications marked as read
|
||||
*/
|
||||
async markAllAsRead(projectPath: string): Promise<number> {
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
|
||||
let count = 0;
|
||||
for (const notification of file.notifications) {
|
||||
if (!notification.read && !notification.dismissed) {
|
||||
notification.read = true;
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
if (count > 0) {
|
||||
await atomicWriteJson(notificationsPath, file);
|
||||
logger.info(`Marked ${count} notifications as read`);
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dismiss a notification
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param notificationId - ID of the notification to dismiss
|
||||
* @returns Promise resolving to true if notification was dismissed
|
||||
*/
|
||||
async dismissNotification(projectPath: string, notificationId: string): Promise<boolean> {
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
|
||||
const notification = file.notifications.find((n) => n.id === notificationId);
|
||||
if (!notification) {
|
||||
return false;
|
||||
}
|
||||
|
||||
notification.dismissed = true;
|
||||
await atomicWriteJson(notificationsPath, file);
|
||||
|
||||
logger.info(`Dismissed notification ${notificationId}`);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dismiss all notifications for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Promise resolving to number of notifications dismissed
|
||||
*/
|
||||
async dismissAll(projectPath: string): Promise<number> {
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
|
||||
let count = 0;
|
||||
for (const notification of file.notifications) {
|
||||
if (!notification.dismissed) {
|
||||
notification.dismissed = true;
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
if (count > 0) {
|
||||
await atomicWriteJson(notificationsPath, file);
|
||||
logger.info(`Dismissed ${count} notifications`);
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
let notificationServiceInstance: NotificationService | null = null;
|
||||
|
||||
/**
|
||||
* Get the singleton notification service instance
|
||||
*/
|
||||
export function getNotificationService(): NotificationService {
|
||||
if (!notificationServiceInstance) {
|
||||
notificationServiceInstance = new NotificationService();
|
||||
}
|
||||
return notificationServiceInstance;
|
||||
}
|
||||
@@ -7,7 +7,7 @@
|
||||
* - Per-project settings ({projectPath}/.automaker/settings.json)
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
|
||||
import {
|
||||
@@ -42,28 +42,8 @@ import {
|
||||
const logger = createLogger('SettingsService');
|
||||
|
||||
/**
|
||||
* Atomic file write - write to temp file then rename
|
||||
*/
|
||||
async function atomicWriteJson(filePath: string, data: unknown): Promise<void> {
|
||||
const tempPath = `${filePath}.tmp.${Date.now()}`;
|
||||
const content = JSON.stringify(data, null, 2);
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(tempPath, content, 'utf-8');
|
||||
await secureFs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
// Clean up temp file if it exists
|
||||
try {
|
||||
await secureFs.unlink(tempPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely read JSON file with fallback to default
|
||||
* Wrapper for readJsonFile from utils that uses the local secureFs
|
||||
* to maintain compatibility with the server's secure file system
|
||||
*/
|
||||
async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||
try {
|
||||
@@ -90,6 +70,13 @@ async function fileExists(filePath: string): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write settings atomically with backup support
|
||||
*/
|
||||
async function writeSettingsJson(filePath: string, data: unknown): Promise<void> {
|
||||
await atomicWriteJson(filePath, data, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
}
|
||||
|
||||
/**
|
||||
* SettingsService - Manages persistent storage of user settings and credentials
|
||||
*
|
||||
@@ -180,7 +167,7 @@ export class SettingsService {
|
||||
if (needsSave) {
|
||||
try {
|
||||
await ensureDataDir(this.dataDir);
|
||||
await atomicWriteJson(settingsPath, result);
|
||||
await writeSettingsJson(settingsPath, result);
|
||||
logger.info('Settings migration complete');
|
||||
} catch (error) {
|
||||
logger.error('Failed to save migrated settings:', error);
|
||||
@@ -340,7 +327,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(settingsPath, updated);
|
||||
await writeSettingsJson(settingsPath, updated);
|
||||
logger.info('Global settings updated');
|
||||
|
||||
return updated;
|
||||
@@ -414,7 +401,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(credentialsPath, updated);
|
||||
await writeSettingsJson(credentialsPath, updated);
|
||||
logger.info('Credentials updated');
|
||||
|
||||
return updated;
|
||||
@@ -525,7 +512,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(settingsPath, updated);
|
||||
await writeSettingsJson(settingsPath, updated);
|
||||
logger.info(`Project settings updated for ${projectPath}`);
|
||||
|
||||
return updated;
|
||||
|
||||
@@ -70,6 +70,29 @@ export class TerminalService extends EventEmitter {
|
||||
private sessions: Map<string, TerminalSession> = new Map();
|
||||
private dataCallbacks: Set<DataCallback> = new Set();
|
||||
private exitCallbacks: Set<ExitCallback> = new Set();
|
||||
private isWindows = os.platform() === 'win32';
|
||||
// On Windows, ConPTY requires AttachConsole which fails in Electron/service mode
|
||||
// Detect Electron by checking for electron-specific env vars or process properties
|
||||
private isElectron =
|
||||
!!(process.versions && (process.versions as Record<string, string>).electron) ||
|
||||
!!process.env.ELECTRON_RUN_AS_NODE;
|
||||
private useConptyFallback = false; // Track if we need to use winpty fallback on Windows
|
||||
|
||||
/**
|
||||
* Kill a PTY process with platform-specific handling.
|
||||
* Windows doesn't support Unix signals like SIGTERM/SIGKILL, so we call kill() without arguments.
|
||||
* On Unix-like systems (macOS, Linux), we can specify the signal.
|
||||
*
|
||||
* @param ptyProcess - The PTY process to kill
|
||||
* @param signal - The signal to send on Unix-like systems (default: 'SIGTERM')
|
||||
*/
|
||||
private killPtyProcess(ptyProcess: pty.IPty, signal: string = 'SIGTERM'): void {
|
||||
if (this.isWindows) {
|
||||
ptyProcess.kill();
|
||||
} else {
|
||||
ptyProcess.kill(signal);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect the best shell for the current platform
|
||||
@@ -322,13 +345,60 @@ export class TerminalService extends EventEmitter {
|
||||
|
||||
logger.info(`Creating session ${id} with shell: ${shell} in ${cwd}`);
|
||||
|
||||
const ptyProcess = pty.spawn(shell, shellArgs, {
|
||||
// Build PTY spawn options
|
||||
const ptyOptions: pty.IPtyForkOptions = {
|
||||
name: 'xterm-256color',
|
||||
cols: options.cols || 80,
|
||||
rows: options.rows || 24,
|
||||
cwd,
|
||||
env,
|
||||
});
|
||||
};
|
||||
|
||||
// On Windows, always use winpty instead of ConPTY
|
||||
// ConPTY requires AttachConsole which fails in many contexts:
|
||||
// - Electron apps without a console
|
||||
// - VS Code integrated terminal
|
||||
// - Spawned from other applications
|
||||
// The error happens in a subprocess so we can't catch it - must proactively disable
|
||||
if (this.isWindows) {
|
||||
(ptyOptions as pty.IWindowsPtyForkOptions).useConpty = false;
|
||||
logger.info(
|
||||
`[createSession] Using winpty for session ${id} (ConPTY disabled for compatibility)`
|
||||
);
|
||||
}
|
||||
|
||||
let ptyProcess: pty.IPty;
|
||||
try {
|
||||
ptyProcess = pty.spawn(shell, shellArgs, ptyOptions);
|
||||
} catch (spawnError) {
|
||||
const errorMessage = spawnError instanceof Error ? spawnError.message : String(spawnError);
|
||||
|
||||
// Check for Windows ConPTY-specific errors
|
||||
if (this.isWindows && errorMessage.includes('AttachConsole failed')) {
|
||||
// ConPTY failed - try winpty fallback
|
||||
if (!this.useConptyFallback) {
|
||||
logger.warn(`[createSession] ConPTY AttachConsole failed, retrying with winpty fallback`);
|
||||
this.useConptyFallback = true;
|
||||
|
||||
try {
|
||||
(ptyOptions as pty.IWindowsPtyForkOptions).useConpty = false;
|
||||
ptyProcess = pty.spawn(shell, shellArgs, ptyOptions);
|
||||
logger.info(`[createSession] Successfully spawned session ${id} with winpty fallback`);
|
||||
} catch (fallbackError) {
|
||||
const fallbackMessage =
|
||||
fallbackError instanceof Error ? fallbackError.message : String(fallbackError);
|
||||
logger.error(`[createSession] Winpty fallback also failed:`, fallbackMessage);
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
logger.error(`[createSession] PTY spawn failed (winpty):`, errorMessage);
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
logger.error(`[createSession] PTY spawn failed:`, errorMessage);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const session: TerminalSession = {
|
||||
id,
|
||||
@@ -392,7 +462,11 @@ export class TerminalService extends EventEmitter {
|
||||
|
||||
// Handle exit
|
||||
ptyProcess.onExit(({ exitCode }) => {
|
||||
logger.info(`Session ${id} exited with code ${exitCode}`);
|
||||
const exitMessage =
|
||||
exitCode === undefined || exitCode === null
|
||||
? 'Session terminated'
|
||||
: `Session exited with code ${exitCode}`;
|
||||
logger.info(`${exitMessage} (${id})`);
|
||||
this.sessions.delete(id);
|
||||
this.exitCallbacks.forEach((cb) => cb(id, exitCode));
|
||||
this.emit('exit', id, exitCode);
|
||||
@@ -477,8 +551,9 @@ export class TerminalService extends EventEmitter {
|
||||
}
|
||||
|
||||
// First try graceful SIGTERM to allow process cleanup
|
||||
// On Windows, killPtyProcess calls kill() without signal since Windows doesn't support Unix signals
|
||||
logger.info(`Session ${sessionId} sending SIGTERM`);
|
||||
session.pty.kill('SIGTERM');
|
||||
this.killPtyProcess(session.pty, 'SIGTERM');
|
||||
|
||||
// Schedule SIGKILL fallback if process doesn't exit gracefully
|
||||
// The onExit handler will remove session from map when it actually exits
|
||||
@@ -486,7 +561,7 @@ export class TerminalService extends EventEmitter {
|
||||
if (this.sessions.has(sessionId)) {
|
||||
logger.info(`Session ${sessionId} still alive after SIGTERM, sending SIGKILL`);
|
||||
try {
|
||||
session.pty.kill('SIGKILL');
|
||||
this.killPtyProcess(session.pty, 'SIGKILL');
|
||||
} catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
@@ -588,7 +663,8 @@ export class TerminalService extends EventEmitter {
|
||||
if (session.flushTimeout) {
|
||||
clearTimeout(session.flushTimeout);
|
||||
}
|
||||
session.pty.kill();
|
||||
// Use platform-specific kill to ensure proper termination on Windows
|
||||
this.killPtyProcess(session.pty);
|
||||
} catch {
|
||||
// Ignore errors during cleanup
|
||||
}
|
||||
|
||||
1027
apps/server/tests/unit/lib/xml-extractor.test.ts
Normal file
1027
apps/server/tests/unit/lib/xml-extractor.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -286,6 +286,7 @@ describe('claude-provider.ts', () => {
|
||||
|
||||
const generator = provider.executeQuery({
|
||||
prompt: 'Test',
|
||||
model: 'claude-opus-4-5-20251101',
|
||||
cwd: '/test',
|
||||
});
|
||||
|
||||
@@ -312,6 +313,7 @@ describe('claude-provider.ts', () => {
|
||||
|
||||
const generator = provider.executeQuery({
|
||||
prompt: 'Test',
|
||||
model: 'claude-opus-4-5-20251101',
|
||||
cwd: '/test',
|
||||
});
|
||||
|
||||
@@ -339,6 +341,7 @@ describe('claude-provider.ts', () => {
|
||||
|
||||
const generator = provider.executeQuery({
|
||||
prompt: 'Test',
|
||||
model: 'claude-opus-4-5-20251101',
|
||||
cwd: '/test',
|
||||
});
|
||||
|
||||
|
||||
@@ -11,6 +11,11 @@ import {
|
||||
getCodexConfigDir,
|
||||
getCodexAuthIndicators,
|
||||
} from '@automaker/platform';
|
||||
import {
|
||||
calculateReasoningTimeout,
|
||||
REASONING_TIMEOUT_MULTIPLIERS,
|
||||
DEFAULT_TIMEOUT_MS,
|
||||
} from '@automaker/types';
|
||||
|
||||
const OPENAI_API_KEY_ENV = 'OPENAI_API_KEY';
|
||||
const originalOpenAIKey = process.env[OPENAI_API_KEY_ENV];
|
||||
@@ -289,5 +294,121 @@ describe('codex-provider.ts', () => {
|
||||
expect(codexRunMock).not.toHaveBeenCalled();
|
||||
expect(spawnJSONLProcess).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('passes extended timeout for high reasoning effort', async () => {
|
||||
vi.mocked(spawnJSONLProcess).mockReturnValue((async function* () {})());
|
||||
|
||||
await collectAsyncGenerator(
|
||||
provider.executeQuery({
|
||||
prompt: 'Complex reasoning task',
|
||||
model: 'gpt-5.1-codex-max',
|
||||
cwd: '/tmp',
|
||||
reasoningEffort: 'high',
|
||||
})
|
||||
);
|
||||
|
||||
const call = vi.mocked(spawnJSONLProcess).mock.calls[0][0];
|
||||
// High reasoning effort should have 3x the default timeout (90000ms)
|
||||
expect(call.timeout).toBe(DEFAULT_TIMEOUT_MS * REASONING_TIMEOUT_MULTIPLIERS.high);
|
||||
});
|
||||
|
||||
it('passes extended timeout for xhigh reasoning effort', async () => {
|
||||
vi.mocked(spawnJSONLProcess).mockReturnValue((async function* () {})());
|
||||
|
||||
await collectAsyncGenerator(
|
||||
provider.executeQuery({
|
||||
prompt: 'Very complex reasoning task',
|
||||
model: 'gpt-5.1-codex-max',
|
||||
cwd: '/tmp',
|
||||
reasoningEffort: 'xhigh',
|
||||
})
|
||||
);
|
||||
|
||||
const call = vi.mocked(spawnJSONLProcess).mock.calls[0][0];
|
||||
// xhigh reasoning effort should have 4x the default timeout (120000ms)
|
||||
expect(call.timeout).toBe(DEFAULT_TIMEOUT_MS * REASONING_TIMEOUT_MULTIPLIERS.xhigh);
|
||||
});
|
||||
|
||||
it('uses default timeout when no reasoning effort is specified', async () => {
|
||||
vi.mocked(spawnJSONLProcess).mockReturnValue((async function* () {})());
|
||||
|
||||
await collectAsyncGenerator(
|
||||
provider.executeQuery({
|
||||
prompt: 'Simple task',
|
||||
model: 'gpt-5.2',
|
||||
cwd: '/tmp',
|
||||
})
|
||||
);
|
||||
|
||||
const call = vi.mocked(spawnJSONLProcess).mock.calls[0][0];
|
||||
// No reasoning effort should use the default timeout
|
||||
expect(call.timeout).toBe(DEFAULT_TIMEOUT_MS);
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateReasoningTimeout', () => {
|
||||
it('returns default timeout when no reasoning effort is specified', () => {
|
||||
expect(calculateReasoningTimeout()).toBe(DEFAULT_TIMEOUT_MS);
|
||||
expect(calculateReasoningTimeout(undefined)).toBe(DEFAULT_TIMEOUT_MS);
|
||||
});
|
||||
|
||||
it('returns default timeout for none reasoning effort', () => {
|
||||
expect(calculateReasoningTimeout('none')).toBe(DEFAULT_TIMEOUT_MS);
|
||||
});
|
||||
|
||||
it('applies correct multiplier for minimal reasoning effort', () => {
|
||||
const expected = Math.round(DEFAULT_TIMEOUT_MS * REASONING_TIMEOUT_MULTIPLIERS.minimal);
|
||||
expect(calculateReasoningTimeout('minimal')).toBe(expected);
|
||||
});
|
||||
|
||||
it('applies correct multiplier for low reasoning effort', () => {
|
||||
const expected = Math.round(DEFAULT_TIMEOUT_MS * REASONING_TIMEOUT_MULTIPLIERS.low);
|
||||
expect(calculateReasoningTimeout('low')).toBe(expected);
|
||||
});
|
||||
|
||||
it('applies correct multiplier for medium reasoning effort', () => {
|
||||
const expected = Math.round(DEFAULT_TIMEOUT_MS * REASONING_TIMEOUT_MULTIPLIERS.medium);
|
||||
expect(calculateReasoningTimeout('medium')).toBe(expected);
|
||||
});
|
||||
|
||||
it('applies correct multiplier for high reasoning effort', () => {
|
||||
const expected = Math.round(DEFAULT_TIMEOUT_MS * REASONING_TIMEOUT_MULTIPLIERS.high);
|
||||
expect(calculateReasoningTimeout('high')).toBe(expected);
|
||||
});
|
||||
|
||||
it('applies correct multiplier for xhigh reasoning effort', () => {
|
||||
const expected = Math.round(DEFAULT_TIMEOUT_MS * REASONING_TIMEOUT_MULTIPLIERS.xhigh);
|
||||
expect(calculateReasoningTimeout('xhigh')).toBe(expected);
|
||||
});
|
||||
|
||||
it('uses custom base timeout when provided', () => {
|
||||
const customBase = 60000;
|
||||
expect(calculateReasoningTimeout('high', customBase)).toBe(
|
||||
Math.round(customBase * REASONING_TIMEOUT_MULTIPLIERS.high)
|
||||
);
|
||||
});
|
||||
|
||||
it('falls back to 1.0 multiplier for invalid reasoning effort', () => {
|
||||
// Test that invalid values fallback gracefully to default multiplier
|
||||
// This tests the defensive ?? 1.0 in calculateReasoningTimeout
|
||||
const invalidEffort = 'invalid_effort' as never;
|
||||
expect(calculateReasoningTimeout(invalidEffort)).toBe(DEFAULT_TIMEOUT_MS);
|
||||
});
|
||||
|
||||
it('produces expected absolute timeout values', () => {
|
||||
// Verify the actual timeout values that will be used:
|
||||
// none: 30000ms (30s)
|
||||
// minimal: 36000ms (36s)
|
||||
// low: 45000ms (45s)
|
||||
// medium: 60000ms (1m)
|
||||
// high: 90000ms (1m 30s)
|
||||
// xhigh: 120000ms (2m)
|
||||
expect(calculateReasoningTimeout('none')).toBe(30000);
|
||||
expect(calculateReasoningTimeout('minimal')).toBe(36000);
|
||||
expect(calculateReasoningTimeout('low')).toBe(45000);
|
||||
expect(calculateReasoningTimeout('medium')).toBe(60000);
|
||||
expect(calculateReasoningTimeout('high')).toBe(90000);
|
||||
expect(calculateReasoningTimeout('xhigh')).toBe(120000);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -202,8 +202,17 @@ describe('auto-mode-service.ts - Planning Mode', () => {
|
||||
});
|
||||
|
||||
describe('buildFeaturePrompt', () => {
|
||||
const buildFeaturePrompt = (svc: any, feature: any) => {
|
||||
return svc.buildFeaturePrompt(feature);
|
||||
const defaultTaskExecutionPrompts = {
|
||||
implementationInstructions: 'Test implementation instructions',
|
||||
playwrightVerificationInstructions: 'Test playwright instructions',
|
||||
};
|
||||
|
||||
const buildFeaturePrompt = (
|
||||
svc: any,
|
||||
feature: any,
|
||||
taskExecutionPrompts = defaultTaskExecutionPrompts
|
||||
) => {
|
||||
return svc.buildFeaturePrompt(feature, taskExecutionPrompts);
|
||||
};
|
||||
|
||||
it('should include feature ID and description', () => {
|
||||
@@ -242,14 +251,15 @@ describe('auto-mode-service.ts - Planning Mode', () => {
|
||||
expect(result).toContain('/tmp/image2.jpg');
|
||||
});
|
||||
|
||||
it('should include summary tags instruction', () => {
|
||||
it('should include implementation instructions', () => {
|
||||
const feature = {
|
||||
id: 'feat-123',
|
||||
description: 'Test feature',
|
||||
};
|
||||
const result = buildFeaturePrompt(service, feature);
|
||||
expect(result).toContain('<summary>');
|
||||
expect(result).toContain('</summary>');
|
||||
// The prompt should include the implementation instructions passed to it
|
||||
expect(result).toContain('Test implementation instructions');
|
||||
expect(result).toContain('Test playwright instructions');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ describe('claude-usage-service.ts', () => {
|
||||
|
||||
it("should use 'where' command on Windows", async () => {
|
||||
vi.mocked(os.platform).mockReturnValue('win32');
|
||||
const windowsService = new ClaudeUsageService(); // Create new service after platform mock
|
||||
const ptyService = new ClaudeUsageService(); // Create new service after platform mock
|
||||
|
||||
mockSpawnProcess.on.mockImplementation((event: string, callback: Function) => {
|
||||
if (event === 'close') {
|
||||
@@ -100,7 +100,7 @@ describe('claude-usage-service.ts', () => {
|
||||
return mockSpawnProcess;
|
||||
});
|
||||
|
||||
await windowsService.isAvailable();
|
||||
await ptyService.isAvailable();
|
||||
|
||||
expect(spawn).toHaveBeenCalledWith('where', ['claude']);
|
||||
});
|
||||
@@ -403,120 +403,22 @@ Resets Jan 15, 3pm
|
||||
});
|
||||
});
|
||||
|
||||
describe('executeClaudeUsageCommandMac', () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(os.platform).mockReturnValue('darwin');
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ HOME: '/Users/testuser' });
|
||||
});
|
||||
|
||||
it('should execute expect script and return output', async () => {
|
||||
const mockOutput = `
|
||||
Current session
|
||||
65% left
|
||||
Resets in 2h
|
||||
`;
|
||||
|
||||
let stdoutCallback: Function;
|
||||
let closeCallback: Function;
|
||||
|
||||
mockSpawnProcess.stdout = {
|
||||
on: vi.fn((event: string, callback: Function) => {
|
||||
if (event === 'data') {
|
||||
stdoutCallback = callback;
|
||||
}
|
||||
}),
|
||||
};
|
||||
mockSpawnProcess.stderr = {
|
||||
on: vi.fn(),
|
||||
};
|
||||
mockSpawnProcess.on = vi.fn((event: string, callback: Function) => {
|
||||
if (event === 'close') {
|
||||
closeCallback = callback;
|
||||
}
|
||||
return mockSpawnProcess;
|
||||
});
|
||||
|
||||
const promise = service.fetchUsageData();
|
||||
|
||||
// Simulate stdout data
|
||||
stdoutCallback!(Buffer.from(mockOutput));
|
||||
|
||||
// Simulate successful close
|
||||
closeCallback!(0);
|
||||
|
||||
const result = await promise;
|
||||
|
||||
expect(result.sessionPercentage).toBe(35); // 100 - 65
|
||||
expect(spawn).toHaveBeenCalledWith(
|
||||
'expect',
|
||||
expect.arrayContaining(['-c']),
|
||||
expect.any(Object)
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle authentication errors', async () => {
|
||||
const mockOutput = 'token_expired';
|
||||
|
||||
let stdoutCallback: Function;
|
||||
let closeCallback: Function;
|
||||
|
||||
mockSpawnProcess.stdout = {
|
||||
on: vi.fn((event: string, callback: Function) => {
|
||||
if (event === 'data') {
|
||||
stdoutCallback = callback;
|
||||
}
|
||||
}),
|
||||
};
|
||||
mockSpawnProcess.stderr = {
|
||||
on: vi.fn(),
|
||||
};
|
||||
mockSpawnProcess.on = vi.fn((event: string, callback: Function) => {
|
||||
if (event === 'close') {
|
||||
closeCallback = callback;
|
||||
}
|
||||
return mockSpawnProcess;
|
||||
});
|
||||
|
||||
const promise = service.fetchUsageData();
|
||||
|
||||
stdoutCallback!(Buffer.from(mockOutput));
|
||||
closeCallback!(1);
|
||||
|
||||
await expect(promise).rejects.toThrow('Authentication required');
|
||||
});
|
||||
|
||||
it('should handle timeout with no data', async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
mockSpawnProcess.stdout = {
|
||||
on: vi.fn(),
|
||||
};
|
||||
mockSpawnProcess.stderr = {
|
||||
on: vi.fn(),
|
||||
};
|
||||
mockSpawnProcess.on = vi.fn(() => mockSpawnProcess);
|
||||
mockSpawnProcess.kill = vi.fn();
|
||||
|
||||
const promise = service.fetchUsageData();
|
||||
|
||||
// Advance time past timeout (30 seconds)
|
||||
vi.advanceTimersByTime(31000);
|
||||
|
||||
await expect(promise).rejects.toThrow('Command timed out');
|
||||
|
||||
vi.useRealTimers();
|
||||
// Note: executeClaudeUsageCommandMac tests removed - the service now uses PTY for all platforms
|
||||
// The executeClaudeUsageCommandMac method exists but is dead code (never called)
|
||||
describe.skip('executeClaudeUsageCommandMac (deprecated - uses PTY now)', () => {
|
||||
it('should be skipped - service now uses PTY for all platforms', () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('executeClaudeUsageCommandWindows', () => {
|
||||
describe('executeClaudeUsageCommandPty', () => {
|
||||
// Note: The service now uses PTY for all platforms, using process.cwd() as the working directory
|
||||
beforeEach(() => {
|
||||
vi.mocked(os.platform).mockReturnValue('win32');
|
||||
vi.mocked(os.homedir).mockReturnValue('C:\\Users\\testuser');
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ USERPROFILE: 'C:\\Users\\testuser' });
|
||||
});
|
||||
|
||||
it('should use node-pty on Windows and return output', async () => {
|
||||
const windowsService = new ClaudeUsageService(); // Create new service for Windows platform
|
||||
it('should use node-pty and return output', async () => {
|
||||
const ptyService = new ClaudeUsageService();
|
||||
const mockOutput = `
|
||||
Current session
|
||||
65% left
|
||||
@@ -538,7 +440,7 @@ Resets in 2h
|
||||
};
|
||||
vi.mocked(pty.spawn).mockReturnValue(mockPty as any);
|
||||
|
||||
const promise = windowsService.fetchUsageData();
|
||||
const promise = ptyService.fetchUsageData();
|
||||
|
||||
// Simulate data
|
||||
dataCallback!(mockOutput);
|
||||
@@ -549,16 +451,19 @@ Resets in 2h
|
||||
const result = await promise;
|
||||
|
||||
expect(result.sessionPercentage).toBe(35);
|
||||
// Service uses process.cwd() for --add-dir
|
||||
expect(pty.spawn).toHaveBeenCalledWith(
|
||||
'cmd.exe',
|
||||
['/c', 'claude', '--add-dir', 'C:\\Users\\testuser'],
|
||||
expect.any(Object)
|
||||
['/c', 'claude', '--add-dir', process.cwd()],
|
||||
expect.objectContaining({
|
||||
cwd: process.cwd(),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should send escape key after seeing usage data', async () => {
|
||||
vi.useFakeTimers();
|
||||
const windowsService = new ClaudeUsageService();
|
||||
const ptyService = new ClaudeUsageService();
|
||||
|
||||
const mockOutput = 'Current session\n65% left';
|
||||
|
||||
@@ -577,7 +482,7 @@ Resets in 2h
|
||||
};
|
||||
vi.mocked(pty.spawn).mockReturnValue(mockPty as any);
|
||||
|
||||
const promise = windowsService.fetchUsageData();
|
||||
const promise = ptyService.fetchUsageData();
|
||||
|
||||
// Simulate seeing usage data
|
||||
dataCallback!(mockOutput);
|
||||
@@ -594,8 +499,8 @@ Resets in 2h
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('should handle authentication errors on Windows', async () => {
|
||||
const windowsService = new ClaudeUsageService();
|
||||
it('should handle authentication errors', async () => {
|
||||
const ptyService = new ClaudeUsageService();
|
||||
let dataCallback: Function | undefined;
|
||||
let exitCallback: Function | undefined;
|
||||
|
||||
@@ -611,18 +516,22 @@ Resets in 2h
|
||||
};
|
||||
vi.mocked(pty.spawn).mockReturnValue(mockPty as any);
|
||||
|
||||
const promise = windowsService.fetchUsageData();
|
||||
const promise = ptyService.fetchUsageData();
|
||||
|
||||
dataCallback!('authentication_error');
|
||||
// Send data containing the authentication error pattern the service looks for
|
||||
dataCallback!('"type":"authentication_error"');
|
||||
|
||||
// Trigger the exit handler which checks for auth errors
|
||||
exitCallback!({ exitCode: 1 });
|
||||
|
||||
await expect(promise).rejects.toThrow(
|
||||
"Claude CLI authentication issue. Please run 'claude logout' and then 'claude login' in your terminal to refresh permissions."
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle timeout with no data on Windows', async () => {
|
||||
it('should handle timeout with no data', async () => {
|
||||
vi.useFakeTimers();
|
||||
const windowsService = new ClaudeUsageService();
|
||||
const ptyService = new ClaudeUsageService();
|
||||
|
||||
const mockPty = {
|
||||
onData: vi.fn(),
|
||||
@@ -633,7 +542,7 @@ Resets in 2h
|
||||
};
|
||||
vi.mocked(pty.spawn).mockReturnValue(mockPty as any);
|
||||
|
||||
const promise = windowsService.fetchUsageData();
|
||||
const promise = ptyService.fetchUsageData();
|
||||
|
||||
// Advance time past timeout (45 seconds)
|
||||
vi.advanceTimersByTime(46000);
|
||||
@@ -648,7 +557,7 @@ Resets in 2h
|
||||
|
||||
it('should return data on timeout if data was captured', async () => {
|
||||
vi.useFakeTimers();
|
||||
const windowsService = new ClaudeUsageService();
|
||||
const ptyService = new ClaudeUsageService();
|
||||
|
||||
let dataCallback: Function | undefined;
|
||||
|
||||
@@ -663,7 +572,7 @@ Resets in 2h
|
||||
};
|
||||
vi.mocked(pty.spawn).mockReturnValue(mockPty as any);
|
||||
|
||||
const promise = windowsService.fetchUsageData();
|
||||
const promise = ptyService.fetchUsageData();
|
||||
|
||||
// Simulate receiving usage data
|
||||
dataCallback!('Current session\n65% left\nResets in 2h');
|
||||
@@ -681,7 +590,9 @@ Resets in 2h
|
||||
|
||||
it('should send SIGTERM after ESC if process does not exit', async () => {
|
||||
vi.useFakeTimers();
|
||||
const windowsService = new ClaudeUsageService();
|
||||
// Mock Unix platform to test SIGTERM behavior (Windows calls kill() without signal)
|
||||
vi.mocked(os.platform).mockReturnValue('darwin');
|
||||
const ptyService = new ClaudeUsageService();
|
||||
|
||||
let dataCallback: Function | undefined;
|
||||
|
||||
@@ -696,7 +607,7 @@ Resets in 2h
|
||||
};
|
||||
vi.mocked(pty.spawn).mockReturnValue(mockPty as any);
|
||||
|
||||
windowsService.fetchUsageData();
|
||||
ptyService.fetchUsageData();
|
||||
|
||||
// Simulate seeing usage data
|
||||
dataCallback!('Current session\n65% left');
|
||||
|
||||
@@ -190,9 +190,10 @@ describe('feature-loader.ts', () => {
|
||||
const result = await loader.getAll(testProjectPath);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
// With recovery-enabled reads, warnings come from AtomicWriter and FeatureLoader
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/WARN.*\[FeatureLoader\]/),
|
||||
expect.stringContaining('Failed to parse feature.json')
|
||||
expect.stringMatching(/WARN.*\[AtomicWriter\]/),
|
||||
expect.stringContaining('unavailable')
|
||||
);
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
@@ -260,10 +261,13 @@ describe('feature-loader.ts', () => {
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should throw on other errors', async () => {
|
||||
it('should return null on other errors (with recovery attempt)', async () => {
|
||||
// With recovery-enabled reads, get() returns null instead of throwing
|
||||
// because it attempts to recover from backups before giving up
|
||||
vi.mocked(fs.readFile).mockRejectedValue(new Error('Permission denied'));
|
||||
|
||||
await expect(loader.get(testProjectPath, 'feature-123')).rejects.toThrow('Permission denied');
|
||||
const result = await loader.get(testProjectPath, 'feature-123');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -442,4 +446,471 @@ describe('feature-loader.ts', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('findByTitle', () => {
|
||||
it('should find feature by exact title match (case-insensitive)', async () => {
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.readdir).mockResolvedValue([
|
||||
{ name: 'feature-1', isDirectory: () => true } as any,
|
||||
{ name: 'feature-2', isDirectory: () => true } as any,
|
||||
]);
|
||||
|
||||
vi.mocked(fs.readFile)
|
||||
.mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
id: 'feature-1000-abc',
|
||||
title: 'Login Feature',
|
||||
category: 'auth',
|
||||
description: 'Login implementation',
|
||||
})
|
||||
)
|
||||
.mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
id: 'feature-2000-def',
|
||||
title: 'Logout Feature',
|
||||
category: 'auth',
|
||||
description: 'Logout implementation',
|
||||
})
|
||||
);
|
||||
|
||||
const result = await loader.findByTitle(testProjectPath, 'LOGIN FEATURE');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.id).toBe('feature-1000-abc');
|
||||
expect(result?.title).toBe('Login Feature');
|
||||
});
|
||||
|
||||
it('should return null when title is not found', async () => {
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.readdir).mockResolvedValue([
|
||||
{ name: 'feature-1', isDirectory: () => true } as any,
|
||||
]);
|
||||
|
||||
vi.mocked(fs.readFile).mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
id: 'feature-1000-abc',
|
||||
title: 'Login Feature',
|
||||
category: 'auth',
|
||||
description: 'Login implementation',
|
||||
})
|
||||
);
|
||||
|
||||
const result = await loader.findByTitle(testProjectPath, 'Nonexistent Feature');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for empty or whitespace title', async () => {
|
||||
const result1 = await loader.findByTitle(testProjectPath, '');
|
||||
const result2 = await loader.findByTitle(testProjectPath, ' ');
|
||||
|
||||
expect(result1).toBeNull();
|
||||
expect(result2).toBeNull();
|
||||
});
|
||||
|
||||
it('should skip features without titles', async () => {
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.readdir).mockResolvedValue([
|
||||
{ name: 'feature-1', isDirectory: () => true } as any,
|
||||
{ name: 'feature-2', isDirectory: () => true } as any,
|
||||
]);
|
||||
|
||||
vi.mocked(fs.readFile)
|
||||
.mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
id: 'feature-1000-abc',
|
||||
// no title
|
||||
category: 'auth',
|
||||
description: 'Login implementation',
|
||||
})
|
||||
)
|
||||
.mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
id: 'feature-2000-def',
|
||||
title: 'Login Feature',
|
||||
category: 'auth',
|
||||
description: 'Another login',
|
||||
})
|
||||
);
|
||||
|
||||
const result = await loader.findByTitle(testProjectPath, 'Login Feature');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.id).toBe('feature-2000-def');
|
||||
});
|
||||
});
|
||||
|
||||
describe('findDuplicateTitle', () => {
|
||||
it('should find duplicate title', async () => {
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.readdir).mockResolvedValue([
|
||||
{ name: 'feature-1', isDirectory: () => true } as any,
|
||||
]);
|
||||
|
||||
vi.mocked(fs.readFile).mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
id: 'feature-1000-abc',
|
||||
title: 'My Feature',
|
||||
category: 'ui',
|
||||
description: 'Feature description',
|
||||
})
|
||||
);
|
||||
|
||||
const result = await loader.findDuplicateTitle(testProjectPath, 'my feature');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.id).toBe('feature-1000-abc');
|
||||
});
|
||||
|
||||
it('should exclude specified feature ID from duplicate check', async () => {
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.readdir).mockResolvedValue([
|
||||
{ name: 'feature-1', isDirectory: () => true } as any,
|
||||
{ name: 'feature-2', isDirectory: () => true } as any,
|
||||
]);
|
||||
|
||||
vi.mocked(fs.readFile)
|
||||
.mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
id: 'feature-1000-abc',
|
||||
title: 'My Feature',
|
||||
category: 'ui',
|
||||
description: 'Feature 1',
|
||||
})
|
||||
)
|
||||
.mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
id: 'feature-2000-def',
|
||||
title: 'Other Feature',
|
||||
category: 'ui',
|
||||
description: 'Feature 2',
|
||||
})
|
||||
);
|
||||
|
||||
// Should not find duplicate when excluding the feature that has the title
|
||||
const result = await loader.findDuplicateTitle(
|
||||
testProjectPath,
|
||||
'My Feature',
|
||||
'feature-1000-abc'
|
||||
);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should find duplicate when title exists on different feature', async () => {
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.readdir).mockResolvedValue([
|
||||
{ name: 'feature-1', isDirectory: () => true } as any,
|
||||
{ name: 'feature-2', isDirectory: () => true } as any,
|
||||
]);
|
||||
|
||||
vi.mocked(fs.readFile)
|
||||
.mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
id: 'feature-1000-abc',
|
||||
title: 'My Feature',
|
||||
category: 'ui',
|
||||
description: 'Feature 1',
|
||||
})
|
||||
)
|
||||
.mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
id: 'feature-2000-def',
|
||||
title: 'Other Feature',
|
||||
category: 'ui',
|
||||
description: 'Feature 2',
|
||||
})
|
||||
);
|
||||
|
||||
// Should find duplicate because feature-1000-abc has the title and we're excluding feature-2000-def
|
||||
const result = await loader.findDuplicateTitle(
|
||||
testProjectPath,
|
||||
'My Feature',
|
||||
'feature-2000-def'
|
||||
);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.id).toBe('feature-1000-abc');
|
||||
});
|
||||
|
||||
it('should return null for empty or whitespace title', async () => {
|
||||
const result1 = await loader.findDuplicateTitle(testProjectPath, '');
|
||||
const result2 = await loader.findDuplicateTitle(testProjectPath, ' ');
|
||||
|
||||
expect(result1).toBeNull();
|
||||
expect(result2).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle titles with leading/trailing whitespace', async () => {
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.readdir).mockResolvedValue([
|
||||
{ name: 'feature-1', isDirectory: () => true } as any,
|
||||
]);
|
||||
|
||||
vi.mocked(fs.readFile).mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
id: 'feature-1000-abc',
|
||||
title: 'My Feature',
|
||||
category: 'ui',
|
||||
description: 'Feature description',
|
||||
})
|
||||
);
|
||||
|
||||
const result = await loader.findDuplicateTitle(testProjectPath, ' My Feature ');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.id).toBe('feature-1000-abc');
|
||||
});
|
||||
});
|
||||
|
||||
describe('syncFeatureToAppSpec', () => {
|
||||
const sampleAppSpec = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project_specification>
|
||||
<project_name>Test Project</project_name>
|
||||
<core_capabilities>
|
||||
<capability>Testing</capability>
|
||||
</core_capabilities>
|
||||
<implemented_features>
|
||||
<feature>
|
||||
<name>Existing Feature</name>
|
||||
<description>Already implemented</description>
|
||||
</feature>
|
||||
</implemented_features>
|
||||
</project_specification>`;
|
||||
|
||||
const appSpecWithoutFeatures = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project_specification>
|
||||
<project_name>Test Project</project_name>
|
||||
<core_capabilities>
|
||||
<capability>Testing</capability>
|
||||
</core_capabilities>
|
||||
</project_specification>`;
|
||||
|
||||
it('should add feature to app_spec.txt', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValueOnce(sampleAppSpec);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
const feature = {
|
||||
id: 'feature-1234-abc',
|
||||
title: 'New Feature',
|
||||
category: 'ui',
|
||||
description: 'A new feature description',
|
||||
};
|
||||
|
||||
const result = await loader.syncFeatureToAppSpec(testProjectPath, feature);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.stringContaining('app_spec.txt'),
|
||||
expect.stringContaining('New Feature'),
|
||||
'utf-8'
|
||||
);
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.stringContaining('A new feature description'),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should add feature with file locations', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValueOnce(sampleAppSpec);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
const feature = {
|
||||
id: 'feature-1234-abc',
|
||||
title: 'Feature With Locations',
|
||||
category: 'backend',
|
||||
description: 'Feature with file locations',
|
||||
};
|
||||
|
||||
const result = await loader.syncFeatureToAppSpec(testProjectPath, feature, [
|
||||
'src/feature.ts',
|
||||
'src/utils/helper.ts',
|
||||
]);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.stringContaining('src/feature.ts'),
|
||||
'utf-8'
|
||||
);
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.stringContaining('src/utils/helper.ts'),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should return false when app_spec.txt does not exist', async () => {
|
||||
const error: any = new Error('File not found');
|
||||
error.code = 'ENOENT';
|
||||
vi.mocked(fs.readFile).mockRejectedValueOnce(error);
|
||||
|
||||
const feature = {
|
||||
id: 'feature-1234-abc',
|
||||
title: 'New Feature',
|
||||
category: 'ui',
|
||||
description: 'A new feature description',
|
||||
};
|
||||
|
||||
const result = await loader.syncFeatureToAppSpec(testProjectPath, feature);
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(fs.writeFile).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return false when feature already exists (duplicate)', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValueOnce(sampleAppSpec);
|
||||
|
||||
const feature = {
|
||||
id: 'feature-5678-xyz',
|
||||
title: 'Existing Feature', // Same name as existing feature
|
||||
category: 'ui',
|
||||
description: 'Different description',
|
||||
};
|
||||
|
||||
const result = await loader.syncFeatureToAppSpec(testProjectPath, feature);
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(fs.writeFile).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should use feature ID as fallback name when title is missing', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValueOnce(sampleAppSpec);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
const feature = {
|
||||
id: 'feature-1234-abc',
|
||||
category: 'ui',
|
||||
description: 'Feature without title',
|
||||
// No title property
|
||||
};
|
||||
|
||||
const result = await loader.syncFeatureToAppSpec(testProjectPath, feature);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.stringContaining('Feature: feature-1234-abc'),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle app_spec without implemented_features section', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValueOnce(appSpecWithoutFeatures);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
const feature = {
|
||||
id: 'feature-1234-abc',
|
||||
title: 'First Feature',
|
||||
category: 'ui',
|
||||
description: 'First implemented feature',
|
||||
};
|
||||
|
||||
const result = await loader.syncFeatureToAppSpec(testProjectPath, feature);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.stringContaining('<implemented_features>'),
|
||||
'utf-8'
|
||||
);
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.stringContaining('First Feature'),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw on non-ENOENT file read errors', async () => {
|
||||
const error = new Error('Permission denied');
|
||||
vi.mocked(fs.readFile).mockRejectedValueOnce(error);
|
||||
|
||||
const feature = {
|
||||
id: 'feature-1234-abc',
|
||||
title: 'New Feature',
|
||||
category: 'ui',
|
||||
description: 'A new feature description',
|
||||
};
|
||||
|
||||
await expect(loader.syncFeatureToAppSpec(testProjectPath, feature)).rejects.toThrow(
|
||||
'Permission denied'
|
||||
);
|
||||
});
|
||||
|
||||
it('should preserve existing features when adding a new one', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValueOnce(sampleAppSpec);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
const feature = {
|
||||
id: 'feature-1234-abc',
|
||||
title: 'New Feature',
|
||||
category: 'ui',
|
||||
description: 'A new feature',
|
||||
};
|
||||
|
||||
await loader.syncFeatureToAppSpec(testProjectPath, feature);
|
||||
|
||||
// Verify both old and new features are in the output
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.stringContaining('Existing Feature'),
|
||||
'utf-8'
|
||||
);
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.stringContaining('New Feature'),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should escape special characters in feature name and description', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValueOnce(sampleAppSpec);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
const feature = {
|
||||
id: 'feature-1234-abc',
|
||||
title: 'Feature with <special> & "chars"',
|
||||
category: 'ui',
|
||||
description: 'Description with <tags> & "quotes"',
|
||||
};
|
||||
|
||||
const result = await loader.syncFeatureToAppSpec(testProjectPath, feature);
|
||||
|
||||
expect(result).toBe(true);
|
||||
// The XML should have escaped characters
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.stringContaining('<special>'),
|
||||
'utf-8'
|
||||
);
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.stringContaining('&'),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should not add empty file_locations array', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValueOnce(sampleAppSpec);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
const feature = {
|
||||
id: 'feature-1234-abc',
|
||||
title: 'Feature Without Locations',
|
||||
category: 'ui',
|
||||
description: 'No file locations',
|
||||
};
|
||||
|
||||
await loader.syncFeatureToAppSpec(testProjectPath, feature, []);
|
||||
|
||||
// File locations should not be included when array is empty
|
||||
const writeCall = vi.mocked(fs.writeFile).mock.calls[0];
|
||||
const writtenContent = writeCall[1] as string;
|
||||
|
||||
// Count occurrences of file_locations - should only have the one from Existing Feature if any
|
||||
// The new feature should not add file_locations
|
||||
expect(writtenContent).toContain('Feature Without Locations');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@automaker/ui",
|
||||
"version": "0.11.0",
|
||||
"version": "0.12.0",
|
||||
"description": "An autonomous AI development studio that helps you build software faster using AI-powered agents",
|
||||
"homepage": "https://github.com/AutoMaker-Org/automaker",
|
||||
"repository": {
|
||||
@@ -48,6 +48,22 @@
|
||||
"@dnd-kit/core": "6.3.1",
|
||||
"@dnd-kit/sortable": "10.0.0",
|
||||
"@dnd-kit/utilities": "3.2.2",
|
||||
"@fontsource/cascadia-code": "^5.2.3",
|
||||
"@fontsource/fira-code": "^5.2.7",
|
||||
"@fontsource/ibm-plex-mono": "^5.2.7",
|
||||
"@fontsource/inconsolata": "^5.2.8",
|
||||
"@fontsource/inter": "^5.2.8",
|
||||
"@fontsource/iosevka": "^5.2.5",
|
||||
"@fontsource/jetbrains-mono": "^5.2.8",
|
||||
"@fontsource/lato": "^5.2.7",
|
||||
"@fontsource/montserrat": "^5.2.8",
|
||||
"@fontsource/open-sans": "^5.2.7",
|
||||
"@fontsource/poppins": "^5.2.7",
|
||||
"@fontsource/raleway": "^5.2.8",
|
||||
"@fontsource/roboto": "^5.2.9",
|
||||
"@fontsource/source-code-pro": "^5.2.7",
|
||||
"@fontsource/source-sans-3": "^5.2.9",
|
||||
"@fontsource/work-sans": "^5.2.8",
|
||||
"@lezer/highlight": "1.2.3",
|
||||
"@radix-ui/react-checkbox": "1.3.3",
|
||||
"@radix-ui/react-collapsible": "1.1.12",
|
||||
@@ -204,12 +220,34 @@
|
||||
"arch": [
|
||||
"x64"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "rpm",
|
||||
"arch": [
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
],
|
||||
"category": "Development",
|
||||
"icon": "public/logo_larger.png",
|
||||
"maintainer": "webdevcody@gmail.com",
|
||||
"executableName": "automaker"
|
||||
"executableName": "automaker",
|
||||
"description": "An autonomous AI development studio that helps you build software faster using AI-powered agents",
|
||||
"synopsis": "AI-powered autonomous development studio"
|
||||
},
|
||||
"rpm": {
|
||||
"depends": [
|
||||
"gtk3",
|
||||
"libnotify",
|
||||
"nss",
|
||||
"libXScrnSaver",
|
||||
"libXtst",
|
||||
"xdg-utils",
|
||||
"at-spi2-core",
|
||||
"libuuid"
|
||||
],
|
||||
"compression": "xz",
|
||||
"vendor": "AutoMaker Team"
|
||||
},
|
||||
"nsis": {
|
||||
"oneClick": false,
|
||||
|
||||
@@ -8,6 +8,7 @@ import { useCursorStatusInit } from './hooks/use-cursor-status-init';
|
||||
import { useProviderAuthInit } from './hooks/use-provider-auth-init';
|
||||
import './styles/global.css';
|
||||
import './styles/theme-imports';
|
||||
import './styles/font-imports';
|
||||
|
||||
const logger = createLogger('App');
|
||||
|
||||
|
||||
67
apps/ui/src/assets/fonts/zed/zed-fonts.css
Normal file
67
apps/ui/src/assets/fonts/zed/zed-fonts.css
Normal file
@@ -0,0 +1,67 @@
|
||||
/* Zed Fonts - https://github.com/zed-industries/zed-fonts */
|
||||
|
||||
/* Zed Sans - UI Font */
|
||||
@font-face {
|
||||
font-family: 'Zed Sans';
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
font-display: swap;
|
||||
src: url('./zed-sans-extended.ttf') format('truetype');
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Zed Sans';
|
||||
font-style: italic;
|
||||
font-weight: 400;
|
||||
font-display: swap;
|
||||
src: url('./zed-sans-extendeditalic.ttf') format('truetype');
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Zed Sans';
|
||||
font-style: normal;
|
||||
font-weight: 700;
|
||||
font-display: swap;
|
||||
src: url('./zed-sans-extendedbold.ttf') format('truetype');
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Zed Sans';
|
||||
font-style: italic;
|
||||
font-weight: 700;
|
||||
font-display: swap;
|
||||
src: url('./zed-sans-extendedbolditalic.ttf') format('truetype');
|
||||
}
|
||||
|
||||
/* Zed Mono - Code Font */
|
||||
@font-face {
|
||||
font-family: 'Zed Mono';
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
font-display: swap;
|
||||
src: url('./zed-mono-extended.ttf') format('truetype');
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Zed Mono';
|
||||
font-style: italic;
|
||||
font-weight: 400;
|
||||
font-display: swap;
|
||||
src: url('./zed-mono-extendeditalic.ttf') format('truetype');
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Zed Mono';
|
||||
font-style: normal;
|
||||
font-weight: 700;
|
||||
font-display: swap;
|
||||
src: url('./zed-mono-extendedbold.ttf') format('truetype');
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Zed Mono';
|
||||
font-style: italic;
|
||||
font-weight: 700;
|
||||
font-display: swap;
|
||||
src: url('./zed-mono-extendedbolditalic.ttf') format('truetype');
|
||||
}
|
||||
BIN
apps/ui/src/assets/fonts/zed/zed-mono-extended.ttf
Normal file
BIN
apps/ui/src/assets/fonts/zed/zed-mono-extended.ttf
Normal file
Binary file not shown.
BIN
apps/ui/src/assets/fonts/zed/zed-mono-extendedbold.ttf
Normal file
BIN
apps/ui/src/assets/fonts/zed/zed-mono-extendedbold.ttf
Normal file
Binary file not shown.
BIN
apps/ui/src/assets/fonts/zed/zed-mono-extendedbolditalic.ttf
Normal file
BIN
apps/ui/src/assets/fonts/zed/zed-mono-extendedbolditalic.ttf
Normal file
Binary file not shown.
BIN
apps/ui/src/assets/fonts/zed/zed-mono-extendeditalic.ttf
Normal file
BIN
apps/ui/src/assets/fonts/zed/zed-mono-extendeditalic.ttf
Normal file
Binary file not shown.
BIN
apps/ui/src/assets/fonts/zed/zed-sans-extended.ttf
Normal file
BIN
apps/ui/src/assets/fonts/zed/zed-sans-extended.ttf
Normal file
Binary file not shown.
BIN
apps/ui/src/assets/fonts/zed/zed-sans-extendedbold.ttf
Normal file
BIN
apps/ui/src/assets/fonts/zed/zed-sans-extendedbold.ttf
Normal file
Binary file not shown.
BIN
apps/ui/src/assets/fonts/zed/zed-sans-extendedbolditalic.ttf
Normal file
BIN
apps/ui/src/assets/fonts/zed/zed-sans-extendedbolditalic.ttf
Normal file
Binary file not shown.
BIN
apps/ui/src/assets/fonts/zed/zed-sans-extendeditalic.ttf
Normal file
BIN
apps/ui/src/assets/fonts/zed/zed-sans-extendeditalic.ttf
Normal file
Binary file not shown.
@@ -448,7 +448,9 @@ export function IconPicker({ selectedIcon, onSelectIcon }: IconPickerProps) {
|
||||
);
|
||||
|
||||
const getIconComponent = (iconName: string) => {
|
||||
return (LucideIcons as Record<string, React.ComponentType<{ className?: string }>>)[iconName];
|
||||
return (LucideIcons as unknown as Record<string, React.ComponentType<{ className?: string }>>)[
|
||||
iconName
|
||||
];
|
||||
};
|
||||
|
||||
return (
|
||||
|
||||
@@ -0,0 +1,207 @@
|
||||
/**
|
||||
* Notification Bell - Bell icon with unread count and popover
|
||||
*/
|
||||
|
||||
import { useCallback } from 'react';
|
||||
import { Bell, Check, Trash2, ExternalLink } from 'lucide-react';
|
||||
import { useNavigate } from '@tanstack/react-router';
|
||||
import { useNotificationsStore } from '@/store/notifications-store';
|
||||
import { useLoadNotifications, useNotificationEvents } from '@/hooks/use-notification-events';
|
||||
import { getHttpApiClient } from '@/lib/http-api-client';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover';
|
||||
import type { Notification } from '@automaker/types';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
||||
/**
|
||||
* Format a date as relative time (e.g., "2 minutes ago", "3 hours ago")
|
||||
*/
|
||||
function formatRelativeTime(date: Date): string {
|
||||
const now = new Date();
|
||||
const diffMs = now.getTime() - date.getTime();
|
||||
const diffSec = Math.floor(diffMs / 1000);
|
||||
const diffMin = Math.floor(diffSec / 60);
|
||||
const diffHour = Math.floor(diffMin / 60);
|
||||
const diffDay = Math.floor(diffHour / 24);
|
||||
|
||||
if (diffSec < 60) return 'just now';
|
||||
if (diffMin < 60) return `${diffMin} minute${diffMin === 1 ? '' : 's'} ago`;
|
||||
if (diffHour < 24) return `${diffHour} hour${diffHour === 1 ? '' : 's'} ago`;
|
||||
if (diffDay < 7) return `${diffDay} day${diffDay === 1 ? '' : 's'} ago`;
|
||||
return date.toLocaleDateString();
|
||||
}
|
||||
|
||||
interface NotificationBellProps {
|
||||
projectPath: string | null;
|
||||
}
|
||||
|
||||
export function NotificationBell({ projectPath }: NotificationBellProps) {
|
||||
const navigate = useNavigate();
|
||||
const {
|
||||
notifications,
|
||||
unreadCount,
|
||||
isPopoverOpen,
|
||||
setPopoverOpen,
|
||||
markAsRead,
|
||||
dismissNotification,
|
||||
} = useNotificationsStore();
|
||||
|
||||
// Load notifications and subscribe to events
|
||||
useLoadNotifications(projectPath);
|
||||
useNotificationEvents(projectPath);
|
||||
|
||||
const handleMarkAsRead = useCallback(
|
||||
async (notificationId: string) => {
|
||||
if (!projectPath) return;
|
||||
|
||||
// Optimistic update
|
||||
markAsRead(notificationId);
|
||||
|
||||
// Sync with server
|
||||
const api = getHttpApiClient();
|
||||
await api.notifications.markAsRead(projectPath, notificationId);
|
||||
},
|
||||
[projectPath, markAsRead]
|
||||
);
|
||||
|
||||
const handleDismiss = useCallback(
|
||||
async (notificationId: string) => {
|
||||
if (!projectPath) return;
|
||||
|
||||
// Optimistic update
|
||||
dismissNotification(notificationId);
|
||||
|
||||
// Sync with server
|
||||
const api = getHttpApiClient();
|
||||
await api.notifications.dismiss(projectPath, notificationId);
|
||||
},
|
||||
[projectPath, dismissNotification]
|
||||
);
|
||||
|
||||
const handleNotificationClick = useCallback(
|
||||
(notification: Notification) => {
|
||||
// Mark as read
|
||||
handleMarkAsRead(notification.id);
|
||||
setPopoverOpen(false);
|
||||
|
||||
// Navigate to the relevant view based on notification type
|
||||
if (notification.featureId) {
|
||||
navigate({ to: '/board' });
|
||||
}
|
||||
},
|
||||
[handleMarkAsRead, setPopoverOpen, navigate]
|
||||
);
|
||||
|
||||
const handleViewAll = useCallback(() => {
|
||||
setPopoverOpen(false);
|
||||
navigate({ to: '/notifications' });
|
||||
}, [setPopoverOpen, navigate]);
|
||||
|
||||
const getNotificationIcon = (type: string) => {
|
||||
switch (type) {
|
||||
case 'feature_waiting_approval':
|
||||
return <Bell className="h-4 w-4 text-yellow-500" />;
|
||||
case 'feature_verified':
|
||||
return <Check className="h-4 w-4 text-green-500" />;
|
||||
case 'spec_regeneration_complete':
|
||||
return <Check className="h-4 w-4 text-blue-500" />;
|
||||
default:
|
||||
return <Bell className="h-4 w-4" />;
|
||||
}
|
||||
};
|
||||
|
||||
// Show recent 3 notifications in popover
|
||||
const recentNotifications = notifications.slice(0, 3);
|
||||
|
||||
if (!projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Popover open={isPopoverOpen} onOpenChange={setPopoverOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<button
|
||||
className={cn(
|
||||
'relative flex items-center justify-center w-8 h-8 rounded-md',
|
||||
'hover:bg-accent transition-colors',
|
||||
'focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2'
|
||||
)}
|
||||
title="Notifications"
|
||||
>
|
||||
<Bell className="h-4 w-4" />
|
||||
{unreadCount > 0 && (
|
||||
<span className="absolute -top-1 -right-1 flex h-4 min-w-4 items-center justify-center rounded-full bg-primary px-1 text-[10px] font-medium text-primary-foreground">
|
||||
{unreadCount > 99 ? '99+' : unreadCount}
|
||||
</span>
|
||||
)}
|
||||
</button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="w-80 p-0" align="start" side="right">
|
||||
<div className="flex items-center justify-between px-4 py-3 border-b">
|
||||
<h4 className="font-medium text-sm">Notifications</h4>
|
||||
{unreadCount > 0 && (
|
||||
<span className="text-xs text-muted-foreground">{unreadCount} unread</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{recentNotifications.length === 0 ? (
|
||||
<div className="flex flex-col items-center justify-center py-8 px-4">
|
||||
<Bell className="h-8 w-8 text-muted-foreground/50 mb-2" />
|
||||
<p className="text-sm text-muted-foreground">No notifications</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="max-h-[300px] overflow-y-auto">
|
||||
{recentNotifications.map((notification) => (
|
||||
<div
|
||||
key={notification.id}
|
||||
className={cn(
|
||||
'flex items-start gap-3 px-4 py-3 cursor-pointer hover:bg-accent/50 border-b last:border-b-0',
|
||||
!notification.read && 'bg-primary/5'
|
||||
)}
|
||||
onClick={() => handleNotificationClick(notification)}
|
||||
>
|
||||
<div className="flex-shrink-0 mt-0.5">{getNotificationIcon(notification.type)}</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-1.5">
|
||||
<p className="text-sm font-medium truncate">{notification.title}</p>
|
||||
{!notification.read && (
|
||||
<span className="h-1.5 w-1.5 rounded-full bg-primary flex-shrink-0" />
|
||||
)}
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground line-clamp-2 mt-0.5">
|
||||
{notification.message}
|
||||
</p>
|
||||
<p className="text-[10px] text-muted-foreground mt-1">
|
||||
{formatRelativeTime(new Date(notification.createdAt))}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex-shrink-0 flex flex-col gap-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-6 w-6"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleDismiss(notification.id);
|
||||
}}
|
||||
title="Dismiss"
|
||||
>
|
||||
<Trash2 className="h-3 w-3" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{notifications.length > 0 && (
|
||||
<div className="border-t px-4 py-2">
|
||||
<Button variant="ghost" size="sm" className="w-full text-xs" onClick={handleViewAll}>
|
||||
View all notifications
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useEffect, useRef, useState, memo } from 'react';
|
||||
import { useEffect, useRef, useState, memo, useCallback } from 'react';
|
||||
import type { LucideIcon } from 'lucide-react';
|
||||
import { Edit2, Trash2, Palette, ChevronRight, Moon, Sun, Monitor } from 'lucide-react';
|
||||
import { toast } from 'sonner';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { type ThemeMode, useAppStore } from '@/store/app-store';
|
||||
import { ConfirmDialog } from '@/components/ui/confirm-dialog';
|
||||
@@ -8,6 +9,9 @@ import type { Project } from '@/lib/electron';
|
||||
import { PROJECT_DARK_THEMES, PROJECT_LIGHT_THEMES } from '@/components/layout/sidebar/constants';
|
||||
import { useThemePreview } from '@/components/layout/sidebar/hooks';
|
||||
|
||||
// Constant for "use global theme" option
|
||||
const USE_GLOBAL_THEME = '' as const;
|
||||
|
||||
// Constants for z-index values
|
||||
const Z_INDEX = {
|
||||
CONTEXT_MENU: 100,
|
||||
@@ -124,19 +128,26 @@ export function ProjectContextMenu({
|
||||
} = useAppStore();
|
||||
const [showRemoveDialog, setShowRemoveDialog] = useState(false);
|
||||
const [showThemeSubmenu, setShowThemeSubmenu] = useState(false);
|
||||
const [removeConfirmed, setRemoveConfirmed] = useState(false);
|
||||
const themeSubmenuRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const { handlePreviewEnter, handlePreviewLeave } = useThemePreview({ setPreviewTheme });
|
||||
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (menuRef.current && !menuRef.current.contains(event.target as Node)) {
|
||||
const handleClickOutside = (event: globalThis.MouseEvent) => {
|
||||
// Don't close if a confirmation dialog is open (dialog is in a portal)
|
||||
if (showRemoveDialog) return;
|
||||
|
||||
if (menuRef.current && !menuRef.current.contains(event.target as globalThis.Node)) {
|
||||
setPreviewTheme(null);
|
||||
onClose();
|
||||
}
|
||||
};
|
||||
|
||||
const handleEscape = (event: KeyboardEvent) => {
|
||||
const handleEscape = (event: globalThis.KeyboardEvent) => {
|
||||
// Don't close if a confirmation dialog is open (let the dialog handle escape)
|
||||
if (showRemoveDialog) return;
|
||||
|
||||
if (event.key === 'Escape') {
|
||||
setPreviewTheme(null);
|
||||
onClose();
|
||||
@@ -150,7 +161,7 @@ export function ProjectContextMenu({
|
||||
document.removeEventListener('mousedown', handleClickOutside);
|
||||
document.removeEventListener('keydown', handleEscape);
|
||||
};
|
||||
}, [onClose, setPreviewTheme]);
|
||||
}, [onClose, setPreviewTheme, showRemoveDialog]);
|
||||
|
||||
const handleEdit = () => {
|
||||
onEdit(project);
|
||||
@@ -160,166 +171,187 @@ export function ProjectContextMenu({
|
||||
setShowRemoveDialog(true);
|
||||
};
|
||||
|
||||
const handleThemeSelect = (value: ThemeMode | '') => {
|
||||
setPreviewTheme(null);
|
||||
if (value !== '') {
|
||||
setTheme(value);
|
||||
} else {
|
||||
setTheme(globalTheme);
|
||||
}
|
||||
setProjectTheme(project.id, value === '' ? null : value);
|
||||
setShowThemeSubmenu(false);
|
||||
};
|
||||
const handleThemeSelect = useCallback(
|
||||
(value: ThemeMode | typeof USE_GLOBAL_THEME) => {
|
||||
setPreviewTheme(null);
|
||||
const isUsingGlobal = value === USE_GLOBAL_THEME;
|
||||
setTheme(isUsingGlobal ? globalTheme : value);
|
||||
setProjectTheme(project.id, isUsingGlobal ? null : value);
|
||||
setShowThemeSubmenu(false);
|
||||
},
|
||||
[globalTheme, project.id, setPreviewTheme, setProjectTheme, setTheme]
|
||||
);
|
||||
|
||||
const handleConfirmRemove = () => {
|
||||
const handleConfirmRemove = useCallback(() => {
|
||||
moveProjectToTrash(project.id);
|
||||
onClose();
|
||||
};
|
||||
toast.success('Project removed', {
|
||||
description: `${project.name} has been removed from your projects list`,
|
||||
});
|
||||
setRemoveConfirmed(true);
|
||||
}, [moveProjectToTrash, project.id, project.name]);
|
||||
|
||||
const handleDialogClose = useCallback(
|
||||
(isOpen: boolean) => {
|
||||
setShowRemoveDialog(isOpen);
|
||||
// Close the context menu when dialog closes (whether confirmed or cancelled)
|
||||
// This prevents the context menu from reappearing after dialog interaction
|
||||
if (!isOpen) {
|
||||
// Reset confirmation state
|
||||
setRemoveConfirmed(false);
|
||||
// Always close the context menu when dialog closes
|
||||
onClose();
|
||||
}
|
||||
},
|
||||
[onClose]
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
ref={menuRef}
|
||||
className={cn(
|
||||
'fixed min-w-48 rounded-lg',
|
||||
'bg-popover text-popover-foreground',
|
||||
'border border-border shadow-lg',
|
||||
'animate-in fade-in zoom-in-95 duration-100'
|
||||
)}
|
||||
style={{
|
||||
top: position.y,
|
||||
left: position.x,
|
||||
zIndex: Z_INDEX.CONTEXT_MENU,
|
||||
}}
|
||||
data-testid="project-context-menu"
|
||||
>
|
||||
<div className="p-1">
|
||||
<button
|
||||
onClick={handleEdit}
|
||||
className={cn(
|
||||
'w-full flex items-center gap-2 px-3 py-2 rounded-md',
|
||||
'text-sm font-medium text-left',
|
||||
'hover:bg-accent transition-colors',
|
||||
'focus:outline-none focus:bg-accent'
|
||||
)}
|
||||
data-testid="edit-project-button"
|
||||
>
|
||||
<Edit2 className="w-4 h-4" />
|
||||
<span>Edit Name & Icon</span>
|
||||
</button>
|
||||
|
||||
{/* Theme Submenu Trigger */}
|
||||
<div
|
||||
className="relative"
|
||||
onMouseEnter={() => setShowThemeSubmenu(true)}
|
||||
onMouseLeave={() => {
|
||||
setShowThemeSubmenu(false);
|
||||
setPreviewTheme(null);
|
||||
}}
|
||||
>
|
||||
{/* Hide context menu when confirm dialog is open */}
|
||||
{!showRemoveDialog && (
|
||||
<div
|
||||
ref={menuRef}
|
||||
className={cn(
|
||||
'fixed min-w-48 rounded-lg',
|
||||
'bg-popover text-popover-foreground',
|
||||
'border border-border shadow-lg',
|
||||
'animate-in fade-in zoom-in-95 duration-100'
|
||||
)}
|
||||
style={{
|
||||
top: position.y,
|
||||
left: position.x,
|
||||
zIndex: Z_INDEX.CONTEXT_MENU,
|
||||
}}
|
||||
data-testid="project-context-menu"
|
||||
>
|
||||
<div className="p-1">
|
||||
<button
|
||||
onClick={() => setShowThemeSubmenu(!showThemeSubmenu)}
|
||||
onClick={handleEdit}
|
||||
className={cn(
|
||||
'w-full flex items-center gap-2 px-3 py-2 rounded-md',
|
||||
'text-sm font-medium text-left',
|
||||
'hover:bg-accent transition-colors',
|
||||
'focus:outline-none focus:bg-accent'
|
||||
)}
|
||||
data-testid="theme-project-button"
|
||||
data-testid="edit-project-button"
|
||||
>
|
||||
<Palette className="w-4 h-4" />
|
||||
<span className="flex-1">Project Theme</span>
|
||||
{project.theme && (
|
||||
<span className="text-[10px] text-muted-foreground capitalize">
|
||||
{project.theme}
|
||||
</span>
|
||||
)}
|
||||
<ChevronRight className="w-4 h-4 text-muted-foreground" />
|
||||
<Edit2 className="w-4 h-4" />
|
||||
<span>Edit Name & Icon</span>
|
||||
</button>
|
||||
|
||||
{/* Theme Submenu */}
|
||||
{showThemeSubmenu && (
|
||||
<div
|
||||
ref={themeSubmenuRef}
|
||||
{/* Theme Submenu Trigger */}
|
||||
<div
|
||||
className="relative"
|
||||
onMouseEnter={() => setShowThemeSubmenu(true)}
|
||||
onMouseLeave={() => {
|
||||
setShowThemeSubmenu(false);
|
||||
setPreviewTheme(null);
|
||||
}}
|
||||
>
|
||||
<button
|
||||
onClick={() => setShowThemeSubmenu(!showThemeSubmenu)}
|
||||
className={cn(
|
||||
'absolute left-full top-0 ml-1 min-w-[420px] rounded-lg',
|
||||
'bg-popover text-popover-foreground',
|
||||
'border border-border shadow-lg',
|
||||
'animate-in fade-in zoom-in-95 duration-100'
|
||||
'w-full flex items-center gap-2 px-3 py-2 rounded-md',
|
||||
'text-sm font-medium text-left',
|
||||
'hover:bg-accent transition-colors',
|
||||
'focus:outline-none focus:bg-accent'
|
||||
)}
|
||||
style={{ zIndex: Z_INDEX.THEME_SUBMENU }}
|
||||
data-testid="project-theme-submenu"
|
||||
data-testid="theme-project-button"
|
||||
>
|
||||
<div className="p-2">
|
||||
{/* Use Global Option */}
|
||||
<button
|
||||
onPointerEnter={() => handlePreviewEnter(globalTheme)}
|
||||
onPointerLeave={handlePreviewLeave}
|
||||
onClick={() => handleThemeSelect('')}
|
||||
className={cn(
|
||||
'w-full flex items-center gap-2 px-3 py-2 rounded-md',
|
||||
'text-sm font-medium text-left',
|
||||
'hover:bg-accent transition-colors',
|
||||
'focus:outline-none focus:bg-accent',
|
||||
!project.theme && 'bg-accent'
|
||||
)}
|
||||
data-testid="project-theme-global"
|
||||
>
|
||||
<Monitor className="w-4 h-4" />
|
||||
<span>Use Global</span>
|
||||
<span className="text-[10px] text-muted-foreground ml-1 capitalize">
|
||||
({globalTheme})
|
||||
</span>
|
||||
</button>
|
||||
<Palette className="w-4 h-4" />
|
||||
<span className="flex-1">Project Theme</span>
|
||||
{project.theme && (
|
||||
<span className="text-[10px] text-muted-foreground capitalize">
|
||||
{project.theme}
|
||||
</span>
|
||||
)}
|
||||
<ChevronRight className="w-4 h-4 text-muted-foreground" />
|
||||
</button>
|
||||
|
||||
<div className="h-px bg-border my-2" />
|
||||
{/* Theme Submenu */}
|
||||
{showThemeSubmenu && (
|
||||
<div
|
||||
ref={themeSubmenuRef}
|
||||
className={cn(
|
||||
'absolute left-full top-0 ml-1 min-w-[420px] rounded-lg',
|
||||
'bg-popover text-popover-foreground',
|
||||
'border border-border shadow-lg',
|
||||
'animate-in fade-in zoom-in-95 duration-100'
|
||||
)}
|
||||
style={{ zIndex: Z_INDEX.THEME_SUBMENU }}
|
||||
data-testid="project-theme-submenu"
|
||||
>
|
||||
<div className="p-2">
|
||||
{/* Use Global Option */}
|
||||
<button
|
||||
onPointerEnter={() => handlePreviewEnter(globalTheme)}
|
||||
onPointerLeave={handlePreviewLeave}
|
||||
onClick={() => handleThemeSelect(USE_GLOBAL_THEME)}
|
||||
className={cn(
|
||||
'w-full flex items-center gap-2 px-3 py-2 rounded-md',
|
||||
'text-sm font-medium text-left',
|
||||
'hover:bg-accent transition-colors',
|
||||
'focus:outline-none focus:bg-accent',
|
||||
!project.theme && 'bg-accent'
|
||||
)}
|
||||
data-testid="project-theme-global"
|
||||
>
|
||||
<Monitor className="w-4 h-4" />
|
||||
<span>Use Global</span>
|
||||
<span className="text-[10px] text-muted-foreground ml-1 capitalize">
|
||||
({globalTheme})
|
||||
</span>
|
||||
</button>
|
||||
|
||||
{/* Two Column Layout - Using reusable ThemeColumn component */}
|
||||
<div className="flex gap-2">
|
||||
<ThemeColumn
|
||||
title="Dark"
|
||||
icon={Moon}
|
||||
themes={PROJECT_DARK_THEMES as ThemeOption[]}
|
||||
selectedTheme={project.theme as ThemeMode | null}
|
||||
onPreviewEnter={handlePreviewEnter}
|
||||
onPreviewLeave={handlePreviewLeave}
|
||||
onSelect={handleThemeSelect}
|
||||
/>
|
||||
<ThemeColumn
|
||||
title="Light"
|
||||
icon={Sun}
|
||||
themes={PROJECT_LIGHT_THEMES as ThemeOption[]}
|
||||
selectedTheme={project.theme as ThemeMode | null}
|
||||
onPreviewEnter={handlePreviewEnter}
|
||||
onPreviewLeave={handlePreviewLeave}
|
||||
onSelect={handleThemeSelect}
|
||||
/>
|
||||
<div className="h-px bg-border my-2" />
|
||||
|
||||
{/* Two Column Layout - Using reusable ThemeColumn component */}
|
||||
<div className="flex gap-2">
|
||||
<ThemeColumn
|
||||
title="Dark"
|
||||
icon={Moon}
|
||||
themes={PROJECT_DARK_THEMES as ThemeOption[]}
|
||||
selectedTheme={project.theme as ThemeMode | null}
|
||||
onPreviewEnter={handlePreviewEnter}
|
||||
onPreviewLeave={handlePreviewLeave}
|
||||
onSelect={handleThemeSelect}
|
||||
/>
|
||||
<ThemeColumn
|
||||
title="Light"
|
||||
icon={Sun}
|
||||
themes={PROJECT_LIGHT_THEMES as ThemeOption[]}
|
||||
selectedTheme={project.theme as ThemeMode | null}
|
||||
onPreviewEnter={handlePreviewEnter}
|
||||
onPreviewLeave={handlePreviewLeave}
|
||||
onSelect={handleThemeSelect}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<button
|
||||
onClick={handleRemove}
|
||||
className={cn(
|
||||
'w-full flex items-center gap-2 px-3 py-2 rounded-md',
|
||||
'text-sm font-medium text-left',
|
||||
'text-destructive hover:bg-destructive/10',
|
||||
'transition-colors',
|
||||
'focus:outline-none focus:bg-destructive/10'
|
||||
)}
|
||||
data-testid="remove-project-button"
|
||||
>
|
||||
<Trash2 className="w-4 h-4" />
|
||||
<span>Remove Project</span>
|
||||
</button>
|
||||
<button
|
||||
onClick={handleRemove}
|
||||
className={cn(
|
||||
'w-full flex items-center gap-2 px-3 py-2 rounded-md',
|
||||
'text-sm font-medium text-left',
|
||||
'text-destructive hover:bg-destructive/10',
|
||||
'transition-colors',
|
||||
'focus:outline-none focus:bg-destructive/10'
|
||||
)}
|
||||
data-testid="remove-project-button"
|
||||
>
|
||||
<Trash2 className="w-4 h-4" />
|
||||
<span>Remove Project</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<ConfirmDialog
|
||||
open={showRemoveDialog}
|
||||
onOpenChange={setShowRemoveDialog}
|
||||
onOpenChange={handleDialogClose}
|
||||
onConfirm={handleConfirmRemove}
|
||||
title="Remove Project"
|
||||
description={`Are you sure you want to remove "${project.name}" from the project list? This won't delete any files on disk.`}
|
||||
|
||||
@@ -29,7 +29,7 @@ export function ProjectSwitcherItem({
|
||||
// Get the icon component from lucide-react
|
||||
const getIconComponent = (): LucideIcon => {
|
||||
if (project.icon && project.icon in LucideIcons) {
|
||||
return (LucideIcons as Record<string, LucideIcon>)[project.icon];
|
||||
return (LucideIcons as unknown as Record<string, LucideIcon>)[project.icon];
|
||||
}
|
||||
return Folder;
|
||||
};
|
||||
|
||||
@@ -1,20 +1,23 @@
|
||||
import { useState, useCallback, useEffect } from 'react';
|
||||
import { Plus, Bug, FolderOpen } from 'lucide-react';
|
||||
import { useNavigate } from '@tanstack/react-router';
|
||||
import { Plus, Bug, FolderOpen, BookOpen } from 'lucide-react';
|
||||
import { useNavigate, useLocation } from '@tanstack/react-router';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { useAppStore, type ThemeMode } from '@/store/app-store';
|
||||
import { useOSDetection } from '@/hooks/use-os-detection';
|
||||
import { ProjectSwitcherItem } from './components/project-switcher-item';
|
||||
import { ProjectContextMenu } from './components/project-context-menu';
|
||||
import { EditProjectDialog } from './components/edit-project-dialog';
|
||||
import { NotificationBell } from './components/notification-bell';
|
||||
import { NewProjectModal } from '@/components/dialogs/new-project-modal';
|
||||
import { OnboardingDialog } from '@/components/layout/sidebar/dialogs';
|
||||
import { useProjectCreation, useProjectTheme } from '@/components/layout/sidebar/hooks';
|
||||
import { SIDEBAR_FEATURE_FLAGS } from '@/components/layout/sidebar/constants';
|
||||
import type { Project } from '@/lib/electron';
|
||||
import { getElectronAPI } from '@/lib/electron';
|
||||
import { initializeProject, hasAppSpec, hasAutomakerDir } from '@/lib/project-init';
|
||||
import { toast } from 'sonner';
|
||||
import { CreateSpecDialog } from '@/components/views/spec-view/dialogs';
|
||||
import type { FeatureCount } from '@/components/views/spec-view/types';
|
||||
|
||||
function getOSAbbreviation(os: string): string {
|
||||
switch (os) {
|
||||
@@ -31,6 +34,9 @@ function getOSAbbreviation(os: string): string {
|
||||
|
||||
export function ProjectSwitcher() {
|
||||
const navigate = useNavigate();
|
||||
const location = useLocation();
|
||||
const { hideWiki } = SIDEBAR_FEATURE_FLAGS;
|
||||
const isWikiActive = location.pathname === '/wiki';
|
||||
const {
|
||||
projects,
|
||||
currentProject,
|
||||
@@ -52,7 +58,7 @@ export function ProjectSwitcher() {
|
||||
const [projectOverview, setProjectOverview] = useState('');
|
||||
const [generateFeatures, setGenerateFeatures] = useState(true);
|
||||
const [analyzeProject, setAnalyzeProject] = useState(true);
|
||||
const [featureCount, setFeatureCount] = useState(5);
|
||||
const [featureCount, setFeatureCount] = useState<FeatureCount>(50);
|
||||
|
||||
// Derive isCreatingSpec from store state
|
||||
const isCreatingSpec = specCreatingForProject !== null;
|
||||
@@ -124,6 +130,10 @@ export function ProjectSwitcher() {
|
||||
api.openExternalLink('https://github.com/AutoMaker-Org/automaker/issues');
|
||||
}, []);
|
||||
|
||||
const handleWikiClick = useCallback(() => {
|
||||
navigate({ to: '/wiki' });
|
||||
}, [navigate]);
|
||||
|
||||
/**
|
||||
* Opens the system folder selection dialog and initializes the selected project.
|
||||
*/
|
||||
@@ -199,13 +209,18 @@ export function ProjectSwitcher() {
|
||||
|
||||
try {
|
||||
const api = getElectronAPI();
|
||||
await api.generateAppSpec({
|
||||
projectPath: setupProjectPath,
|
||||
if (!api.specRegeneration) {
|
||||
toast.error('Spec regeneration not available');
|
||||
setSpecCreatingForProject(null);
|
||||
return;
|
||||
}
|
||||
await api.specRegeneration.create(
|
||||
setupProjectPath,
|
||||
projectOverview,
|
||||
generateFeatures,
|
||||
analyzeProject,
|
||||
featureCount,
|
||||
});
|
||||
featureCount
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Failed to generate spec:', error);
|
||||
toast.error('Failed to generate spec', {
|
||||
@@ -319,6 +334,11 @@ export function ProjectSwitcher() {
|
||||
v{appVersion} {versionSuffix}
|
||||
</span>
|
||||
</button>
|
||||
|
||||
{/* Notification Bell */}
|
||||
<div className="flex justify-center mt-2">
|
||||
<NotificationBell projectPath={currentProject?.path ?? null} />
|
||||
</div>
|
||||
<div className="w-full h-px bg-border mt-3" />
|
||||
</div>
|
||||
|
||||
@@ -405,8 +425,37 @@ export function ProjectSwitcher() {
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Bug Report Button at the very bottom */}
|
||||
<div className="p-2 border-t border-border/40">
|
||||
{/* Wiki and Bug Report Buttons at the very bottom */}
|
||||
<div className="p-2 border-t border-border/40 space-y-2">
|
||||
{/* Wiki Button */}
|
||||
{!hideWiki && (
|
||||
<button
|
||||
onClick={handleWikiClick}
|
||||
className={cn(
|
||||
'w-full aspect-square rounded-xl flex items-center justify-center',
|
||||
'transition-all duration-200 ease-out',
|
||||
isWikiActive
|
||||
? [
|
||||
'bg-gradient-to-r from-brand-500/20 via-brand-500/15 to-brand-600/10',
|
||||
'text-foreground',
|
||||
'border border-brand-500/30',
|
||||
'shadow-md shadow-brand-500/10',
|
||||
]
|
||||
: [
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50 border border-transparent hover:border-border/40',
|
||||
'hover:shadow-sm hover:scale-105 active:scale-95',
|
||||
]
|
||||
)}
|
||||
title="Wiki"
|
||||
data-testid="wiki-button"
|
||||
>
|
||||
<BookOpen
|
||||
className={cn('w-5 h-5', isWikiActive && 'text-brand-500 drop-shadow-sm')}
|
||||
/>
|
||||
</button>
|
||||
)}
|
||||
{/* Bug Report Button */}
|
||||
<button
|
||||
onClick={handleBugReportClick}
|
||||
className={cn(
|
||||
|
||||
@@ -5,6 +5,7 @@ import { useNavigate, useLocation } from '@tanstack/react-router';
|
||||
const logger = createLogger('Sidebar');
|
||||
import { cn } from '@/lib/utils';
|
||||
import { useAppStore, type ThemeMode } from '@/store/app-store';
|
||||
import { useNotificationsStore } from '@/store/notifications-store';
|
||||
import { useKeyboardShortcuts, useKeyboardShortcutsConfig } from '@/hooks/use-keyboard-shortcuts';
|
||||
import { getElectronAPI } from '@/lib/electron';
|
||||
import { initializeProject, hasAppSpec, hasAutomakerDir } from '@/lib/project-init';
|
||||
@@ -19,7 +20,10 @@ import {
|
||||
SidebarHeader,
|
||||
SidebarNavigation,
|
||||
SidebarFooter,
|
||||
MobileSidebarToggle,
|
||||
} from './sidebar/components';
|
||||
import { useIsCompact } from '@/hooks/use-media-query';
|
||||
import { PanelLeftClose } from 'lucide-react';
|
||||
import { TrashDialog, OnboardingDialog } from './sidebar/dialogs';
|
||||
import { SIDEBAR_FEATURE_FLAGS } from './sidebar/constants';
|
||||
import {
|
||||
@@ -43,9 +47,11 @@ export function Sidebar() {
|
||||
trashedProjects,
|
||||
currentProject,
|
||||
sidebarOpen,
|
||||
mobileSidebarHidden,
|
||||
projectHistory,
|
||||
upsertAndSetCurrentProject,
|
||||
toggleSidebar,
|
||||
toggleMobileSidebarHidden,
|
||||
restoreTrashedProject,
|
||||
deleteTrashedProject,
|
||||
emptyTrash,
|
||||
@@ -56,13 +62,17 @@ export function Sidebar() {
|
||||
setSpecCreatingForProject,
|
||||
} = useAppStore();
|
||||
|
||||
const isCompact = useIsCompact();
|
||||
|
||||
// Environment variable flags for hiding sidebar items
|
||||
const { hideTerminal, hideWiki, hideRunningAgents, hideContext, hideSpecEditor } =
|
||||
SIDEBAR_FEATURE_FLAGS;
|
||||
const { hideTerminal, hideRunningAgents, hideContext, hideSpecEditor } = SIDEBAR_FEATURE_FLAGS;
|
||||
|
||||
// Get customizable keyboard shortcuts
|
||||
const shortcuts = useKeyboardShortcutsConfig();
|
||||
|
||||
// Get unread notifications count
|
||||
const unreadNotificationsCount = useNotificationsStore((s) => s.unreadCount);
|
||||
|
||||
// State for delete project confirmation dialog
|
||||
const [showDeleteProjectDialog, setShowDeleteProjectDialog] = useState(false);
|
||||
|
||||
@@ -239,6 +249,7 @@ export function Sidebar() {
|
||||
cyclePrevProject,
|
||||
cycleNextProject,
|
||||
unviewedValidationsCount,
|
||||
unreadNotificationsCount,
|
||||
isSpecGenerating: isCurrentProjectGeneratingSpec,
|
||||
});
|
||||
|
||||
@@ -251,10 +262,16 @@ export function Sidebar() {
|
||||
return location.pathname === routePath;
|
||||
};
|
||||
|
||||
// Check if sidebar should be completely hidden on mobile
|
||||
const shouldHideSidebar = isCompact && mobileSidebarHidden;
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* Floating toggle to show sidebar on mobile when hidden */}
|
||||
<MobileSidebarToggle />
|
||||
|
||||
{/* Mobile backdrop overlay */}
|
||||
{sidebarOpen && (
|
||||
{sidebarOpen && !shouldHideSidebar && (
|
||||
<div
|
||||
className="fixed inset-0 bg-black/50 z-20 lg:hidden"
|
||||
onClick={toggleSidebar}
|
||||
@@ -270,8 +287,11 @@ export function Sidebar() {
|
||||
'border-r border-border/60 shadow-[1px_0_20px_-5px_rgba(0,0,0,0.1)]',
|
||||
// Smooth width transition
|
||||
'transition-all duration-300 ease-[cubic-bezier(0.4,0,0.2,1)]',
|
||||
// Mobile: completely hidden when mobileSidebarHidden is true
|
||||
shouldHideSidebar && 'hidden',
|
||||
// Mobile: overlay when open, collapsed when closed
|
||||
sidebarOpen ? 'fixed inset-y-0 left-0 w-72 lg:relative lg:w-72' : 'relative w-16'
|
||||
!shouldHideSidebar &&
|
||||
(sidebarOpen ? 'fixed inset-y-0 left-0 w-72 lg:relative lg:w-72' : 'relative w-16')
|
||||
)}
|
||||
data-testid="sidebar"
|
||||
>
|
||||
@@ -281,8 +301,33 @@ export function Sidebar() {
|
||||
shortcut={shortcuts.toggleSidebar}
|
||||
/>
|
||||
|
||||
{/* Floating hide button on right edge - only visible on compact screens when sidebar is collapsed */}
|
||||
{!sidebarOpen && isCompact && (
|
||||
<button
|
||||
onClick={toggleMobileSidebarHidden}
|
||||
className={cn(
|
||||
'absolute -right-6 top-1/2 -translate-y-1/2 z-40',
|
||||
'flex items-center justify-center w-6 h-10 rounded-r-lg',
|
||||
'bg-card/95 backdrop-blur-sm border border-l-0 border-border/80',
|
||||
'text-muted-foreground hover:text-brand-500 hover:bg-accent/80',
|
||||
'shadow-lg hover:shadow-xl hover:shadow-brand-500/10',
|
||||
'transition-all duration-200',
|
||||
'hover:w-8 active:scale-95'
|
||||
)}
|
||||
aria-label="Hide sidebar"
|
||||
data-testid="sidebar-mobile-hide"
|
||||
>
|
||||
<PanelLeftClose className="w-3.5 h-3.5" />
|
||||
</button>
|
||||
)}
|
||||
|
||||
<div className="flex-1 flex flex-col overflow-hidden">
|
||||
<SidebarHeader sidebarOpen={sidebarOpen} currentProject={currentProject} />
|
||||
<SidebarHeader
|
||||
sidebarOpen={sidebarOpen}
|
||||
currentProject={currentProject}
|
||||
onClose={toggleSidebar}
|
||||
onExpand={toggleSidebar}
|
||||
/>
|
||||
|
||||
<SidebarNavigation
|
||||
currentProject={currentProject}
|
||||
@@ -297,7 +342,6 @@ export function Sidebar() {
|
||||
sidebarOpen={sidebarOpen}
|
||||
isActiveRoute={isActiveRoute}
|
||||
navigate={navigate}
|
||||
hideWiki={hideWiki}
|
||||
hideRunningAgents={hideRunningAgents}
|
||||
runningAgentsCount={runningAgentsCount}
|
||||
shortcuts={{ settings: shortcuts.settings }}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { PanelLeft, PanelLeftClose } from 'lucide-react';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { formatShortcut } from '@/store/app-store';
|
||||
import { useIsCompact } from '@/hooks/use-media-query';
|
||||
|
||||
interface CollapseToggleButtonProps {
|
||||
sidebarOpen: boolean;
|
||||
@@ -13,6 +14,13 @@ export function CollapseToggleButton({
|
||||
toggleSidebar,
|
||||
shortcut,
|
||||
}: CollapseToggleButtonProps) {
|
||||
const isCompact = useIsCompact();
|
||||
|
||||
// Hide when in compact mode (mobile menu is shown in board header)
|
||||
if (isCompact) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
onClick={toggleSidebar}
|
||||
|
||||
@@ -8,3 +8,4 @@ export { ProjectActions } from './project-actions';
|
||||
export { SidebarNavigation } from './sidebar-navigation';
|
||||
export { ProjectSelectorWithOptions } from './project-selector-with-options';
|
||||
export { SidebarFooter } from './sidebar-footer';
|
||||
export { MobileSidebarToggle } from './mobile-sidebar-toggle';
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
import { PanelLeft } from 'lucide-react';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { useAppStore } from '@/store/app-store';
|
||||
import { useIsCompact } from '@/hooks/use-media-query';
|
||||
|
||||
/**
|
||||
* Floating toggle button for mobile that completely hides/shows the sidebar.
|
||||
* Positioned at the left-center of the screen.
|
||||
* Only visible on compact/mobile screens when the sidebar is hidden.
|
||||
*/
|
||||
export function MobileSidebarToggle() {
|
||||
const isCompact = useIsCompact();
|
||||
const { mobileSidebarHidden, toggleMobileSidebarHidden } = useAppStore();
|
||||
|
||||
// Only show on compact screens when sidebar is hidden
|
||||
if (!isCompact || !mobileSidebarHidden) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
onClick={toggleMobileSidebarHidden}
|
||||
className={cn(
|
||||
'fixed left-0 top-1/2 -translate-y-1/2 z-50',
|
||||
'flex items-center justify-center',
|
||||
'w-8 h-12 rounded-r-lg',
|
||||
// Glass morphism background
|
||||
'bg-card/95 backdrop-blur-sm border border-l-0 border-border/80',
|
||||
// Shadow and hover effects
|
||||
'shadow-lg shadow-black/10 hover:shadow-xl hover:shadow-brand-500/10',
|
||||
'text-muted-foreground hover:text-brand-500 hover:bg-accent/80',
|
||||
'hover:border-brand-500/30',
|
||||
'transition-all duration-200 ease-out',
|
||||
'hover:w-10 active:scale-95'
|
||||
)}
|
||||
aria-label="Show sidebar"
|
||||
data-testid="mobile-sidebar-toggle"
|
||||
>
|
||||
<PanelLeft className="w-4 h-4 pointer-events-none" />
|
||||
</button>
|
||||
);
|
||||
}
|
||||
@@ -1,13 +1,12 @@
|
||||
import type { NavigateOptions } from '@tanstack/react-router';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { formatShortcut } from '@/store/app-store';
|
||||
import { BookOpen, Activity, Settings } from 'lucide-react';
|
||||
import { Activity, Settings } from 'lucide-react';
|
||||
|
||||
interface SidebarFooterProps {
|
||||
sidebarOpen: boolean;
|
||||
isActiveRoute: (id: string) => boolean;
|
||||
navigate: (opts: NavigateOptions) => void;
|
||||
hideWiki: boolean;
|
||||
hideRunningAgents: boolean;
|
||||
runningAgentsCount: number;
|
||||
shortcuts: {
|
||||
@@ -19,7 +18,6 @@ export function SidebarFooter({
|
||||
sidebarOpen,
|
||||
isActiveRoute,
|
||||
navigate,
|
||||
hideWiki,
|
||||
hideRunningAgents,
|
||||
runningAgentsCount,
|
||||
shortcuts,
|
||||
@@ -34,66 +32,6 @@ export function SidebarFooter({
|
||||
'bg-gradient-to-t from-background/10 via-sidebar/50 to-transparent'
|
||||
)}
|
||||
>
|
||||
{/* Wiki Link */}
|
||||
{!hideWiki && (
|
||||
<div className="p-2 pb-0">
|
||||
<button
|
||||
onClick={() => navigate({ to: '/wiki' })}
|
||||
className={cn(
|
||||
'group flex items-center w-full px-3 py-2.5 rounded-xl relative overflow-hidden titlebar-no-drag',
|
||||
'transition-all duration-200 ease-out',
|
||||
isActiveRoute('wiki')
|
||||
? [
|
||||
'bg-gradient-to-r from-brand-500/20 via-brand-500/15 to-brand-600/10',
|
||||
'text-foreground font-medium',
|
||||
'border border-brand-500/30',
|
||||
'shadow-md shadow-brand-500/10',
|
||||
]
|
||||
: [
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50',
|
||||
'border border-transparent hover:border-border/40',
|
||||
'hover:shadow-sm',
|
||||
],
|
||||
sidebarOpen ? 'justify-start' : 'justify-center',
|
||||
'hover:scale-[1.02] active:scale-[0.97]'
|
||||
)}
|
||||
title={!sidebarOpen ? 'Wiki' : undefined}
|
||||
data-testid="wiki-link"
|
||||
>
|
||||
<BookOpen
|
||||
className={cn(
|
||||
'w-[18px] h-[18px] shrink-0 transition-all duration-200',
|
||||
isActiveRoute('wiki')
|
||||
? 'text-brand-500 drop-shadow-sm'
|
||||
: 'group-hover:text-brand-400 group-hover:scale-110'
|
||||
)}
|
||||
/>
|
||||
<span
|
||||
className={cn(
|
||||
'ml-3 font-medium text-sm flex-1 text-left',
|
||||
sidebarOpen ? 'block' : 'hidden'
|
||||
)}
|
||||
>
|
||||
Wiki
|
||||
</span>
|
||||
{!sidebarOpen && (
|
||||
<span
|
||||
className={cn(
|
||||
'absolute left-full ml-3 px-2.5 py-1.5 rounded-lg',
|
||||
'bg-popover text-popover-foreground text-xs font-medium',
|
||||
'border border-border shadow-lg',
|
||||
'opacity-0 group-hover:opacity-100',
|
||||
'transition-all duration-200 whitespace-nowrap z-50',
|
||||
'translate-x-1 group-hover:translate-x-0'
|
||||
)}
|
||||
>
|
||||
Wiki
|
||||
</span>
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
{/* Running Agents Link */}
|
||||
{!hideRunningAgents && (
|
||||
<div className="p-2 pb-0">
|
||||
@@ -213,7 +151,7 @@ export function SidebarFooter({
|
||||
sidebarOpen ? 'justify-start' : 'justify-center',
|
||||
'hover:scale-[1.02] active:scale-[0.97]'
|
||||
)}
|
||||
title={!sidebarOpen ? 'Settings' : undefined}
|
||||
title={!sidebarOpen ? 'Global Settings' : undefined}
|
||||
data-testid="settings-button"
|
||||
>
|
||||
<Settings
|
||||
@@ -230,7 +168,7 @@ export function SidebarFooter({
|
||||
sidebarOpen ? 'block' : 'hidden'
|
||||
)}
|
||||
>
|
||||
Settings
|
||||
Global Settings
|
||||
</span>
|
||||
{sidebarOpen && (
|
||||
<span
|
||||
@@ -256,7 +194,7 @@ export function SidebarFooter({
|
||||
'translate-x-1 group-hover:translate-x-0'
|
||||
)}
|
||||
>
|
||||
Settings
|
||||
Global Settings
|
||||
<span className="ml-2 px-1.5 py-0.5 bg-muted rounded text-[10px] font-mono text-muted-foreground">
|
||||
{formatShortcut(shortcuts.settings, true)}
|
||||
</span>
|
||||
|
||||
@@ -1,19 +1,33 @@
|
||||
import { Folder, LucideIcon } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import { Folder, LucideIcon, X, Menu, Check } from 'lucide-react';
|
||||
import * as LucideIcons from 'lucide-react';
|
||||
import { cn, isMac } from '@/lib/utils';
|
||||
import { getAuthenticatedImageUrl } from '@/lib/api-fetch';
|
||||
import { isElectron, type Project } from '@/lib/electron';
|
||||
import { useIsCompact } from '@/hooks/use-media-query';
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover';
|
||||
import { useAppStore } from '@/store/app-store';
|
||||
|
||||
interface SidebarHeaderProps {
|
||||
sidebarOpen: boolean;
|
||||
currentProject: Project | null;
|
||||
onClose?: () => void;
|
||||
onExpand?: () => void;
|
||||
}
|
||||
|
||||
export function SidebarHeader({ sidebarOpen, currentProject }: SidebarHeaderProps) {
|
||||
export function SidebarHeader({
|
||||
sidebarOpen,
|
||||
currentProject,
|
||||
onClose,
|
||||
onExpand,
|
||||
}: SidebarHeaderProps) {
|
||||
const isCompact = useIsCompact();
|
||||
const [projectListOpen, setProjectListOpen] = useState(false);
|
||||
const { projects, setCurrentProject } = useAppStore();
|
||||
// Get the icon component from lucide-react
|
||||
const getIconComponent = (): LucideIcon => {
|
||||
if (currentProject?.icon && currentProject.icon in LucideIcons) {
|
||||
return (LucideIcons as Record<string, LucideIcon>)[currentProject.icon];
|
||||
return (LucideIcons as unknown as Record<string, LucideIcon>)[currentProject.icon];
|
||||
}
|
||||
return Folder;
|
||||
};
|
||||
@@ -24,43 +38,141 @@ export function SidebarHeader({ sidebarOpen, currentProject }: SidebarHeaderProp
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'shrink-0 flex flex-col',
|
||||
'shrink-0 flex flex-col relative',
|
||||
// Add padding on macOS Electron for traffic light buttons
|
||||
isMac && isElectron() && 'pt-[10px]'
|
||||
)}
|
||||
>
|
||||
{/* Project name and icon display */}
|
||||
{currentProject && (
|
||||
<div
|
||||
{/* Mobile close button - only visible on mobile when sidebar is open */}
|
||||
{sidebarOpen && onClose && (
|
||||
<button
|
||||
onClick={onClose}
|
||||
className={cn(
|
||||
'flex items-center gap-3 px-4 pt-3 pb-1',
|
||||
!sidebarOpen && 'justify-center px-2'
|
||||
'lg:hidden absolute top-3 right-3 z-10',
|
||||
'flex items-center justify-center w-8 h-8 rounded-lg',
|
||||
'bg-muted/50 hover:bg-muted',
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'transition-colors duration-200'
|
||||
)}
|
||||
aria-label="Close navigation"
|
||||
data-testid="sidebar-mobile-close"
|
||||
>
|
||||
{/* Project Icon */}
|
||||
<div className="shrink-0">
|
||||
{hasCustomIcon ? (
|
||||
<img
|
||||
src={getAuthenticatedImageUrl(currentProject.customIconPath!, currentProject.path)}
|
||||
alt={currentProject.name}
|
||||
className="w-8 h-8 rounded-lg object-cover ring-1 ring-border/50"
|
||||
/>
|
||||
) : (
|
||||
<div className="w-8 h-8 rounded-lg bg-brand-500/10 border border-brand-500/20 flex items-center justify-center">
|
||||
<IconComponent className="w-5 h-5 text-brand-500" />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Project Name - only show when sidebar is open */}
|
||||
{sidebarOpen && (
|
||||
<div className="flex-1 min-w-0">
|
||||
<h2 className="text-sm font-semibold text-foreground truncate">
|
||||
{currentProject.name}
|
||||
</h2>
|
||||
</div>
|
||||
<X className="w-5 h-5" />
|
||||
</button>
|
||||
)}
|
||||
{/* Mobile expand button - hamburger menu to expand sidebar when collapsed on mobile */}
|
||||
{!sidebarOpen && isCompact && onExpand && (
|
||||
<button
|
||||
onClick={onExpand}
|
||||
className={cn(
|
||||
'flex items-center justify-center w-10 h-10 mx-auto mt-2 rounded-lg',
|
||||
'bg-muted/50 hover:bg-muted',
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'transition-colors duration-200'
|
||||
)}
|
||||
</div>
|
||||
aria-label="Expand navigation"
|
||||
data-testid="sidebar-mobile-expand"
|
||||
>
|
||||
<Menu className="w-5 h-5" />
|
||||
</button>
|
||||
)}
|
||||
{/* Project name and icon display - entire element clickable on mobile */}
|
||||
{currentProject && (
|
||||
<Popover open={projectListOpen} onOpenChange={setProjectListOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<button
|
||||
className={cn(
|
||||
'flex items-center gap-3 px-4 pt-3 pb-1 w-full text-left',
|
||||
'rounded-lg transition-colors duration-150',
|
||||
!sidebarOpen && 'justify-center px-2',
|
||||
// Only enable click behavior on compact screens
|
||||
isCompact && 'hover:bg-accent/50 cursor-pointer',
|
||||
!isCompact && 'pointer-events-none'
|
||||
)}
|
||||
title={isCompact ? 'Switch project' : undefined}
|
||||
>
|
||||
{/* Project Icon */}
|
||||
<div className="shrink-0">
|
||||
{hasCustomIcon ? (
|
||||
<img
|
||||
src={getAuthenticatedImageUrl(
|
||||
currentProject.customIconPath!,
|
||||
currentProject.path
|
||||
)}
|
||||
alt={currentProject.name}
|
||||
className="w-8 h-8 rounded-lg object-cover ring-1 ring-border/50"
|
||||
/>
|
||||
) : (
|
||||
<div className="w-8 h-8 rounded-lg bg-brand-500/10 border border-brand-500/20 flex items-center justify-center">
|
||||
<IconComponent className="w-5 h-5 text-brand-500" />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Project Name - only show when sidebar is open */}
|
||||
{sidebarOpen && (
|
||||
<div className="flex-1 min-w-0">
|
||||
<h2 className="text-sm font-semibold text-foreground truncate">
|
||||
{currentProject.name}
|
||||
</h2>
|
||||
</div>
|
||||
)}
|
||||
</button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="w-64 p-2" align="start" side="bottom" sideOffset={8}>
|
||||
<div className="space-y-1">
|
||||
<p className="text-xs font-medium text-muted-foreground px-2 py-1">Switch Project</p>
|
||||
{projects.map((project) => {
|
||||
const ProjectIcon =
|
||||
project.icon && project.icon in LucideIcons
|
||||
? (LucideIcons as unknown as Record<string, LucideIcon>)[project.icon]
|
||||
: Folder;
|
||||
const isActive = currentProject?.id === project.id;
|
||||
|
||||
return (
|
||||
<button
|
||||
key={project.id}
|
||||
onClick={() => {
|
||||
setCurrentProject(project);
|
||||
setProjectListOpen(false);
|
||||
}}
|
||||
className={cn(
|
||||
'w-full flex items-center gap-3 px-2 py-2 rounded-lg text-left',
|
||||
'transition-colors duration-150',
|
||||
isActive
|
||||
? 'bg-brand-500/10 text-brand-500'
|
||||
: 'hover:bg-accent text-foreground'
|
||||
)}
|
||||
>
|
||||
{project.customIconPath ? (
|
||||
<img
|
||||
src={getAuthenticatedImageUrl(project.customIconPath, project.path)}
|
||||
alt={project.name}
|
||||
className="w-6 h-6 rounded object-cover ring-1 ring-border/50"
|
||||
/>
|
||||
) : (
|
||||
<div
|
||||
className={cn(
|
||||
'w-6 h-6 rounded flex items-center justify-center',
|
||||
isActive ? 'bg-brand-500/20' : 'bg-muted'
|
||||
)}
|
||||
>
|
||||
<ProjectIcon
|
||||
className={cn(
|
||||
'w-4 h-4',
|
||||
isActive ? 'text-brand-500' : 'text-muted-foreground'
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<span className="flex-1 text-sm truncate">{project.name}</span>
|
||||
{isActive && <Check className="w-4 h-4 text-brand-500" />}
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -21,7 +21,12 @@ export function SidebarNavigation({
|
||||
navigate,
|
||||
}: SidebarNavigationProps) {
|
||||
return (
|
||||
<nav className={cn('flex-1 overflow-y-auto px-3 pb-2', sidebarOpen ? 'mt-1' : 'mt-1')}>
|
||||
<nav
|
||||
className={cn(
|
||||
'flex-1 overflow-y-auto scrollbar-hide px-3 pb-2',
|
||||
sidebarOpen ? 'mt-1' : 'mt-1'
|
||||
)}
|
||||
>
|
||||
{!currentProject && sidebarOpen ? (
|
||||
// Placeholder when no project is selected (only in expanded state)
|
||||
<div className="flex items-center justify-center h-full px-4">
|
||||
@@ -41,7 +46,13 @@ export function SidebarNavigation({
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
{section.label && !sidebarOpen && <div className="h-px bg-border/30 mx-2 my-1.5"></div>}
|
||||
{/* Separator for sections without label (visual separation) */}
|
||||
{!section.label && sectionIdx > 0 && sidebarOpen && (
|
||||
<div className="h-px bg-border/40 mx-3 mb-4"></div>
|
||||
)}
|
||||
{(section.label || sectionIdx > 0) && !sidebarOpen && (
|
||||
<div className="h-px bg-border/30 mx-2 my-1.5"></div>
|
||||
)}
|
||||
|
||||
{/* Nav Items */}
|
||||
<div className="space-y-1.5">
|
||||
|
||||
@@ -11,6 +11,8 @@ import {
|
||||
Lightbulb,
|
||||
Brain,
|
||||
Network,
|
||||
Bell,
|
||||
Settings,
|
||||
} from 'lucide-react';
|
||||
import type { NavSection, NavItem } from '../types';
|
||||
import type { KeyboardShortcut } from '@/hooks/use-keyboard-shortcuts';
|
||||
@@ -32,9 +34,11 @@ interface UseNavigationProps {
|
||||
agent: string;
|
||||
terminal: string;
|
||||
settings: string;
|
||||
projectSettings: string;
|
||||
ideation: string;
|
||||
githubIssues: string;
|
||||
githubPrs: string;
|
||||
notifications: string;
|
||||
};
|
||||
hideSpecEditor: boolean;
|
||||
hideContext: boolean;
|
||||
@@ -49,6 +53,8 @@ interface UseNavigationProps {
|
||||
cycleNextProject: () => void;
|
||||
/** Count of unviewed validations to show on GitHub Issues nav item */
|
||||
unviewedValidationsCount?: number;
|
||||
/** Count of unread notifications to show on Notifications nav item */
|
||||
unreadNotificationsCount?: number;
|
||||
/** Whether spec generation is currently running for the current project */
|
||||
isSpecGenerating?: boolean;
|
||||
}
|
||||
@@ -67,6 +73,7 @@ export function useNavigation({
|
||||
cyclePrevProject,
|
||||
cycleNextProject,
|
||||
unviewedValidationsCount,
|
||||
unreadNotificationsCount,
|
||||
isSpecGenerating,
|
||||
}: UseNavigationProps) {
|
||||
// Track if current project has a GitHub remote
|
||||
@@ -199,6 +206,26 @@ export function useNavigation({
|
||||
});
|
||||
}
|
||||
|
||||
// Add Notifications and Project Settings as a standalone section (no label for visual separation)
|
||||
sections.push({
|
||||
label: '',
|
||||
items: [
|
||||
{
|
||||
id: 'notifications',
|
||||
label: 'Notifications',
|
||||
icon: Bell,
|
||||
shortcut: shortcuts.notifications,
|
||||
count: unreadNotificationsCount,
|
||||
},
|
||||
{
|
||||
id: 'project-settings',
|
||||
label: 'Project Settings',
|
||||
icon: Settings,
|
||||
shortcut: shortcuts.projectSettings,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
return sections;
|
||||
}, [
|
||||
shortcuts,
|
||||
@@ -207,6 +234,7 @@ export function useNavigation({
|
||||
hideTerminal,
|
||||
hasGitHubRemote,
|
||||
unviewedValidationsCount,
|
||||
unreadNotificationsCount,
|
||||
isSpecGenerating,
|
||||
]);
|
||||
|
||||
@@ -257,11 +285,11 @@ export function useNavigation({
|
||||
});
|
||||
});
|
||||
|
||||
// Add settings shortcut
|
||||
// Add global settings shortcut
|
||||
shortcutsList.push({
|
||||
key: shortcuts.settings,
|
||||
action: () => navigate({ to: '/settings' }),
|
||||
description: 'Navigate to Settings',
|
||||
description: 'Navigate to Global Settings',
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { useState, useEffect, useCallback, useRef } from 'react';
|
||||
import { createLogger } from '@automaker/utils/logger';
|
||||
import { getElectronAPI } from '@/lib/electron';
|
||||
|
||||
@@ -6,26 +6,48 @@ const logger = createLogger('RunningAgents');
|
||||
|
||||
export function useRunningAgents() {
|
||||
const [runningAgentsCount, setRunningAgentsCount] = useState(0);
|
||||
const fetchTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
|
||||
// Fetch running agents count function - used for initial load and event-driven updates
|
||||
const fetchRunningAgentsCount = useCallback(async () => {
|
||||
try {
|
||||
const api = getElectronAPI();
|
||||
if (api.runningAgents) {
|
||||
logger.debug('Fetching running agents count');
|
||||
const result = await api.runningAgents.getAll();
|
||||
if (result.success && result.runningAgents) {
|
||||
logger.debug('Running agents count fetched', {
|
||||
count: result.runningAgents.length,
|
||||
});
|
||||
setRunningAgentsCount(result.runningAgents.length);
|
||||
} else {
|
||||
logger.debug('Running agents count fetch returned empty/failed', {
|
||||
success: result.success,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
logger.debug('Running agents API not available');
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error fetching running agents count:', error);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Debounced fetch to avoid excessive API calls from frequent events
|
||||
const debouncedFetchRunningAgentsCount = useCallback(() => {
|
||||
if (fetchTimeoutRef.current) {
|
||||
clearTimeout(fetchTimeoutRef.current);
|
||||
}
|
||||
fetchTimeoutRef.current = setTimeout(() => {
|
||||
fetchRunningAgentsCount();
|
||||
}, 300);
|
||||
}, [fetchRunningAgentsCount]);
|
||||
|
||||
// Subscribe to auto-mode events to update running agents count in real-time
|
||||
useEffect(() => {
|
||||
const api = getElectronAPI();
|
||||
if (!api.autoMode) {
|
||||
logger.debug('Auto mode API not available for running agents hook');
|
||||
// If autoMode is not available, still fetch initial count
|
||||
fetchRunningAgentsCount();
|
||||
return;
|
||||
@@ -35,6 +57,9 @@ export function useRunningAgents() {
|
||||
fetchRunningAgentsCount();
|
||||
|
||||
const unsubscribe = api.autoMode.onEvent((event) => {
|
||||
logger.debug('Auto mode event for running agents hook', {
|
||||
type: event.type,
|
||||
});
|
||||
// When a feature starts, completes, or errors, refresh the count
|
||||
if (
|
||||
event.type === 'auto_mode_feature_complete' ||
|
||||
@@ -50,6 +75,57 @@ export function useRunningAgents() {
|
||||
};
|
||||
}, [fetchRunningAgentsCount]);
|
||||
|
||||
// Subscribe to backlog plan events to update running agents count
|
||||
useEffect(() => {
|
||||
const api = getElectronAPI();
|
||||
if (!api.backlogPlan) return;
|
||||
|
||||
fetchRunningAgentsCount();
|
||||
|
||||
const unsubscribe = api.backlogPlan.onEvent(() => {
|
||||
fetchRunningAgentsCount();
|
||||
});
|
||||
|
||||
return () => {
|
||||
unsubscribe();
|
||||
};
|
||||
}, [fetchRunningAgentsCount]);
|
||||
|
||||
// Subscribe to spec regeneration events to update running agents count
|
||||
useEffect(() => {
|
||||
const api = getElectronAPI();
|
||||
if (!api.specRegeneration) return;
|
||||
|
||||
fetchRunningAgentsCount();
|
||||
|
||||
const unsubscribe = api.specRegeneration.onEvent((event) => {
|
||||
logger.debug('Spec regeneration event for running agents hook', {
|
||||
type: event.type,
|
||||
});
|
||||
// When spec regeneration completes or errors, refresh immediately
|
||||
if (event.type === 'spec_regeneration_complete' || event.type === 'spec_regeneration_error') {
|
||||
fetchRunningAgentsCount();
|
||||
}
|
||||
// For progress events, use debounced fetch to avoid excessive calls
|
||||
else if (event.type === 'spec_regeneration_progress') {
|
||||
debouncedFetchRunningAgentsCount();
|
||||
}
|
||||
});
|
||||
|
||||
return () => {
|
||||
unsubscribe();
|
||||
};
|
||||
}, [fetchRunningAgentsCount, debouncedFetchRunningAgentsCount]);
|
||||
|
||||
// Cleanup timeout on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (fetchTimeoutRef.current) {
|
||||
clearTimeout(fetchTimeoutRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
return {
|
||||
runningAgentsCount,
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user