mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-30 14:22:02 +00:00
Compare commits
42 Commits
v0.2.0
...
test-relea
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
935316cb51 | ||
|
|
e608f46a49 | ||
|
|
8de4056417 | ||
|
|
9196a1afb4 | ||
|
|
eaef95c4a3 | ||
|
|
3dd10aa8c7 | ||
|
|
104f478f89 | ||
|
|
b32af0c86b | ||
|
|
c991d5f2f7 | ||
|
|
b3a4fd2be1 | ||
|
|
28328d7d1e | ||
|
|
346c38d6da | ||
|
|
ca4809ca06 | ||
|
|
9afcc5fae6 | ||
|
|
383dc66952 | ||
|
|
3c466f0150 | ||
|
|
fe9b26c49e | ||
|
|
97d05148d4 | ||
|
|
437063630c | ||
|
|
0dc2e72263 | ||
|
|
5544031164 | ||
|
|
9cf5fff0ad | ||
|
|
77918018fa | ||
|
|
1a4e9ccfcb | ||
|
|
5873e888a9 | ||
|
|
28bbc3e0e1 | ||
|
|
be4aadb632 | ||
|
|
62082fbaf5 | ||
|
|
a4c5567768 | ||
|
|
6c7adb140d | ||
|
|
18182bbc94 | ||
|
|
0bb774375e | ||
|
|
4924cf1453 | ||
|
|
c079b3ef88 | ||
|
|
fade47afdc | ||
|
|
813ede2dde | ||
|
|
b950f13e11 | ||
|
|
ba24753630 | ||
|
|
8e65f0b338 | ||
|
|
4b9bd2641f | ||
|
|
9978de0377 | ||
|
|
0c776b173a |
@@ -1,202 +0,0 @@
|
||||
<project_specification>
|
||||
<project_name>Automaker - Autonomous AI Development Studio</project_name>
|
||||
|
||||
<overview>
|
||||
Automaker is a sophisticated desktop application that empowers developers to build software autonomously through AI-powered agents. Built with Electron and Next.js, it provides an intelligent GUI for project management, feature tracking via Kanban boards, and autonomous code generation. The application leverages multiple AI models (Claude, GPT) and supports complex workflows including git worktree isolation, testing automation, and multi-model agent execution. It acts as a complete development orchestrator, managing the entire lifecycle from specification to verified implementation.
|
||||
</overview>
|
||||
|
||||
<technology_stack>
|
||||
<frontend>
|
||||
<framework>Next.js 16.0.7 (App Router)</framework>
|
||||
<ui_library>shadcn/ui with Radix UI primitives</ui_library>
|
||||
<styling>Tailwind CSS 4.0</styling>
|
||||
<state_management>Zustand with persistence</state_management>
|
||||
<drag_drop>@dnd-kit for Kanban board</drag_drop>
|
||||
<icons>Lucide React</icons>
|
||||
<query_client>TanStack Query for server state</query_client>
|
||||
</frontend>
|
||||
<desktop_shell>
|
||||
<framework>Electron 39.2.6</framework>
|
||||
<language>TypeScript 5.x</language>
|
||||
<inter_process_communication>Electron IPC with security sandboxing</inter_process_communication>
|
||||
<file_system>Node.js fs/promises with path validation</file_system>
|
||||
</desktop_shell>
|
||||
<ai_engine>
|
||||
<primary_model>Claude 3.5 (Opus, Sonnet, Haiku) via Anthropic Claude Agent SDK</primary_model>
|
||||
<secondary_model>GPT-5.1 Codex family via OpenAI CLI</secondary_model>
|
||||
<orchestration>Custom Agent Service with streaming responses</orchestration>
|
||||
<model_registry>Dynamic model provider system with CLI detection</model_registry>
|
||||
</ai_engine>
|
||||
<testing>
|
||||
<framework>Playwright for E2E testing</framework>
|
||||
<unit>Jest/Vitest compatible</unit>
|
||||
<integration>Agent-driven test execution and verification</integration>
|
||||
</testing>
|
||||
<version_control>
|
||||
<system>Git with worktree isolation support</system>
|
||||
<branching>Feature branch management</branching>
|
||||
<workflow>Automated commit and merge capabilities</workflow>
|
||||
</version_control>
|
||||
</technology_stack>
|
||||
|
||||
<core_capabilities>
|
||||
<project_management>
|
||||
- Open and manage multiple local projects
|
||||
- Project-specific themes and configurations
|
||||
- Session management with project context
|
||||
- Recently used project cycling (Q/E shortcuts)
|
||||
- Project search and type-ahead selection
|
||||
- Trash and restore functionality for projects
|
||||
</project_management>
|
||||
|
||||
<intelligent_analysis>
|
||||
- Auto-generation and updating of app_spec.txt
|
||||
- Feature extraction from existing codebases
|
||||
- Technology stack detection and documentation
|
||||
- Project structure analysis with file tree visualization - "Project Ingestion": Analyzes existing codebases to understand structure
|
||||
- Auto-generation of `.automaker/app_spec.txt` based on codebase analysis
|
||||
- Auto-generation of features in `.automaker/features/{id}/feature.json`:
|
||||
- Scans code for implemented features
|
||||
- Creates test cases for existing features
|
||||
- Marks existing features as "passes": true automatically
|
||||
</intelligent_analysis>
|
||||
|
||||
<kanban_workflow>
|
||||
- Visual representation of features from `.automaker/features/` folder
|
||||
- Drag-and-drop interface to reprioritize tasks
|
||||
- direct editing of feature details (steps, description) from the card
|
||||
- Visual Kanban board with drag-and-drop functionality
|
||||
- Multiple status columns: Backlog, In Progress, Waiting Approval, Verified
|
||||
- Feature cards with detailed information display (3 detail levels)
|
||||
- Real-time status updates during agent execution
|
||||
- Search and filtering capabilities
|
||||
- Category management and autocomplete
|
||||
- Image attachment support for feature descriptions
|
||||
</kanban_workflow>
|
||||
|
||||
<autonomous_agent_engine>
|
||||
- Multi-model agent system with profile-based execution
|
||||
- Streaming agent output with real-time logs
|
||||
- Git worktree isolation for safe feature development
|
||||
- Automatic testing and verification workflows
|
||||
- Context-aware prompt generation
|
||||
- Agent memory and learning capabilities
|
||||
- Concurrent feature processing with configurable limits
|
||||
- Follow-up and resume capabilities
|
||||
</autonomous_agent_engine>
|
||||
|
||||
<advanced_workflows>
|
||||
- Git worktree management for isolated development
|
||||
- Feature-specific branching and merging
|
||||
- Automated commit generation with file tracking
|
||||
- Test-driven development support
|
||||
- Code review and approval workflows
|
||||
- Revert and rollback capabilities
|
||||
</advanced_workflows>
|
||||
|
||||
<user_interface>
|
||||
- Dark/Light theme support with 12 custom themes
|
||||
- Per-project theme configurations
|
||||
- Comprehensive keyboard shortcut system
|
||||
- Sidebar navigation with project switching
|
||||
- Multi-view architecture (Board, Spec, Agent, Context, Settings)
|
||||
- Setup wizard for first-time configuration
|
||||
- CLI integration status monitoring
|
||||
</user_interface>
|
||||
|
||||
<extensibility>
|
||||
- AI Profile system for model/thinking level presets
|
||||
- Keyboard shortcut customization
|
||||
- Model provider plugin architecture
|
||||
- Context file management for agent guidance
|
||||
- Feature suggestion generation
|
||||
- Spec regeneration workflows
|
||||
</extensibility>
|
||||
</core_capabilities>
|
||||
|
||||
<ui_layout>
|
||||
<window_structure>
|
||||
- Sidebar: Project List, Settings, Logs, Plugins
|
||||
- Main Content:
|
||||
- **Spec View**: Split editor for `.automaker/app_spec.txt`
|
||||
- **Board View**: Kanban board for `.automaker/features/` folder
|
||||
- **Code View**: Read-only Monaco editor to see what the agent is writing
|
||||
- **Agent View**: Chat-like interface showing agent thought process and tool usage. Also used for the "New Project Interview".
|
||||
</window_structure>
|
||||
<theme>
|
||||
- Dark/Light mode support (system sync)
|
||||
- "Hacker" aesthetic option (terminal-like)
|
||||
- Professional/Clean default
|
||||
</theme>
|
||||
</ui_layout>
|
||||
|
||||
<development_workflow>
|
||||
<local_testing>
|
||||
- "Browser Mode": Run the Next.js frontend in a standard browser with mocked Electron IPC for rapid UI iteration.
|
||||
- "Electron Mode": Full desktop app testing.
|
||||
- Hot Reloading for both Main and Renderer processes.
|
||||
</local_testing>
|
||||
</development_workflow>
|
||||
|
||||
<implemented_features>
|
||||
- Complete Kanban board with drag-and-drop functionality
|
||||
- Multi-model AI agent execution (Claude + GPT/Codex)
|
||||
- Git worktree isolation for features
|
||||
- Real-time agent output streaming and logging
|
||||
- Project management with session persistence
|
||||
- Theme system with 12 themes + per-project themes
|
||||
- Comprehensive settings panel with all configurations
|
||||
- Feature image attachment and context system
|
||||
- Agent profiles with model/thinking level presets
|
||||
- Keyboard shortcut system with customization
|
||||
- CLI integration detection (Claude Code + Codex CLI)
|
||||
- Auto mode for autonomous feature processing
|
||||
- Feature suggestions generation
|
||||
- Spec regeneration and project analysis
|
||||
- Context file management
|
||||
- Chat history and session management
|
||||
- File diff viewing and git integration
|
||||
- Search and filtering across all features
|
||||
- Category management and autocomplete
|
||||
- Test automation and verification workflows
|
||||
</implemented_features>
|
||||
|
||||
<implementation_roadmap>
|
||||
<phase_1_foundation>
|
||||
- Enhanced error handling and recovery mechanisms
|
||||
- Performance optimization for large projects
|
||||
- Improved memory management for long-running sessions
|
||||
- Advanced logging and debugging capabilities
|
||||
</phase_1_foundation>
|
||||
|
||||
<phase_2_core_logic>
|
||||
- Plugin system for custom model providers
|
||||
- Advanced workflow customization engine
|
||||
- Team collaboration features
|
||||
- Cloud synchronization capabilities
|
||||
- Advanced project templates and scaffolding
|
||||
</phase_2_core_logic>
|
||||
|
||||
<phase_3_kanban_and_interaction>
|
||||
- Build Kanban board with drag-and-drop
|
||||
- Connect Kanban state to `.automaker/features/` filesystem
|
||||
- Implement "Run Feature" capability
|
||||
- Integrate standard prompts library
|
||||
</phase_3_kanban_and_interaction>
|
||||
|
||||
<phase_3_polish>
|
||||
- Enhanced accessibility features
|
||||
- Advanced theme customization
|
||||
- Performance monitoring and analytics
|
||||
- Documentation generation automation
|
||||
- Integration with external development tools
|
||||
- Advanced security auditing and sandboxing
|
||||
</phase_3_polish>
|
||||
|
||||
<phase_4_polish>
|
||||
- Advanced terminal integration
|
||||
- Settings & Extensibility
|
||||
- UI refinement
|
||||
</phase_4_polish>
|
||||
</implementation_roadmap>
|
||||
</project_specification>
|
||||
@@ -1,9 +0,0 @@
|
||||
[
|
||||
"Agent Runner",
|
||||
"Core",
|
||||
"Kanban",
|
||||
"Other",
|
||||
"Settings",
|
||||
"Uncategorized",
|
||||
"ka"
|
||||
]
|
||||
@@ -1,474 +0,0 @@
|
||||
# Clean Code Guidelines
|
||||
|
||||
## Overview
|
||||
|
||||
This document serves as a comprehensive guide for writing clean, maintainable, and extensible code. It outlines principles and practices that ensure code quality, reusability, and long-term maintainability. When writing or reviewing code, follow these guidelines to create software that is easy to understand, modify, and extend. This file is used by LLMs to understand and enforce coding standards throughout the codebase.
|
||||
|
||||
---
|
||||
|
||||
## Core Principles
|
||||
|
||||
### 1. DRY (Don't Repeat Yourself)
|
||||
|
||||
**Principle**: Every piece of knowledge should have a single, unambiguous representation within a system.
|
||||
|
||||
**Practices**:
|
||||
|
||||
- Extract repeated logic into reusable functions, classes, or modules
|
||||
- Use constants for repeated values
|
||||
- Create shared utilities for common operations
|
||||
- Avoid copy-pasting code blocks
|
||||
- When you find yourself writing similar code more than twice, refactor it
|
||||
|
||||
**Example - Bad**:
|
||||
|
||||
```typescript
|
||||
// Repeated validation logic
|
||||
if (email.includes("@") && email.length > 5) {
|
||||
// ...
|
||||
}
|
||||
if (email.includes("@") && email.length > 5) {
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
**Example - Good**:
|
||||
|
||||
```typescript
|
||||
function isValidEmail(email: string): boolean {
|
||||
return email.includes("@") && email.length > 5;
|
||||
}
|
||||
|
||||
if (isValidEmail(email)) {
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 2. Code Reusability
|
||||
|
||||
**Principle**: Write code that can be used in multiple contexts without modification or with minimal adaptation.
|
||||
|
||||
**Practices**:
|
||||
|
||||
- Create generic, parameterized functions instead of specific ones
|
||||
- Use composition over inheritance where appropriate
|
||||
- Design functions to be pure (no side effects) when possible
|
||||
- Create utility libraries for common operations
|
||||
- Use dependency injection to make components reusable
|
||||
- Design APIs that are flexible and configurable
|
||||
|
||||
**Example - Bad**:
|
||||
|
||||
```typescript
|
||||
function calculateUserTotal(userId: string) {
|
||||
const user = getUser(userId);
|
||||
return user.items.reduce((sum, item) => sum + item.price, 0);
|
||||
}
|
||||
```
|
||||
|
||||
**Example - Good**:
|
||||
|
||||
```typescript
|
||||
function calculateTotal<T extends { price: number }>(items: T[]): number {
|
||||
return items.reduce((sum, item) => sum + item.price, 0);
|
||||
}
|
||||
|
||||
function calculateUserTotal(userId: string) {
|
||||
const user = getUser(userId);
|
||||
return calculateTotal(user.items);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 3. Abstract Functions and Abstractions
|
||||
|
||||
**Principle**: Create abstractions that hide implementation details and provide clear, simple interfaces.
|
||||
|
||||
**Practices**:
|
||||
|
||||
- Use interfaces and abstract classes to define contracts
|
||||
- Create abstraction layers between different concerns
|
||||
- Hide complex implementation behind simple function signatures
|
||||
- Use dependency inversion - depend on abstractions, not concretions
|
||||
- Create factory functions/classes for object creation
|
||||
- Use strategy pattern for interchangeable algorithms
|
||||
|
||||
**Example - Bad**:
|
||||
|
||||
```typescript
|
||||
function processPayment(amount: number, cardNumber: string, cvv: string) {
|
||||
// Direct implementation tied to specific payment processor
|
||||
fetch("https://stripe.com/api/charge", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ amount, cardNumber, cvv }),
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
**Example - Good**:
|
||||
|
||||
```typescript
|
||||
interface PaymentProcessor {
|
||||
processPayment(
|
||||
amount: number,
|
||||
details: PaymentDetails
|
||||
): Promise<PaymentResult>;
|
||||
}
|
||||
|
||||
class StripeProcessor implements PaymentProcessor {
|
||||
async processPayment(
|
||||
amount: number,
|
||||
details: PaymentDetails
|
||||
): Promise<PaymentResult> {
|
||||
// Implementation
|
||||
}
|
||||
}
|
||||
|
||||
function processPayment(
|
||||
processor: PaymentProcessor,
|
||||
amount: number,
|
||||
details: PaymentDetails
|
||||
) {
|
||||
return processor.processPayment(amount, details);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 4. Extensibility
|
||||
|
||||
**Principle**: Design code that can be easily extended with new features without modifying existing code.
|
||||
|
||||
**Practices**:
|
||||
|
||||
- Follow the Open/Closed Principle: open for extension, closed for modification
|
||||
- Use plugin architectures and hooks for extensibility
|
||||
- Design with future requirements in mind (but don't over-engineer)
|
||||
- Use configuration over hardcoding
|
||||
- Create extension points through interfaces and callbacks
|
||||
- Use composition and dependency injection
|
||||
- Design APIs that can accommodate new parameters/options
|
||||
|
||||
**Example - Bad**:
|
||||
|
||||
```typescript
|
||||
function sendNotification(user: User, type: string) {
|
||||
if (type === "email") {
|
||||
sendEmail(user.email);
|
||||
} else if (type === "sms") {
|
||||
sendSMS(user.phone);
|
||||
}
|
||||
// Adding new notification types requires modifying this function
|
||||
}
|
||||
```
|
||||
|
||||
**Example - Good**:
|
||||
|
||||
```typescript
|
||||
interface NotificationChannel {
|
||||
send(user: User): Promise<void>;
|
||||
}
|
||||
|
||||
class EmailChannel implements NotificationChannel {
|
||||
async send(user: User): Promise<void> {
|
||||
// Implementation
|
||||
}
|
||||
}
|
||||
|
||||
class SMSChannel implements NotificationChannel {
|
||||
async send(user: User): Promise<void> {
|
||||
// Implementation
|
||||
}
|
||||
}
|
||||
|
||||
class NotificationService {
|
||||
constructor(private channels: NotificationChannel[]) {}
|
||||
|
||||
async send(user: User): Promise<void> {
|
||||
await Promise.all(this.channels.map((channel) => channel.send(user)));
|
||||
}
|
||||
}
|
||||
// New notification types can be added without modifying existing code
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 5. Avoid Magic Numbers and Strings
|
||||
|
||||
**Principle**: Use named constants instead of hardcoded values to improve readability and maintainability.
|
||||
|
||||
**Practices**:
|
||||
|
||||
- Extract all magic numbers into named constants
|
||||
- Use enums for related constants
|
||||
- Create configuration objects for settings
|
||||
- Use constants for API endpoints, timeouts, limits, etc.
|
||||
- Document why specific values are used
|
||||
|
||||
**Example - Bad**:
|
||||
|
||||
```typescript
|
||||
if (user.age >= 18) {
|
||||
// What does 18 mean?
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
// What does 3000 mean?
|
||||
}, 3000);
|
||||
|
||||
if (status === "active") {
|
||||
// What are the valid statuses?
|
||||
}
|
||||
```
|
||||
|
||||
**Example - Good**:
|
||||
|
||||
```typescript
|
||||
const MINIMUM_AGE_FOR_ADULTS = 18;
|
||||
const SESSION_TIMEOUT_MS = 3000;
|
||||
|
||||
enum UserStatus {
|
||||
ACTIVE = "active",
|
||||
INACTIVE = "inactive",
|
||||
SUSPENDED = "suspended",
|
||||
}
|
||||
|
||||
if (user.age >= MINIMUM_AGE_FOR_ADULTS) {
|
||||
// Clear intent
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
// Clear intent
|
||||
}, SESSION_TIMEOUT_MS);
|
||||
|
||||
if (status === UserStatus.ACTIVE) {
|
||||
// Type-safe and clear
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Additional Best Practices
|
||||
|
||||
### 6. Single Responsibility Principle
|
||||
|
||||
Each function, class, or module should have one reason to change.
|
||||
|
||||
**Example**:
|
||||
|
||||
```typescript
|
||||
// Bad: Multiple responsibilities
|
||||
class User {
|
||||
save() {
|
||||
/* database logic */
|
||||
}
|
||||
sendEmail() {
|
||||
/* email logic */
|
||||
}
|
||||
validate() {
|
||||
/* validation logic */
|
||||
}
|
||||
}
|
||||
|
||||
// Good: Single responsibility
|
||||
class User {
|
||||
validate() {
|
||||
/* validation only */
|
||||
}
|
||||
}
|
||||
|
||||
class UserRepository {
|
||||
save(user: User) {
|
||||
/* database logic */
|
||||
}
|
||||
}
|
||||
|
||||
class EmailService {
|
||||
sendToUser(user: User) {
|
||||
/* email logic */
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 7. Meaningful Names
|
||||
|
||||
- Use descriptive names that reveal intent
|
||||
- Avoid abbreviations unless they're widely understood
|
||||
- Use verbs for functions, nouns for classes
|
||||
- Be consistent with naming conventions
|
||||
|
||||
**Example**:
|
||||
|
||||
```typescript
|
||||
// Bad
|
||||
const d = new Date();
|
||||
const u = getUser();
|
||||
function calc(x, y) {}
|
||||
|
||||
// Good
|
||||
const currentDate = new Date();
|
||||
const currentUser = getUser();
|
||||
function calculateTotal(price: number, quantity: number): number {}
|
||||
```
|
||||
|
||||
### 8. Small Functions
|
||||
|
||||
- Functions should do one thing and do it well
|
||||
- Keep functions short (ideally under 20 lines)
|
||||
- Extract complex logic into separate functions
|
||||
- Use descriptive function names instead of comments
|
||||
|
||||
### 9. Error Handling
|
||||
|
||||
- Handle errors explicitly
|
||||
- Use appropriate error types
|
||||
- Provide meaningful error messages
|
||||
- Don't swallow errors silently
|
||||
- Use try-catch appropriately
|
||||
|
||||
**Example**:
|
||||
|
||||
```typescript
|
||||
// Bad
|
||||
function divide(a: number, b: number) {
|
||||
return a / b; // Can throw division by zero
|
||||
}
|
||||
|
||||
// Good
|
||||
function divide(a: number, b: number): number {
|
||||
if (b === 0) {
|
||||
throw new Error("Division by zero is not allowed");
|
||||
}
|
||||
return a / b;
|
||||
}
|
||||
```
|
||||
|
||||
### 10. Comments and Documentation
|
||||
|
||||
- Write self-documenting code (code should explain itself)
|
||||
- Use comments to explain "why", not "what"
|
||||
- Document complex algorithms or business logic
|
||||
- Keep comments up-to-date with code changes
|
||||
- Use JSDoc/TSDoc for public APIs
|
||||
|
||||
### 11. Type Safety
|
||||
|
||||
- Use TypeScript types/interfaces effectively
|
||||
- Avoid `any` type unless absolutely necessary
|
||||
- Use union types and discriminated unions
|
||||
- Leverage type inference where appropriate
|
||||
- Create custom types for domain concepts
|
||||
|
||||
**Example**:
|
||||
|
||||
```typescript
|
||||
// Bad
|
||||
function processUser(data: any) {
|
||||
return data.name;
|
||||
}
|
||||
|
||||
// Good
|
||||
interface User {
|
||||
id: string;
|
||||
name: string;
|
||||
email: string;
|
||||
}
|
||||
|
||||
function processUser(user: User): string {
|
||||
return user.name;
|
||||
}
|
||||
```
|
||||
|
||||
### 12. Testing Considerations
|
||||
|
||||
- Write testable code (pure functions, dependency injection)
|
||||
- Keep functions small and focused
|
||||
- Avoid hidden dependencies
|
||||
- Use mocks and stubs appropriately
|
||||
- Design for testability from the start
|
||||
|
||||
### 13. Performance vs. Readability
|
||||
|
||||
- Prefer readability over premature optimization
|
||||
- Profile before optimizing
|
||||
- Use clear algorithms first, optimize if needed
|
||||
- Document performance-critical sections
|
||||
- Balance between clean code and performance requirements
|
||||
|
||||
### 14. Code Organization
|
||||
|
||||
- Group related functionality together
|
||||
- Use modules/packages to organize code
|
||||
- Follow consistent file and folder structures
|
||||
- Separate concerns (UI, business logic, data access)
|
||||
- Use barrel exports (index files) appropriately
|
||||
|
||||
### 15. Configuration Management
|
||||
|
||||
- Externalize configuration values
|
||||
- Use environment variables for environment-specific settings
|
||||
- Create configuration objects/interfaces
|
||||
- Validate configuration at startup
|
||||
- Provide sensible defaults
|
||||
|
||||
**Example**:
|
||||
|
||||
```typescript
|
||||
// Bad
|
||||
const apiUrl = "https://api.example.com";
|
||||
const timeout = 5000;
|
||||
|
||||
// Good
|
||||
interface Config {
|
||||
apiUrl: string;
|
||||
timeout: number;
|
||||
maxRetries: number;
|
||||
}
|
||||
|
||||
const config: Config = {
|
||||
apiUrl: process.env.API_URL || "https://api.example.com",
|
||||
timeout: parseInt(process.env.TIMEOUT || "5000"),
|
||||
maxRetries: parseInt(process.env.MAX_RETRIES || "3"),
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Code Review Checklist
|
||||
|
||||
When reviewing code, check for:
|
||||
|
||||
- [ ] No code duplication (DRY principle)
|
||||
- [ ] Meaningful variable and function names
|
||||
- [ ] No magic numbers or strings
|
||||
- [ ] Functions are small and focused
|
||||
- [ ] Proper error handling
|
||||
- [ ] Type safety maintained
|
||||
- [ ] Code is testable
|
||||
- [ ] Documentation where needed
|
||||
- [ ] Consistent code style
|
||||
- [ ] Proper abstraction levels
|
||||
- [ ] Extensibility considered
|
||||
- [ ] Single responsibility principle followed
|
||||
|
||||
---
|
||||
|
||||
## Summary
|
||||
|
||||
Clean code is:
|
||||
|
||||
- **Readable**: Easy to understand at a glance
|
||||
- **Maintainable**: Easy to modify and update
|
||||
- **Testable**: Easy to write tests for
|
||||
- **Extensible**: Easy to add new features
|
||||
- **Reusable**: Can be used in multiple contexts
|
||||
- **Well-documented**: Clear intent and purpose
|
||||
- **Type-safe**: Leverages type system effectively
|
||||
- **DRY**: No unnecessary repetition
|
||||
- **Abstracted**: Proper separation of concerns
|
||||
- **Configurable**: Uses constants and configuration over hardcoding
|
||||
|
||||
Remember: Code is read far more often than it is written. Write code for your future self and your teammates.
|
||||
@@ -1,70 +0,0 @@
|
||||
You are a very strong reasoner and planner. Use these critical instructions to structure your plans, thoughts, and responses.
|
||||
|
||||
Before taking any action (either tool calls or responses to the user), you must proactively, methodically, and independently plan and reason about:
|
||||
|
||||
1. Logical dependencies and constraints:
|
||||
|
||||
Analyze the intended action against the following factors. Resolve conflicts in order of importance:
|
||||
|
||||
1.1) Policy-based rules, mandatory prerequisites, and constraints.
|
||||
1.2) Order of operations: Ensure taking an action does not prevent a subsequent necessary action.
|
||||
1.2.1) The user may request actions in a random order, but you may need to reorder operations to maximize successful completion of the task.
|
||||
1.3) Other prerequisites (information and/or actions needed).
|
||||
1.4) Explicit user constraints or preferences.
|
||||
|
||||
2. Risk assessment:
|
||||
|
||||
What are the consequences of taking the action? Will the new state cause any future issues?
|
||||
|
||||
2.1) For exploratory tasks (like searches), missing optional parameters is a LOW risk.
|
||||
Prefer calling the tool with the available information over asking the user, unless your Rule 1 (Logical Dependencies) reasoning determines that optional information is required for a later step in your plan.
|
||||
|
||||
3. Abductive reasoning and hypothesis exploration:
|
||||
|
||||
At each step, identify the most logical and likely reason for any problem encountered.
|
||||
|
||||
3.1) Look beyond immediate or obvious causes. The most likely reason may not be the simplest and may require deeper inference.
|
||||
3.2) Hypotheses may require additional research. Each hypothesis may take multiple steps to test.
|
||||
3.3) Prioritize hypotheses based on likelihood, but do not discard less likely ones prematurely. A low-probability event may still be the root cause.
|
||||
|
||||
4. Outcome evaluation and adaptability:
|
||||
|
||||
Does the previous observation require any changes to your plan?
|
||||
|
||||
4.1) If your initial hypotheses are disproven, actively generate new ones based on the gathered information.
|
||||
|
||||
5. Information availability:
|
||||
|
||||
Incorporate all applicable and alternative sources of information, including:
|
||||
|
||||
5.1) Using available tools and their capabilities
|
||||
5.2) All policies, rules, checklists, and constraints
|
||||
5.3) Previous observations and conversation history
|
||||
5.4) Information only available by asking the user
|
||||
|
||||
6. Precision and Grounding:
|
||||
|
||||
Ensure your reasoning is extremely precise and relevant to each exact ongoing situation.
|
||||
|
||||
6.1) Verify your claims by quoting the exact applicable information (including policies) when referring to them.
|
||||
|
||||
7. Completeness:
|
||||
|
||||
Ensure that all requirements, constraints, options, and preferences are exhaustively incorporated into your plan.
|
||||
|
||||
7.1) Resolve conflicts using the order of importance in #1.
|
||||
7.2) Avoid premature conclusions: There may be multiple relevant options for a given situation.
|
||||
7.2.1) To check for whether an option is relevant, reason about all information sources from #5.
|
||||
7.2.2) You may need to consult the user to even know whether something is applicable. Do not assume it is not applicable without checking.
|
||||
7.3) Review applicable sources of information from #5 to confirm which are relevant to the current state.
|
||||
|
||||
8. Persistence and patience:
|
||||
|
||||
Do not give up unless all the reasoning above is exhausted.
|
||||
|
||||
8.1) Don't be dissuaded by time taken or user frustration.
|
||||
8.2) This persistence must be intelligent: On transient errors (e.g. please try again), you must retry unless an explicit retry limit (e.g., max x tries) has been reached. If such a limit is hit, you must stop. On other errors, you must change your strategy or arguments, not repeat the same failed call.
|
||||
|
||||
9. Inhibit your response:
|
||||
|
||||
Only take an action after all the above reasoning is completed. Once you've taken an action, you cannot take it back.
|
||||
@@ -1,53 +0,0 @@
|
||||
📋 Planning implementation for: move the report bug request button to top in header next to the automaker logo
|
||||
⚡ Executing implementation for: move the report bug request button to top in header next to the automaker logo
|
||||
Starting code implementation...
|
||||
|
||||
🔧 Tool: TodoWrite
|
||||
Let me start by exploring the header component and understanding the current structure.
|
||||
🔧 Tool: Glob
|
||||
|
||||
🔧 Tool: Grep
|
||||
|
||||
🔧 Tool: Read
|
||||
|
||||
🔧 Tool: Glob
|
||||
|
||||
🔧 Tool: TodoWrite
|
||||
I found the sidebar component. The "Report Bug / Feature Request" button is currently in the bottom section of the sidebar (lines 1150-1180). The feature request is to move this button to the top header, next to the Automaker logo.
|
||||
|
||||
Looking at the current structure:
|
||||
1. The logo section is at lines 742-770
|
||||
2. The bug report button is in the bottom section at lines 1150-1180
|
||||
|
||||
I'll move the bug report button to be next to the logo in the header area. Let me make this change:
|
||||
🔧 Tool: Edit
|
||||
Now I need to remove the old bug report button from the bottom section:
|
||||
🔧 Tool: Edit
|
||||
The change is complete. Let me verify the build compiles correctly:
|
||||
🔧 Tool: Bash
|
||||
|
||||
🔧 Tool: Bash
|
||||
|
||||
🔧 Tool: Bash
|
||||
|
||||
🔧 Tool: Bash
|
||||
The TypeScript check passed with no errors. Let me mark the task as completed and update the feature status:
|
||||
🔧 Tool: TodoWrite
|
||||
|
||||
🔧 Tool: mcp__automaker-tools__UpdateFeatureStatus
|
||||
|
||||
🔧 Tool: TodoWrite
|
||||
I've completed the feature implementation. Here's a summary of what was done:
|
||||
|
||||
**Changes Made:**
|
||||
|
||||
1. **Modified `app/src/components/layout/sidebar.tsx`:**
|
||||
- Moved the "Report Bug / Feature Request" button from the bottom sidebar section (between Running Agents and Settings) to the header area, next to the AutoMaker logo
|
||||
- Changed the header container from `justify-center` to `justify-between` to accommodate the new button placement
|
||||
- The button is now a compact icon-only button with a tooltip on hover
|
||||
- Kept the same test ID (`bug-report-link`) for consistency
|
||||
- Removed the old button from the bottom section
|
||||
|
||||
The bug report button now appears as a small bug icon in the header, right next to the AutoMaker logo, making it more accessible and visible to users.✅ Verifying implementation for: move the report bug request button to top in header next to the automaker logo
|
||||
Verifying implementation and checking test results...
|
||||
✓ Verification successful: All tests passed
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"category": "Uncategorized",
|
||||
"description": "move the report bug request button to top in header next to the automaker logo",
|
||||
"steps": [],
|
||||
"status": "waiting_approval",
|
||||
"images": [],
|
||||
"imagePaths": [],
|
||||
"skipTests": true,
|
||||
"model": "opus",
|
||||
"thinkingLevel": "none",
|
||||
"id": "feature-1765426941179-om12nlapa",
|
||||
"startedAt": "2025-12-11T04:22:21.750Z",
|
||||
"summary": "Moved the Report Bug / Feature Request button from the bottom sidebar section to the header, next to the AutoMaker logo. Modified: app/src/components/layout/sidebar.tsx. The button now appears as a compact icon button in the header area."
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"timestamp": "2025-12-11T04:22:21.809Z",
|
||||
"modifiedFiles": [
|
||||
"app/src/components/views/analysis-view.tsx",
|
||||
"app/src/components/views/interview-view.tsx"
|
||||
],
|
||||
"untrackedFiles": [
|
||||
".automaker/features/feature-1765426941179-om12nlapa/feature.json",
|
||||
"marketing/index.html"
|
||||
]
|
||||
}
|
||||
@@ -1,172 +0,0 @@
|
||||
# Agent Memory - Lessons Learned
|
||||
|
||||
This file documents issues encountered by previous agents and their solutions. Read this before starting work to avoid repeating mistakes.
|
||||
|
||||
## Testing Issues
|
||||
|
||||
### Issue: Mock project setup not navigating to board view
|
||||
|
||||
**Problem:** Setting `currentProject` in localStorage didn't automatically show the board view - app stayed on welcome view.
|
||||
**Fix:** The `currentView` state is not persisted in localStorage. Instead of trying to set it, have tests click on the recent project from the welcome view to trigger `setCurrentProject()` which handles the view transition properly.
|
||||
|
||||
```typescript
|
||||
// Don't do this:
|
||||
await setupMockProject(page); // Sets localStorage
|
||||
await page.goto("/");
|
||||
await waitForElement(page, "board-view"); // ❌ Fails - still on welcome view
|
||||
|
||||
// Do this instead:
|
||||
await setupMockProject(page);
|
||||
await page.goto("/");
|
||||
await waitForElement(page, "welcome-view");
|
||||
const recentProject = page.locator(
|
||||
'[data-testid="recent-project-test-project-1"]'
|
||||
);
|
||||
await recentProject.click(); // ✅ Triggers proper view transition
|
||||
await waitForElement(page, "board-view");
|
||||
```
|
||||
|
||||
### Issue: View output button test IDs are conditional
|
||||
|
||||
**Problem:** Tests failed looking for `view-output-inprogress-${featureId}` when the actual button had `view-output-${featureId}`.
|
||||
**Fix:** The button test ID depends on whether the feature is actively running:
|
||||
|
||||
- `view-output-${featureId}` - shown when feature is in `runningAutoTasks` (actively running)
|
||||
- `view-output-inprogress-${featureId}` - shown when status is "in_progress" but NOT actively running
|
||||
|
||||
After dragging a feature to in_progress, wait for the `auto_mode_feature_start` event to fire before looking for the button:
|
||||
|
||||
```typescript
|
||||
// Wait for feature to start running
|
||||
const viewOutputButton = page
|
||||
.locator(
|
||||
`[data-testid="view-output-${featureId}"], [data-testid="view-output-inprogress-${featureId}"]`
|
||||
)
|
||||
.first();
|
||||
await expect(viewOutputButton).toBeVisible({ timeout: 8000 });
|
||||
```
|
||||
|
||||
### Issue: Elements not appearing due to async event timing
|
||||
|
||||
**Problem:** Tests checked for UI elements before async events (like `auto_mode_feature_start`) had fired and updated the UI.
|
||||
**Fix:** Add appropriate timeouts when waiting for elements that depend on async events. The mock auto mode takes ~2.4 seconds to complete, so allow sufficient time:
|
||||
|
||||
```typescript
|
||||
// Mock auto mode timing: ~2.4s + 1.5s delay = ~4s total
|
||||
await waitForAgentOutputModalHidden(page, { timeout: 10000 });
|
||||
```
|
||||
|
||||
### Issue: Slider interaction testing
|
||||
|
||||
**Problem:** Clicking on slider track didn't reliably set specific values.
|
||||
**Fix:** Use the slider's keyboard interaction or calculate the exact click position on the track. For max value, click on the rightmost edge of the track.
|
||||
|
||||
### Issue: Port binding blocked in sandbox mode
|
||||
|
||||
**Problem:** Playwright tests couldn't bind to port in sandbox mode.
|
||||
**Fix:** Tests don't need sandbox disabled - the issue was TEST_REUSE_SERVER environment variable. Make sure to start the dev server separately or let Playwright's webServer config handle it.
|
||||
|
||||
## Code Architecture
|
||||
|
||||
### Issue: Understanding store state persistence
|
||||
|
||||
**Problem:** Not all store state is persisted to localStorage.
|
||||
**Fix:** Check the `partialize` function in `app-store.ts` to see which state is persisted:
|
||||
|
||||
```typescript
|
||||
partialize: (state) => ({
|
||||
projects: state.projects,
|
||||
currentProject: state.currentProject,
|
||||
theme: state.theme,
|
||||
sidebarOpen: state.sidebarOpen,
|
||||
apiKeys: state.apiKeys,
|
||||
chatSessions: state.chatSessions,
|
||||
chatHistoryOpen: state.chatHistoryOpen,
|
||||
maxConcurrency: state.maxConcurrency, // Added for concurrency feature
|
||||
});
|
||||
```
|
||||
|
||||
Note: `currentView` is NOT persisted - it's managed through actions.
|
||||
|
||||
### Issue: Auto mode task lifecycle
|
||||
|
||||
**Problem:** Confusion about when features are considered "running" vs "in_progress".
|
||||
**Fix:** Understand the task lifecycle:
|
||||
|
||||
1. Feature dragged to "in_progress" column → status becomes "in_progress"
|
||||
2. `auto_mode_feature_start` event fires → feature added to `runningAutoTasks`
|
||||
3. Agent works on feature → periodic events sent
|
||||
4. `auto_mode_feature_complete` event fires → feature removed from `runningAutoTasks`
|
||||
5. If `passes: true` → status becomes "verified", if `passes: false` → stays "in_progress"
|
||||
|
||||
### Issue: waiting_approval features not draggable when skipTests=true
|
||||
|
||||
**Problem:** Features in `waiting_approval` status couldn't be dragged to `verified` column, even though the code appeared to handle it.
|
||||
**Fix:** The order of condition checks in `handleDragEnd` matters. The `skipTests` check was catching `waiting_approval` features before the `waiting_approval` status check could handle them. Move the `waiting_approval` status check **before** the `skipTests` check in `board-view.tsx`:
|
||||
|
||||
```typescript
|
||||
// Correct order in handleDragEnd:
|
||||
if (draggedFeature.status === "backlog") {
|
||||
// ...
|
||||
} else if (draggedFeature.status === "waiting_approval") {
|
||||
// Handle waiting_approval BEFORE skipTests check
|
||||
// because waiting_approval features often have skipTests=true
|
||||
} else if (draggedFeature.skipTests) {
|
||||
// Handle other skipTests features
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices Discovered
|
||||
|
||||
### Testing utilities are critical
|
||||
|
||||
Create comprehensive testing utilities in `tests/utils.ts` to avoid repeating selector logic:
|
||||
|
||||
- `waitForElement` - waits for elements to appear
|
||||
- `waitForElementHidden` - waits for elements to disappear
|
||||
- `setupMockProject` - sets up mock localStorage state
|
||||
- `navigateToBoard` - handles navigation from welcome to board view
|
||||
|
||||
### Always add data-testid attributes
|
||||
|
||||
When implementing features, immediately add `data-testid` attributes to key UI elements. This makes tests more reliable and easier to write.
|
||||
|
||||
### Test timeouts should be generous but not excessive
|
||||
|
||||
- Default timeout: 30s (set in playwright.config.ts)
|
||||
- Element waits: 5-15s for critical elements
|
||||
- Auto mode completion: 10s (accounts for ~4s mock duration)
|
||||
- Don't increase timeouts past 10s for individual operations
|
||||
|
||||
### Mock auto mode timing
|
||||
|
||||
The mock auto mode in `electron.ts` has predictable timing:
|
||||
|
||||
- Total duration: ~2.4 seconds (300+500+300+300+500+500ms)
|
||||
- Plus 1.5s delay before auto-closing modals
|
||||
- Total: ~4 seconds from start to completion
|
||||
|
||||
### Issue: HotkeyButton conflicting with useKeyboardShortcuts
|
||||
|
||||
**Problem:** Adding `HotkeyButton` with a simple key (like "N") to buttons that already had keyboard shortcuts registered via `useKeyboardShortcuts` caused the hotkey to stop working. Both registered duplicate listeners, and the HotkeyButton's `stopPropagation()` call could interfere.
|
||||
**Fix:** When a simple single-key hotkey is already handled by `useKeyboardShortcuts`, set `hotkeyActive={false}` on the `HotkeyButton` so it only displays the indicator badge without registering a duplicate listener:
|
||||
|
||||
```tsx
|
||||
// In views that already use useKeyboardShortcuts for the "N" key:
|
||||
<HotkeyButton
|
||||
onClick={() => setShowAddDialog(true)}
|
||||
hotkey={shortcuts.addFeature}
|
||||
hotkeyActive={false} // <-- Important! Prevents duplicate listener
|
||||
>
|
||||
Add Feature
|
||||
</HotkeyButton>
|
||||
|
||||
// HotkeyButton should only actively listen when it's the sole handler (e.g., Cmd+Enter in dialogs)
|
||||
<HotkeyButton
|
||||
onClick={handleSubmit}
|
||||
hotkey={{ key: "Enter", cmdCtrl: true }}
|
||||
hotkeyActive={isDialogOpen} // Active when dialog is open
|
||||
>
|
||||
Submit
|
||||
</HotkeyButton>
|
||||
```
|
||||
224
.github/scripts/upload-to-r2.js
vendored
Normal file
224
.github/scripts/upload-to-r2.js
vendored
Normal file
@@ -0,0 +1,224 @@
|
||||
const {
|
||||
S3Client,
|
||||
PutObjectCommand,
|
||||
GetObjectCommand,
|
||||
} = require("@aws-sdk/client-s3");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const https = require("https");
|
||||
const { pipeline } = require("stream/promises");
|
||||
|
||||
const s3Client = new S3Client({
|
||||
region: "auto",
|
||||
endpoint: process.env.R2_ENDPOINT,
|
||||
credentials: {
|
||||
accessKeyId: process.env.R2_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.R2_SECRET_ACCESS_KEY,
|
||||
},
|
||||
});
|
||||
|
||||
const BUCKET = process.env.R2_BUCKET_NAME;
|
||||
const PUBLIC_URL = process.env.R2_PUBLIC_URL;
|
||||
const VERSION = process.env.RELEASE_VERSION;
|
||||
const RELEASE_TAG = process.env.RELEASE_TAG || `v${VERSION}`;
|
||||
const GITHUB_REPO = process.env.GITHUB_REPOSITORY;
|
||||
|
||||
async function fetchExistingReleases() {
|
||||
try {
|
||||
const response = await s3Client.send(
|
||||
new GetObjectCommand({
|
||||
Bucket: BUCKET,
|
||||
Key: "releases.json",
|
||||
})
|
||||
);
|
||||
const body = await response.Body.transformToString();
|
||||
return JSON.parse(body);
|
||||
} catch (error) {
|
||||
if (error.name === "NoSuchKey" || error.$metadata?.httpStatusCode === 404) {
|
||||
console.log("No existing releases.json found, creating new one");
|
||||
return { latestVersion: null, releases: [] };
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function uploadFile(localPath, r2Key, contentType) {
|
||||
const fileBuffer = fs.readFileSync(localPath);
|
||||
const stats = fs.statSync(localPath);
|
||||
|
||||
await s3Client.send(
|
||||
new PutObjectCommand({
|
||||
Bucket: BUCKET,
|
||||
Key: r2Key,
|
||||
Body: fileBuffer,
|
||||
ContentType: contentType,
|
||||
})
|
||||
);
|
||||
|
||||
console.log(`Uploaded: ${r2Key} (${stats.size} bytes)`);
|
||||
return stats.size;
|
||||
}
|
||||
|
||||
function findArtifacts(dir, pattern) {
|
||||
if (!fs.existsSync(dir)) return [];
|
||||
const files = fs.readdirSync(dir);
|
||||
return files.filter((f) => pattern.test(f)).map((f) => path.join(dir, f));
|
||||
}
|
||||
|
||||
async function downloadFromGitHub(url, outputPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
https
|
||||
.get(url, (response) => {
|
||||
if (response.statusCode === 302 || response.statusCode === 301) {
|
||||
// Follow redirect
|
||||
return downloadFromGitHub(response.headers.location, outputPath)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
}
|
||||
if (response.statusCode !== 200) {
|
||||
reject(
|
||||
new Error(
|
||||
`Failed to download ${url}: ${response.statusCode} ${response.statusMessage}`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
const fileStream = fs.createWriteStream(outputPath);
|
||||
response.pipe(fileStream);
|
||||
fileStream.on("finish", () => {
|
||||
fileStream.close();
|
||||
resolve();
|
||||
});
|
||||
fileStream.on("error", reject);
|
||||
})
|
||||
.on("error", reject);
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const artifactsDir = "artifacts";
|
||||
const tempDir = path.join(artifactsDir, "temp");
|
||||
|
||||
// Create temp directory for downloaded GitHub archives
|
||||
if (!fs.existsSync(tempDir)) {
|
||||
fs.mkdirSync(tempDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Download source archives from GitHub
|
||||
const githubZipUrl = `https://github.com/${GITHUB_REPO}/archive/refs/tags/${RELEASE_TAG}.zip`;
|
||||
const githubTarGzUrl = `https://github.com/${GITHUB_REPO}/archive/refs/tags/${RELEASE_TAG}.tar.gz`;
|
||||
|
||||
const sourceZipPath = path.join(tempDir, `automaker-${VERSION}.zip`);
|
||||
const sourceTarGzPath = path.join(tempDir, `automaker-${VERSION}.tar.gz`);
|
||||
|
||||
console.log(`Downloading source archives from GitHub...`);
|
||||
console.log(` ZIP: ${githubZipUrl}`);
|
||||
console.log(` TAR.GZ: ${githubTarGzUrl}`);
|
||||
|
||||
await downloadFromGitHub(githubZipUrl, sourceZipPath);
|
||||
await downloadFromGitHub(githubTarGzUrl, sourceTarGzPath);
|
||||
|
||||
console.log(`Downloaded source archives successfully`);
|
||||
|
||||
// Find all artifacts
|
||||
const artifacts = {
|
||||
windows: findArtifacts(path.join(artifactsDir, "windows-builds"), /\.exe$/),
|
||||
macos: findArtifacts(path.join(artifactsDir, "macos-builds"), /-x64\.dmg$/),
|
||||
macosArm: findArtifacts(
|
||||
path.join(artifactsDir, "macos-builds"),
|
||||
/-arm64\.dmg$/
|
||||
),
|
||||
linux: findArtifacts(
|
||||
path.join(artifactsDir, "linux-builds"),
|
||||
/\.AppImage$/
|
||||
),
|
||||
sourceZip: [sourceZipPath],
|
||||
sourceTarGz: [sourceTarGzPath],
|
||||
};
|
||||
|
||||
console.log("Found artifacts:");
|
||||
for (const [platform, files] of Object.entries(artifacts)) {
|
||||
console.log(
|
||||
` ${platform}: ${
|
||||
files.length > 0
|
||||
? files.map((f) => path.basename(f)).join(", ")
|
||||
: "none"
|
||||
}`
|
||||
);
|
||||
}
|
||||
|
||||
// Upload each artifact to R2
|
||||
const assets = {};
|
||||
const contentTypes = {
|
||||
windows: "application/x-msdownload",
|
||||
macos: "application/x-apple-diskimage",
|
||||
macosArm: "application/x-apple-diskimage",
|
||||
linux: "application/x-executable",
|
||||
sourceZip: "application/zip",
|
||||
sourceTarGz: "application/gzip",
|
||||
};
|
||||
|
||||
for (const [platform, files] of Object.entries(artifacts)) {
|
||||
if (files.length === 0) {
|
||||
console.warn(`Warning: No artifact found for ${platform}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Use the first matching file for each platform
|
||||
const localPath = files[0];
|
||||
const filename = path.basename(localPath);
|
||||
const r2Key = `releases/${VERSION}/${filename}`;
|
||||
const size = await uploadFile(localPath, r2Key, contentTypes[platform]);
|
||||
|
||||
assets[platform] = {
|
||||
url: `${PUBLIC_URL}/releases/${VERSION}/${filename}`,
|
||||
filename,
|
||||
size,
|
||||
arch:
|
||||
platform === "macosArm"
|
||||
? "arm64"
|
||||
: platform === "sourceZip" || platform === "sourceTarGz"
|
||||
? "source"
|
||||
: "x64",
|
||||
};
|
||||
}
|
||||
|
||||
// Fetch and update releases.json
|
||||
const releasesData = await fetchExistingReleases();
|
||||
|
||||
const newRelease = {
|
||||
version: VERSION,
|
||||
date: new Date().toISOString(),
|
||||
assets,
|
||||
githubReleaseUrl: `https://github.com/${GITHUB_REPO}/releases/tag/${RELEASE_TAG}`,
|
||||
};
|
||||
|
||||
// Remove existing entry for this version if re-running
|
||||
releasesData.releases = releasesData.releases.filter(
|
||||
(r) => r.version !== VERSION
|
||||
);
|
||||
|
||||
// Prepend new release
|
||||
releasesData.releases.unshift(newRelease);
|
||||
releasesData.latestVersion = VERSION;
|
||||
|
||||
// Upload updated releases.json
|
||||
await s3Client.send(
|
||||
new PutObjectCommand({
|
||||
Bucket: BUCKET,
|
||||
Key: "releases.json",
|
||||
Body: JSON.stringify(releasesData, null, 2),
|
||||
ContentType: "application/json",
|
||||
CacheControl: "public, max-age=60",
|
||||
})
|
||||
);
|
||||
|
||||
console.log("Successfully updated releases.json");
|
||||
console.log(`Latest version: ${VERSION}`);
|
||||
console.log(`Total releases: ${releasesData.releases.length}`);
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error("Failed to upload to R2:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
84
.github/workflows/release.yml
vendored
84
.github/workflows/release.yml
vendored
@@ -19,10 +19,13 @@ jobs:
|
||||
include:
|
||||
- os: macos-latest
|
||||
name: macOS
|
||||
artifact-name: macos-builds
|
||||
- os: windows-latest
|
||||
name: Windows
|
||||
artifact-name: windows-builds
|
||||
- os: ubuntu-latest
|
||||
name: Linux
|
||||
artifact-name: linux-builds
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
@@ -45,6 +48,20 @@ jobs:
|
||||
# optional dependencies (e.g., @tailwindcss/oxide, lightningcss binaries)
|
||||
run: npm install
|
||||
|
||||
- name: Extract and set version
|
||||
id: version
|
||||
run: |
|
||||
VERSION_TAG="${{ github.event.inputs.version || github.ref_name }}"
|
||||
# Remove 'v' prefix if present (e.g., v1.0.0 -> 1.0.0)
|
||||
VERSION="${VERSION_TAG#v}"
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Extracted version: $VERSION from tag: $VERSION_TAG"
|
||||
# Update the app's package.json version
|
||||
cd apps/app
|
||||
npm version $VERSION --no-git-tag-version
|
||||
cd ../..
|
||||
echo "Updated apps/app/package.json to version $VERSION"
|
||||
|
||||
- name: Build Electron App (macOS)
|
||||
if: matrix.os == 'macos-latest'
|
||||
env:
|
||||
@@ -78,3 +95,70 @@ jobs:
|
||||
prerelease: false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload macOS artifacts for R2
|
||||
if: matrix.os == 'macos-latest'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.artifact-name }}
|
||||
path: apps/app/dist/*.dmg
|
||||
retention-days: 1
|
||||
|
||||
- name: Upload Windows artifacts for R2
|
||||
if: matrix.os == 'windows-latest'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.artifact-name }}
|
||||
path: apps/app/dist/*.exe
|
||||
retention-days: 1
|
||||
|
||||
- name: Upload Linux artifacts for R2
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.artifact-name }}
|
||||
path: apps/app/dist/*.AppImage
|
||||
retention-days: 1
|
||||
|
||||
upload-to-r2:
|
||||
needs: build-and-release
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Install AWS SDK
|
||||
run: npm install @aws-sdk/client-s3
|
||||
|
||||
- name: Extract version
|
||||
id: version
|
||||
run: |
|
||||
VERSION_TAG="${{ github.event.inputs.version || github.ref_name }}"
|
||||
# Remove 'v' prefix if present (e.g., v1.0.0 -> 1.0.0)
|
||||
VERSION="${VERSION_TAG#v}"
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "version_tag=$VERSION_TAG" >> $GITHUB_OUTPUT
|
||||
echo "Extracted version: $VERSION from tag: $VERSION_TAG"
|
||||
|
||||
- name: Upload to R2 and update releases.json
|
||||
env:
|
||||
R2_ENDPOINT: ${{ secrets.R2_ENDPOINT }}
|
||||
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
|
||||
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
|
||||
R2_BUCKET_NAME: ${{ secrets.R2_BUCKET_NAME }}
|
||||
R2_PUBLIC_URL: ${{ secrets.R2_PUBLIC_URL }}
|
||||
RELEASE_VERSION: ${{ steps.version.outputs.version }}
|
||||
RELEASE_TAG: ${{ steps.version.outputs.version_tag }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
run: node .github/scripts/upload-to-r2.js
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -7,3 +7,7 @@ node_modules/
|
||||
# Build outputs
|
||||
dist/
|
||||
.next/
|
||||
.automaker/images/
|
||||
.automaker/
|
||||
/.automaker/*
|
||||
/.automaker/
|
||||
|
||||
77
README.md
77
README.md
@@ -45,16 +45,81 @@ export CLAUDE_CODE_OAUTH_TOKEN="sk-ant-oat01-..."
|
||||
npm run dev:electron
|
||||
```
|
||||
|
||||
## How to Run
|
||||
|
||||
### Development Modes
|
||||
|
||||
Automaker can be run in several modes:
|
||||
|
||||
#### Electron Desktop App (Recommended)
|
||||
|
||||
```bash
|
||||
# Standard development mode
|
||||
npm run dev:electron
|
||||
|
||||
# With DevTools open automatically
|
||||
npm run dev:electron:debug
|
||||
|
||||
# For WSL (Windows Subsystem for Linux)
|
||||
npm run dev:electron:wsl
|
||||
|
||||
# For WSL with GPU acceleration
|
||||
npm run dev:electron:wsl:gpu
|
||||
```
|
||||
|
||||
#### Web Browser Mode
|
||||
|
||||
```bash
|
||||
# Run in web browser (http://localhost:3007)
|
||||
npm run dev:web
|
||||
# or
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### Building for Production
|
||||
|
||||
```bash
|
||||
# Build Next.js app
|
||||
npm run build
|
||||
|
||||
# Build Electron app for distribution
|
||||
npm run build:electron
|
||||
```
|
||||
|
||||
### Running Production Build
|
||||
|
||||
```bash
|
||||
# Start production Next.js server
|
||||
npm run start
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
```bash
|
||||
# Run tests headless
|
||||
npm run test
|
||||
|
||||
# Run tests with browser visible
|
||||
npm run test:headed
|
||||
```
|
||||
|
||||
### Linting
|
||||
|
||||
```bash
|
||||
# Run ESLint
|
||||
npm run lint
|
||||
```
|
||||
|
||||
### Authentication Options
|
||||
|
||||
Automaker supports multiple authentication methods (in order of priority):
|
||||
|
||||
| Method | Environment Variable | Description |
|
||||
|--------|---------------------|-------------|
|
||||
| OAuth Token (env) | `CLAUDE_CODE_OAUTH_TOKEN` | From `claude setup-token` - uses your Claude subscription |
|
||||
| OAuth Token (stored) | — | Stored in app credentials file |
|
||||
| API Key (stored) | — | Anthropic API key stored in app |
|
||||
| API Key (env) | `ANTHROPIC_API_KEY` | Pay-per-use API key |
|
||||
| Method | Environment Variable | Description |
|
||||
| -------------------- | ------------------------- | --------------------------------------------------------- |
|
||||
| OAuth Token (env) | `CLAUDE_CODE_OAUTH_TOKEN` | From `claude setup-token` - uses your Claude subscription |
|
||||
| OAuth Token (stored) | — | Stored in app credentials file |
|
||||
| API Key (stored) | — | Anthropic API key stored in app |
|
||||
| API Key (env) | `ANTHROPIC_API_KEY` | Pay-per-use API key |
|
||||
|
||||
**Recommended:** Use `CLAUDE_CODE_OAUTH_TOKEN` if you have a Claude subscription.
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,404 +1,37 @@
|
||||
/**
|
||||
* Simplified Electron preload script
|
||||
*
|
||||
* Only exposes native features (dialogs, shell) and server URL.
|
||||
* All other operations go through HTTP API.
|
||||
*/
|
||||
|
||||
const { contextBridge, ipcRenderer } = require("electron");
|
||||
|
||||
// Expose protected methods that allow the renderer process to use
|
||||
// the ipcRenderer without exposing the entire object
|
||||
// Expose minimal API for native features
|
||||
contextBridge.exposeInMainWorld("electronAPI", {
|
||||
// IPC test
|
||||
// Platform info
|
||||
platform: process.platform,
|
||||
isElectron: true,
|
||||
|
||||
// Connection check
|
||||
ping: () => ipcRenderer.invoke("ping"),
|
||||
|
||||
// Shell APIs
|
||||
openExternalLink: (url) => ipcRenderer.invoke("shell:openExternal", url),
|
||||
// Get server URL for HTTP client
|
||||
getServerUrl: () => ipcRenderer.invoke("server:getUrl"),
|
||||
|
||||
// Dialog APIs
|
||||
// Native dialogs - better UX than prompt()
|
||||
openDirectory: () => ipcRenderer.invoke("dialog:openDirectory"),
|
||||
openFile: (options) => ipcRenderer.invoke("dialog:openFile", options),
|
||||
saveFile: (options) => ipcRenderer.invoke("dialog:saveFile", options),
|
||||
|
||||
// File system APIs
|
||||
readFile: (filePath) => ipcRenderer.invoke("fs:readFile", filePath),
|
||||
writeFile: (filePath, content) =>
|
||||
ipcRenderer.invoke("fs:writeFile", filePath, content),
|
||||
mkdir: (dirPath) => ipcRenderer.invoke("fs:mkdir", dirPath),
|
||||
readdir: (dirPath) => ipcRenderer.invoke("fs:readdir", dirPath),
|
||||
exists: (filePath) => ipcRenderer.invoke("fs:exists", filePath),
|
||||
stat: (filePath) => ipcRenderer.invoke("fs:stat", filePath),
|
||||
deleteFile: (filePath) => ipcRenderer.invoke("fs:deleteFile", filePath),
|
||||
trashItem: (filePath) => ipcRenderer.invoke("fs:trashItem", filePath),
|
||||
// Shell operations
|
||||
openExternalLink: (url) => ipcRenderer.invoke("shell:openExternal", url),
|
||||
openPath: (filePath) => ipcRenderer.invoke("shell:openPath", filePath),
|
||||
|
||||
// App APIs
|
||||
// App info
|
||||
getPath: (name) => ipcRenderer.invoke("app:getPath", name),
|
||||
saveImageToTemp: (data, filename, mimeType, projectPath) =>
|
||||
ipcRenderer.invoke("app:saveImageToTemp", {
|
||||
data,
|
||||
filename,
|
||||
mimeType,
|
||||
projectPath,
|
||||
}),
|
||||
|
||||
// Agent APIs
|
||||
agent: {
|
||||
// Start or resume a conversation
|
||||
start: (sessionId, workingDirectory) =>
|
||||
ipcRenderer.invoke("agent:start", { sessionId, workingDirectory }),
|
||||
|
||||
// Send a message to the agent
|
||||
send: (sessionId, message, workingDirectory, imagePaths) =>
|
||||
ipcRenderer.invoke("agent:send", {
|
||||
sessionId,
|
||||
message,
|
||||
workingDirectory,
|
||||
imagePaths,
|
||||
}),
|
||||
|
||||
// Get conversation history
|
||||
getHistory: (sessionId) =>
|
||||
ipcRenderer.invoke("agent:getHistory", { sessionId }),
|
||||
|
||||
// Stop current execution
|
||||
stop: (sessionId) => ipcRenderer.invoke("agent:stop", { sessionId }),
|
||||
|
||||
// Clear conversation
|
||||
clear: (sessionId) => ipcRenderer.invoke("agent:clear", { sessionId }),
|
||||
|
||||
// Subscribe to streaming events
|
||||
onStream: (callback) => {
|
||||
const subscription = (_, data) => callback(data);
|
||||
ipcRenderer.on("agent:stream", subscription);
|
||||
// Return unsubscribe function
|
||||
return () => ipcRenderer.removeListener("agent:stream", subscription);
|
||||
},
|
||||
},
|
||||
|
||||
// Session Management APIs
|
||||
sessions: {
|
||||
// List all sessions
|
||||
list: (includeArchived) =>
|
||||
ipcRenderer.invoke("sessions:list", { includeArchived }),
|
||||
|
||||
// Create a new session
|
||||
create: (name, projectPath, workingDirectory) =>
|
||||
ipcRenderer.invoke("sessions:create", {
|
||||
name,
|
||||
projectPath,
|
||||
workingDirectory,
|
||||
}),
|
||||
|
||||
// Update session metadata
|
||||
update: (sessionId, name, tags) =>
|
||||
ipcRenderer.invoke("sessions:update", { sessionId, name, tags }),
|
||||
|
||||
// Archive a session
|
||||
archive: (sessionId) =>
|
||||
ipcRenderer.invoke("sessions:archive", { sessionId }),
|
||||
|
||||
// Unarchive a session
|
||||
unarchive: (sessionId) =>
|
||||
ipcRenderer.invoke("sessions:unarchive", { sessionId }),
|
||||
|
||||
// Delete a session permanently
|
||||
delete: (sessionId) => ipcRenderer.invoke("sessions:delete", { sessionId }),
|
||||
},
|
||||
|
||||
// Auto Mode API
|
||||
autoMode: {
|
||||
// Start auto mode for a specific project
|
||||
start: (projectPath, maxConcurrency) =>
|
||||
ipcRenderer.invoke("auto-mode:start", { projectPath, maxConcurrency }),
|
||||
|
||||
// Stop auto mode for a specific project
|
||||
stop: (projectPath) => ipcRenderer.invoke("auto-mode:stop", { projectPath }),
|
||||
|
||||
// Get auto mode status (optionally for a specific project)
|
||||
status: (projectPath) => ipcRenderer.invoke("auto-mode:status", { projectPath }),
|
||||
|
||||
// Run a specific feature
|
||||
runFeature: (projectPath, featureId, useWorktrees) =>
|
||||
ipcRenderer.invoke("auto-mode:run-feature", {
|
||||
projectPath,
|
||||
featureId,
|
||||
useWorktrees,
|
||||
}),
|
||||
|
||||
// Verify a specific feature by running its tests
|
||||
verifyFeature: (projectPath, featureId) =>
|
||||
ipcRenderer.invoke("auto-mode:verify-feature", {
|
||||
projectPath,
|
||||
featureId,
|
||||
}),
|
||||
|
||||
// Resume a specific feature with previous context
|
||||
resumeFeature: (projectPath, featureId) =>
|
||||
ipcRenderer.invoke("auto-mode:resume-feature", {
|
||||
projectPath,
|
||||
featureId,
|
||||
}),
|
||||
|
||||
// Check if context file exists for a feature
|
||||
contextExists: (projectPath, featureId) =>
|
||||
ipcRenderer.invoke("auto-mode:context-exists", {
|
||||
projectPath,
|
||||
featureId,
|
||||
}),
|
||||
|
||||
// Analyze a new project - kicks off an agent to analyze codebase
|
||||
analyzeProject: (projectPath) =>
|
||||
ipcRenderer.invoke("auto-mode:analyze-project", { projectPath }),
|
||||
|
||||
// Stop a specific feature
|
||||
stopFeature: (featureId) =>
|
||||
ipcRenderer.invoke("auto-mode:stop-feature", { featureId }),
|
||||
|
||||
// Follow-up on a feature with additional prompt
|
||||
followUpFeature: (projectPath, featureId, prompt, imagePaths) =>
|
||||
ipcRenderer.invoke("auto-mode:follow-up-feature", {
|
||||
projectPath,
|
||||
featureId,
|
||||
prompt,
|
||||
imagePaths,
|
||||
}),
|
||||
|
||||
// Commit changes for a feature
|
||||
commitFeature: (projectPath, featureId) =>
|
||||
ipcRenderer.invoke("auto-mode:commit-feature", {
|
||||
projectPath,
|
||||
featureId,
|
||||
}),
|
||||
|
||||
// Listen for auto mode events
|
||||
onEvent: (callback) => {
|
||||
const subscription = (_, data) => callback(data);
|
||||
ipcRenderer.on("auto-mode:event", subscription);
|
||||
|
||||
// Return unsubscribe function
|
||||
return () => {
|
||||
ipcRenderer.removeListener("auto-mode:event", subscription);
|
||||
};
|
||||
},
|
||||
},
|
||||
|
||||
// Claude CLI Detection API
|
||||
checkClaudeCli: () => ipcRenderer.invoke("claude:check-cli"),
|
||||
|
||||
// Codex CLI Detection API
|
||||
checkCodexCli: () => ipcRenderer.invoke("codex:check-cli"),
|
||||
|
||||
// Model Management APIs
|
||||
model: {
|
||||
// Get all available models from all providers
|
||||
getAvailable: () => ipcRenderer.invoke("model:get-available"),
|
||||
|
||||
// Check all provider installation status
|
||||
checkProviders: () => ipcRenderer.invoke("model:check-providers"),
|
||||
},
|
||||
|
||||
// OpenAI API
|
||||
testOpenAIConnection: (apiKey) =>
|
||||
ipcRenderer.invoke("openai:test-connection", { apiKey }),
|
||||
|
||||
// Worktree Management APIs
|
||||
worktree: {
|
||||
// Revert feature changes by removing the worktree
|
||||
revertFeature: (projectPath, featureId) =>
|
||||
ipcRenderer.invoke("worktree:revert-feature", { projectPath, featureId }),
|
||||
|
||||
// Merge feature worktree changes back to main branch
|
||||
mergeFeature: (projectPath, featureId, options) =>
|
||||
ipcRenderer.invoke("worktree:merge-feature", {
|
||||
projectPath,
|
||||
featureId,
|
||||
options,
|
||||
}),
|
||||
|
||||
// Get worktree info for a feature
|
||||
getInfo: (projectPath, featureId) =>
|
||||
ipcRenderer.invoke("worktree:get-info", { projectPath, featureId }),
|
||||
|
||||
// Get worktree status (changed files, commits)
|
||||
getStatus: (projectPath, featureId) =>
|
||||
ipcRenderer.invoke("worktree:get-status", { projectPath, featureId }),
|
||||
|
||||
// List all feature worktrees
|
||||
list: (projectPath) => ipcRenderer.invoke("worktree:list", { projectPath }),
|
||||
|
||||
// Get file diffs for a feature worktree
|
||||
getDiffs: (projectPath, featureId) =>
|
||||
ipcRenderer.invoke("worktree:get-diffs", { projectPath, featureId }),
|
||||
|
||||
// Get diff for a specific file in a worktree
|
||||
getFileDiff: (projectPath, featureId, filePath) =>
|
||||
ipcRenderer.invoke("worktree:get-file-diff", {
|
||||
projectPath,
|
||||
featureId,
|
||||
filePath,
|
||||
}),
|
||||
},
|
||||
|
||||
// Git Operations APIs (for non-worktree operations)
|
||||
git: {
|
||||
// Get file diffs for the main project
|
||||
getDiffs: (projectPath) =>
|
||||
ipcRenderer.invoke("git:get-diffs", { projectPath }),
|
||||
|
||||
// Get diff for a specific file in the main project
|
||||
getFileDiff: (projectPath, filePath) =>
|
||||
ipcRenderer.invoke("git:get-file-diff", { projectPath, filePath }),
|
||||
},
|
||||
|
||||
// Feature Suggestions API
|
||||
suggestions: {
|
||||
// Generate feature suggestions
|
||||
// suggestionType can be: "features", "refactoring", "security", "performance"
|
||||
generate: (projectPath, suggestionType = "features") =>
|
||||
ipcRenderer.invoke("suggestions:generate", { projectPath, suggestionType }),
|
||||
|
||||
// Stop generating suggestions
|
||||
stop: () => ipcRenderer.invoke("suggestions:stop"),
|
||||
|
||||
// Get suggestions status
|
||||
status: () => ipcRenderer.invoke("suggestions:status"),
|
||||
|
||||
// Listen for suggestions events
|
||||
onEvent: (callback) => {
|
||||
const subscription = (_, data) => callback(data);
|
||||
ipcRenderer.on("suggestions:event", subscription);
|
||||
|
||||
// Return unsubscribe function
|
||||
return () => {
|
||||
ipcRenderer.removeListener("suggestions:event", subscription);
|
||||
};
|
||||
},
|
||||
},
|
||||
|
||||
// Spec Regeneration API
|
||||
specRegeneration: {
|
||||
// Create initial app spec for a new project
|
||||
create: (projectPath, projectOverview, generateFeatures = true) =>
|
||||
ipcRenderer.invoke("spec-regeneration:create", {
|
||||
projectPath,
|
||||
projectOverview,
|
||||
generateFeatures,
|
||||
}),
|
||||
|
||||
// Regenerate the app spec
|
||||
generate: (projectPath, projectDefinition) =>
|
||||
ipcRenderer.invoke("spec-regeneration:generate", {
|
||||
projectPath,
|
||||
projectDefinition,
|
||||
}),
|
||||
|
||||
// Generate features from existing app_spec.txt
|
||||
generateFeatures: (projectPath) =>
|
||||
ipcRenderer.invoke("spec-regeneration:generate-features", {
|
||||
projectPath,
|
||||
}),
|
||||
|
||||
// Stop regenerating spec
|
||||
stop: () => ipcRenderer.invoke("spec-regeneration:stop"),
|
||||
|
||||
// Get regeneration status
|
||||
status: () => ipcRenderer.invoke("spec-regeneration:status"),
|
||||
|
||||
// Listen for regeneration events
|
||||
onEvent: (callback) => {
|
||||
const subscription = (_, data) => callback(data);
|
||||
ipcRenderer.on("spec-regeneration:event", subscription);
|
||||
|
||||
// Return unsubscribe function
|
||||
return () => {
|
||||
ipcRenderer.removeListener("spec-regeneration:event", subscription);
|
||||
};
|
||||
},
|
||||
},
|
||||
|
||||
// Setup & CLI Management API
|
||||
setup: {
|
||||
// Get comprehensive Claude CLI status
|
||||
getClaudeStatus: () => ipcRenderer.invoke("setup:claude-status"),
|
||||
|
||||
// Get comprehensive Codex CLI status
|
||||
getCodexStatus: () => ipcRenderer.invoke("setup:codex-status"),
|
||||
|
||||
// Install Claude CLI
|
||||
installClaude: () => ipcRenderer.invoke("setup:install-claude"),
|
||||
|
||||
// Install Codex CLI
|
||||
installCodex: () => ipcRenderer.invoke("setup:install-codex"),
|
||||
|
||||
// Authenticate Claude CLI
|
||||
authClaude: () => ipcRenderer.invoke("setup:auth-claude"),
|
||||
|
||||
// Authenticate Codex CLI with optional API key
|
||||
authCodex: (apiKey) => ipcRenderer.invoke("setup:auth-codex", { apiKey }),
|
||||
|
||||
// Store API key securely
|
||||
storeApiKey: (provider, apiKey) =>
|
||||
ipcRenderer.invoke("setup:store-api-key", { provider, apiKey }),
|
||||
|
||||
// Get stored API keys status
|
||||
getApiKeys: () => ipcRenderer.invoke("setup:get-api-keys"),
|
||||
|
||||
// Configure Codex MCP server for a project
|
||||
configureCodexMcp: (projectPath) =>
|
||||
ipcRenderer.invoke("setup:configure-codex-mcp", { projectPath }),
|
||||
|
||||
// Get platform information
|
||||
getPlatform: () => ipcRenderer.invoke("setup:get-platform"),
|
||||
|
||||
// Listen for installation progress
|
||||
onInstallProgress: (callback) => {
|
||||
const subscription = (_, data) => callback(data);
|
||||
ipcRenderer.on("setup:install-progress", subscription);
|
||||
return () => {
|
||||
ipcRenderer.removeListener("setup:install-progress", subscription);
|
||||
};
|
||||
},
|
||||
|
||||
// Listen for auth progress
|
||||
onAuthProgress: (callback) => {
|
||||
const subscription = (_, data) => callback(data);
|
||||
ipcRenderer.on("setup:auth-progress", subscription);
|
||||
return () => {
|
||||
ipcRenderer.removeListener("setup:auth-progress", subscription);
|
||||
};
|
||||
},
|
||||
},
|
||||
|
||||
// Features API
|
||||
features: {
|
||||
// Get all features for a project
|
||||
getAll: (projectPath) =>
|
||||
ipcRenderer.invoke("features:getAll", { projectPath }),
|
||||
|
||||
// Get a single feature by ID
|
||||
get: (projectPath, featureId) =>
|
||||
ipcRenderer.invoke("features:get", { projectPath, featureId }),
|
||||
|
||||
// Create a new feature
|
||||
create: (projectPath, feature) =>
|
||||
ipcRenderer.invoke("features:create", { projectPath, feature }),
|
||||
|
||||
// Update a feature (partial updates supported)
|
||||
update: (projectPath, featureId, updates) =>
|
||||
ipcRenderer.invoke("features:update", {
|
||||
projectPath,
|
||||
featureId,
|
||||
updates,
|
||||
}),
|
||||
|
||||
// Delete a feature and its folder
|
||||
delete: (projectPath, featureId) =>
|
||||
ipcRenderer.invoke("features:delete", { projectPath, featureId }),
|
||||
|
||||
// Get agent output for a feature
|
||||
getAgentOutput: (projectPath, featureId) =>
|
||||
ipcRenderer.invoke("features:getAgentOutput", { projectPath, featureId }),
|
||||
},
|
||||
|
||||
// Running Agents API
|
||||
runningAgents: {
|
||||
// Get all running agents across all projects
|
||||
getAll: () => ipcRenderer.invoke("running-agents:getAll"),
|
||||
},
|
||||
getVersion: () => ipcRenderer.invoke("app:getVersion"),
|
||||
isPackaged: () => ipcRenderer.invoke("app:isPackaged"),
|
||||
});
|
||||
|
||||
// Also expose a flag to detect if we're in Electron
|
||||
contextBridge.exposeInMainWorld("isElectron", true);
|
||||
console.log("[Preload] Electron API exposed (simplified mode)");
|
||||
|
||||
@@ -1,721 +0,0 @@
|
||||
const { execSync, spawn } = require("child_process");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const os = require("os");
|
||||
|
||||
let runPtyCommand = null;
|
||||
try {
|
||||
({ runPtyCommand } = require("./pty-runner"));
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
"[ClaudeCliDetector] node-pty unavailable, will fall back to external terminal:",
|
||||
error?.message || error
|
||||
);
|
||||
}
|
||||
|
||||
const ANSI_REGEX =
|
||||
// eslint-disable-next-line no-control-regex
|
||||
/\u001b\[[0-9;?]*[ -/]*[@-~]|\u001b[@-_]|\u001b\][^\u0007]*\u0007/g;
|
||||
|
||||
const stripAnsi = (text = "") => text.replace(ANSI_REGEX, "");
|
||||
|
||||
/**
|
||||
* Claude CLI Detector
|
||||
*
|
||||
* Authentication options:
|
||||
* 1. OAuth Token (Subscription): User runs `claude setup-token` and provides the token to the app
|
||||
* 2. API Key (Pay-per-use): User provides their Anthropic API key directly
|
||||
*/
|
||||
class ClaudeCliDetector {
|
||||
/**
|
||||
* Check if Claude Code CLI is installed and accessible
|
||||
* @returns {Object} { installed: boolean, path: string|null, version: string|null, method: 'cli'|'none' }
|
||||
*/
|
||||
/**
|
||||
* Try to get updated PATH from shell config files
|
||||
* This helps detect CLI installations that modify shell config but haven't updated the current process PATH
|
||||
*/
|
||||
static getUpdatedPathFromShellConfig() {
|
||||
const homeDir = os.homedir();
|
||||
const shell = process.env.SHELL || "/bin/bash";
|
||||
const shellName = path.basename(shell);
|
||||
|
||||
const configFiles = [];
|
||||
if (shellName.includes("zsh")) {
|
||||
configFiles.push(path.join(homeDir, ".zshrc"));
|
||||
configFiles.push(path.join(homeDir, ".zshenv"));
|
||||
configFiles.push(path.join(homeDir, ".zprofile"));
|
||||
} else if (shellName.includes("bash")) {
|
||||
configFiles.push(path.join(homeDir, ".bashrc"));
|
||||
configFiles.push(path.join(homeDir, ".bash_profile"));
|
||||
configFiles.push(path.join(homeDir, ".profile"));
|
||||
}
|
||||
|
||||
const commonPaths = [
|
||||
path.join(homeDir, ".local", "bin"),
|
||||
path.join(homeDir, ".cargo", "bin"),
|
||||
"/usr/local/bin",
|
||||
"/opt/homebrew/bin",
|
||||
path.join(homeDir, "bin"),
|
||||
];
|
||||
|
||||
for (const configFile of configFiles) {
|
||||
if (fs.existsSync(configFile)) {
|
||||
try {
|
||||
const content = fs.readFileSync(configFile, "utf-8");
|
||||
const pathMatches = content.match(
|
||||
/export\s+PATH=["']?([^"'\n]+)["']?/g
|
||||
);
|
||||
if (pathMatches) {
|
||||
for (const match of pathMatches) {
|
||||
const pathValue = match
|
||||
.replace(/export\s+PATH=["']?/, "")
|
||||
.replace(/["']?$/, "");
|
||||
const paths = pathValue
|
||||
.split(":")
|
||||
.filter((p) => p && !p.includes("$"));
|
||||
commonPaths.push(...paths);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore errors reading config files
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return [...new Set(commonPaths)];
|
||||
}
|
||||
|
||||
static detectClaudeInstallation() {
|
||||
try {
|
||||
// Check if 'claude' command is in PATH (Unix)
|
||||
if (process.platform !== "win32") {
|
||||
try {
|
||||
const claudePath = execSync("which claude 2>/dev/null", {
|
||||
encoding: "utf-8",
|
||||
}).trim();
|
||||
if (claudePath) {
|
||||
const version = this.getClaudeVersion(claudePath);
|
||||
return {
|
||||
installed: true,
|
||||
path: claudePath,
|
||||
version: version,
|
||||
method: "cli",
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
// CLI not in PATH
|
||||
}
|
||||
}
|
||||
|
||||
// Check Windows path
|
||||
if (process.platform === "win32") {
|
||||
try {
|
||||
const claudePath = execSync("where claude 2>nul", {
|
||||
encoding: "utf-8",
|
||||
})
|
||||
.trim()
|
||||
.split("\n")[0];
|
||||
if (claudePath) {
|
||||
const version = this.getClaudeVersion(claudePath);
|
||||
return {
|
||||
installed: true,
|
||||
path: claudePath,
|
||||
version: version,
|
||||
method: "cli",
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
// Not found on Windows
|
||||
}
|
||||
}
|
||||
|
||||
// Check for local installation
|
||||
const localClaudePath = path.join(
|
||||
os.homedir(),
|
||||
".claude",
|
||||
"local",
|
||||
"claude"
|
||||
);
|
||||
if (fs.existsSync(localClaudePath)) {
|
||||
const version = this.getClaudeVersion(localClaudePath);
|
||||
return {
|
||||
installed: true,
|
||||
path: localClaudePath,
|
||||
version: version,
|
||||
method: "cli-local",
|
||||
};
|
||||
}
|
||||
|
||||
// Check common installation locations
|
||||
const commonPaths = this.getUpdatedPathFromShellConfig();
|
||||
const binaryNames = ["claude", "claude-code"];
|
||||
|
||||
for (const basePath of commonPaths) {
|
||||
for (const binaryName of binaryNames) {
|
||||
const claudePath = path.join(basePath, binaryName);
|
||||
if (fs.existsSync(claudePath)) {
|
||||
try {
|
||||
const version = this.getClaudeVersion(claudePath);
|
||||
return {
|
||||
installed: true,
|
||||
path: claudePath,
|
||||
version: version,
|
||||
method: "cli",
|
||||
};
|
||||
} catch (error) {
|
||||
// File exists but can't get version
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to source shell config and check PATH again (Unix)
|
||||
if (process.platform !== "win32") {
|
||||
try {
|
||||
const shell = process.env.SHELL || "/bin/bash";
|
||||
const shellName = path.basename(shell);
|
||||
const homeDir = os.homedir();
|
||||
|
||||
let sourceCmd = "";
|
||||
if (shellName.includes("zsh")) {
|
||||
sourceCmd = `source ${homeDir}/.zshrc 2>/dev/null && which claude`;
|
||||
} else if (shellName.includes("bash")) {
|
||||
sourceCmd = `source ${homeDir}/.bashrc 2>/dev/null && which claude`;
|
||||
}
|
||||
|
||||
if (sourceCmd) {
|
||||
const claudePath = execSync(`bash -c "${sourceCmd}"`, {
|
||||
encoding: "utf-8",
|
||||
timeout: 2000,
|
||||
}).trim();
|
||||
if (claudePath && claudePath.startsWith("/")) {
|
||||
const version = this.getClaudeVersion(claudePath);
|
||||
return {
|
||||
installed: true,
|
||||
path: claudePath,
|
||||
version: version,
|
||||
method: "cli",
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Failed to source shell config
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
installed: false,
|
||||
path: null,
|
||||
version: null,
|
||||
method: "none",
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
installed: false,
|
||||
path: null,
|
||||
version: null,
|
||||
method: "none",
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Claude CLI version
|
||||
* @param {string} claudePath Path to claude executable
|
||||
* @returns {string|null} Version string or null
|
||||
*/
|
||||
static getClaudeVersion(claudePath) {
|
||||
try {
|
||||
const version = execSync(`"${claudePath}" --version 2>/dev/null`, {
|
||||
encoding: "utf-8",
|
||||
timeout: 5000,
|
||||
}).trim();
|
||||
return version || null;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get authentication status
|
||||
* Checks for:
|
||||
* 1. OAuth token stored in app's credentials (from `claude setup-token`)
|
||||
* 2. API key stored in app's credentials
|
||||
* 3. API key in environment variable
|
||||
*
|
||||
* @param {string} appCredentialsPath Path to app's credentials.json
|
||||
* @returns {Object} Authentication status
|
||||
*/
|
||||
static getAuthStatus(appCredentialsPath) {
|
||||
const envApiKey = process.env.ANTHROPIC_API_KEY;
|
||||
const envOAuthToken = process.env.CLAUDE_CODE_OAUTH_TOKEN;
|
||||
|
||||
let storedOAuthToken = null;
|
||||
let storedApiKey = null;
|
||||
|
||||
if (appCredentialsPath && fs.existsSync(appCredentialsPath)) {
|
||||
try {
|
||||
const content = fs.readFileSync(appCredentialsPath, "utf-8");
|
||||
const credentials = JSON.parse(content);
|
||||
storedOAuthToken = credentials.anthropic_oauth_token || null;
|
||||
storedApiKey =
|
||||
credentials.anthropic || credentials.anthropic_api_key || null;
|
||||
} catch (error) {
|
||||
// Ignore credential read errors
|
||||
}
|
||||
}
|
||||
|
||||
// Authentication priority (highest to lowest):
|
||||
// 1. Environment OAuth Token (CLAUDE_CODE_OAUTH_TOKEN)
|
||||
// 2. Stored OAuth Token (from credentials file)
|
||||
// 3. Stored API Key (from credentials file)
|
||||
// 4. Environment API Key (ANTHROPIC_API_KEY)
|
||||
let authenticated = false;
|
||||
let method = "none";
|
||||
|
||||
if (envOAuthToken) {
|
||||
authenticated = true;
|
||||
method = "oauth_token_env";
|
||||
} else if (storedOAuthToken) {
|
||||
authenticated = true;
|
||||
method = "oauth_token";
|
||||
} else if (storedApiKey) {
|
||||
authenticated = true;
|
||||
method = "api_key";
|
||||
} else if (envApiKey) {
|
||||
authenticated = true;
|
||||
method = "api_key_env";
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated,
|
||||
method,
|
||||
hasStoredOAuthToken: !!storedOAuthToken,
|
||||
hasStoredApiKey: !!storedApiKey,
|
||||
hasEnvApiKey: !!envApiKey,
|
||||
hasEnvOAuthToken: !!envOAuthToken,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Get installation info (installation status only, no auth)
|
||||
* @returns {Object} Installation info with status property
|
||||
*/
|
||||
static getInstallationInfo() {
|
||||
const installation = this.detectClaudeInstallation();
|
||||
return {
|
||||
status: installation.installed ? "installed" : "not_installed",
|
||||
installed: installation.installed,
|
||||
path: installation.path,
|
||||
version: installation.version,
|
||||
method: installation.method,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get full status including installation and auth
|
||||
* @param {string} appCredentialsPath Path to app's credentials.json
|
||||
* @returns {Object} Full status
|
||||
*/
|
||||
static getFullStatus(appCredentialsPath) {
|
||||
const installation = this.detectClaudeInstallation();
|
||||
const auth = this.getAuthStatus(appCredentialsPath);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
status: installation.installed ? "installed" : "not_installed",
|
||||
installed: installation.installed,
|
||||
path: installation.path,
|
||||
version: installation.version,
|
||||
method: installation.method,
|
||||
auth,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get installation info and recommendations
|
||||
* @returns {Object} Installation status and recommendations
|
||||
*/
|
||||
static getInstallationInfo() {
|
||||
const detection = this.detectClaudeInstallation();
|
||||
|
||||
if (detection.installed) {
|
||||
return {
|
||||
status: 'installed',
|
||||
method: detection.method,
|
||||
version: detection.version,
|
||||
path: detection.path,
|
||||
recommendation: 'Claude Code CLI is ready for ultrathink'
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
status: 'not_installed',
|
||||
recommendation: 'Install Claude Code CLI for optimal ultrathink performance',
|
||||
installCommands: this.getInstallCommands()
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get installation commands for different platforms
|
||||
* @returns {Object} Installation commands
|
||||
*/
|
||||
static getInstallCommands() {
|
||||
return {
|
||||
macos: "curl -fsSL https://claude.ai/install.sh | bash",
|
||||
windows: "irm https://claude.ai/install.ps1 | iex",
|
||||
linux: "curl -fsSL https://claude.ai/install.sh | bash",
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Install Claude CLI using the official script
|
||||
* @param {Function} onProgress Callback for progress updates
|
||||
* @returns {Promise<Object>} Installation result
|
||||
*/
|
||||
static async installCli(onProgress) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const platform = process.platform;
|
||||
let command, args;
|
||||
|
||||
if (platform === "win32") {
|
||||
command = "powershell";
|
||||
args = ["-Command", "irm https://claude.ai/install.ps1 | iex"];
|
||||
} else {
|
||||
command = "bash";
|
||||
args = ["-c", "curl -fsSL https://claude.ai/install.sh | bash"];
|
||||
}
|
||||
|
||||
console.log("[ClaudeCliDetector] Installing Claude CLI...");
|
||||
|
||||
const proc = spawn(command, args, {
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
shell: false,
|
||||
});
|
||||
|
||||
let output = "";
|
||||
let errorOutput = "";
|
||||
|
||||
proc.stdout.on("data", (data) => {
|
||||
const text = data.toString();
|
||||
output += text;
|
||||
if (onProgress) {
|
||||
onProgress({ type: "stdout", data: text });
|
||||
}
|
||||
});
|
||||
|
||||
proc.stderr.on("data", (data) => {
|
||||
const text = data.toString();
|
||||
errorOutput += text;
|
||||
if (onProgress) {
|
||||
onProgress({ type: "stderr", data: text });
|
||||
}
|
||||
});
|
||||
|
||||
proc.on("close", (code) => {
|
||||
if (code === 0) {
|
||||
console.log(
|
||||
"[ClaudeCliDetector] Installation completed successfully"
|
||||
);
|
||||
resolve({
|
||||
success: true,
|
||||
output,
|
||||
message: "Claude CLI installed successfully",
|
||||
});
|
||||
} else {
|
||||
console.error(
|
||||
"[ClaudeCliDetector] Installation failed with code:",
|
||||
code
|
||||
);
|
||||
reject({
|
||||
success: false,
|
||||
error: errorOutput || `Installation failed with code ${code}`,
|
||||
output,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
proc.on("error", (error) => {
|
||||
console.error("[ClaudeCliDetector] Installation error:", error);
|
||||
reject({
|
||||
success: false,
|
||||
error: error.message,
|
||||
output,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get instructions for setup-token command
|
||||
* @returns {Object} Setup token instructions
|
||||
*/
|
||||
static getSetupTokenInstructions() {
|
||||
const detection = this.detectClaudeInstallation();
|
||||
|
||||
if (!detection.installed) {
|
||||
return {
|
||||
success: false,
|
||||
error: "Claude CLI is not installed. Please install it first.",
|
||||
installCommands: this.getInstallCommands(),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
command: "claude setup-token",
|
||||
instructions: [
|
||||
"1. Open your terminal",
|
||||
"2. Run: claude setup-token",
|
||||
"3. Follow the prompts to authenticate",
|
||||
"4. Copy the token that is displayed",
|
||||
"5. Paste the token in the field below",
|
||||
],
|
||||
note: "This token is from your Claude subscription and allows you to use Claude without API charges.",
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract OAuth token from command output
|
||||
* Tries multiple patterns to find the token
|
||||
* @param {string} output The command output
|
||||
* @returns {string|null} Extracted token or null
|
||||
*/
|
||||
static extractTokenFromOutput(output) {
|
||||
// Pattern 1: CLAUDE_CODE_OAUTH_TOKEN=<token> or CLAUDE_CODE_OAUTH_TOKEN: <token>
|
||||
const envMatch = output.match(
|
||||
/CLAUDE_CODE_OAUTH_TOKEN[=:]\s*["']?([a-zA-Z0-9_\-\.]+)["']?/i
|
||||
);
|
||||
if (envMatch) return envMatch[1];
|
||||
|
||||
// Pattern 2: "Token: <token>" or "token: <token>"
|
||||
const tokenLabelMatch = output.match(
|
||||
/\btoken[:\s]+["']?([a-zA-Z0-9_\-\.]{40,})["']?/i
|
||||
);
|
||||
if (tokenLabelMatch) return tokenLabelMatch[1];
|
||||
|
||||
// Pattern 3: Look for token after success/authenticated message
|
||||
const successMatch = output.match(
|
||||
/(?:success|authenticated|generated|token is)[^\n]*\n\s*([a-zA-Z0-9_\-\.]{40,})/i
|
||||
);
|
||||
if (successMatch) return successMatch[1];
|
||||
|
||||
// Pattern 4: Standalone long alphanumeric string on its own line (last resort)
|
||||
// This catches tokens that are printed on their own line
|
||||
const lines = output.split("\n");
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
// Token should be 40+ chars, alphanumeric with possible hyphens/underscores/dots
|
||||
if (/^[a-zA-Z0-9_\-\.]{40,}$/.test(trimmed)) {
|
||||
return trimmed;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run claude setup-token command to generate OAuth token
|
||||
* Opens an external terminal window since Claude CLI requires TTY for its Ink-based UI
|
||||
* @param {Function} onProgress Callback for progress updates
|
||||
* @returns {Promise<Object>} Result indicating terminal was opened
|
||||
*/
|
||||
static async runSetupToken(onProgress) {
|
||||
const detection = this.detectClaudeInstallation();
|
||||
|
||||
if (!detection.installed) {
|
||||
throw {
|
||||
success: false,
|
||||
error: "Claude CLI is not installed. Please install it first.",
|
||||
requiresManualAuth: false,
|
||||
};
|
||||
}
|
||||
|
||||
const claudePath = detection.path;
|
||||
const platform = process.platform;
|
||||
const preferPty =
|
||||
(platform === "win32" ||
|
||||
platform === "darwin" ||
|
||||
process.env.CLAUDE_AUTH_FORCE_PTY === "1") &&
|
||||
process.env.CLAUDE_AUTH_DISABLE_PTY !== "1";
|
||||
|
||||
const send = (data) => {
|
||||
if (onProgress && data) {
|
||||
onProgress({ type: "stdout", data });
|
||||
}
|
||||
};
|
||||
|
||||
if (preferPty && runPtyCommand) {
|
||||
try {
|
||||
send("Starting in-app terminal session for Claude auth...\n");
|
||||
send("If your browser opens, complete sign-in and return here.\n\n");
|
||||
|
||||
const ptyResult = await runPtyCommand(claudePath, ["setup-token"], {
|
||||
cols: 120,
|
||||
rows: 30,
|
||||
onData: (chunk) => send(chunk),
|
||||
env: {
|
||||
FORCE_COLOR: "1",
|
||||
},
|
||||
});
|
||||
|
||||
const cleanedOutput = stripAnsi(ptyResult.output || "");
|
||||
const token = this.extractTokenFromOutput(cleanedOutput);
|
||||
|
||||
if (ptyResult.success && token) {
|
||||
send("\nCaptured token automatically.\n");
|
||||
return {
|
||||
success: true,
|
||||
token,
|
||||
requiresManualAuth: false,
|
||||
terminalOpened: false,
|
||||
};
|
||||
}
|
||||
|
||||
if (ptyResult.success && !token) {
|
||||
send(
|
||||
"\nCLI completed but token was not detected automatically. You can copy it above or retry.\n"
|
||||
);
|
||||
return {
|
||||
success: true,
|
||||
requiresManualAuth: true,
|
||||
terminalOpened: false,
|
||||
error: "Could not capture token automatically",
|
||||
output: cleanedOutput,
|
||||
};
|
||||
}
|
||||
|
||||
send(
|
||||
`\nClaude CLI exited with code ${ptyResult.exitCode}. Falling back to manual copy.\n`
|
||||
);
|
||||
return {
|
||||
success: false,
|
||||
error: `Claude CLI exited with code ${ptyResult.exitCode}`,
|
||||
requiresManualAuth: true,
|
||||
output: cleanedOutput,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("[ClaudeCliDetector] PTY auth failed, falling back:", error);
|
||||
send(
|
||||
`In-app terminal failed (${error?.message || "unknown error"}). Falling back to external terminal...\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: external terminal window
|
||||
if (preferPty && !runPtyCommand) {
|
||||
send("In-app terminal unavailable (node-pty not loaded).");
|
||||
} else if (!preferPty) {
|
||||
send("Using system terminal for authentication on this platform.");
|
||||
}
|
||||
send("Opening system terminal for authentication...\n");
|
||||
|
||||
// Helper function to check if a command exists asynchronously
|
||||
const commandExists = (cmd) => {
|
||||
return new Promise((resolve) => {
|
||||
require("child_process").exec(
|
||||
`which ${cmd}`,
|
||||
{ timeout: 1000 },
|
||||
(error) => {
|
||||
resolve(!error);
|
||||
}
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
// For Linux, find available terminal first (async)
|
||||
let linuxTerminal = null;
|
||||
if (platform !== "win32" && platform !== "darwin") {
|
||||
const terminals = [
|
||||
["gnome-terminal", ["--", claudePath, "setup-token"]],
|
||||
["konsole", ["-e", claudePath, "setup-token"]],
|
||||
["xterm", ["-e", claudePath, "setup-token"]],
|
||||
["x-terminal-emulator", ["-e", `${claudePath} setup-token`]],
|
||||
];
|
||||
|
||||
for (const [term, termArgs] of terminals) {
|
||||
const exists = await commandExists(term);
|
||||
if (exists) {
|
||||
linuxTerminal = { command: term, args: termArgs };
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// Open command in external terminal since Claude CLI requires TTY
|
||||
let command, args;
|
||||
|
||||
if (platform === "win32") {
|
||||
// Windows: Open new cmd window that stays open
|
||||
command = "cmd";
|
||||
args = ["/c", "start", "cmd", "/k", `"${claudePath}" setup-token`];
|
||||
} else if (platform === "darwin") {
|
||||
// macOS: Open Terminal.app
|
||||
command = "osascript";
|
||||
args = [
|
||||
"-e",
|
||||
`tell application "Terminal" to do script "${claudePath} setup-token"`,
|
||||
"-e",
|
||||
'tell application "Terminal" to activate',
|
||||
];
|
||||
} else {
|
||||
// Linux: Use the terminal we found earlier
|
||||
if (!linuxTerminal) {
|
||||
reject({
|
||||
success: false,
|
||||
error:
|
||||
"Could not find a terminal emulator. Please run 'claude setup-token' manually in your terminal.",
|
||||
requiresManualAuth: true,
|
||||
});
|
||||
return;
|
||||
}
|
||||
command = linuxTerminal.command;
|
||||
args = linuxTerminal.args;
|
||||
}
|
||||
|
||||
console.log(
|
||||
"[ClaudeCliDetector] Spawning terminal:",
|
||||
command,
|
||||
args.join(" ")
|
||||
);
|
||||
|
||||
const proc = spawn(command, args, {
|
||||
detached: true,
|
||||
stdio: "ignore",
|
||||
shell: platform === "win32",
|
||||
});
|
||||
|
||||
proc.unref();
|
||||
|
||||
proc.on("error", (error) => {
|
||||
console.error("[ClaudeCliDetector] Failed to open terminal:", error);
|
||||
reject({
|
||||
success: false,
|
||||
error: `Failed to open terminal: ${error.message}`,
|
||||
requiresManualAuth: true,
|
||||
});
|
||||
});
|
||||
|
||||
// Give the terminal a moment to open
|
||||
setTimeout(() => {
|
||||
send("Terminal window opened!\n\n");
|
||||
send("1. Complete the sign-in in your browser\n");
|
||||
send("2. Copy the token from the terminal\n");
|
||||
send("3. Paste it below\n");
|
||||
|
||||
// Resolve with manual auth required since we can't capture from external terminal
|
||||
resolve({
|
||||
success: true,
|
||||
requiresManualAuth: true,
|
||||
terminalOpened: true,
|
||||
message:
|
||||
"Terminal opened. Complete authentication and paste the token below.",
|
||||
});
|
||||
}, 500);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ClaudeCliDetector;
|
||||
@@ -1,566 +0,0 @@
|
||||
const { execSync, spawn } = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
/**
|
||||
* Codex CLI Detector - Checks if OpenAI Codex CLI is installed
|
||||
*
|
||||
* Codex CLI is OpenAI's agent CLI tool that allows users to use
|
||||
* GPT-5.1 Codex models (gpt-5.1-codex-max, gpt-5.1-codex, etc.)
|
||||
* for code generation and agentic tasks.
|
||||
*/
|
||||
class CodexCliDetector {
|
||||
/**
|
||||
* Get the path to Codex config directory
|
||||
* @returns {string} Path to .codex directory
|
||||
*/
|
||||
static getConfigDir() {
|
||||
return path.join(os.homedir(), '.codex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path to Codex auth file
|
||||
* @returns {string} Path to auth.json
|
||||
*/
|
||||
static getAuthPath() {
|
||||
return path.join(this.getConfigDir(), 'auth.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check Codex authentication status
|
||||
* @returns {Object} Authentication status
|
||||
*/
|
||||
static checkAuth() {
|
||||
try {
|
||||
const authPath = this.getAuthPath();
|
||||
const envApiKey = process.env.OPENAI_API_KEY;
|
||||
|
||||
// Try to verify authentication using codex CLI command if available
|
||||
try {
|
||||
const detection = this.detectCodexInstallation();
|
||||
if (detection.installed) {
|
||||
try {
|
||||
const statusOutput = execSync(`"${detection.path || 'codex'}" login status 2>/dev/null`, {
|
||||
encoding: 'utf-8',
|
||||
timeout: 5000
|
||||
});
|
||||
|
||||
if (statusOutput && (statusOutput.includes('Logged in') || statusOutput.includes('Authenticated'))) {
|
||||
return {
|
||||
authenticated: true,
|
||||
method: 'cli_verified',
|
||||
hasAuthFile: fs.existsSync(authPath),
|
||||
hasEnvKey: !!envApiKey,
|
||||
authPath
|
||||
};
|
||||
}
|
||||
} catch (statusError) {
|
||||
// status command failed, continue with file-based check
|
||||
}
|
||||
}
|
||||
} catch (verifyError) {
|
||||
// CLI verification failed, continue with file-based check
|
||||
}
|
||||
|
||||
// Check if auth file exists
|
||||
if (fs.existsSync(authPath)) {
|
||||
let auth = null;
|
||||
try {
|
||||
const content = fs.readFileSync(authPath, 'utf-8');
|
||||
auth = JSON.parse(content);
|
||||
|
||||
// Check for token object structure
|
||||
if (auth.token && typeof auth.token === 'object') {
|
||||
const token = auth.token;
|
||||
if (token.Id_token || token.access_token || token.refresh_token || token.id_token) {
|
||||
return {
|
||||
authenticated: true,
|
||||
method: 'cli_tokens',
|
||||
hasAuthFile: true,
|
||||
hasEnvKey: !!envApiKey,
|
||||
authPath
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check for tokens at root level
|
||||
if (auth.access_token || auth.refresh_token || auth.Id_token || auth.id_token) {
|
||||
return {
|
||||
authenticated: true,
|
||||
method: 'cli_tokens',
|
||||
hasAuthFile: true,
|
||||
hasEnvKey: !!envApiKey,
|
||||
authPath
|
||||
};
|
||||
}
|
||||
|
||||
// Check for API key fields
|
||||
if (auth.api_key || auth.openai_api_key || auth.apiKey) {
|
||||
return {
|
||||
authenticated: true,
|
||||
method: 'auth_file',
|
||||
hasAuthFile: true,
|
||||
hasEnvKey: !!envApiKey,
|
||||
authPath
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
authenticated: false,
|
||||
method: 'none',
|
||||
hasAuthFile: false,
|
||||
hasEnvKey: !!envApiKey,
|
||||
authPath
|
||||
};
|
||||
}
|
||||
|
||||
if (!auth) {
|
||||
return {
|
||||
authenticated: false,
|
||||
method: 'none',
|
||||
hasAuthFile: true,
|
||||
hasEnvKey: !!envApiKey,
|
||||
authPath
|
||||
};
|
||||
}
|
||||
|
||||
const keys = Object.keys(auth);
|
||||
if (keys.length > 0) {
|
||||
const hasTokens = keys.some(key =>
|
||||
key.toLowerCase().includes('token') ||
|
||||
key.toLowerCase().includes('refresh') ||
|
||||
(auth[key] && typeof auth[key] === 'object' && (
|
||||
auth[key].access_token || auth[key].refresh_token || auth[key].Id_token || auth[key].id_token
|
||||
))
|
||||
);
|
||||
|
||||
if (hasTokens) {
|
||||
return {
|
||||
authenticated: true,
|
||||
method: 'cli_tokens',
|
||||
hasAuthFile: true,
|
||||
hasEnvKey: !!envApiKey,
|
||||
authPath
|
||||
};
|
||||
}
|
||||
|
||||
// File exists and has content - check if it's tokens or API key
|
||||
const likelyTokens = keys.some(key => key.toLowerCase().includes('token') || key.toLowerCase().includes('refresh'));
|
||||
return {
|
||||
authenticated: true,
|
||||
method: likelyTokens ? 'cli_tokens' : 'auth_file',
|
||||
hasAuthFile: true,
|
||||
hasEnvKey: !!envApiKey,
|
||||
authPath
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check environment variable
|
||||
if (envApiKey) {
|
||||
return {
|
||||
authenticated: true,
|
||||
method: 'env_var',
|
||||
hasAuthFile: false,
|
||||
hasEnvKey: true,
|
||||
authPath
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated: false,
|
||||
method: 'none',
|
||||
hasAuthFile: false,
|
||||
hasEnvKey: false,
|
||||
authPath
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
authenticated: false,
|
||||
method: 'none',
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Check if Codex CLI is installed and accessible
|
||||
* @returns {Object} { installed: boolean, path: string|null, version: string|null, method: 'cli'|'npm'|'brew'|'none' }
|
||||
*/
|
||||
static detectCodexInstallation() {
|
||||
try {
|
||||
// Method 1: Check if 'codex' command is in PATH
|
||||
try {
|
||||
const codexPath = execSync('which codex 2>/dev/null', { encoding: 'utf-8' }).trim();
|
||||
if (codexPath) {
|
||||
const version = this.getCodexVersion(codexPath);
|
||||
return {
|
||||
installed: true,
|
||||
path: codexPath,
|
||||
version: version,
|
||||
method: 'cli'
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
// CLI not in PATH, continue checking other methods
|
||||
}
|
||||
|
||||
// Method 2: Check for npm global installation
|
||||
try {
|
||||
const npmListOutput = execSync('npm list -g @openai/codex --depth=0 2>/dev/null', { encoding: 'utf-8' });
|
||||
if (npmListOutput && npmListOutput.includes('@openai/codex')) {
|
||||
// Get the path from npm bin
|
||||
const npmBinPath = execSync('npm bin -g', { encoding: 'utf-8' }).trim();
|
||||
const codexPath = path.join(npmBinPath, 'codex');
|
||||
const version = this.getCodexVersion(codexPath);
|
||||
return {
|
||||
installed: true,
|
||||
path: codexPath,
|
||||
version: version,
|
||||
method: 'npm'
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
// npm global not found
|
||||
}
|
||||
|
||||
// Method 3: Check for Homebrew installation on macOS
|
||||
if (process.platform === 'darwin') {
|
||||
try {
|
||||
const brewList = execSync('brew list --formula 2>/dev/null', { encoding: 'utf-8' });
|
||||
if (brewList.includes('codex')) {
|
||||
const brewPrefixOutput = execSync('brew --prefix codex 2>/dev/null', { encoding: 'utf-8' }).trim();
|
||||
const codexPath = path.join(brewPrefixOutput, 'bin', 'codex');
|
||||
const version = this.getCodexVersion(codexPath);
|
||||
return {
|
||||
installed: true,
|
||||
path: codexPath,
|
||||
version: version,
|
||||
method: 'brew'
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
// Homebrew not found or codex not installed via brew
|
||||
}
|
||||
}
|
||||
|
||||
// Method 4: Check Windows path
|
||||
if (process.platform === 'win32') {
|
||||
try {
|
||||
const codexPath = execSync('where codex 2>nul', { encoding: 'utf-8' }).trim().split('\n')[0];
|
||||
if (codexPath) {
|
||||
const version = this.getCodexVersion(codexPath);
|
||||
return {
|
||||
installed: true,
|
||||
path: codexPath,
|
||||
version: version,
|
||||
method: 'cli'
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
// Not found on Windows
|
||||
}
|
||||
}
|
||||
|
||||
// Method 5: Check common installation paths
|
||||
const commonPaths = [
|
||||
path.join(os.homedir(), '.local', 'bin', 'codex'),
|
||||
path.join(os.homedir(), '.npm-global', 'bin', 'codex'),
|
||||
'/usr/local/bin/codex',
|
||||
'/opt/homebrew/bin/codex',
|
||||
];
|
||||
|
||||
for (const checkPath of commonPaths) {
|
||||
if (fs.existsSync(checkPath)) {
|
||||
const version = this.getCodexVersion(checkPath);
|
||||
return {
|
||||
installed: true,
|
||||
path: checkPath,
|
||||
version: version,
|
||||
method: 'cli'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Method 6: Check if OPENAI_API_KEY is set (can use Codex API directly)
|
||||
if (process.env.OPENAI_API_KEY) {
|
||||
return {
|
||||
installed: false,
|
||||
path: null,
|
||||
version: null,
|
||||
method: 'api-key-only',
|
||||
hasApiKey: true
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
installed: false,
|
||||
path: null,
|
||||
version: null,
|
||||
method: 'none'
|
||||
};
|
||||
} catch (error) {
|
||||
// Error detecting Codex installation
|
||||
return {
|
||||
installed: false,
|
||||
path: null,
|
||||
version: null,
|
||||
method: 'none',
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Codex CLI version from executable path
|
||||
* @param {string} codexPath Path to codex executable
|
||||
* @returns {string|null} Version string or null
|
||||
*/
|
||||
static getCodexVersion(codexPath) {
|
||||
try {
|
||||
const version = execSync(`"${codexPath}" --version 2>/dev/null`, { encoding: 'utf-8' }).trim();
|
||||
return version || null;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get installation info and recommendations
|
||||
* @returns {Object} Installation status and recommendations
|
||||
*/
|
||||
static getInstallationInfo() {
|
||||
const detection = this.detectCodexInstallation();
|
||||
|
||||
if (detection.installed) {
|
||||
return {
|
||||
status: 'installed',
|
||||
method: detection.method,
|
||||
version: detection.version,
|
||||
path: detection.path,
|
||||
recommendation: detection.method === 'cli'
|
||||
? 'Using Codex CLI - ready for GPT-5.1 Codex models'
|
||||
: `Using Codex CLI via ${detection.method} - ready for GPT-5.1 Codex models`
|
||||
};
|
||||
}
|
||||
|
||||
// Not installed but has API key
|
||||
if (detection.method === 'api-key-only') {
|
||||
return {
|
||||
status: 'api_key_only',
|
||||
method: 'api-key-only',
|
||||
recommendation: 'OPENAI_API_KEY detected but Codex CLI not installed. Install Codex CLI for full agentic capabilities.',
|
||||
installCommands: this.getInstallCommands()
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
status: 'not_installed',
|
||||
recommendation: 'Install OpenAI Codex CLI to use GPT-5.1 Codex models for agentic tasks',
|
||||
installCommands: this.getInstallCommands()
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get installation commands for different platforms
|
||||
* @returns {Object} Installation commands by platform
|
||||
*/
|
||||
static getInstallCommands() {
|
||||
return {
|
||||
npm: 'npm install -g @openai/codex@latest',
|
||||
macos: 'brew install codex',
|
||||
linux: 'npm install -g @openai/codex@latest',
|
||||
windows: 'npm install -g @openai/codex@latest'
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if Codex CLI supports a specific model
|
||||
* @param {string} model Model name to check
|
||||
* @returns {boolean} Whether the model is supported
|
||||
*/
|
||||
static isModelSupported(model) {
|
||||
const supportedModels = [
|
||||
'gpt-5.1-codex-max',
|
||||
'gpt-5.1-codex',
|
||||
'gpt-5.1-codex-mini',
|
||||
'gpt-5.1'
|
||||
];
|
||||
return supportedModels.includes(model);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default model for Codex CLI
|
||||
* @returns {string} Default model name
|
||||
*/
|
||||
static getDefaultModel() {
|
||||
return 'gpt-5.1-codex-max';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get comprehensive installation info including auth status
|
||||
* @returns {Object} Full status object
|
||||
*/
|
||||
static getFullStatus() {
|
||||
const installation = this.detectCodexInstallation();
|
||||
const auth = this.checkAuth();
|
||||
const info = this.getInstallationInfo();
|
||||
|
||||
return {
|
||||
...info,
|
||||
auth,
|
||||
installation
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Install Codex CLI using npm
|
||||
* @param {Function} onProgress Callback for progress updates
|
||||
* @returns {Promise<Object>} Installation result
|
||||
*/
|
||||
static async installCli(onProgress) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const command = 'npm';
|
||||
const args = ['install', '-g', '@openai/codex@latest'];
|
||||
|
||||
const proc = spawn(command, args, {
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
shell: true
|
||||
});
|
||||
|
||||
let output = '';
|
||||
let errorOutput = '';
|
||||
|
||||
proc.stdout.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
output += text;
|
||||
if (onProgress) {
|
||||
onProgress({ type: 'stdout', data: text });
|
||||
}
|
||||
});
|
||||
|
||||
proc.stderr.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
errorOutput += text;
|
||||
// npm often outputs progress to stderr
|
||||
if (onProgress) {
|
||||
onProgress({ type: 'stderr', data: text });
|
||||
}
|
||||
});
|
||||
|
||||
proc.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
resolve({
|
||||
success: true,
|
||||
output,
|
||||
message: 'Codex CLI installed successfully'
|
||||
});
|
||||
} else {
|
||||
reject({
|
||||
success: false,
|
||||
error: errorOutput || `Installation failed with code ${code}`,
|
||||
output
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
proc.on('error', (error) => {
|
||||
reject({
|
||||
success: false,
|
||||
error: error.message,
|
||||
output
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Authenticate Codex CLI - opens browser for OAuth or stores API key
|
||||
* @param {string} apiKey Optional API key to store
|
||||
* @param {Function} onProgress Callback for progress updates
|
||||
* @returns {Promise<Object>} Authentication result
|
||||
*/
|
||||
static async authenticate(apiKey, onProgress) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const detection = this.detectCodexInstallation();
|
||||
|
||||
if (!detection.installed) {
|
||||
reject({
|
||||
success: false,
|
||||
error: 'Codex CLI is not installed'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const codexPath = detection.path || 'codex';
|
||||
|
||||
if (apiKey) {
|
||||
// Store API key directly using codex auth command
|
||||
const proc = spawn(codexPath, ['auth', 'login', '--api-key', apiKey], {
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
shell: false
|
||||
});
|
||||
|
||||
let output = '';
|
||||
let errorOutput = '';
|
||||
|
||||
proc.stdout.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
output += text;
|
||||
if (onProgress) {
|
||||
onProgress({ type: 'stdout', data: text });
|
||||
}
|
||||
});
|
||||
|
||||
proc.stderr.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
errorOutput += text;
|
||||
if (onProgress) {
|
||||
onProgress({ type: 'stderr', data: text });
|
||||
}
|
||||
});
|
||||
|
||||
proc.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
resolve({
|
||||
success: true,
|
||||
output,
|
||||
message: 'Codex CLI authenticated successfully'
|
||||
});
|
||||
} else {
|
||||
reject({
|
||||
success: false,
|
||||
error: errorOutput || `Authentication failed with code ${code}`,
|
||||
output
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
proc.on('error', (error) => {
|
||||
reject({
|
||||
success: false,
|
||||
error: error.message,
|
||||
output
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// Require manual authentication
|
||||
if (onProgress) {
|
||||
onProgress({
|
||||
type: 'info',
|
||||
data: 'Please run the following command in your terminal to authenticate:\n\ncodex auth login\n\nThen return here to continue setup.'
|
||||
});
|
||||
}
|
||||
|
||||
resolve({
|
||||
success: true,
|
||||
requiresManualAuth: true,
|
||||
command: `${codexPath} auth login`,
|
||||
message: 'Please authenticate Codex CLI manually'
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CodexCliDetector;
|
||||
@@ -1,353 +0,0 @@
|
||||
/**
|
||||
* Codex TOML Configuration Manager
|
||||
*
|
||||
* Manages Codex CLI's TOML configuration file to add/update MCP server settings.
|
||||
* Codex CLI looks for config at:
|
||||
* - ~/.codex/config.toml (user-level)
|
||||
* - .codex/config.toml (project-level, takes precedence)
|
||||
*/
|
||||
|
||||
const fs = require('fs/promises');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
class CodexConfigManager {
|
||||
constructor() {
|
||||
this.userConfigPath = path.join(os.homedir(), '.codex', 'config.toml');
|
||||
this.projectConfigPath = null; // Will be set per project
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the project path for project-level config
|
||||
*/
|
||||
setProjectPath(projectPath) {
|
||||
this.projectConfigPath = path.join(projectPath, '.codex', 'config.toml');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the effective config path (project-level if exists, otherwise user-level)
|
||||
*/
|
||||
async getConfigPath() {
|
||||
if (this.projectConfigPath) {
|
||||
try {
|
||||
await fs.access(this.projectConfigPath);
|
||||
return this.projectConfigPath;
|
||||
} catch (e) {
|
||||
// Project config doesn't exist, fall back to user config
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure user config directory exists
|
||||
const userConfigDir = path.dirname(this.userConfigPath);
|
||||
try {
|
||||
await fs.mkdir(userConfigDir, { recursive: true });
|
||||
} catch (e) {
|
||||
// Directory might already exist
|
||||
}
|
||||
|
||||
return this.userConfigPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read existing TOML config (simple parser for our needs)
|
||||
*/
|
||||
async readConfig(configPath) {
|
||||
try {
|
||||
const content = await fs.readFile(configPath, 'utf-8');
|
||||
return this.parseToml(content);
|
||||
} catch (e) {
|
||||
if (e.code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple TOML parser for our specific use case
|
||||
* This is a minimal parser that handles the MCP server config structure
|
||||
*/
|
||||
parseToml(content) {
|
||||
const config = {};
|
||||
let currentSection = null;
|
||||
let currentSubsection = null;
|
||||
|
||||
const lines = content.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
|
||||
// Skip comments and empty lines
|
||||
if (!trimmed || trimmed.startsWith('#')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Section header: [section]
|
||||
const sectionMatch = trimmed.match(/^\[([^\]]+)\]$/);
|
||||
if (sectionMatch) {
|
||||
const sectionName = sectionMatch[1];
|
||||
const parts = sectionName.split('.');
|
||||
|
||||
if (parts.length === 1) {
|
||||
currentSection = parts[0];
|
||||
currentSubsection = null;
|
||||
if (!config[currentSection]) {
|
||||
config[currentSection] = {};
|
||||
}
|
||||
} else if (parts.length === 2) {
|
||||
currentSection = parts[0];
|
||||
currentSubsection = parts[1];
|
||||
if (!config[currentSection]) {
|
||||
config[currentSection] = {};
|
||||
}
|
||||
if (!config[currentSection][currentSubsection]) {
|
||||
config[currentSection][currentSubsection] = {};
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Key-value pair: key = value
|
||||
const kvMatch = trimmed.match(/^([^=]+)=(.+)$/);
|
||||
if (kvMatch) {
|
||||
const key = kvMatch[1].trim();
|
||||
let value = kvMatch[2].trim();
|
||||
|
||||
// Remove quotes if present
|
||||
if ((value.startsWith('"') && value.endsWith('"')) ||
|
||||
(value.startsWith("'") && value.endsWith("'"))) {
|
||||
value = value.slice(1, -1);
|
||||
}
|
||||
|
||||
// Parse boolean
|
||||
if (value === 'true') value = true;
|
||||
else if (value === 'false') value = false;
|
||||
// Parse number
|
||||
else if (/^-?\d+$/.test(value)) value = parseInt(value, 10);
|
||||
else if (/^-?\d+\.\d+$/.test(value)) value = parseFloat(value);
|
||||
|
||||
if (currentSubsection) {
|
||||
if (!config[currentSection][currentSubsection]) {
|
||||
config[currentSection][currentSubsection] = {};
|
||||
}
|
||||
config[currentSection][currentSubsection][key] = value;
|
||||
} else if (currentSection) {
|
||||
if (!config[currentSection]) {
|
||||
config[currentSection] = {};
|
||||
}
|
||||
config[currentSection][key] = value;
|
||||
} else {
|
||||
config[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert config object back to TOML format
|
||||
*/
|
||||
stringifyToml(config, indent = 0) {
|
||||
const indentStr = ' '.repeat(indent);
|
||||
let result = '';
|
||||
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||
// Section
|
||||
result += `${indentStr}[${key}]\n`;
|
||||
result += this.stringifyToml(value, indent);
|
||||
} else {
|
||||
// Key-value
|
||||
let valueStr = value;
|
||||
if (typeof value === 'string') {
|
||||
// Escape quotes and wrap in quotes if needed
|
||||
if (value.includes('"') || value.includes("'") || value.includes(' ')) {
|
||||
valueStr = `"${value.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
} else if (typeof value === 'boolean') {
|
||||
valueStr = value.toString();
|
||||
}
|
||||
result += `${indentStr}${key} = ${valueStr}\n`;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure the automaker-tools MCP server
|
||||
*/
|
||||
async configureMcpServer(projectPath, mcpServerScriptPath) {
|
||||
this.setProjectPath(projectPath);
|
||||
const configPath = await this.getConfigPath();
|
||||
|
||||
// Read existing config
|
||||
const config = await this.readConfig(configPath);
|
||||
|
||||
// Ensure mcp_servers section exists
|
||||
if (!config.mcp_servers) {
|
||||
config.mcp_servers = {};
|
||||
}
|
||||
|
||||
// Configure automaker-tools server
|
||||
config.mcp_servers['automaker-tools'] = {
|
||||
command: 'node',
|
||||
args: [mcpServerScriptPath],
|
||||
env: {
|
||||
AUTOMAKER_PROJECT_PATH: projectPath
|
||||
},
|
||||
startup_timeout_sec: 10,
|
||||
tool_timeout_sec: 60,
|
||||
enabled_tools: ['UpdateFeatureStatus']
|
||||
};
|
||||
|
||||
// Ensure experimental_use_rmcp_client is enabled (if needed)
|
||||
if (!config.experimental_use_rmcp_client) {
|
||||
config.experimental_use_rmcp_client = true;
|
||||
}
|
||||
|
||||
// Write config back
|
||||
await this.writeConfig(configPath, config);
|
||||
|
||||
console.log(`[CodexConfigManager] Configured automaker-tools MCP server in ${configPath}`);
|
||||
return configPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write config to TOML file
|
||||
*/
|
||||
async writeConfig(configPath, config) {
|
||||
let content = '';
|
||||
|
||||
// Write top-level keys first (preserve existing non-MCP config)
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
if (key === 'mcp_servers' || key === 'experimental_use_rmcp_client') {
|
||||
continue; // Handle these separately
|
||||
}
|
||||
if (typeof value !== 'object') {
|
||||
content += `${key} = ${this.formatValue(value)}\n`;
|
||||
}
|
||||
}
|
||||
|
||||
// Write experimental flag if enabled
|
||||
if (config.experimental_use_rmcp_client) {
|
||||
if (content && !content.endsWith('\n\n')) {
|
||||
content += '\n';
|
||||
}
|
||||
content += `experimental_use_rmcp_client = true\n`;
|
||||
}
|
||||
|
||||
// Write mcp_servers section
|
||||
if (config.mcp_servers && Object.keys(config.mcp_servers).length > 0) {
|
||||
if (content && !content.endsWith('\n\n')) {
|
||||
content += '\n';
|
||||
}
|
||||
|
||||
for (const [serverName, serverConfig] of Object.entries(config.mcp_servers)) {
|
||||
content += `\n[mcp_servers.${serverName}]\n`;
|
||||
|
||||
// Write command first
|
||||
if (serverConfig.command) {
|
||||
content += `command = "${this.escapeTomlString(serverConfig.command)}"\n`;
|
||||
}
|
||||
|
||||
// Write args
|
||||
if (serverConfig.args && Array.isArray(serverConfig.args)) {
|
||||
const argsStr = serverConfig.args.map(a => `"${this.escapeTomlString(a)}"`).join(', ');
|
||||
content += `args = [${argsStr}]\n`;
|
||||
}
|
||||
|
||||
// Write timeouts (must be before env subsection)
|
||||
if (serverConfig.startup_timeout_sec !== undefined) {
|
||||
content += `startup_timeout_sec = ${serverConfig.startup_timeout_sec}\n`;
|
||||
}
|
||||
|
||||
if (serverConfig.tool_timeout_sec !== undefined) {
|
||||
content += `tool_timeout_sec = ${serverConfig.tool_timeout_sec}\n`;
|
||||
}
|
||||
|
||||
// Write enabled_tools (must be before env subsection - at server level, not env level)
|
||||
if (serverConfig.enabled_tools && Array.isArray(serverConfig.enabled_tools)) {
|
||||
const toolsStr = serverConfig.enabled_tools.map(t => `"${this.escapeTomlString(t)}"`).join(', ');
|
||||
content += `enabled_tools = [${toolsStr}]\n`;
|
||||
}
|
||||
|
||||
// Write env section last (as a separate subsection)
|
||||
// IMPORTANT: In TOML, once we start [mcp_servers.server_name.env],
|
||||
// everything after belongs to that subsection until a new section starts
|
||||
if (serverConfig.env && typeof serverConfig.env === 'object' && Object.keys(serverConfig.env).length > 0) {
|
||||
content += `\n[mcp_servers.${serverName}.env]\n`;
|
||||
for (const [envKey, envValue] of Object.entries(serverConfig.env)) {
|
||||
content += `${envKey} = "${this.escapeTomlString(String(envValue))}"\n`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure directory exists
|
||||
const configDir = path.dirname(configPath);
|
||||
await fs.mkdir(configDir, { recursive: true });
|
||||
|
||||
// Write file
|
||||
await fs.writeFile(configPath, content, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape special characters in TOML strings
|
||||
*/
|
||||
escapeTomlString(str) {
|
||||
return str
|
||||
.replace(/\\/g, '\\\\')
|
||||
.replace(/"/g, '\\"')
|
||||
.replace(/\n/g, '\\n')
|
||||
.replace(/\r/g, '\\r')
|
||||
.replace(/\t/g, '\\t');
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a value for TOML output
|
||||
*/
|
||||
formatValue(value) {
|
||||
if (typeof value === 'string') {
|
||||
// Escape quotes
|
||||
const escaped = value.replace(/\\/g, '\\\\').replace(/"/g, '\\"');
|
||||
return `"${escaped}"`;
|
||||
} else if (typeof value === 'boolean') {
|
||||
return value.toString();
|
||||
} else if (typeof value === 'number') {
|
||||
return value.toString();
|
||||
}
|
||||
return `"${String(value)}"`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove automaker-tools MCP server configuration
|
||||
*/
|
||||
async removeMcpServer(projectPath) {
|
||||
this.setProjectPath(projectPath);
|
||||
const configPath = await this.getConfigPath();
|
||||
|
||||
try {
|
||||
const config = await this.readConfig(configPath);
|
||||
|
||||
if (config.mcp_servers && config.mcp_servers['automaker-tools']) {
|
||||
delete config.mcp_servers['automaker-tools'];
|
||||
|
||||
// If no more MCP servers, remove the section
|
||||
if (Object.keys(config.mcp_servers).length === 0) {
|
||||
delete config.mcp_servers;
|
||||
}
|
||||
|
||||
await this.writeConfig(configPath, config);
|
||||
console.log(`[CodexConfigManager] Removed automaker-tools MCP server from ${configPath}`);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[CodexConfigManager] Error removing MCP server config:`, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new CodexConfigManager();
|
||||
|
||||
|
||||
@@ -1,610 +0,0 @@
|
||||
/**
|
||||
* Codex CLI Execution Wrapper
|
||||
*
|
||||
* This module handles spawning and managing Codex CLI processes
|
||||
* for executing OpenAI model queries.
|
||||
*/
|
||||
|
||||
const { spawn } = require('child_process');
|
||||
const { EventEmitter } = require('events');
|
||||
const readline = require('readline');
|
||||
const path = require('path');
|
||||
const CodexCliDetector = require('./codex-cli-detector');
|
||||
const codexConfigManager = require('./codex-config-manager');
|
||||
|
||||
/**
|
||||
* Message types from Codex CLI JSON output
|
||||
*/
|
||||
const CODEX_EVENT_TYPES = {
|
||||
THREAD_STARTED: 'thread.started',
|
||||
ITEM_STARTED: 'item.started',
|
||||
ITEM_COMPLETED: 'item.completed',
|
||||
THREAD_COMPLETED: 'thread.completed',
|
||||
ERROR: 'error'
|
||||
};
|
||||
|
||||
/**
|
||||
* Codex Executor - Manages Codex CLI process execution
|
||||
*/
|
||||
class CodexExecutor extends EventEmitter {
|
||||
constructor() {
|
||||
super();
|
||||
this.currentProcess = null;
|
||||
this.codexPath = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find and cache the Codex CLI path
|
||||
* @returns {string|null} Path to codex executable
|
||||
*/
|
||||
findCodexPath() {
|
||||
if (this.codexPath) {
|
||||
return this.codexPath;
|
||||
}
|
||||
|
||||
const installation = CodexCliDetector.detectCodexInstallation();
|
||||
if (installation.installed && installation.path) {
|
||||
this.codexPath = installation.path;
|
||||
return this.codexPath;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a Codex CLI query
|
||||
* @param {Object} options Execution options
|
||||
* @param {string} options.prompt The prompt to execute
|
||||
* @param {string} options.model Model to use (default: gpt-5.1-codex-max)
|
||||
* @param {string} options.cwd Working directory
|
||||
* @param {string} options.systemPrompt System prompt (optional, will be prepended to prompt)
|
||||
* @param {number} options.maxTurns Not used - Codex CLI doesn't support this parameter
|
||||
* @param {string[]} options.allowedTools Not used - Codex CLI doesn't support this parameter
|
||||
* @param {Object} options.env Environment variables
|
||||
* @param {Object} options.mcpServers MCP servers configuration (for configuring Codex TOML)
|
||||
* @returns {AsyncGenerator} Generator yielding messages
|
||||
*/
|
||||
async *execute(options) {
|
||||
const {
|
||||
prompt,
|
||||
model = 'gpt-5.1-codex-max',
|
||||
cwd = process.cwd(),
|
||||
systemPrompt,
|
||||
maxTurns, // Not used by Codex CLI
|
||||
allowedTools, // Not used by Codex CLI
|
||||
env = {},
|
||||
mcpServers = null
|
||||
} = options;
|
||||
|
||||
const codexPath = this.findCodexPath();
|
||||
if (!codexPath) {
|
||||
yield {
|
||||
type: 'error',
|
||||
error: 'Codex CLI not found. Please install it with: npm install -g @openai/codex@latest'
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
// Configure MCP server if provided
|
||||
if (mcpServers && mcpServers['automaker-tools']) {
|
||||
try {
|
||||
// Get the absolute path to the MCP server script
|
||||
const mcpServerScriptPath = path.resolve(__dirname, 'mcp-server-stdio.js');
|
||||
|
||||
// Verify the script exists
|
||||
const fs = require('fs');
|
||||
if (!fs.existsSync(mcpServerScriptPath)) {
|
||||
console.warn(`[CodexExecutor] MCP server script not found at ${mcpServerScriptPath}, skipping MCP configuration`);
|
||||
} else {
|
||||
// Configure Codex TOML to use the MCP server
|
||||
await codexConfigManager.configureMcpServer(cwd, mcpServerScriptPath);
|
||||
console.log('[CodexExecutor] Configured automaker-tools MCP server for Codex CLI');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[CodexExecutor] Failed to configure MCP server:', error);
|
||||
// Continue execution even if MCP config fails - Codex will work without MCP tools
|
||||
}
|
||||
}
|
||||
|
||||
// Combine system prompt with main prompt if provided
|
||||
// Codex CLI doesn't support --system-prompt argument, so we prepend it to the prompt
|
||||
let combinedPrompt = prompt;
|
||||
console.log('[CodexExecutor] Original prompt length:', prompt?.length || 0);
|
||||
if (systemPrompt) {
|
||||
combinedPrompt = `${systemPrompt}\n\n---\n\n${prompt}`;
|
||||
console.log('[CodexExecutor] System prompt prepended to main prompt');
|
||||
console.log('[CodexExecutor] System prompt length:', systemPrompt.length);
|
||||
console.log('[CodexExecutor] Combined prompt length:', combinedPrompt.length);
|
||||
}
|
||||
|
||||
// Build command arguments
|
||||
// Note: maxTurns and allowedTools are not supported by Codex CLI
|
||||
console.log('[CodexExecutor] Building command arguments...');
|
||||
const args = this.buildArgs({
|
||||
prompt: combinedPrompt,
|
||||
model
|
||||
});
|
||||
|
||||
console.log('[CodexExecutor] Executing command:', codexPath);
|
||||
console.log('[CodexExecutor] Number of args:', args.length);
|
||||
console.log('[CodexExecutor] Args (without prompt):', args.slice(0, -1).join(' '));
|
||||
console.log('[CodexExecutor] Prompt length in args:', args[args.length - 1]?.length || 0);
|
||||
console.log('[CodexExecutor] Prompt preview (first 200 chars):', args[args.length - 1]?.substring(0, 200));
|
||||
console.log('[CodexExecutor] Working directory:', cwd);
|
||||
|
||||
// Spawn the process
|
||||
const processEnv = {
|
||||
...process.env,
|
||||
...env,
|
||||
// Ensure OPENAI_API_KEY is available
|
||||
OPENAI_API_KEY: env.OPENAI_API_KEY || process.env.OPENAI_API_KEY
|
||||
};
|
||||
|
||||
// Log API key status (without exposing the key)
|
||||
if (processEnv.OPENAI_API_KEY) {
|
||||
console.log('[CodexExecutor] OPENAI_API_KEY is set (length:', processEnv.OPENAI_API_KEY.length, ')');
|
||||
} else {
|
||||
console.warn('[CodexExecutor] WARNING: OPENAI_API_KEY is not set!');
|
||||
}
|
||||
|
||||
console.log('[CodexExecutor] Spawning process...');
|
||||
const proc = spawn(codexPath, args, {
|
||||
cwd,
|
||||
env: processEnv,
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
this.currentProcess = proc;
|
||||
console.log('[CodexExecutor] Process spawned with PID:', proc.pid);
|
||||
|
||||
// Track process events
|
||||
proc.on('error', (error) => {
|
||||
console.error('[CodexExecutor] Process error:', error);
|
||||
});
|
||||
|
||||
proc.on('spawn', () => {
|
||||
console.log('[CodexExecutor] Process spawned successfully');
|
||||
});
|
||||
|
||||
// Collect stderr output as it comes in
|
||||
let stderr = '';
|
||||
let hasOutput = false;
|
||||
let stdoutChunks = [];
|
||||
let stderrChunks = [];
|
||||
|
||||
proc.stderr.on('data', (data) => {
|
||||
const errorText = data.toString();
|
||||
stderr += errorText;
|
||||
stderrChunks.push(errorText);
|
||||
hasOutput = true;
|
||||
console.error('[CodexExecutor] stderr chunk received (', data.length, 'bytes):', errorText.substring(0, 200));
|
||||
});
|
||||
|
||||
proc.stderr.on('end', () => {
|
||||
console.log('[CodexExecutor] stderr stream ended. Total chunks:', stderrChunks.length, 'Total length:', stderr.length);
|
||||
});
|
||||
|
||||
proc.stdout.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
stdoutChunks.push(text);
|
||||
hasOutput = true;
|
||||
console.log('[CodexExecutor] stdout chunk received (', data.length, 'bytes):', text.substring(0, 200));
|
||||
});
|
||||
|
||||
proc.stdout.on('end', () => {
|
||||
console.log('[CodexExecutor] stdout stream ended. Total chunks:', stdoutChunks.length);
|
||||
});
|
||||
|
||||
// Create readline interface for parsing JSONL output
|
||||
console.log('[CodexExecutor] Creating readline interface...');
|
||||
const rl = readline.createInterface({
|
||||
input: proc.stdout,
|
||||
crlfDelay: Infinity
|
||||
});
|
||||
|
||||
// Track accumulated content for converting to Claude format
|
||||
let accumulatedText = '';
|
||||
let toolUses = [];
|
||||
let lastOutputTime = Date.now();
|
||||
const OUTPUT_TIMEOUT = 30000; // 30 seconds timeout for no output
|
||||
let lineCount = 0;
|
||||
let jsonParseErrors = 0;
|
||||
|
||||
// Set up timeout check
|
||||
const checkTimeout = setInterval(() => {
|
||||
const timeSinceLastOutput = Date.now() - lastOutputTime;
|
||||
if (timeSinceLastOutput > OUTPUT_TIMEOUT && !hasOutput) {
|
||||
console.warn('[CodexExecutor] No output received for', timeSinceLastOutput, 'ms. Process still alive:', !proc.killed);
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
console.log('[CodexExecutor] Starting to read lines from stdout...');
|
||||
|
||||
// Process stdout line by line (JSONL format)
|
||||
try {
|
||||
for await (const line of rl) {
|
||||
hasOutput = true;
|
||||
lastOutputTime = Date.now();
|
||||
lineCount++;
|
||||
|
||||
console.log('[CodexExecutor] Line', lineCount, 'received (length:', line.length, '):', line.substring(0, 100));
|
||||
|
||||
if (!line.trim()) {
|
||||
console.log('[CodexExecutor] Skipping empty line');
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const event = JSON.parse(line);
|
||||
console.log('[CodexExecutor] Successfully parsed JSON event. Type:', event.type, 'Keys:', Object.keys(event));
|
||||
|
||||
const convertedMsg = this.convertToClaudeFormat(event);
|
||||
console.log('[CodexExecutor] Converted message:', convertedMsg ? { type: convertedMsg.type } : 'null');
|
||||
|
||||
if (convertedMsg) {
|
||||
// Accumulate text content
|
||||
if (convertedMsg.type === 'assistant' && convertedMsg.message?.content) {
|
||||
for (const block of convertedMsg.message.content) {
|
||||
if (block.type === 'text') {
|
||||
accumulatedText += block.text;
|
||||
console.log('[CodexExecutor] Accumulated text block (total length:', accumulatedText.length, ')');
|
||||
} else if (block.type === 'tool_use') {
|
||||
toolUses.push(block);
|
||||
console.log('[CodexExecutor] Tool use detected:', block.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
console.log('[CodexExecutor] Yielding message of type:', convertedMsg.type);
|
||||
yield convertedMsg;
|
||||
} else {
|
||||
console.log('[CodexExecutor] Converted message is null, skipping');
|
||||
}
|
||||
} catch (parseError) {
|
||||
jsonParseErrors++;
|
||||
// Non-JSON output, yield as text
|
||||
console.log('[CodexExecutor] JSON parse error (', jsonParseErrors, 'total):', parseError.message);
|
||||
console.log('[CodexExecutor] Non-JSON line content:', line.substring(0, 200));
|
||||
yield {
|
||||
type: 'assistant',
|
||||
message: {
|
||||
content: [{ type: 'text', text: line + '\n' }]
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
console.log('[CodexExecutor] Finished reading all lines. Total lines:', lineCount, 'JSON errors:', jsonParseErrors);
|
||||
} catch (readError) {
|
||||
console.error('[CodexExecutor] Error reading from readline:', readError);
|
||||
throw readError;
|
||||
} finally {
|
||||
clearInterval(checkTimeout);
|
||||
console.log('[CodexExecutor] Cleaned up timeout checker');
|
||||
}
|
||||
|
||||
// Handle process completion
|
||||
console.log('[CodexExecutor] Waiting for process to close...');
|
||||
const exitCode = await new Promise((resolve) => {
|
||||
proc.on('close', (code, signal) => {
|
||||
console.log('[CodexExecutor] Process closed with code:', code, 'signal:', signal);
|
||||
resolve(code);
|
||||
});
|
||||
});
|
||||
|
||||
this.currentProcess = null;
|
||||
console.log('[CodexExecutor] Process completed. Exit code:', exitCode, 'Has output:', hasOutput, 'Stderr length:', stderr.length);
|
||||
|
||||
// Wait a bit for any remaining stderr data to be collected
|
||||
console.log('[CodexExecutor] Waiting 200ms for any remaining stderr data...');
|
||||
await new Promise(resolve => setTimeout(resolve, 200));
|
||||
console.log('[CodexExecutor] Final stderr length:', stderr.length, 'Final stdout chunks:', stdoutChunks.length);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
const errorMessage = stderr.trim()
|
||||
? `Codex CLI exited with code ${exitCode}.\n\nError output:\n${stderr}`
|
||||
: `Codex CLI exited with code ${exitCode}. No error output captured.`;
|
||||
|
||||
console.error('[CodexExecutor] Process failed with exit code', exitCode);
|
||||
console.error('[CodexExecutor] Error message:', errorMessage);
|
||||
console.error('[CodexExecutor] Stderr chunks:', stderrChunks.length, 'Stdout chunks:', stdoutChunks.length);
|
||||
|
||||
yield {
|
||||
type: 'error',
|
||||
error: errorMessage
|
||||
};
|
||||
} else if (!hasOutput && !stderr) {
|
||||
// Process exited successfully but produced no output - might be API key issue
|
||||
const warningMessage = 'Codex CLI completed but produced no output. This might indicate:\n' +
|
||||
'- Missing or invalid OPENAI_API_KEY\n' +
|
||||
'- Codex CLI configuration issue\n' +
|
||||
'- The process completed without generating any response\n\n' +
|
||||
`Debug info: Exit code ${exitCode}, stdout chunks: ${stdoutChunks.length}, stderr chunks: ${stderrChunks.length}, lines read: ${lineCount}`;
|
||||
|
||||
console.warn('[CodexExecutor] No output detected:', warningMessage);
|
||||
console.warn('[CodexExecutor] Stdout chunks:', stdoutChunks);
|
||||
console.warn('[CodexExecutor] Stderr chunks:', stderrChunks);
|
||||
|
||||
yield {
|
||||
type: 'error',
|
||||
error: warningMessage
|
||||
};
|
||||
} else {
|
||||
console.log('[CodexExecutor] Process completed successfully. Exit code:', exitCode, 'Lines processed:', lineCount);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build command arguments for Codex CLI
|
||||
* Only includes supported arguments based on Codex CLI help:
|
||||
* - --model: Model to use
|
||||
* - --json: JSON output format
|
||||
* - --full-auto: Non-interactive automatic execution
|
||||
*
|
||||
* Note: Codex CLI does NOT support:
|
||||
* - --system-prompt (system prompt is prepended to main prompt)
|
||||
* - --max-turns (not available in CLI)
|
||||
* - --tools (not available in CLI)
|
||||
*
|
||||
* @param {Object} options Options
|
||||
* @returns {string[]} Command arguments
|
||||
*/
|
||||
buildArgs(options) {
|
||||
const { prompt, model } = options;
|
||||
|
||||
console.log('[CodexExecutor] buildArgs called with model:', model, 'prompt length:', prompt?.length || 0);
|
||||
|
||||
const args = ['exec'];
|
||||
|
||||
// Add model (required for most use cases)
|
||||
if (model) {
|
||||
args.push('--model', model);
|
||||
console.log('[CodexExecutor] Added model argument:', model);
|
||||
}
|
||||
|
||||
// Add JSON output flag for structured parsing
|
||||
args.push('--json');
|
||||
console.log('[CodexExecutor] Added --json flag');
|
||||
|
||||
// Add full-auto mode (non-interactive)
|
||||
// This enables automatic execution with workspace-write sandbox
|
||||
args.push('--full-auto');
|
||||
console.log('[CodexExecutor] Added --full-auto flag');
|
||||
|
||||
// Add the prompt at the end
|
||||
args.push(prompt);
|
||||
console.log('[CodexExecutor] Added prompt (length:', prompt?.length || 0, ')');
|
||||
|
||||
console.log('[CodexExecutor] Final args count:', args.length);
|
||||
return args;
|
||||
}
|
||||
|
||||
/**
|
||||
* Map Claude tool names to Codex tool names
|
||||
* @param {string[]} tools Array of tool names
|
||||
* @returns {string[]} Mapped tool names
|
||||
*/
|
||||
mapToolsToCodex(tools) {
|
||||
const toolMap = {
|
||||
'Read': 'read',
|
||||
'Write': 'write',
|
||||
'Edit': 'edit',
|
||||
'Bash': 'bash',
|
||||
'Glob': 'glob',
|
||||
'Grep': 'grep',
|
||||
'WebSearch': 'web-search',
|
||||
'WebFetch': 'web-fetch'
|
||||
};
|
||||
|
||||
return tools
|
||||
.map(tool => toolMap[tool] || tool.toLowerCase())
|
||||
.filter(tool => tool); // Remove undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Codex JSONL event to Claude SDK message format
|
||||
* @param {Object} event Codex event object
|
||||
* @returns {Object|null} Claude-format message or null
|
||||
*/
|
||||
convertToClaudeFormat(event) {
|
||||
console.log('[CodexExecutor] Converting event:', JSON.stringify(event).substring(0, 200));
|
||||
const { type, data, item, thread_id } = event;
|
||||
|
||||
switch (type) {
|
||||
case CODEX_EVENT_TYPES.THREAD_STARTED:
|
||||
case 'thread.started':
|
||||
// Session initialization
|
||||
return {
|
||||
type: 'session_start',
|
||||
sessionId: thread_id || data?.thread_id || event.thread_id
|
||||
};
|
||||
|
||||
case CODEX_EVENT_TYPES.ITEM_COMPLETED:
|
||||
case 'item.completed':
|
||||
// Codex uses 'item' field, not 'data'
|
||||
return this.convertItemCompleted(item || data);
|
||||
|
||||
case CODEX_EVENT_TYPES.ITEM_STARTED:
|
||||
case 'item.started':
|
||||
// Convert item.started events - these indicate tool/command usage
|
||||
const startedItem = item || data;
|
||||
if (startedItem?.type === 'command_execution' && startedItem?.command) {
|
||||
return {
|
||||
type: 'assistant',
|
||||
message: {
|
||||
content: [{
|
||||
type: 'tool_use',
|
||||
name: 'bash',
|
||||
input: { command: startedItem.command }
|
||||
}]
|
||||
}
|
||||
};
|
||||
}
|
||||
// For other item.started types, return null (we'll show the completed version)
|
||||
return null;
|
||||
|
||||
case CODEX_EVENT_TYPES.THREAD_COMPLETED:
|
||||
case 'thread.completed':
|
||||
return {
|
||||
type: 'complete',
|
||||
sessionId: thread_id || data?.thread_id || event.thread_id
|
||||
};
|
||||
|
||||
case CODEX_EVENT_TYPES.ERROR:
|
||||
case 'error':
|
||||
return {
|
||||
type: 'error',
|
||||
error: data?.message || item?.message || event.message || 'Unknown error from Codex CLI'
|
||||
};
|
||||
|
||||
case 'turn.started':
|
||||
// Turn started - just a marker, no need to convert
|
||||
return null;
|
||||
|
||||
default:
|
||||
// Pass through other events
|
||||
console.log('[CodexExecutor] Unhandled event type:', type);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert item.completed event to Claude format
|
||||
* @param {Object} item Event item data
|
||||
* @returns {Object|null} Claude-format message
|
||||
*/
|
||||
convertItemCompleted(item) {
|
||||
if (!item) {
|
||||
console.log('[CodexExecutor] convertItemCompleted: item is null/undefined');
|
||||
return null;
|
||||
}
|
||||
|
||||
const itemType = item.type || item.item_type;
|
||||
console.log('[CodexExecutor] convertItemCompleted: itemType =', itemType, 'item keys:', Object.keys(item));
|
||||
|
||||
switch (itemType) {
|
||||
case 'reasoning':
|
||||
// Thinking/reasoning output - Codex uses 'text' field
|
||||
const reasoningText = item.text || item.content || '';
|
||||
console.log('[CodexExecutor] Converting reasoning, text length:', reasoningText.length);
|
||||
return {
|
||||
type: 'assistant',
|
||||
message: {
|
||||
content: [{
|
||||
type: 'thinking',
|
||||
thinking: reasoningText
|
||||
}]
|
||||
}
|
||||
};
|
||||
|
||||
case 'agent_message':
|
||||
case 'message':
|
||||
// Assistant text message
|
||||
const messageText = item.content || item.text || '';
|
||||
console.log('[CodexExecutor] Converting message, text length:', messageText.length);
|
||||
return {
|
||||
type: 'assistant',
|
||||
message: {
|
||||
content: [{
|
||||
type: 'text',
|
||||
text: messageText
|
||||
}]
|
||||
}
|
||||
};
|
||||
|
||||
case 'command_execution':
|
||||
// Command execution - show both the command and its output
|
||||
const command = item.command || '';
|
||||
const output = item.aggregated_output || item.output || '';
|
||||
console.log('[CodexExecutor] Converting command_execution, command:', command.substring(0, 50), 'output length:', output.length);
|
||||
|
||||
// Return as text message showing the command and output
|
||||
return {
|
||||
type: 'assistant',
|
||||
message: {
|
||||
content: [{
|
||||
type: 'text',
|
||||
text: `\`\`\`bash\n${command}\n\`\`\`\n\n${output}`
|
||||
}]
|
||||
}
|
||||
};
|
||||
|
||||
case 'tool_use':
|
||||
// Tool use
|
||||
return {
|
||||
type: 'assistant',
|
||||
message: {
|
||||
content: [{
|
||||
type: 'tool_use',
|
||||
name: item.tool || item.command || 'unknown',
|
||||
input: item.input || item.args || {}
|
||||
}]
|
||||
}
|
||||
};
|
||||
|
||||
case 'tool_result':
|
||||
// Tool result
|
||||
return {
|
||||
type: 'tool_result',
|
||||
tool_use_id: item.tool_use_id,
|
||||
content: item.output || item.result
|
||||
};
|
||||
|
||||
case 'todo_list':
|
||||
// Todo list - convert to text format
|
||||
const todos = item.items || [];
|
||||
const todoText = todos.map((t, i) => `${i + 1}. ${t.text || t}`).join('\n');
|
||||
console.log('[CodexExecutor] Converting todo_list, items:', todos.length);
|
||||
return {
|
||||
type: 'assistant',
|
||||
message: {
|
||||
content: [{
|
||||
type: 'text',
|
||||
text: `**Todo List:**\n${todoText}`
|
||||
}]
|
||||
}
|
||||
};
|
||||
|
||||
default:
|
||||
// Generic text output
|
||||
const text = item.text || item.content || item.aggregated_output;
|
||||
if (text) {
|
||||
console.log('[CodexExecutor] Converting default item type, text length:', text.length);
|
||||
return {
|
||||
type: 'assistant',
|
||||
message: {
|
||||
content: [{
|
||||
type: 'text',
|
||||
text: String(text)
|
||||
}]
|
||||
}
|
||||
};
|
||||
}
|
||||
console.log('[CodexExecutor] convertItemCompleted: No text content found, returning null');
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort current execution
|
||||
*/
|
||||
abort() {
|
||||
if (this.currentProcess) {
|
||||
console.log('[CodexExecutor] Aborting current process');
|
||||
this.currentProcess.kill('SIGTERM');
|
||||
this.currentProcess = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if execution is in progress
|
||||
* @returns {boolean} Whether execution is in progress
|
||||
*/
|
||||
isRunning() {
|
||||
return this.currentProcess !== null;
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
const codexExecutor = new CodexExecutor();
|
||||
|
||||
module.exports = codexExecutor;
|
||||
@@ -1,452 +0,0 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs/promises");
|
||||
|
||||
/**
|
||||
* Context Manager - Handles reading, writing, and deleting context files for features
|
||||
*/
|
||||
class ContextManager {
|
||||
/**
|
||||
* Write output to feature context file
|
||||
*/
|
||||
async writeToContextFile(projectPath, featureId, content) {
|
||||
if (!projectPath) return;
|
||||
|
||||
try {
|
||||
const featureDir = path.join(
|
||||
projectPath,
|
||||
".automaker",
|
||||
"features",
|
||||
featureId
|
||||
);
|
||||
|
||||
// Ensure feature directory exists
|
||||
try {
|
||||
await fs.access(featureDir);
|
||||
} catch {
|
||||
await fs.mkdir(featureDir, { recursive: true });
|
||||
}
|
||||
|
||||
const filePath = path.join(featureDir, "agent-output.md");
|
||||
|
||||
// Append to existing file or create new one
|
||||
try {
|
||||
const existing = await fs.readFile(filePath, "utf-8");
|
||||
await fs.writeFile(filePath, existing + content, "utf-8");
|
||||
} catch {
|
||||
await fs.writeFile(filePath, content, "utf-8");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("[ContextManager] Failed to write to context file:", error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read context file for a feature
|
||||
*/
|
||||
async readContextFile(projectPath, featureId) {
|
||||
try {
|
||||
const contextPath = path.join(
|
||||
projectPath,
|
||||
".automaker",
|
||||
"features",
|
||||
featureId,
|
||||
"agent-output.md"
|
||||
);
|
||||
const content = await fs.readFile(contextPath, "utf-8");
|
||||
return content;
|
||||
} catch (error) {
|
||||
console.log(`[ContextManager] No context file found for ${featureId}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete agent context file for a feature
|
||||
*/
|
||||
async deleteContextFile(projectPath, featureId) {
|
||||
if (!projectPath) return;
|
||||
|
||||
try {
|
||||
const contextPath = path.join(
|
||||
projectPath,
|
||||
".automaker",
|
||||
"features",
|
||||
featureId,
|
||||
"agent-output.md"
|
||||
);
|
||||
await fs.unlink(contextPath);
|
||||
console.log(
|
||||
`[ContextManager] Deleted agent context for feature ${featureId}`
|
||||
);
|
||||
} catch (error) {
|
||||
// File might not exist, which is fine
|
||||
if (error.code !== "ENOENT") {
|
||||
console.error("[ContextManager] Failed to delete context file:", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the memory.md file containing lessons learned and common issues
|
||||
* Returns formatted string to inject into prompts
|
||||
*/
|
||||
async getMemoryContent(projectPath) {
|
||||
if (!projectPath) return "";
|
||||
|
||||
try {
|
||||
const memoryPath = path.join(projectPath, ".automaker", "memory.md");
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(memoryPath);
|
||||
} catch {
|
||||
// File doesn't exist, return empty string
|
||||
return "";
|
||||
}
|
||||
|
||||
const content = await fs.readFile(memoryPath, "utf-8");
|
||||
|
||||
if (!content.trim()) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return `
|
||||
**🧠 Agent Memory - Previous Lessons Learned:**
|
||||
|
||||
The following memory file contains lessons learned from previous agent runs, including common issues and their solutions. Review this carefully to avoid repeating past mistakes.
|
||||
|
||||
<agent-memory>
|
||||
${content}
|
||||
</agent-memory>
|
||||
|
||||
**IMPORTANT:** If you encounter a new issue that took significant debugging effort to resolve, add it to the memory file at \`.automaker/memory.md\` in a concise format:
|
||||
- Issue title
|
||||
- Problem description (1-2 sentences)
|
||||
- Solution/fix (with code example if helpful)
|
||||
|
||||
This helps future agent runs avoid the same pitfalls.
|
||||
`;
|
||||
} catch (error) {
|
||||
console.error("[ContextManager] Failed to read memory file:", error);
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List context files from .automaker/context/ directory and get previews
|
||||
* Returns a formatted string with file names and first 50 lines of each file
|
||||
*/
|
||||
async getContextFilesPreview(projectPath) {
|
||||
if (!projectPath) return "";
|
||||
|
||||
try {
|
||||
const contextDir = path.join(projectPath, ".automaker", "context");
|
||||
|
||||
// Check if directory exists
|
||||
try {
|
||||
await fs.access(contextDir);
|
||||
} catch {
|
||||
// Directory doesn't exist, return empty string
|
||||
return "";
|
||||
}
|
||||
|
||||
// Read directory contents
|
||||
const entries = await fs.readdir(contextDir, { withFileTypes: true });
|
||||
const files = entries
|
||||
.filter((entry) => entry.isFile())
|
||||
.map((entry) => entry.name)
|
||||
.sort();
|
||||
|
||||
if (files.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
// Build preview string
|
||||
const previews = [];
|
||||
previews.push(`\n**📁 Context Files Available:**\n`);
|
||||
previews.push(
|
||||
`The following context files are available in \`.automaker/context/\` directory.`
|
||||
);
|
||||
previews.push(
|
||||
`These files contain additional context that may be relevant to your work.`
|
||||
);
|
||||
previews.push(
|
||||
`You can read them in full using the Read tool if needed.\n`
|
||||
);
|
||||
|
||||
for (const fileName of files) {
|
||||
try {
|
||||
const filePath = path.join(contextDir, fileName);
|
||||
const content = await fs.readFile(filePath, "utf-8");
|
||||
const lines = content.split("\n");
|
||||
const previewLines = lines.slice(0, 50);
|
||||
const preview = previewLines.join("\n");
|
||||
const hasMore = lines.length > 50;
|
||||
|
||||
previews.push(`\n**File: ${fileName}**`);
|
||||
if (hasMore) {
|
||||
previews.push(
|
||||
`(Showing first 50 of ${lines.length} lines - use Read tool to see full content)`
|
||||
);
|
||||
}
|
||||
previews.push(`\`\`\``);
|
||||
previews.push(preview);
|
||||
previews.push(`\`\`\`\n`);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[ContextManager] Failed to read context file ${fileName}:`,
|
||||
error
|
||||
);
|
||||
previews.push(`\n**File: ${fileName}** (Error reading file)\n`);
|
||||
}
|
||||
}
|
||||
|
||||
return previews.join("\n");
|
||||
} catch (error) {
|
||||
console.error("[ContextManager] Failed to list context files:", error);
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the initial git state before a feature starts executing
|
||||
* This captures all files that were already modified before the AI agent started
|
||||
* @param {string} projectPath - Path to the project
|
||||
* @param {string} featureId - Feature ID
|
||||
* @returns {Promise<{modifiedFiles: string[], untrackedFiles: string[]}>}
|
||||
*/
|
||||
async saveInitialGitState(projectPath, featureId) {
|
||||
if (!projectPath) return { modifiedFiles: [], untrackedFiles: [] };
|
||||
|
||||
try {
|
||||
const { execSync } = require("child_process");
|
||||
const featureDir = path.join(
|
||||
projectPath,
|
||||
".automaker",
|
||||
"features",
|
||||
featureId
|
||||
);
|
||||
|
||||
// Ensure feature directory exists
|
||||
try {
|
||||
await fs.access(featureDir);
|
||||
} catch {
|
||||
await fs.mkdir(featureDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Get list of modified files (both staged and unstaged)
|
||||
let modifiedFiles = [];
|
||||
try {
|
||||
const modifiedOutput = execSync("git diff --name-only HEAD", {
|
||||
cwd: projectPath,
|
||||
encoding: "utf-8",
|
||||
}).trim();
|
||||
if (modifiedOutput) {
|
||||
modifiedFiles = modifiedOutput.split("\n").filter(Boolean);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(
|
||||
"[ContextManager] No modified files or git error:",
|
||||
error.message
|
||||
);
|
||||
}
|
||||
|
||||
// Get list of untracked files
|
||||
let untrackedFiles = [];
|
||||
try {
|
||||
const untrackedOutput = execSync(
|
||||
"git ls-files --others --exclude-standard",
|
||||
{
|
||||
cwd: projectPath,
|
||||
encoding: "utf-8",
|
||||
}
|
||||
).trim();
|
||||
if (untrackedOutput) {
|
||||
untrackedFiles = untrackedOutput.split("\n").filter(Boolean);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(
|
||||
"[ContextManager] Error getting untracked files:",
|
||||
error.message
|
||||
);
|
||||
}
|
||||
|
||||
// Save the initial state to a JSON file
|
||||
const stateFile = path.join(featureDir, "git-state.json");
|
||||
const state = {
|
||||
timestamp: new Date().toISOString(),
|
||||
modifiedFiles,
|
||||
untrackedFiles,
|
||||
};
|
||||
|
||||
await fs.writeFile(stateFile, JSON.stringify(state, null, 2), "utf-8");
|
||||
console.log(
|
||||
`[ContextManager] Saved initial git state for ${featureId}:`,
|
||||
{
|
||||
modifiedCount: modifiedFiles.length,
|
||||
untrackedCount: untrackedFiles.length,
|
||||
}
|
||||
);
|
||||
|
||||
return state;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"[ContextManager] Failed to save initial git state:",
|
||||
error
|
||||
);
|
||||
return { modifiedFiles: [], untrackedFiles: [] };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the initial git state saved before a feature started executing
|
||||
* @param {string} projectPath - Path to the project
|
||||
* @param {string} featureId - Feature ID
|
||||
* @returns {Promise<{modifiedFiles: string[], untrackedFiles: string[], timestamp: string} | null>}
|
||||
*/
|
||||
async getInitialGitState(projectPath, featureId) {
|
||||
if (!projectPath) return null;
|
||||
|
||||
try {
|
||||
const stateFile = path.join(
|
||||
projectPath,
|
||||
".automaker",
|
||||
"features",
|
||||
featureId,
|
||||
"git-state.json"
|
||||
);
|
||||
const content = await fs.readFile(stateFile, "utf-8");
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
console.log(
|
||||
`[ContextManager] No initial git state found for ${featureId}`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the git state file for a feature
|
||||
* @param {string} projectPath - Path to the project
|
||||
* @param {string} featureId - Feature ID
|
||||
*/
|
||||
async deleteGitStateFile(projectPath, featureId) {
|
||||
if (!projectPath) return;
|
||||
|
||||
try {
|
||||
const stateFile = path.join(
|
||||
projectPath,
|
||||
".automaker",
|
||||
"features",
|
||||
featureId,
|
||||
"git-state.json"
|
||||
);
|
||||
await fs.unlink(stateFile);
|
||||
console.log(`[ContextManager] Deleted git state file for ${featureId}`);
|
||||
} catch (error) {
|
||||
// File might not exist, which is fine
|
||||
if (error.code !== "ENOENT") {
|
||||
console.error(
|
||||
"[ContextManager] Failed to delete git state file:",
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate which files were changed during the AI session
|
||||
* by comparing current git state with the saved initial state
|
||||
* @param {string} projectPath - Path to the project
|
||||
* @param {string} featureId - Feature ID
|
||||
* @returns {Promise<{newFiles: string[], modifiedFiles: string[]}>}
|
||||
*/
|
||||
async getFilesChangedDuringSession(projectPath, featureId) {
|
||||
if (!projectPath) return { newFiles: [], modifiedFiles: [] };
|
||||
|
||||
try {
|
||||
const { execSync } = require("child_process");
|
||||
|
||||
// Get initial state
|
||||
const initialState = await this.getInitialGitState(
|
||||
projectPath,
|
||||
featureId
|
||||
);
|
||||
|
||||
// Get current state
|
||||
let currentModified = [];
|
||||
try {
|
||||
const modifiedOutput = execSync("git diff --name-only HEAD", {
|
||||
cwd: projectPath,
|
||||
encoding: "utf-8",
|
||||
}).trim();
|
||||
if (modifiedOutput) {
|
||||
currentModified = modifiedOutput.split("\n").filter(Boolean);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("[ContextManager] No modified files or git error");
|
||||
}
|
||||
|
||||
let currentUntracked = [];
|
||||
try {
|
||||
const untrackedOutput = execSync(
|
||||
"git ls-files --others --exclude-standard",
|
||||
{
|
||||
cwd: projectPath,
|
||||
encoding: "utf-8",
|
||||
}
|
||||
).trim();
|
||||
if (untrackedOutput) {
|
||||
currentUntracked = untrackedOutput.split("\n").filter(Boolean);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("[ContextManager] Error getting untracked files");
|
||||
}
|
||||
|
||||
if (!initialState) {
|
||||
// No initial state - all current changes are considered from this session
|
||||
console.log(
|
||||
"[ContextManager] No initial state found, returning all current changes"
|
||||
);
|
||||
return {
|
||||
newFiles: currentUntracked,
|
||||
modifiedFiles: currentModified,
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate files that are new since the session started
|
||||
const initialModifiedSet = new Set(initialState.modifiedFiles || []);
|
||||
const initialUntrackedSet = new Set(initialState.untrackedFiles || []);
|
||||
|
||||
// New files = current untracked - initial untracked
|
||||
const newFiles = currentUntracked.filter(
|
||||
(f) => !initialUntrackedSet.has(f)
|
||||
);
|
||||
|
||||
// Modified files = current modified - initial modified
|
||||
const modifiedFiles = currentModified.filter(
|
||||
(f) => !initialModifiedSet.has(f)
|
||||
);
|
||||
|
||||
console.log(
|
||||
`[ContextManager] Files changed during session for ${featureId}:`,
|
||||
{
|
||||
newFilesCount: newFiles.length,
|
||||
modifiedFilesCount: modifiedFiles.length,
|
||||
newFiles,
|
||||
modifiedFiles,
|
||||
}
|
||||
);
|
||||
|
||||
return { newFiles, modifiedFiles };
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"[ContextManager] Failed to calculate changed files:",
|
||||
error
|
||||
);
|
||||
return { newFiles: [], modifiedFiles: [] };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new ContextManager();
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,500 +0,0 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs/promises");
|
||||
|
||||
/**
|
||||
* Feature Loader - Handles loading and managing features from individual feature folders
|
||||
* Each feature is stored in .automaker/features/{featureId}/feature.json
|
||||
*/
|
||||
class FeatureLoader {
|
||||
/**
|
||||
* Get the features directory path
|
||||
*/
|
||||
getFeaturesDir(projectPath) {
|
||||
return path.join(projectPath, ".automaker", "features");
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path to a specific feature folder
|
||||
*/
|
||||
getFeatureDir(projectPath, featureId) {
|
||||
return path.join(this.getFeaturesDir(projectPath), featureId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path to a feature's feature.json file
|
||||
*/
|
||||
getFeatureJsonPath(projectPath, featureId) {
|
||||
return path.join(
|
||||
this.getFeatureDir(projectPath, featureId),
|
||||
"feature.json"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path to a feature's agent-output.md file
|
||||
*/
|
||||
getAgentOutputPath(projectPath, featureId) {
|
||||
return path.join(
|
||||
this.getFeatureDir(projectPath, featureId),
|
||||
"agent-output.md"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a new feature ID
|
||||
*/
|
||||
generateFeatureId() {
|
||||
return `feature-${Date.now()}-${Math.random()
|
||||
.toString(36)
|
||||
.substring(2, 11)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure all image paths for a feature are stored within the feature directory
|
||||
*/
|
||||
async ensureFeatureImages(projectPath, featureId, feature) {
|
||||
if (
|
||||
!feature ||
|
||||
!Array.isArray(feature.imagePaths) ||
|
||||
feature.imagePaths.length === 0
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const featureDir = this.getFeatureDir(projectPath, featureId);
|
||||
const featureImagesDir = path.join(featureDir, "images");
|
||||
await fs.mkdir(featureImagesDir, { recursive: true });
|
||||
|
||||
const updatedImagePaths = [];
|
||||
|
||||
for (const entry of feature.imagePaths) {
|
||||
const isStringEntry = typeof entry === "string";
|
||||
const currentPathValue = isStringEntry ? entry : entry.path;
|
||||
|
||||
if (!currentPathValue) {
|
||||
updatedImagePaths.push(entry);
|
||||
continue;
|
||||
}
|
||||
|
||||
let resolvedCurrentPath = currentPathValue;
|
||||
if (!path.isAbsolute(resolvedCurrentPath)) {
|
||||
resolvedCurrentPath = path.join(projectPath, resolvedCurrentPath);
|
||||
}
|
||||
resolvedCurrentPath = path.normalize(resolvedCurrentPath);
|
||||
|
||||
// Skip if file doesn't exist
|
||||
try {
|
||||
await fs.access(resolvedCurrentPath);
|
||||
} catch {
|
||||
console.warn(
|
||||
`[FeatureLoader] Image file missing for ${featureId}: ${resolvedCurrentPath}`
|
||||
);
|
||||
updatedImagePaths.push(entry);
|
||||
continue;
|
||||
}
|
||||
|
||||
const relativeToFeatureImages = path.relative(
|
||||
featureImagesDir,
|
||||
resolvedCurrentPath
|
||||
);
|
||||
const alreadyInFeatureDir =
|
||||
relativeToFeatureImages === "" ||
|
||||
(!relativeToFeatureImages.startsWith("..") &&
|
||||
!path.isAbsolute(relativeToFeatureImages));
|
||||
|
||||
let finalPath = resolvedCurrentPath;
|
||||
|
||||
if (!alreadyInFeatureDir) {
|
||||
const originalName = path.basename(resolvedCurrentPath);
|
||||
let targetPath = path.join(featureImagesDir, originalName);
|
||||
|
||||
// Avoid overwriting files by appending a counter if needed
|
||||
let counter = 1;
|
||||
while (true) {
|
||||
try {
|
||||
await fs.access(targetPath);
|
||||
const parsed = path.parse(originalName);
|
||||
targetPath = path.join(
|
||||
featureImagesDir,
|
||||
`${parsed.name}-${counter}${parsed.ext}`
|
||||
);
|
||||
counter += 1;
|
||||
} catch {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.rename(resolvedCurrentPath, targetPath);
|
||||
finalPath = targetPath;
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`[FeatureLoader] Failed to move image ${resolvedCurrentPath}: ${error.message}`
|
||||
);
|
||||
updatedImagePaths.push(entry);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
updatedImagePaths.push(
|
||||
isStringEntry ? finalPath : { ...entry, path: finalPath }
|
||||
);
|
||||
}
|
||||
|
||||
feature.imagePaths = updatedImagePaths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all features for a project
|
||||
*/
|
||||
async getAll(projectPath) {
|
||||
try {
|
||||
const featuresDir = this.getFeaturesDir(projectPath);
|
||||
|
||||
// Check if features directory exists
|
||||
try {
|
||||
await fs.access(featuresDir);
|
||||
} catch {
|
||||
// Directory doesn't exist, return empty array
|
||||
return [];
|
||||
}
|
||||
|
||||
// Read all feature directories
|
||||
const entries = await fs.readdir(featuresDir, { withFileTypes: true });
|
||||
const featureDirs = entries.filter((entry) => entry.isDirectory());
|
||||
|
||||
// Load each feature
|
||||
const features = [];
|
||||
for (const dir of featureDirs) {
|
||||
const featureId = dir.name;
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
|
||||
try {
|
||||
// Read feature.json directly - handle ENOENT in catch block
|
||||
// This avoids TOCTOU race condition from checking with fs.access first
|
||||
const content = await fs.readFile(featureJsonPath, "utf-8");
|
||||
const feature = JSON.parse(content);
|
||||
|
||||
// Validate that the feature has required fields
|
||||
if (!feature.id) {
|
||||
console.warn(
|
||||
`[FeatureLoader] Feature ${featureId} missing required 'id' field, skipping`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
features.push(feature);
|
||||
} catch (error) {
|
||||
// Handle different error types appropriately
|
||||
if (error.code === "ENOENT") {
|
||||
// File doesn't exist - this is expected for incomplete feature directories
|
||||
// Skip silently (feature.json not yet created or was removed)
|
||||
continue;
|
||||
} else if (error instanceof SyntaxError) {
|
||||
// JSON parse error - log as warning since file exists but is malformed
|
||||
console.warn(
|
||||
`[FeatureLoader] Failed to parse feature.json for ${featureId}: ${error.message}`
|
||||
);
|
||||
} else {
|
||||
// Other errors - log as error
|
||||
console.error(
|
||||
`[FeatureLoader] Failed to load feature ${featureId}:`,
|
||||
error.message || error
|
||||
);
|
||||
}
|
||||
// Continue loading other features
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by creation order (feature IDs contain timestamp)
|
||||
features.sort((a, b) => {
|
||||
const aTime = a.id ? parseInt(a.id.split("-")[1] || "0") : 0;
|
||||
const bTime = b.id ? parseInt(b.id.split("-")[1] || "0") : 0;
|
||||
return aTime - bTime;
|
||||
});
|
||||
|
||||
return features;
|
||||
} catch (error) {
|
||||
console.error("[FeatureLoader] Failed to get all features:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single feature by ID
|
||||
*/
|
||||
async get(projectPath, featureId) {
|
||||
try {
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
const content = await fs.readFile(featureJsonPath, "utf-8");
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
if (error.code === "ENOENT") {
|
||||
return null;
|
||||
}
|
||||
console.error(
|
||||
`[FeatureLoader] Failed to get feature ${featureId}:`,
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new feature
|
||||
*/
|
||||
async create(projectPath, featureData) {
|
||||
const featureId = featureData.id || this.generateFeatureId();
|
||||
const featureDir = this.getFeatureDir(projectPath, featureId);
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
|
||||
// Ensure features directory exists
|
||||
const featuresDir = this.getFeaturesDir(projectPath);
|
||||
await fs.mkdir(featuresDir, { recursive: true });
|
||||
|
||||
// Create feature directory
|
||||
await fs.mkdir(featureDir, { recursive: true });
|
||||
|
||||
// Ensure feature has an ID
|
||||
const feature = { ...featureData, id: featureId };
|
||||
|
||||
// Move any uploaded images into the feature directory
|
||||
await this.ensureFeatureImages(projectPath, featureId, feature);
|
||||
|
||||
// Write feature.json
|
||||
await fs.writeFile(
|
||||
featureJsonPath,
|
||||
JSON.stringify(feature, null, 2),
|
||||
"utf-8"
|
||||
);
|
||||
|
||||
console.log(`[FeatureLoader] Created feature ${featureId}`);
|
||||
return feature;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a feature (partial updates supported)
|
||||
*/
|
||||
async update(projectPath, featureId, updates) {
|
||||
try {
|
||||
const feature = await this.get(projectPath, featureId);
|
||||
if (!feature) {
|
||||
throw new Error(`Feature ${featureId} not found`);
|
||||
}
|
||||
|
||||
// Merge updates
|
||||
const updatedFeature = { ...feature, ...updates };
|
||||
|
||||
// Move any new images into the feature directory
|
||||
await this.ensureFeatureImages(projectPath, featureId, updatedFeature);
|
||||
|
||||
// Write back to file
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
await fs.writeFile(
|
||||
featureJsonPath,
|
||||
JSON.stringify(updatedFeature, null, 2),
|
||||
"utf-8"
|
||||
);
|
||||
|
||||
console.log(`[FeatureLoader] Updated feature ${featureId}`);
|
||||
return updatedFeature;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[FeatureLoader] Failed to update feature ${featureId}:`,
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a feature and its entire folder
|
||||
*/
|
||||
async delete(projectPath, featureId) {
|
||||
try {
|
||||
const featureDir = this.getFeatureDir(projectPath, featureId);
|
||||
await fs.rm(featureDir, { recursive: true, force: true });
|
||||
console.log(`[FeatureLoader] Deleted feature ${featureId}`);
|
||||
} catch (error) {
|
||||
if (error.code === "ENOENT") {
|
||||
// Feature doesn't exist, that's fine
|
||||
return;
|
||||
}
|
||||
console.error(
|
||||
`[FeatureLoader] Failed to delete feature ${featureId}:`,
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get agent output for a feature
|
||||
*/
|
||||
async getAgentOutput(projectPath, featureId) {
|
||||
try {
|
||||
const agentOutputPath = this.getAgentOutputPath(projectPath, featureId);
|
||||
const content = await fs.readFile(agentOutputPath, "utf-8");
|
||||
return content;
|
||||
} catch (error) {
|
||||
if (error.code === "ENOENT") {
|
||||
return null;
|
||||
}
|
||||
console.error(
|
||||
`[FeatureLoader] Failed to get agent output for ${featureId}:`,
|
||||
error
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Legacy methods for backward compatibility (used by backend services)
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Load all features for a project (legacy API)
|
||||
* Features are stored in .automaker/features/{id}/feature.json
|
||||
*/
|
||||
async loadFeatures(projectPath) {
|
||||
return await this.getAll(projectPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update feature status (legacy API)
|
||||
* Features are stored in .automaker/features/{id}/feature.json
|
||||
* Creates the feature if it doesn't exist.
|
||||
* @param {string} featureId - The ID of the feature to update
|
||||
* @param {string} status - The new status
|
||||
* @param {string} projectPath - Path to the project
|
||||
* @param {Object} options - Options object for optional parameters
|
||||
* @param {string} [options.summary] - Optional summary of what was done
|
||||
* @param {string} [options.error] - Optional error message if feature errored
|
||||
* @param {string} [options.description] - Optional detailed description
|
||||
* @param {string} [options.category] - Optional category/phase
|
||||
* @param {string[]} [options.steps] - Optional array of implementation steps
|
||||
*/
|
||||
async updateFeatureStatus(featureId, status, projectPath, options = {}) {
|
||||
const { summary, error, description, category, steps } = options;
|
||||
// Check if feature exists
|
||||
const existingFeature = await this.get(projectPath, featureId);
|
||||
|
||||
if (!existingFeature) {
|
||||
// Feature doesn't exist - create it with all required fields
|
||||
console.log(`[FeatureLoader] Feature ${featureId} not found - creating new feature`);
|
||||
const newFeature = {
|
||||
id: featureId,
|
||||
title: featureId.split('-').map(word => word.charAt(0).toUpperCase() + word.slice(1)).join(' '),
|
||||
description: description || summary || '', // Use provided description, fall back to summary
|
||||
category: category || "Uncategorized",
|
||||
steps: steps || [],
|
||||
status: status,
|
||||
images: [],
|
||||
imagePaths: [],
|
||||
skipTests: false, // Auto-generated features should run tests by default
|
||||
model: "sonnet",
|
||||
thinkingLevel: "none",
|
||||
summary: summary || description || '',
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
if (error !== undefined) {
|
||||
newFeature.error = error;
|
||||
}
|
||||
await this.create(projectPath, newFeature);
|
||||
console.log(
|
||||
`[FeatureLoader] Created feature ${featureId}: status=${status}, category=${category || "Uncategorized"}, steps=${steps?.length || 0}${
|
||||
summary ? `, summary="${summary}"` : ""
|
||||
}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Feature exists - update it
|
||||
const updates = { status };
|
||||
if (summary !== undefined) {
|
||||
updates.summary = summary;
|
||||
// Also update description if it's empty or not set
|
||||
if (!existingFeature.description) {
|
||||
updates.description = summary;
|
||||
}
|
||||
}
|
||||
if (description !== undefined) {
|
||||
updates.description = description;
|
||||
}
|
||||
if (category !== undefined) {
|
||||
updates.category = category;
|
||||
}
|
||||
if (steps !== undefined && Array.isArray(steps)) {
|
||||
updates.steps = steps;
|
||||
}
|
||||
if (error !== undefined) {
|
||||
updates.error = error;
|
||||
} else {
|
||||
// Clear error if not provided
|
||||
if (existingFeature.error) {
|
||||
updates.error = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure required fields exist (for features created before this fix)
|
||||
if (!existingFeature.category && !updates.category) updates.category = "Uncategorized";
|
||||
if (!existingFeature.steps && !updates.steps) updates.steps = [];
|
||||
if (!existingFeature.images) updates.images = [];
|
||||
if (!existingFeature.imagePaths) updates.imagePaths = [];
|
||||
if (existingFeature.skipTests === undefined) updates.skipTests = false;
|
||||
if (!existingFeature.model) updates.model = "sonnet";
|
||||
if (!existingFeature.thinkingLevel) updates.thinkingLevel = "none";
|
||||
|
||||
await this.update(projectPath, featureId, updates);
|
||||
console.log(
|
||||
`[FeatureLoader] Updated feature ${featureId}: status=${status}${
|
||||
category ? `, category="${category}"` : ""
|
||||
}${steps ? `, steps=${steps.length}` : ""}${
|
||||
summary ? `, summary="${summary}"` : ""
|
||||
}`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Select the next feature to implement
|
||||
* Prioritizes: earlier features in the list that are not verified or waiting_approval
|
||||
*/
|
||||
selectNextFeature(features) {
|
||||
// Find first feature that is in backlog or in_progress status
|
||||
// Skip verified and waiting_approval (which needs user input)
|
||||
return features.find(
|
||||
(f) => f.status !== "verified" && f.status !== "waiting_approval"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update worktree info for a feature (legacy API)
|
||||
* Features are stored in .automaker/features/{id}/feature.json
|
||||
* @param {string} featureId - The ID of the feature to update
|
||||
* @param {string} projectPath - Path to the project
|
||||
* @param {string|null} worktreePath - Path to the worktree (null to clear)
|
||||
* @param {string|null} branchName - Name of the feature branch (null to clear)
|
||||
*/
|
||||
async updateFeatureWorktree(
|
||||
featureId,
|
||||
projectPath,
|
||||
worktreePath,
|
||||
branchName
|
||||
) {
|
||||
const updates = {};
|
||||
if (worktreePath) {
|
||||
updates.worktreePath = worktreePath;
|
||||
updates.branchName = branchName;
|
||||
} else {
|
||||
updates.worktreePath = null;
|
||||
updates.branchName = null;
|
||||
}
|
||||
|
||||
await this.update(projectPath, featureId, updates);
|
||||
console.log(
|
||||
`[FeatureLoader] Updated feature ${featureId}: worktreePath=${worktreePath}, branchName=${branchName}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new FeatureLoader();
|
||||
@@ -1,379 +0,0 @@
|
||||
const { query, AbortError } = require("@anthropic-ai/claude-agent-sdk");
|
||||
const promptBuilder = require("./prompt-builder");
|
||||
|
||||
/**
|
||||
* Feature Suggestions Service - Analyzes project and generates feature suggestions
|
||||
*/
|
||||
class FeatureSuggestionsService {
|
||||
constructor() {
|
||||
this.runningAnalysis = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate feature suggestions by analyzing the project
|
||||
* @param {string} projectPath - Path to the project
|
||||
* @param {Function} sendToRenderer - Function to send events to renderer
|
||||
* @param {Object} execution - Execution context with abort controller
|
||||
* @param {string} suggestionType - Type of suggestions: "features", "refactoring", "security", "performance"
|
||||
*/
|
||||
async generateSuggestions(projectPath, sendToRenderer, execution, suggestionType = "features") {
|
||||
console.log(
|
||||
`[FeatureSuggestions] Generating ${suggestionType} suggestions for: ${projectPath}`
|
||||
);
|
||||
|
||||
try {
|
||||
const abortController = new AbortController();
|
||||
execution.abortController = abortController;
|
||||
|
||||
const options = {
|
||||
model: "claude-sonnet-4-20250514",
|
||||
systemPrompt: this.getSystemPrompt(suggestionType),
|
||||
maxTurns: 50,
|
||||
cwd: projectPath,
|
||||
allowedTools: ["Read", "Glob", "Grep", "Bash"],
|
||||
permissionMode: "acceptEdits",
|
||||
sandbox: {
|
||||
enabled: true,
|
||||
autoAllowBashIfSandboxed: true,
|
||||
},
|
||||
abortController: abortController,
|
||||
};
|
||||
|
||||
const prompt = this.buildAnalysisPrompt(suggestionType);
|
||||
|
||||
sendToRenderer({
|
||||
type: "suggestions_progress",
|
||||
content: "Starting project analysis...\n",
|
||||
});
|
||||
|
||||
const currentQuery = query({ prompt, options });
|
||||
execution.query = currentQuery;
|
||||
|
||||
let fullResponse = "";
|
||||
for await (const msg of currentQuery) {
|
||||
if (!execution.isActive()) break;
|
||||
|
||||
if (msg.type === "assistant" && msg.message?.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text") {
|
||||
fullResponse += block.text;
|
||||
sendToRenderer({
|
||||
type: "suggestions_progress",
|
||||
content: block.text,
|
||||
});
|
||||
} else if (block.type === "tool_use") {
|
||||
sendToRenderer({
|
||||
type: "suggestions_tool",
|
||||
tool: block.name,
|
||||
input: block.input,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
execution.query = null;
|
||||
execution.abortController = null;
|
||||
|
||||
// Parse the suggestions from the response
|
||||
const suggestions = this.parseSuggestions(fullResponse);
|
||||
|
||||
sendToRenderer({
|
||||
type: "suggestions_complete",
|
||||
suggestions: suggestions,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
suggestions: suggestions,
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof AbortError || error?.name === "AbortError") {
|
||||
console.log("[FeatureSuggestions] Analysis aborted");
|
||||
if (execution) {
|
||||
execution.abortController = null;
|
||||
execution.query = null;
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
message: "Analysis aborted",
|
||||
suggestions: [],
|
||||
};
|
||||
}
|
||||
|
||||
console.error(
|
||||
"[FeatureSuggestions] Error generating suggestions:",
|
||||
error
|
||||
);
|
||||
if (execution) {
|
||||
execution.abortController = null;
|
||||
execution.query = null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse suggestions from the LLM response
|
||||
* Looks for JSON array in the response
|
||||
*/
|
||||
parseSuggestions(response) {
|
||||
try {
|
||||
// Try to find JSON array in the response
|
||||
// Look for ```json ... ``` blocks first
|
||||
const jsonBlockMatch = response.match(/```json\s*([\s\S]*?)```/);
|
||||
if (jsonBlockMatch) {
|
||||
const parsed = JSON.parse(jsonBlockMatch[1].trim());
|
||||
if (Array.isArray(parsed)) {
|
||||
return this.validateSuggestions(parsed);
|
||||
}
|
||||
}
|
||||
|
||||
// Try to find a raw JSON array
|
||||
const jsonArrayMatch = response.match(/\[\s*\{[\s\S]*\}\s*\]/);
|
||||
if (jsonArrayMatch) {
|
||||
const parsed = JSON.parse(jsonArrayMatch[0]);
|
||||
if (Array.isArray(parsed)) {
|
||||
return this.validateSuggestions(parsed);
|
||||
}
|
||||
}
|
||||
|
||||
console.warn(
|
||||
"[FeatureSuggestions] Could not parse suggestions from response"
|
||||
);
|
||||
return [];
|
||||
} catch (error) {
|
||||
console.error("[FeatureSuggestions] Error parsing suggestions:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate and normalize suggestions
|
||||
*/
|
||||
validateSuggestions(suggestions) {
|
||||
return suggestions
|
||||
.filter((s) => s && typeof s === "object")
|
||||
.map((s, index) => ({
|
||||
id: `suggestion-${Date.now()}-${index}`,
|
||||
category: s.category || "Uncategorized",
|
||||
description: s.description || s.title || "No description",
|
||||
steps: Array.isArray(s.steps) ? s.steps : [],
|
||||
priority: typeof s.priority === "number" ? s.priority : index + 1,
|
||||
reasoning: s.reasoning || "",
|
||||
}))
|
||||
.sort((a, b) => a.priority - b.priority);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the system prompt for feature suggestion analysis
|
||||
* @param {string} suggestionType - Type of suggestions: "features", "refactoring", "security", "performance"
|
||||
*/
|
||||
getSystemPrompt(suggestionType = "features") {
|
||||
const basePrompt = `You are an expert software architect. Your job is to analyze a codebase and provide actionable suggestions.
|
||||
|
||||
You have access to file reading and search tools. Use them to understand the codebase.
|
||||
|
||||
When analyzing, look at:
|
||||
- README files and documentation
|
||||
- Package.json, cargo.toml, or similar config files for tech stack
|
||||
- Source code structure and organization
|
||||
- Existing code patterns and implementation styles`;
|
||||
|
||||
switch (suggestionType) {
|
||||
case "refactoring":
|
||||
return `${basePrompt}
|
||||
|
||||
Your specific focus is on **refactoring suggestions**. You should:
|
||||
1. Identify code smells and areas that need cleanup
|
||||
2. Find duplicated code that could be consolidated
|
||||
3. Spot overly complex functions or classes that should be broken down
|
||||
4. Look for inconsistent naming conventions or coding patterns
|
||||
5. Find opportunities to improve code organization and modularity
|
||||
6. Identify violations of SOLID principles or common design patterns
|
||||
7. Look for dead code or unused dependencies
|
||||
|
||||
Prioritize suggestions by:
|
||||
- Impact on maintainability
|
||||
- Risk level (lower risk refactorings first)
|
||||
- Complexity of the refactoring`;
|
||||
|
||||
case "security":
|
||||
return `${basePrompt}
|
||||
|
||||
Your specific focus is on **security vulnerabilities and improvements**. You should:
|
||||
1. Identify potential security vulnerabilities (OWASP Top 10)
|
||||
2. Look for hardcoded secrets, API keys, or credentials
|
||||
3. Check for proper input validation and sanitization
|
||||
4. Identify SQL injection, XSS, or command injection risks
|
||||
5. Review authentication and authorization patterns
|
||||
6. Check for secure communication (HTTPS, encryption)
|
||||
7. Look for insecure dependencies or outdated packages
|
||||
8. Review error handling that might leak sensitive information
|
||||
9. Check for proper session management
|
||||
10. Identify insecure file handling or path traversal risks
|
||||
|
||||
Prioritize by severity:
|
||||
- Critical: Exploitable vulnerabilities with high impact
|
||||
- High: Security issues that could lead to data exposure
|
||||
- Medium: Best practice violations that weaken security
|
||||
- Low: Minor improvements to security posture`;
|
||||
|
||||
case "performance":
|
||||
return `${basePrompt}
|
||||
|
||||
Your specific focus is on **performance issues and optimizations**. You should:
|
||||
1. Identify N+1 query problems or inefficient database access
|
||||
2. Look for unnecessary re-renders in React/frontend code
|
||||
3. Find opportunities for caching or memoization
|
||||
4. Identify large bundle sizes or unoptimized imports
|
||||
5. Look for blocking operations that could be async
|
||||
6. Find memory leaks or inefficient memory usage
|
||||
7. Identify slow algorithms or data structure choices
|
||||
8. Look for missing indexes in database schemas
|
||||
9. Find opportunities for lazy loading or code splitting
|
||||
10. Identify unnecessary network requests or API calls
|
||||
|
||||
Prioritize by:
|
||||
- Impact on user experience
|
||||
- Frequency of the slow path
|
||||
- Ease of implementation`;
|
||||
|
||||
default: // "features"
|
||||
return `${basePrompt}
|
||||
|
||||
Your specific focus is on **missing features and improvements**. You should:
|
||||
1. Identify what the application does and what features it currently has
|
||||
2. Look at the .automaker/app_spec.txt file if it exists
|
||||
3. Generate a comprehensive list of missing features that would be valuable to users
|
||||
4. Consider user experience improvements
|
||||
5. Consider developer experience improvements
|
||||
6. Look at common patterns in similar applications
|
||||
|
||||
Prioritize features by:
|
||||
- Impact on users
|
||||
- Alignment with project goals
|
||||
- Complexity of implementation`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the prompt for analyzing the project
|
||||
* @param {string} suggestionType - Type of suggestions: "features", "refactoring", "security", "performance"
|
||||
*/
|
||||
buildAnalysisPrompt(suggestionType = "features") {
|
||||
const commonIntro = `Analyze this project and generate a list of actionable suggestions.
|
||||
|
||||
**Your Task:**
|
||||
|
||||
1. First, explore the project structure:
|
||||
- Read README.md, package.json, or similar config files
|
||||
- Scan the source code directory structure
|
||||
- Identify the tech stack and frameworks used
|
||||
- Look at existing code and how it's implemented
|
||||
|
||||
2. Identify what the application does:
|
||||
- What is the main purpose?
|
||||
- What patterns and conventions are used?
|
||||
`;
|
||||
|
||||
const commonOutput = `
|
||||
**CRITICAL: Output your suggestions as a JSON array** at the end of your response, formatted like this:
|
||||
|
||||
\`\`\`json
|
||||
[
|
||||
{
|
||||
"category": "Category Name",
|
||||
"description": "Clear description of the suggestion",
|
||||
"steps": [
|
||||
"Step 1 to implement",
|
||||
"Step 2 to implement",
|
||||
"Step 3 to implement"
|
||||
],
|
||||
"priority": 1,
|
||||
"reasoning": "Why this is important"
|
||||
}
|
||||
]
|
||||
\`\`\`
|
||||
|
||||
**Important Guidelines:**
|
||||
- Generate at least 10-15 suggestions
|
||||
- Order them by priority (1 = highest priority)
|
||||
- Each suggestion should have clear, actionable steps
|
||||
- Be specific about what files might need to be modified
|
||||
- Consider the existing tech stack and patterns
|
||||
|
||||
Begin by exploring the project structure.`;
|
||||
|
||||
switch (suggestionType) {
|
||||
case "refactoring":
|
||||
return `${commonIntro}
|
||||
3. Look for refactoring opportunities:
|
||||
- Find code duplication across the codebase
|
||||
- Identify functions or classes that are too long or complex
|
||||
- Look for inconsistent patterns or naming conventions
|
||||
- Find tightly coupled code that should be decoupled
|
||||
- Identify opportunities to extract reusable utilities
|
||||
- Look for dead code or unused exports
|
||||
- Check for proper separation of concerns
|
||||
|
||||
Categories to use: "Code Smell", "Duplication", "Complexity", "Architecture", "Naming", "Dead Code", "Coupling", "Testing"
|
||||
${commonOutput}`;
|
||||
|
||||
case "security":
|
||||
return `${commonIntro}
|
||||
3. Look for security issues:
|
||||
- Check for hardcoded secrets or API keys
|
||||
- Look for potential injection vulnerabilities (SQL, XSS, command)
|
||||
- Review authentication and authorization code
|
||||
- Check input validation and sanitization
|
||||
- Look for insecure dependencies
|
||||
- Review error handling for information leakage
|
||||
- Check for proper HTTPS/TLS usage
|
||||
- Look for insecure file operations
|
||||
|
||||
Categories to use: "Critical", "High", "Medium", "Low" (based on severity)
|
||||
${commonOutput}`;
|
||||
|
||||
case "performance":
|
||||
return `${commonIntro}
|
||||
3. Look for performance issues:
|
||||
- Find N+1 queries or inefficient database access patterns
|
||||
- Look for unnecessary re-renders in React components
|
||||
- Identify missing memoization opportunities
|
||||
- Check bundle size and import patterns
|
||||
- Look for synchronous operations that could be async
|
||||
- Find potential memory leaks
|
||||
- Identify slow algorithms or data structures
|
||||
- Look for missing caching opportunities
|
||||
- Check for unnecessary network requests
|
||||
|
||||
Categories to use: "Database", "Rendering", "Memory", "Bundle Size", "Caching", "Algorithm", "Network"
|
||||
${commonOutput}`;
|
||||
|
||||
default: // "features"
|
||||
return `${commonIntro}
|
||||
3. Generate feature suggestions:
|
||||
- Think about what's missing compared to similar applications
|
||||
- Consider user experience improvements
|
||||
- Consider developer experience improvements
|
||||
- Think about performance, security, and reliability
|
||||
- Consider testing and documentation improvements
|
||||
|
||||
Categories to use: "User Experience", "Performance", "Security", "Testing", "Documentation", "Developer Experience", "Accessibility", etc.
|
||||
${commonOutput}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the current analysis
|
||||
*/
|
||||
stop() {
|
||||
if (this.runningAnalysis && this.runningAnalysis.abortController) {
|
||||
this.runningAnalysis.abortController.abort();
|
||||
}
|
||||
this.runningAnalysis = null;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new FeatureSuggestionsService();
|
||||
@@ -1,185 +0,0 @@
|
||||
const { query, AbortError } = require("@anthropic-ai/claude-agent-sdk");
|
||||
const promptBuilder = require("./prompt-builder");
|
||||
const contextManager = require("./context-manager");
|
||||
const featureLoader = require("./feature-loader");
|
||||
const mcpServerFactory = require("./mcp-server-factory");
|
||||
|
||||
/**
|
||||
* Feature Verifier - Handles feature verification by running tests
|
||||
*/
|
||||
class FeatureVerifier {
|
||||
/**
|
||||
* Verify feature tests (runs tests and checks if they pass)
|
||||
*/
|
||||
async verifyFeatureTests(feature, projectPath, sendToRenderer, execution) {
|
||||
console.log(
|
||||
`[FeatureVerifier] Verifying tests for: ${feature.description}`
|
||||
);
|
||||
|
||||
try {
|
||||
const verifyMsg = `\n✅ Verifying tests for: ${feature.description}\n`;
|
||||
await contextManager.writeToContextFile(
|
||||
projectPath,
|
||||
feature.id,
|
||||
verifyMsg
|
||||
);
|
||||
|
||||
sendToRenderer({
|
||||
type: "auto_mode_phase",
|
||||
featureId: feature.id,
|
||||
phase: "verification",
|
||||
message: `Verifying tests for: ${feature.description}`,
|
||||
});
|
||||
|
||||
const abortController = new AbortController();
|
||||
execution.abortController = abortController;
|
||||
|
||||
// Create custom MCP server with UpdateFeatureStatus tool
|
||||
const featureToolsServer = mcpServerFactory.createFeatureToolsServer(
|
||||
featureLoader.updateFeatureStatus.bind(featureLoader),
|
||||
projectPath
|
||||
);
|
||||
|
||||
const options = {
|
||||
model: "claude-opus-4-5-20251101",
|
||||
systemPrompt: await promptBuilder.getVerificationPrompt(projectPath),
|
||||
maxTurns: 1000,
|
||||
cwd: projectPath,
|
||||
mcpServers: {
|
||||
"automaker-tools": featureToolsServer,
|
||||
},
|
||||
allowedTools: [
|
||||
"Read",
|
||||
"Write",
|
||||
"Edit",
|
||||
"Glob",
|
||||
"Grep",
|
||||
"Bash",
|
||||
"mcp__automaker-tools__UpdateFeatureStatus",
|
||||
],
|
||||
permissionMode: "acceptEdits",
|
||||
sandbox: {
|
||||
enabled: true,
|
||||
autoAllowBashIfSandboxed: true,
|
||||
},
|
||||
abortController: abortController,
|
||||
};
|
||||
|
||||
const prompt = await promptBuilder.buildVerificationPrompt(
|
||||
feature,
|
||||
projectPath
|
||||
);
|
||||
|
||||
const runningTestsMsg =
|
||||
"Running Playwright tests to verify feature implementation...\n";
|
||||
await contextManager.writeToContextFile(
|
||||
projectPath,
|
||||
feature.id,
|
||||
runningTestsMsg
|
||||
);
|
||||
|
||||
sendToRenderer({
|
||||
type: "auto_mode_progress",
|
||||
featureId: feature.id,
|
||||
content: runningTestsMsg,
|
||||
});
|
||||
|
||||
const currentQuery = query({ prompt, options });
|
||||
execution.query = currentQuery;
|
||||
|
||||
let responseText = "";
|
||||
for await (const msg of currentQuery) {
|
||||
// Check if this specific feature was aborted
|
||||
if (!execution.isActive()) break;
|
||||
|
||||
if (msg.type === "assistant" && msg.message?.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text") {
|
||||
responseText += block.text;
|
||||
|
||||
await contextManager.writeToContextFile(
|
||||
projectPath,
|
||||
feature.id,
|
||||
block.text
|
||||
);
|
||||
|
||||
sendToRenderer({
|
||||
type: "auto_mode_progress",
|
||||
featureId: feature.id,
|
||||
content: block.text,
|
||||
});
|
||||
} else if (block.type === "tool_use") {
|
||||
const toolMsg = `\n🔧 Tool: ${block.name}\n`;
|
||||
await contextManager.writeToContextFile(
|
||||
projectPath,
|
||||
feature.id,
|
||||
toolMsg
|
||||
);
|
||||
|
||||
sendToRenderer({
|
||||
type: "auto_mode_tool",
|
||||
featureId: feature.id,
|
||||
tool: block.name,
|
||||
input: block.input,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
execution.query = null;
|
||||
execution.abortController = null;
|
||||
|
||||
// Re-load features to check if it was marked as verified or waiting_approval (for skipTests)
|
||||
const updatedFeatures = await featureLoader.loadFeatures(projectPath);
|
||||
const updatedFeature = updatedFeatures.find((f) => f.id === feature.id);
|
||||
// For skipTests features, waiting_approval is also considered a success
|
||||
const passes =
|
||||
updatedFeature?.status === "verified" ||
|
||||
(updatedFeature?.skipTests &&
|
||||
updatedFeature?.status === "waiting_approval");
|
||||
|
||||
const finalMsg = passes
|
||||
? "✓ Verification successful: All tests passed\n"
|
||||
: "✗ Tests failed or not all passing - feature remains in progress\n";
|
||||
|
||||
await contextManager.writeToContextFile(
|
||||
projectPath,
|
||||
feature.id,
|
||||
finalMsg
|
||||
);
|
||||
|
||||
sendToRenderer({
|
||||
type: "auto_mode_progress",
|
||||
featureId: feature.id,
|
||||
content: finalMsg,
|
||||
});
|
||||
|
||||
return {
|
||||
passes,
|
||||
message: responseText.substring(0, 500),
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof AbortError || error?.name === "AbortError") {
|
||||
console.log("[FeatureVerifier] Verification aborted");
|
||||
if (execution) {
|
||||
execution.abortController = null;
|
||||
execution.query = null;
|
||||
}
|
||||
return {
|
||||
passes: false,
|
||||
message: "Verification aborted",
|
||||
};
|
||||
}
|
||||
|
||||
console.error("[FeatureVerifier] Error verifying feature:", error);
|
||||
if (execution) {
|
||||
execution.abortController = null;
|
||||
execution.query = null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new FeatureVerifier();
|
||||
@@ -1,98 +0,0 @@
|
||||
const { createSdkMcpServer, tool } = require("@anthropic-ai/claude-agent-sdk");
|
||||
const { z } = require("zod");
|
||||
const featureLoader = require("./feature-loader");
|
||||
|
||||
/**
|
||||
* MCP Server Factory - Creates custom MCP servers with tools
|
||||
*/
|
||||
class McpServerFactory {
|
||||
/**
|
||||
* Create a custom MCP server with the UpdateFeatureStatus tool
|
||||
* This tool allows Claude Code to safely update feature status without
|
||||
* directly modifying feature files, preventing race conditions
|
||||
* and accidental state corruption.
|
||||
*/
|
||||
createFeatureToolsServer(updateFeatureStatusCallback, projectPath) {
|
||||
return createSdkMcpServer({
|
||||
name: "automaker-tools",
|
||||
version: "1.0.0",
|
||||
tools: [
|
||||
tool(
|
||||
"UpdateFeatureStatus",
|
||||
"Create or update a feature. Use this tool to create new features with detailed information or update existing feature status. When creating features, provide comprehensive description, category, and implementation steps.",
|
||||
{
|
||||
featureId: z.string().describe("The ID of the feature (lowercase, hyphens for spaces). Example: 'user-authentication', 'budget-tracking'"),
|
||||
status: z.enum(["backlog", "todo", "in_progress", "verified"]).describe("The status for the feature. Use 'backlog' or 'todo' for new features."),
|
||||
summary: z.string().optional().describe("A brief summary of what was implemented/changed or what the feature does."),
|
||||
description: z.string().optional().describe("A detailed description of the feature. Be comprehensive - explain what the feature does, its purpose, and key functionality."),
|
||||
category: z.string().optional().describe("The category/phase for this feature. Example: 'Phase 1: Foundation', 'Phase 2: Core Logic', 'Phase 3: Polish', 'Authentication', 'UI/UX'"),
|
||||
steps: z.array(z.string()).optional().describe("Array of implementation steps. Each step should be a clear, actionable task. Example: ['Set up database schema', 'Create API endpoints', 'Build UI components', 'Add validation']")
|
||||
},
|
||||
async (args) => {
|
||||
try {
|
||||
console.log(`[McpServerFactory] UpdateFeatureStatus tool called: featureId=${args.featureId}, status=${args.status}, summary=${args.summary || "(none)"}, category=${args.category || "(none)"}, steps=${args.steps?.length || 0}`);
|
||||
console.log(`[Feature Creation] Creating/updating feature "${args.featureId}" with status "${args.status}"`);
|
||||
|
||||
// Load the feature to check skipTests flag
|
||||
const features = await featureLoader.loadFeatures(projectPath);
|
||||
const feature = features.find((f) => f.id === args.featureId);
|
||||
|
||||
if (!feature) {
|
||||
console.log(`[Feature Creation] Feature ${args.featureId} not found - this might be a new feature being created`);
|
||||
// This might be a new feature - try to proceed anyway
|
||||
}
|
||||
|
||||
// If agent tries to mark as verified but feature has skipTests=true, convert to waiting_approval
|
||||
let finalStatus = args.status;
|
||||
// Convert 'todo' to 'backlog' for consistency, but only for new features
|
||||
if (!feature && finalStatus === "todo") {
|
||||
finalStatus = "backlog";
|
||||
}
|
||||
if (feature && args.status === "verified" && feature.skipTests === true) {
|
||||
console.log(`[McpServerFactory] Feature ${args.featureId} has skipTests=true, converting verified -> waiting_approval`);
|
||||
finalStatus = "waiting_approval";
|
||||
}
|
||||
|
||||
// Call the provided callback to update feature status
|
||||
await updateFeatureStatusCallback(
|
||||
args.featureId,
|
||||
finalStatus,
|
||||
projectPath,
|
||||
{
|
||||
summary: args.summary,
|
||||
description: args.description,
|
||||
category: args.category,
|
||||
steps: args.steps,
|
||||
}
|
||||
);
|
||||
|
||||
const statusMessage = finalStatus !== args.status
|
||||
? `Successfully created/updated feature ${args.featureId} to status "${finalStatus}" (converted from "${args.status}")${args.summary ? ` - ${args.summary}` : ""}`
|
||||
: `Successfully created/updated feature ${args.featureId} to status "${finalStatus}"${args.summary ? ` - ${args.summary}` : ""}`;
|
||||
|
||||
console.log(`[Feature Creation] ✓ ${statusMessage}`);
|
||||
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: statusMessage
|
||||
}]
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("[McpServerFactory] UpdateFeatureStatus tool error:", error);
|
||||
console.error(`[Feature Creation] ✗ Failed to create/update feature ${args.featureId}: ${error.message}`);
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Failed to update feature status: ${error.message}`
|
||||
}]
|
||||
};
|
||||
}
|
||||
}
|
||||
)
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new McpServerFactory();
|
||||
@@ -1,349 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Standalone STDIO MCP Server for Automaker Tools
|
||||
*
|
||||
* This script runs as a standalone process and communicates via JSON-RPC 2.0
|
||||
* over stdin/stdout. It implements the MCP protocol to expose the UpdateFeatureStatus
|
||||
* tool to Codex CLI.
|
||||
*
|
||||
* Environment variables:
|
||||
* - AUTOMAKER_PROJECT_PATH: Path to the project directory
|
||||
* - AUTOMAKER_IPC_CHANNEL: IPC channel name for callback communication (optional, uses default)
|
||||
*/
|
||||
|
||||
const readline = require('readline');
|
||||
const path = require('path');
|
||||
|
||||
// Redirect all console.log output to stderr to avoid polluting MCP stdout
|
||||
const originalConsoleLog = console.log;
|
||||
console.log = (...args) => {
|
||||
console.error(...args);
|
||||
};
|
||||
|
||||
// Set up readline interface for line-by-line JSON-RPC input
|
||||
// IMPORTANT: Use a separate output stream for readline to avoid interfering with JSON-RPC stdout
|
||||
// We'll write JSON-RPC responses directly to stdout, not through readline
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: null, // Don't use stdout for readline output
|
||||
terminal: false
|
||||
});
|
||||
|
||||
let initialized = false;
|
||||
let projectPath = null;
|
||||
let ipcChannel = null;
|
||||
|
||||
// Get configuration from environment
|
||||
projectPath = process.env.AUTOMAKER_PROJECT_PATH || process.cwd();
|
||||
ipcChannel = process.env.AUTOMAKER_IPC_CHANNEL || 'mcp:update-feature-status';
|
||||
|
||||
// Load dependencies (these will be available in the Electron app context)
|
||||
let featureLoader;
|
||||
let electron;
|
||||
|
||||
// Try to load Electron IPC if available (when running from Electron app)
|
||||
try {
|
||||
// In Electron, we can use IPC directly
|
||||
if (typeof require !== 'undefined') {
|
||||
// Check if we're in Electron context
|
||||
const electronModule = require('electron');
|
||||
if (electronModule && electronModule.ipcMain) {
|
||||
electron = electronModule;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Not in Electron context, will use alternative method
|
||||
}
|
||||
|
||||
// Load feature loader
|
||||
// Try multiple paths since this script might be run from different contexts
|
||||
try {
|
||||
// First try relative path (when run from electron/services/)
|
||||
featureLoader = require('./feature-loader');
|
||||
} catch (e) {
|
||||
try {
|
||||
// Try absolute path resolution
|
||||
const featureLoaderPath = path.resolve(__dirname, 'feature-loader.js');
|
||||
delete require.cache[require.resolve(featureLoaderPath)];
|
||||
featureLoader = require(featureLoaderPath);
|
||||
} catch (e2) {
|
||||
// If still fails, try from parent directory
|
||||
try {
|
||||
featureLoader = require(path.join(__dirname, '..', 'services', 'feature-loader'));
|
||||
} catch (e3) {
|
||||
console.error('[McpServerStdio] Error loading feature-loader:', e3.message);
|
||||
console.error('[McpServerStdio] Tried paths:', [
|
||||
'./feature-loader',
|
||||
path.resolve(__dirname, 'feature-loader.js'),
|
||||
path.join(__dirname, '..', 'services', 'feature-loader')
|
||||
]);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send JSON-RPC response
|
||||
* CRITICAL: Must write directly to stdout, not via console.log
|
||||
* MCP protocol requires ONLY JSON-RPC messages on stdout
|
||||
*/
|
||||
function sendResponse(id, result, error = null) {
|
||||
const response = {
|
||||
jsonrpc: '2.0',
|
||||
id
|
||||
};
|
||||
|
||||
if (error) {
|
||||
response.error = error;
|
||||
} else {
|
||||
response.result = result;
|
||||
}
|
||||
|
||||
// Write directly to stdout with newline (MCP uses line-delimited JSON)
|
||||
process.stdout.write(JSON.stringify(response) + '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Send JSON-RPC notification
|
||||
* CRITICAL: Must write directly to stdout, not via console.log
|
||||
*/
|
||||
function sendNotification(method, params) {
|
||||
const notification = {
|
||||
jsonrpc: '2.0',
|
||||
method,
|
||||
params
|
||||
};
|
||||
|
||||
// Write directly to stdout with newline (MCP uses line-delimited JSON)
|
||||
process.stdout.write(JSON.stringify(notification) + '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle MCP initialize request
|
||||
*/
|
||||
async function handleInitialize(params, id) {
|
||||
initialized = true;
|
||||
|
||||
sendResponse(id, {
|
||||
protocolVersion: '2024-11-05',
|
||||
capabilities: {
|
||||
tools: {}
|
||||
},
|
||||
serverInfo: {
|
||||
name: 'automaker-tools',
|
||||
version: '1.0.0'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle tools/list request
|
||||
*/
|
||||
async function handleToolsList(params, id) {
|
||||
sendResponse(id, {
|
||||
tools: [
|
||||
{
|
||||
name: 'UpdateFeatureStatus',
|
||||
description: 'Update the status of a feature. Use this tool instead of directly modifying feature files to safely update feature status. IMPORTANT: If the feature has skipTests=true, you should NOT mark it as verified - instead it will automatically go to waiting_approval status for manual review. Always include a summary of what was done.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
featureId: {
|
||||
type: 'string',
|
||||
description: 'The ID of the feature to update'
|
||||
},
|
||||
status: {
|
||||
type: 'string',
|
||||
enum: ['backlog', 'in_progress', 'verified'],
|
||||
description: 'The new status for the feature. Note: If skipTests=true, verified will be converted to waiting_approval automatically.'
|
||||
},
|
||||
summary: {
|
||||
type: 'string',
|
||||
description: 'A brief summary of what was implemented/changed. This will be displayed on the Kanban card. Example: "Added dark mode toggle. Modified: settings.tsx, theme-provider.tsx"'
|
||||
}
|
||||
},
|
||||
required: ['featureId', 'status']
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle tools/call request
|
||||
*/
|
||||
async function handleToolsCall(params, id) {
|
||||
const { name, arguments: args } = params;
|
||||
|
||||
if (name !== 'UpdateFeatureStatus') {
|
||||
sendResponse(id, null, {
|
||||
code: -32601,
|
||||
message: `Unknown tool: ${name}`
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const { featureId, status, summary } = args;
|
||||
|
||||
if (!featureId || !status) {
|
||||
sendResponse(id, null, {
|
||||
code: -32602,
|
||||
message: 'Missing required parameters: featureId and status are required'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Load the feature to check skipTests flag
|
||||
const features = await featureLoader.loadFeatures(projectPath);
|
||||
const feature = features.find((f) => f.id === featureId);
|
||||
|
||||
if (!feature) {
|
||||
sendResponse(id, null, {
|
||||
code: -32602,
|
||||
message: `Feature ${featureId} not found`
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// If agent tries to mark as verified but feature has skipTests=true, convert to waiting_approval
|
||||
let finalStatus = status;
|
||||
if (status === 'verified' && feature.skipTests === true) {
|
||||
finalStatus = 'waiting_approval';
|
||||
}
|
||||
|
||||
// Call the update callback via IPC or direct call
|
||||
// Since we're in a separate process, we need to use IPC to communicate back
|
||||
// For now, we'll call the feature loader directly since it has the update method
|
||||
await featureLoader.updateFeatureStatus(featureId, finalStatus, projectPath, { summary });
|
||||
|
||||
const statusMessage = finalStatus !== status
|
||||
? `Successfully updated feature ${featureId} to status "${finalStatus}" (converted from "${status}" because skipTests=true)${summary ? ` with summary: "${summary}"` : ''}`
|
||||
: `Successfully updated feature ${featureId} to status "${finalStatus}"${summary ? ` with summary: "${summary}"` : ''}`;
|
||||
|
||||
sendResponse(id, {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: statusMessage
|
||||
}
|
||||
]
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[McpServerStdio] UpdateFeatureStatus error:', error);
|
||||
sendResponse(id, null, {
|
||||
code: -32603,
|
||||
message: `Failed to update feature status: ${error.message}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle JSON-RPC request
|
||||
*/
|
||||
async function handleRequest(line) {
|
||||
let request;
|
||||
|
||||
try {
|
||||
request = JSON.parse(line);
|
||||
} catch (e) {
|
||||
sendResponse(null, null, {
|
||||
code: -32700,
|
||||
message: 'Parse error'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate JSON-RPC 2.0 structure
|
||||
if (request.jsonrpc !== '2.0') {
|
||||
sendResponse(request.id || null, null, {
|
||||
code: -32600,
|
||||
message: 'Invalid Request'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const { method, params, id } = request;
|
||||
|
||||
// Handle notifications (no id)
|
||||
if (id === undefined) {
|
||||
// Handle notifications if needed
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle requests
|
||||
try {
|
||||
switch (method) {
|
||||
case 'initialize':
|
||||
await handleInitialize(params, id);
|
||||
break;
|
||||
|
||||
case 'tools/list':
|
||||
if (!initialized) {
|
||||
sendResponse(id, null, {
|
||||
code: -32002,
|
||||
message: 'Server not initialized'
|
||||
});
|
||||
return;
|
||||
}
|
||||
await handleToolsList(params, id);
|
||||
break;
|
||||
|
||||
case 'tools/call':
|
||||
if (!initialized) {
|
||||
sendResponse(id, null, {
|
||||
code: -32002,
|
||||
message: 'Server not initialized'
|
||||
});
|
||||
return;
|
||||
}
|
||||
await handleToolsCall(params, id);
|
||||
break;
|
||||
|
||||
default:
|
||||
sendResponse(id, null, {
|
||||
code: -32601,
|
||||
message: `Method not found: ${method}`
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[McpServerStdio] Error handling request:', error);
|
||||
sendResponse(id, null, {
|
||||
code: -32603,
|
||||
message: `Internal error: ${error.message}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Process stdin line by line
|
||||
rl.on('line', async (line) => {
|
||||
if (!line.trim()) {
|
||||
return;
|
||||
}
|
||||
|
||||
await handleRequest(line);
|
||||
});
|
||||
|
||||
// Handle errors
|
||||
rl.on('error', (error) => {
|
||||
console.error('[McpServerStdio] Readline error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Handle process termination
|
||||
process.on('SIGTERM', () => {
|
||||
rl.close();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
rl.close();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
// Log startup
|
||||
console.error('[McpServerStdio] Starting MCP server for automaker-tools');
|
||||
console.error(`[McpServerStdio] Project path: ${projectPath}`);
|
||||
console.error(`[McpServerStdio] IPC channel: ${ipcChannel}`);
|
||||
|
||||
|
||||
@@ -1,524 +0,0 @@
|
||||
/**
|
||||
* Model Provider Abstraction Layer
|
||||
*
|
||||
* This module provides an abstract interface for model providers (Claude, Codex, etc.)
|
||||
* allowing the application to use different AI models through a unified API.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Base class for model providers
|
||||
* Concrete implementations should extend this class
|
||||
*/
|
||||
class ModelProvider {
|
||||
constructor(config = {}) {
|
||||
this.config = config;
|
||||
this.name = 'base';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get provider name
|
||||
* @returns {string} Provider name
|
||||
*/
|
||||
getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a query with the model provider
|
||||
* @param {Object} options Query options
|
||||
* @param {string} options.prompt The prompt to send
|
||||
* @param {string} options.model The model to use
|
||||
* @param {string} options.systemPrompt System prompt
|
||||
* @param {string} options.cwd Working directory
|
||||
* @param {number} options.maxTurns Maximum turns
|
||||
* @param {string[]} options.allowedTools Allowed tools
|
||||
* @param {Object} options.mcpServers MCP servers configuration
|
||||
* @param {AbortController} options.abortController Abort controller
|
||||
* @param {Object} options.thinking Thinking configuration
|
||||
* @returns {AsyncGenerator} Async generator yielding messages
|
||||
*/
|
||||
async *executeQuery(options) {
|
||||
throw new Error('executeQuery must be implemented by subclass');
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect if this provider's CLI/SDK is installed
|
||||
* @returns {Promise<Object>} Installation status
|
||||
*/
|
||||
async detectInstallation() {
|
||||
throw new Error('detectInstallation must be implemented by subclass');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of available models for this provider
|
||||
* @returns {Array<Object>} Array of model definitions
|
||||
*/
|
||||
getAvailableModels() {
|
||||
throw new Error('getAvailableModels must be implemented by subclass');
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate provider configuration
|
||||
* @returns {Object} Validation result { valid: boolean, errors: string[] }
|
||||
*/
|
||||
validateConfig() {
|
||||
throw new Error('validateConfig must be implemented by subclass');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the full model string for a model key
|
||||
* @param {string} modelKey Short model key (e.g., 'opus', 'gpt-5.1-codex')
|
||||
* @returns {string} Full model string
|
||||
*/
|
||||
getModelString(modelKey) {
|
||||
throw new Error('getModelString must be implemented by subclass');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if provider supports a specific feature
|
||||
* @param {string} feature Feature name (e.g., 'thinking', 'tools', 'streaming')
|
||||
* @returns {boolean} Whether the feature is supported
|
||||
*/
|
||||
supportsFeature(feature) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Claude Provider - Uses Anthropic Claude Agent SDK
|
||||
*/
|
||||
class ClaudeProvider extends ModelProvider {
|
||||
constructor(config = {}) {
|
||||
super(config);
|
||||
this.name = 'claude';
|
||||
this.sdk = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to load credentials from the app's own credentials.json file.
|
||||
* This is where we store OAuth tokens and API keys that users enter in the setup wizard.
|
||||
* Returns { oauthToken, apiKey } or null values if not found.
|
||||
*/
|
||||
loadTokenFromAppCredentials() {
|
||||
try {
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { app } = require('electron');
|
||||
const credentialsPath = path.join(app.getPath('userData'), 'credentials.json');
|
||||
|
||||
if (!fs.existsSync(credentialsPath)) {
|
||||
console.log('[ClaudeProvider] App credentials file does not exist:', credentialsPath);
|
||||
return { oauthToken: null, apiKey: null };
|
||||
}
|
||||
|
||||
const raw = fs.readFileSync(credentialsPath, 'utf-8');
|
||||
const parsed = JSON.parse(raw);
|
||||
|
||||
// Check for OAuth token first (from claude setup-token), then API key
|
||||
const oauthToken = parsed.anthropic_oauth_token || null;
|
||||
const apiKey = parsed.anthropic || parsed.anthropic_api_key || null;
|
||||
|
||||
console.log('[ClaudeProvider] App credentials check - OAuth token:', !!oauthToken, ', API key:', !!apiKey);
|
||||
return { oauthToken, apiKey };
|
||||
} catch (err) {
|
||||
console.warn('[ClaudeProvider] Failed to read app credentials:', err?.message);
|
||||
return { oauthToken: null, apiKey: null };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to load a Claude OAuth token from the local CLI config (~/.claude/config.json).
|
||||
* Returns the token string or null if not found.
|
||||
* NOTE: Claude's credentials.json is encrypted, so we only try config.json
|
||||
*/
|
||||
loadTokenFromCliConfig() {
|
||||
try {
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const configPath = path.join(require('os').homedir(), '.claude', 'config.json');
|
||||
if (!fs.existsSync(configPath)) {
|
||||
return null;
|
||||
}
|
||||
const raw = fs.readFileSync(configPath, 'utf-8');
|
||||
const parsed = JSON.parse(raw);
|
||||
// CLI config stores token as oauth_token (newer) or token (older)
|
||||
return parsed.oauth_token || parsed.token || null;
|
||||
} catch (err) {
|
||||
console.warn('[ClaudeProvider] Failed to read CLI config token:', err?.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
ensureAuthEnv() {
|
||||
// If API key or token already present in environment, keep as-is.
|
||||
if (process.env.ANTHROPIC_API_KEY || process.env.CLAUDE_CODE_OAUTH_TOKEN) {
|
||||
console.log('[ClaudeProvider] Auth already present in environment');
|
||||
return true;
|
||||
}
|
||||
|
||||
// Priority 1: Try to load from app's own credentials (setup wizard)
|
||||
const appCredentials = this.loadTokenFromAppCredentials();
|
||||
if (appCredentials.oauthToken) {
|
||||
process.env.CLAUDE_CODE_OAUTH_TOKEN = appCredentials.oauthToken;
|
||||
console.log('[ClaudeProvider] Loaded CLAUDE_CODE_OAUTH_TOKEN from app credentials');
|
||||
return true;
|
||||
}
|
||||
if (appCredentials.apiKey) {
|
||||
process.env.ANTHROPIC_API_KEY = appCredentials.apiKey;
|
||||
console.log('[ClaudeProvider] Loaded ANTHROPIC_API_KEY from app credentials');
|
||||
return true;
|
||||
}
|
||||
|
||||
// Priority 2: Try to hydrate from CLI login config (legacy)
|
||||
const token = this.loadTokenFromCliConfig();
|
||||
if (token) {
|
||||
process.env.CLAUDE_CODE_OAUTH_TOKEN = token;
|
||||
console.log('[ClaudeProvider] Loaded CLAUDE_CODE_OAUTH_TOKEN from ~/.claude/config.json');
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if CLI is installed but not logged in
|
||||
try {
|
||||
const claudeCliDetector = require('./claude-cli-detector');
|
||||
const detection = claudeCliDetector.detectClaudeInstallation();
|
||||
if (detection.installed && detection.method === 'cli') {
|
||||
console.error('[ClaudeProvider] Claude CLI is installed but not authenticated. Use the setup wizard or set ANTHROPIC_API_KEY or CLAUDE_CODE_OAUTH_TOKEN environment variable.');
|
||||
} else {
|
||||
console.error('[ClaudeProvider] No Anthropic auth found. Use the setup wizard or set ANTHROPIC_API_KEY or CLAUDE_CODE_OAUTH_TOKEN.');
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('[ClaudeProvider] No Anthropic auth found. Use the setup wizard or set ANTHROPIC_API_KEY or CLAUDE_CODE_OAUTH_TOKEN.');
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Lazily load the Claude SDK
|
||||
*/
|
||||
loadSdk() {
|
||||
if (!this.sdk) {
|
||||
this.sdk = require('@anthropic-ai/claude-agent-sdk');
|
||||
}
|
||||
return this.sdk;
|
||||
}
|
||||
|
||||
async *executeQuery(options) {
|
||||
// Ensure we have auth; fall back to app credentials or CLI login token if available.
|
||||
if (!this.ensureAuthEnv()) {
|
||||
// Check if CLI is installed to provide better error message
|
||||
let msg = 'Missing Anthropic auth. Go to Settings > Setup to configure your Claude authentication.';
|
||||
try {
|
||||
const claudeCliDetector = require('./claude-cli-detector');
|
||||
const detection = claudeCliDetector.detectClaudeInstallation();
|
||||
if (detection.installed && detection.method === 'cli') {
|
||||
msg = 'Claude CLI is installed but not authenticated. Go to Settings > Setup to provide your subscription token (from `claude setup-token`) or API key.';
|
||||
} else {
|
||||
msg = 'Missing Anthropic auth. Go to Settings > Setup to configure your Claude authentication, or set ANTHROPIC_API_KEY environment variable.';
|
||||
}
|
||||
} catch (err) {
|
||||
// Fallback to default message
|
||||
}
|
||||
console.error(`[ClaudeProvider] ${msg}`);
|
||||
yield { type: 'error', error: msg };
|
||||
return;
|
||||
}
|
||||
|
||||
const { query } = this.loadSdk();
|
||||
|
||||
const sdkOptions = {
|
||||
model: options.model,
|
||||
systemPrompt: options.systemPrompt,
|
||||
maxTurns: options.maxTurns || 1000,
|
||||
cwd: options.cwd,
|
||||
mcpServers: options.mcpServers,
|
||||
allowedTools: options.allowedTools,
|
||||
permissionMode: options.permissionMode || 'acceptEdits',
|
||||
sandbox: options.sandbox,
|
||||
abortController: options.abortController,
|
||||
};
|
||||
|
||||
// Add thinking configuration if enabled
|
||||
if (options.thinking) {
|
||||
sdkOptions.thinking = options.thinking;
|
||||
}
|
||||
|
||||
const currentQuery = query({ prompt: options.prompt, options: sdkOptions });
|
||||
|
||||
for await (const msg of currentQuery) {
|
||||
yield msg;
|
||||
}
|
||||
}
|
||||
|
||||
async detectInstallation() {
|
||||
const claudeCliDetector = require('./claude-cli-detector');
|
||||
return claudeCliDetector.getFullStatus();
|
||||
}
|
||||
|
||||
getAvailableModels() {
|
||||
return [
|
||||
{
|
||||
id: 'haiku',
|
||||
name: 'Claude Haiku',
|
||||
modelString: 'claude-haiku-4-5',
|
||||
provider: 'claude',
|
||||
description: 'Fast and efficient for simple tasks',
|
||||
tier: 'basic'
|
||||
},
|
||||
{
|
||||
id: 'sonnet',
|
||||
name: 'Claude Sonnet',
|
||||
modelString: 'claude-sonnet-4-20250514',
|
||||
provider: 'claude',
|
||||
description: 'Balanced performance and capabilities',
|
||||
tier: 'standard'
|
||||
},
|
||||
{
|
||||
id: 'opus',
|
||||
name: 'Claude Opus 4.5',
|
||||
modelString: 'claude-opus-4-5-20251101',
|
||||
provider: 'claude',
|
||||
description: 'Most capable model for complex tasks',
|
||||
tier: 'premium'
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
validateConfig() {
|
||||
const errors = [];
|
||||
|
||||
// Ensure auth is available (try to auto-load from app credentials or CLI config)
|
||||
this.ensureAuthEnv();
|
||||
|
||||
if (!process.env.CLAUDE_CODE_OAUTH_TOKEN && !process.env.ANTHROPIC_API_KEY) {
|
||||
errors.push('No Claude authentication found. Go to Settings > Setup to configure your subscription token or API key.');
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors
|
||||
};
|
||||
}
|
||||
|
||||
getModelString(modelKey) {
|
||||
const modelMap = {
|
||||
haiku: 'claude-haiku-4-5',
|
||||
sonnet: 'claude-sonnet-4-20250514',
|
||||
opus: 'claude-opus-4-5-20251101'
|
||||
};
|
||||
return modelMap[modelKey] || modelMap.opus;
|
||||
}
|
||||
|
||||
supportsFeature(feature) {
|
||||
const supportedFeatures = ['thinking', 'tools', 'streaming', 'mcp'];
|
||||
return supportedFeatures.includes(feature);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Codex Provider - Uses OpenAI Codex CLI
|
||||
*/
|
||||
class CodexProvider extends ModelProvider {
|
||||
constructor(config = {}) {
|
||||
super(config);
|
||||
this.name = 'codex';
|
||||
}
|
||||
|
||||
async *executeQuery(options) {
|
||||
const codexExecutor = require('./codex-executor');
|
||||
|
||||
// Validate that we're not receiving a Claude model string
|
||||
if (options.model && options.model.startsWith('claude-')) {
|
||||
const errorMsg = `Codex provider cannot use Claude model '${options.model}'. Codex only supports OpenAI models (gpt-5.1-codex-max, gpt-5.1-codex, gpt-5.1-codex-mini, gpt-5.1).`;
|
||||
console.error(`[CodexProvider] ${errorMsg}`);
|
||||
yield {
|
||||
type: 'error',
|
||||
error: errorMsg
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
const executeOptions = {
|
||||
prompt: options.prompt,
|
||||
model: options.model,
|
||||
cwd: options.cwd,
|
||||
systemPrompt: options.systemPrompt,
|
||||
maxTurns: options.maxTurns || 20,
|
||||
allowedTools: options.allowedTools,
|
||||
mcpServers: options.mcpServers, // Pass MCP servers config to executor
|
||||
env: {
|
||||
...process.env,
|
||||
OPENAI_API_KEY: process.env.OPENAI_API_KEY
|
||||
}
|
||||
};
|
||||
|
||||
// Execute and yield results
|
||||
const generator = codexExecutor.execute(executeOptions);
|
||||
for await (const msg of generator) {
|
||||
yield msg;
|
||||
}
|
||||
}
|
||||
|
||||
async detectInstallation() {
|
||||
const codexCliDetector = require('./codex-cli-detector');
|
||||
return codexCliDetector.getInstallationInfo();
|
||||
}
|
||||
|
||||
getAvailableModels() {
|
||||
return [
|
||||
{
|
||||
id: 'gpt-5.1-codex-max',
|
||||
name: 'GPT-5.1 Codex Max',
|
||||
modelString: 'gpt-5.1-codex-max',
|
||||
provider: 'codex',
|
||||
description: 'Latest flagship - deep and fast reasoning for coding',
|
||||
tier: 'premium',
|
||||
default: true
|
||||
},
|
||||
{
|
||||
id: 'gpt-5.1-codex',
|
||||
name: 'GPT-5.1 Codex',
|
||||
modelString: 'gpt-5.1-codex',
|
||||
provider: 'codex',
|
||||
description: 'Optimized for code generation',
|
||||
tier: 'standard'
|
||||
},
|
||||
{
|
||||
id: 'gpt-5.1-codex-mini',
|
||||
name: 'GPT-5.1 Codex Mini',
|
||||
modelString: 'gpt-5.1-codex-mini',
|
||||
provider: 'codex',
|
||||
description: 'Faster and cheaper option',
|
||||
tier: 'basic'
|
||||
},
|
||||
{
|
||||
id: 'gpt-5.1',
|
||||
name: 'GPT-5.1',
|
||||
modelString: 'gpt-5.1',
|
||||
provider: 'codex',
|
||||
description: 'Broad world knowledge with strong reasoning',
|
||||
tier: 'standard'
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
validateConfig() {
|
||||
const errors = [];
|
||||
const codexCliDetector = require('./codex-cli-detector');
|
||||
const installation = codexCliDetector.detectCodexInstallation();
|
||||
|
||||
if (!installation.installed && !process.env.OPENAI_API_KEY) {
|
||||
errors.push('Codex CLI not installed and no OPENAI_API_KEY found.');
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors
|
||||
};
|
||||
}
|
||||
|
||||
getModelString(modelKey) {
|
||||
// Codex models use the key directly as the model string
|
||||
const modelMap = {
|
||||
'gpt-5.1-codex-max': 'gpt-5.1-codex-max',
|
||||
'gpt-5.1-codex': 'gpt-5.1-codex',
|
||||
'gpt-5.1-codex-mini': 'gpt-5.1-codex-mini',
|
||||
'gpt-5.1': 'gpt-5.1'
|
||||
};
|
||||
return modelMap[modelKey] || 'gpt-5.1-codex-max';
|
||||
}
|
||||
|
||||
supportsFeature(feature) {
|
||||
const supportedFeatures = ['tools', 'streaming'];
|
||||
return supportedFeatures.includes(feature);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Model Provider Factory
|
||||
* Creates the appropriate provider based on model or provider name
|
||||
*/
|
||||
class ModelProviderFactory {
|
||||
static providers = {
|
||||
claude: ClaudeProvider,
|
||||
codex: CodexProvider
|
||||
};
|
||||
|
||||
/**
|
||||
* Get provider for a specific model
|
||||
* @param {string} modelId Model ID (e.g., 'opus', 'gpt-5.1-codex')
|
||||
* @returns {ModelProvider} Provider instance
|
||||
*/
|
||||
static getProviderForModel(modelId) {
|
||||
// Check if it's a Claude model
|
||||
const claudeModels = ['haiku', 'sonnet', 'opus'];
|
||||
if (claudeModels.includes(modelId)) {
|
||||
return new ClaudeProvider();
|
||||
}
|
||||
|
||||
// Check if it's a Codex/OpenAI model
|
||||
const codexModels = [
|
||||
'gpt-5.1-codex-max', 'gpt-5.1-codex', 'gpt-5.1-codex-mini', 'gpt-5.1'
|
||||
];
|
||||
if (codexModels.includes(modelId)) {
|
||||
return new CodexProvider();
|
||||
}
|
||||
|
||||
// Default to Claude
|
||||
return new ClaudeProvider();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get provider by name
|
||||
* @param {string} providerName Provider name ('claude' or 'codex')
|
||||
* @returns {ModelProvider} Provider instance
|
||||
*/
|
||||
static getProvider(providerName) {
|
||||
const ProviderClass = this.providers[providerName];
|
||||
if (!ProviderClass) {
|
||||
throw new Error(`Unknown provider: ${providerName}`);
|
||||
}
|
||||
return new ProviderClass();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all available providers
|
||||
* @returns {string[]} List of provider names
|
||||
*/
|
||||
static getAvailableProviders() {
|
||||
return Object.keys(this.providers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all available models across all providers
|
||||
* @returns {Array<Object>} All available models
|
||||
*/
|
||||
static getAllModels() {
|
||||
const allModels = [];
|
||||
for (const providerName of this.getAvailableProviders()) {
|
||||
const provider = this.getProvider(providerName);
|
||||
const models = provider.getAvailableModels();
|
||||
allModels.push(...models);
|
||||
}
|
||||
return allModels;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check installation status for all providers
|
||||
* @returns {Promise<Object>} Installation status for each provider
|
||||
*/
|
||||
static async checkAllProviders() {
|
||||
const status = {};
|
||||
for (const providerName of this.getAvailableProviders()) {
|
||||
const provider = this.getProvider(providerName);
|
||||
status[providerName] = await provider.detectInstallation();
|
||||
}
|
||||
return status;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ModelProvider,
|
||||
ClaudeProvider,
|
||||
CodexProvider,
|
||||
ModelProviderFactory
|
||||
};
|
||||
@@ -1,320 +0,0 @@
|
||||
/**
|
||||
* Model Registry - Centralized model definitions and metadata
|
||||
*
|
||||
* This module provides a central registry of all available models
|
||||
* across different providers (Claude, Codex/OpenAI).
|
||||
*/
|
||||
|
||||
/**
|
||||
* Model Categories
|
||||
*/
|
||||
const MODEL_CATEGORIES = {
|
||||
CLAUDE: 'claude',
|
||||
OPENAI: 'openai',
|
||||
CODEX: 'codex'
|
||||
};
|
||||
|
||||
/**
|
||||
* Model Tiers (capability levels)
|
||||
*/
|
||||
const MODEL_TIERS = {
|
||||
BASIC: 'basic', // Fast, cheap, simple tasks
|
||||
STANDARD: 'standard', // Balanced performance
|
||||
PREMIUM: 'premium' // Most capable, complex tasks
|
||||
};
|
||||
|
||||
const CODEX_MODEL_IDS = [
|
||||
'gpt-5.1-codex-max',
|
||||
'gpt-5.1-codex',
|
||||
'gpt-5.1-codex-mini',
|
||||
'gpt-5.1'
|
||||
];
|
||||
|
||||
/**
|
||||
* All available models with full metadata
|
||||
*/
|
||||
const MODELS = {
|
||||
// Claude Models
|
||||
haiku: {
|
||||
id: 'haiku',
|
||||
name: 'Claude Haiku',
|
||||
modelString: 'claude-haiku-4-5',
|
||||
provider: 'claude',
|
||||
category: MODEL_CATEGORIES.CLAUDE,
|
||||
tier: MODEL_TIERS.BASIC,
|
||||
description: 'Fast and efficient for simple tasks',
|
||||
capabilities: ['code', 'text', 'tools'],
|
||||
maxTokens: 8192,
|
||||
contextWindow: 200000,
|
||||
supportsThinking: true,
|
||||
requiresAuth: 'CLAUDE_CODE_OAUTH_TOKEN'
|
||||
},
|
||||
sonnet: {
|
||||
id: 'sonnet',
|
||||
name: 'Claude Sonnet',
|
||||
modelString: 'claude-sonnet-4-20250514',
|
||||
provider: 'claude',
|
||||
category: MODEL_CATEGORIES.CLAUDE,
|
||||
tier: MODEL_TIERS.STANDARD,
|
||||
description: 'Balanced performance and capabilities',
|
||||
capabilities: ['code', 'text', 'tools', 'analysis'],
|
||||
maxTokens: 8192,
|
||||
contextWindow: 200000,
|
||||
supportsThinking: true,
|
||||
requiresAuth: 'CLAUDE_CODE_OAUTH_TOKEN'
|
||||
},
|
||||
opus: {
|
||||
id: 'opus',
|
||||
name: 'Claude Opus 4.5',
|
||||
modelString: 'claude-opus-4-5-20251101',
|
||||
provider: 'claude',
|
||||
category: MODEL_CATEGORIES.CLAUDE,
|
||||
tier: MODEL_TIERS.PREMIUM,
|
||||
description: 'Most capable model for complex tasks',
|
||||
capabilities: ['code', 'text', 'tools', 'analysis', 'reasoning'],
|
||||
maxTokens: 8192,
|
||||
contextWindow: 200000,
|
||||
supportsThinking: true,
|
||||
requiresAuth: 'CLAUDE_CODE_OAUTH_TOKEN',
|
||||
default: true
|
||||
},
|
||||
|
||||
// OpenAI GPT-5.1 Codex Models
|
||||
'gpt-5.1-codex-max': {
|
||||
id: 'gpt-5.1-codex-max',
|
||||
name: 'GPT-5.1 Codex Max',
|
||||
modelString: 'gpt-5.1-codex-max',
|
||||
provider: 'codex',
|
||||
category: MODEL_CATEGORIES.OPENAI,
|
||||
tier: MODEL_TIERS.PREMIUM,
|
||||
description: 'Latest flagship - deep and fast reasoning for coding',
|
||||
capabilities: ['code', 'text', 'tools', 'reasoning'],
|
||||
maxTokens: 32768,
|
||||
contextWindow: 128000,
|
||||
supportsThinking: false,
|
||||
requiresAuth: 'OPENAI_API_KEY',
|
||||
codexDefault: true
|
||||
},
|
||||
'gpt-5.1-codex': {
|
||||
id: 'gpt-5.1-codex',
|
||||
name: 'GPT-5.1 Codex',
|
||||
modelString: 'gpt-5.1-codex',
|
||||
provider: 'codex',
|
||||
category: MODEL_CATEGORIES.OPENAI,
|
||||
tier: MODEL_TIERS.STANDARD,
|
||||
description: 'Optimized for code generation',
|
||||
capabilities: ['code', 'text', 'tools'],
|
||||
maxTokens: 32768,
|
||||
contextWindow: 128000,
|
||||
supportsThinking: false,
|
||||
requiresAuth: 'OPENAI_API_KEY'
|
||||
},
|
||||
'gpt-5.1-codex-mini': {
|
||||
id: 'gpt-5.1-codex-mini',
|
||||
name: 'GPT-5.1 Codex Mini',
|
||||
modelString: 'gpt-5.1-codex-mini',
|
||||
provider: 'codex',
|
||||
category: MODEL_CATEGORIES.OPENAI,
|
||||
tier: MODEL_TIERS.BASIC,
|
||||
description: 'Faster and cheaper option',
|
||||
capabilities: ['code', 'text'],
|
||||
maxTokens: 16384,
|
||||
contextWindow: 128000,
|
||||
supportsThinking: false,
|
||||
requiresAuth: 'OPENAI_API_KEY'
|
||||
},
|
||||
'gpt-5.1': {
|
||||
id: 'gpt-5.1',
|
||||
name: 'GPT-5.1',
|
||||
modelString: 'gpt-5.1',
|
||||
provider: 'codex',
|
||||
category: MODEL_CATEGORIES.OPENAI,
|
||||
tier: MODEL_TIERS.STANDARD,
|
||||
description: 'Broad world knowledge with strong reasoning',
|
||||
capabilities: ['code', 'text', 'reasoning'],
|
||||
maxTokens: 32768,
|
||||
contextWindow: 128000,
|
||||
supportsThinking: false,
|
||||
requiresAuth: 'OPENAI_API_KEY'
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Model Registry class for querying and managing models
|
||||
*/
|
||||
class ModelRegistry {
|
||||
/**
|
||||
* Get all registered models
|
||||
* @returns {Object} All models
|
||||
*/
|
||||
static getAllModels() {
|
||||
return MODELS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get model by ID
|
||||
* @param {string} modelId Model ID
|
||||
* @returns {Object|null} Model definition or null
|
||||
*/
|
||||
static getModel(modelId) {
|
||||
return MODELS[modelId] || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get models by provider
|
||||
* @param {string} provider Provider name ('claude' or 'codex')
|
||||
* @returns {Object[]} Array of models for the provider
|
||||
*/
|
||||
static getModelsByProvider(provider) {
|
||||
return Object.values(MODELS).filter(m => m.provider === provider);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get models by category
|
||||
* @param {string} category Category name
|
||||
* @returns {Object[]} Array of models in the category
|
||||
*/
|
||||
static getModelsByCategory(category) {
|
||||
return Object.values(MODELS).filter(m => m.category === category);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get models by tier
|
||||
* @param {string} tier Tier name
|
||||
* @returns {Object[]} Array of models in the tier
|
||||
*/
|
||||
static getModelsByTier(tier) {
|
||||
return Object.values(MODELS).filter(m => m.tier === tier);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default model for a provider
|
||||
* @param {string} provider Provider name
|
||||
* @returns {Object|null} Default model or null
|
||||
*/
|
||||
static getDefaultModel(provider = 'claude') {
|
||||
const models = this.getModelsByProvider(provider);
|
||||
if (provider === 'claude') {
|
||||
return models.find(m => m.default) || models[0];
|
||||
}
|
||||
if (provider === 'codex') {
|
||||
return models.find(m => m.codexDefault) || models[0];
|
||||
}
|
||||
return models[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get model string (full model name) for a model ID
|
||||
* @param {string} modelId Model ID
|
||||
* @returns {string} Full model string
|
||||
*/
|
||||
static getModelString(modelId) {
|
||||
const model = this.getModel(modelId);
|
||||
return model ? model.modelString : modelId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine provider for a model ID
|
||||
* @param {string} modelId Model ID
|
||||
* @returns {string} Provider name ('claude' or 'codex')
|
||||
*/
|
||||
static getProviderForModel(modelId) {
|
||||
const model = this.getModel(modelId);
|
||||
if (model) {
|
||||
return model.provider;
|
||||
}
|
||||
|
||||
// Fallback detection for models not explicitly registered (keeps legacy Codex IDs working)
|
||||
if (CODEX_MODEL_IDS.includes(modelId)) {
|
||||
return 'codex';
|
||||
}
|
||||
|
||||
return 'claude';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a model is a Claude model
|
||||
* @param {string} modelId Model ID
|
||||
* @returns {boolean} Whether it's a Claude model
|
||||
*/
|
||||
static isClaudeModel(modelId) {
|
||||
return this.getProviderForModel(modelId) === 'claude';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a model is a Codex/OpenAI model
|
||||
* @param {string} modelId Model ID
|
||||
* @returns {boolean} Whether it's a Codex model
|
||||
*/
|
||||
static isCodexModel(modelId) {
|
||||
return this.getProviderForModel(modelId) === 'codex';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get models grouped by provider for UI display
|
||||
* @returns {Object} Models grouped by provider
|
||||
*/
|
||||
static getModelsGroupedByProvider() {
|
||||
return {
|
||||
claude: this.getModelsByProvider('claude'),
|
||||
codex: this.getModelsByProvider('codex')
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all model IDs as an array
|
||||
* @returns {string[]} Array of model IDs
|
||||
*/
|
||||
static getAllModelIds() {
|
||||
return Object.keys(MODELS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if model supports a specific capability
|
||||
* @param {string} modelId Model ID
|
||||
* @param {string} capability Capability name
|
||||
* @returns {boolean} Whether the model supports the capability
|
||||
*/
|
||||
static modelSupportsCapability(modelId, capability) {
|
||||
const model = this.getModel(modelId);
|
||||
return model ? model.capabilities.includes(capability) : false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if model supports extended thinking
|
||||
* @param {string} modelId Model ID
|
||||
* @returns {boolean} Whether the model supports thinking
|
||||
*/
|
||||
static modelSupportsThinking(modelId) {
|
||||
const model = this.getModel(modelId);
|
||||
return model ? model.supportsThinking : false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get required authentication for a model
|
||||
* @param {string} modelId Model ID
|
||||
* @returns {string|null} Required auth env variable name
|
||||
*/
|
||||
static getRequiredAuth(modelId) {
|
||||
const model = this.getModel(modelId);
|
||||
return model ? model.requiresAuth : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if authentication is available for a model
|
||||
* @param {string} modelId Model ID
|
||||
* @returns {boolean} Whether auth is available
|
||||
*/
|
||||
static hasAuthForModel(modelId) {
|
||||
const authVar = this.getRequiredAuth(modelId);
|
||||
if (!authVar) return false;
|
||||
return !!process.env[authVar];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
MODEL_CATEGORIES,
|
||||
MODEL_TIERS,
|
||||
MODELS,
|
||||
ModelRegistry
|
||||
};
|
||||
@@ -1,112 +0,0 @@
|
||||
const { query, AbortError } = require("@anthropic-ai/claude-agent-sdk");
|
||||
const promptBuilder = require("./prompt-builder");
|
||||
|
||||
/**
|
||||
* Project Analyzer - Scans codebase and updates app_spec.txt
|
||||
*/
|
||||
class ProjectAnalyzer {
|
||||
/**
|
||||
* Run the project analysis using Claude Agent SDK
|
||||
*/
|
||||
async runProjectAnalysis(projectPath, analysisId, sendToRenderer, execution) {
|
||||
console.log(`[ProjectAnalyzer] Running project analysis for: ${projectPath}`);
|
||||
|
||||
try {
|
||||
sendToRenderer({
|
||||
type: "auto_mode_phase",
|
||||
featureId: analysisId,
|
||||
phase: "planning",
|
||||
message: "Scanning project structure...",
|
||||
});
|
||||
|
||||
const abortController = new AbortController();
|
||||
execution.abortController = abortController;
|
||||
|
||||
const options = {
|
||||
model: "claude-sonnet-4-20250514",
|
||||
systemPrompt: promptBuilder.getProjectAnalysisSystemPrompt(),
|
||||
maxTurns: 50,
|
||||
cwd: projectPath,
|
||||
allowedTools: ["Read", "Write", "Edit", "Glob", "Grep", "Bash"],
|
||||
permissionMode: "acceptEdits",
|
||||
sandbox: {
|
||||
enabled: true,
|
||||
autoAllowBashIfSandboxed: true,
|
||||
},
|
||||
abortController: abortController,
|
||||
};
|
||||
|
||||
const prompt = promptBuilder.buildProjectAnalysisPrompt(projectPath);
|
||||
|
||||
sendToRenderer({
|
||||
type: "auto_mode_progress",
|
||||
featureId: analysisId,
|
||||
content: "Starting project analysis...\n",
|
||||
});
|
||||
|
||||
const currentQuery = query({ prompt, options });
|
||||
execution.query = currentQuery;
|
||||
|
||||
let responseText = "";
|
||||
for await (const msg of currentQuery) {
|
||||
if (!execution.isActive()) break;
|
||||
|
||||
if (msg.type === "assistant" && msg.message?.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text") {
|
||||
responseText += block.text;
|
||||
sendToRenderer({
|
||||
type: "auto_mode_progress",
|
||||
featureId: analysisId,
|
||||
content: block.text,
|
||||
});
|
||||
} else if (block.type === "tool_use") {
|
||||
sendToRenderer({
|
||||
type: "auto_mode_tool",
|
||||
featureId: analysisId,
|
||||
tool: block.name,
|
||||
input: block.input,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
execution.query = null;
|
||||
execution.abortController = null;
|
||||
|
||||
sendToRenderer({
|
||||
type: "auto_mode_phase",
|
||||
featureId: analysisId,
|
||||
phase: "verification",
|
||||
message: "Project analysis complete",
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: "Project analyzed successfully",
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof AbortError || error?.name === "AbortError") {
|
||||
console.log("[ProjectAnalyzer] Project analysis aborted");
|
||||
if (execution) {
|
||||
execution.abortController = null;
|
||||
execution.query = null;
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
message: "Analysis aborted",
|
||||
};
|
||||
}
|
||||
|
||||
console.error("[ProjectAnalyzer] Error in project analysis:", error);
|
||||
if (execution) {
|
||||
execution.abortController = null;
|
||||
execution.query = null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new ProjectAnalyzer();
|
||||
@@ -1,787 +0,0 @@
|
||||
const contextManager = require("./context-manager");
|
||||
|
||||
/**
|
||||
* Prompt Builder - Generates prompts for different agent tasks
|
||||
*/
|
||||
class PromptBuilder {
|
||||
/**
|
||||
* Build the prompt for implementing a specific feature
|
||||
*/
|
||||
async buildFeaturePrompt(feature, projectPath) {
|
||||
const skipTestsNote = feature.skipTests
|
||||
? `\n**⚠️ IMPORTANT - Manual Testing Mode:**\nThis feature has skipTests=true, which means:\n- DO NOT commit changes automatically\n- DO NOT mark as verified - it will automatically go to "waiting_approval" status\n- The user will manually review and commit the changes\n- Just implement the feature and mark it as verified (it will be converted to waiting_approval)\n`
|
||||
: "";
|
||||
|
||||
let imagesNote = "";
|
||||
if (feature.imagePaths && feature.imagePaths.length > 0) {
|
||||
const imagesList = feature.imagePaths
|
||||
.map(
|
||||
(img, idx) =>
|
||||
` ${idx + 1}. ${img.filename} (${img.mimeType})\n Path: ${
|
||||
img.path
|
||||
}`
|
||||
)
|
||||
.join("\n");
|
||||
|
||||
imagesNote = `\n**📎 Context Images Attached:**\nThe user has attached ${feature.imagePaths.length} image(s) for context. These images are provided both visually (in the initial message) and as files you can read:
|
||||
|
||||
${imagesList}
|
||||
|
||||
You can use the Read tool to view these images at any time during implementation. Review them carefully before implementing.\n`;
|
||||
}
|
||||
|
||||
// Get context files preview
|
||||
const contextFilesPreview = await contextManager.getContextFilesPreview(
|
||||
projectPath
|
||||
);
|
||||
|
||||
// Get memory content (lessons learned from previous runs)
|
||||
const memoryContent = await contextManager.getMemoryContent(projectPath);
|
||||
|
||||
// Build mode header for this feature
|
||||
const modeHeader = feature.skipTests
|
||||
? `**🔨 MODE: Manual Review (No Automated Tests)**
|
||||
This feature is set for manual review - focus on clean implementation without automated tests.`
|
||||
: `**🧪 MODE: Test-Driven Development (TDD)**
|
||||
This feature requires automated Playwright tests to verify the implementation.`;
|
||||
|
||||
return `You are working on a feature implementation task.
|
||||
|
||||
${modeHeader}
|
||||
${memoryContent}
|
||||
**Current Feature to Implement:**
|
||||
|
||||
ID: ${feature.id}
|
||||
Category: ${feature.category || "Uncategorized"}
|
||||
Description: ${feature.description || feature.summary || feature.title || "No description provided"}
|
||||
${skipTestsNote}${imagesNote}${contextFilesPreview}
|
||||
**Steps to Complete:**
|
||||
${(feature.steps || []).map((step, i) => `${i + 1}. ${step}`).join("\n") || "No specific steps provided - implement based on description"}
|
||||
|
||||
**Your Task:**
|
||||
|
||||
1. Read the project files to understand the current codebase structure
|
||||
2. Implement the feature according to the description and steps
|
||||
${
|
||||
feature.skipTests
|
||||
? "3. Test the implementation manually (no automated tests needed for skipTests features)"
|
||||
: "3. Write Playwright tests to verify the feature works correctly\n4. Run the tests and ensure they pass\n5. **DELETE the test file(s) you created** - tests are only for immediate verification"
|
||||
}
|
||||
${
|
||||
feature.skipTests ? "4" : "6"
|
||||
}. **CRITICAL: Use the UpdateFeatureStatus tool to mark this feature as verified**
|
||||
${
|
||||
feature.skipTests
|
||||
? "5. **DO NOT commit changes** - the user will review and commit manually"
|
||||
: "7. Commit your changes with git"
|
||||
}
|
||||
|
||||
**IMPORTANT - Updating Feature Status:**
|
||||
|
||||
When you have completed the feature${
|
||||
feature.skipTests ? "" : " and all tests pass"
|
||||
}, you MUST use the \`mcp__automaker-tools__UpdateFeatureStatus\` tool to update the feature status:
|
||||
- Call the tool with: featureId="${feature.id}" and status="verified"
|
||||
- **You can also include a summary parameter** to describe what was done: summary="Brief summary of changes"
|
||||
- **DO NOT manually edit feature files** - this can cause race conditions
|
||||
- The UpdateFeatureStatus tool safely updates the feature status without risk of corrupting other data
|
||||
- **If skipTests=true, the tool will automatically convert "verified" to "waiting_approval"** - this is correct behavior
|
||||
|
||||
**IMPORTANT - Feature Summary (REQUIRED):**
|
||||
|
||||
When calling UpdateFeatureStatus, you MUST include a summary parameter that describes:
|
||||
- What files were modified/created
|
||||
- What functionality was added or changed
|
||||
- Any notable implementation decisions
|
||||
|
||||
Example:
|
||||
\`\`\`
|
||||
UpdateFeatureStatus(featureId="${
|
||||
feature.id
|
||||
}", status="verified", summary="Added dark mode toggle to settings. Modified: settings.tsx, theme-provider.tsx. Created new useTheme hook.")
|
||||
\`\`\`
|
||||
|
||||
The summary will be displayed on the Kanban card so the user can see what was done without checking the code.
|
||||
|
||||
**Important Guidelines:**
|
||||
|
||||
- Focus ONLY on implementing this specific feature
|
||||
- Write clean, production-quality code
|
||||
- Add proper error handling
|
||||
${
|
||||
feature.skipTests
|
||||
? "- Skip automated testing (skipTests=true) - user will manually verify"
|
||||
: "- Write comprehensive Playwright tests\n- Ensure all existing tests still pass\n- Mark the feature as passing only when all tests are green\n- **CRITICAL: Delete test files after verification** - tests accumulate and become brittle"
|
||||
}
|
||||
- **CRITICAL: Use UpdateFeatureStatus tool instead of editing feature files directly**
|
||||
- **CRITICAL: Always include a summary when marking feature as verified**
|
||||
${
|
||||
feature.skipTests
|
||||
? "- **DO NOT commit changes** - user will review and commit manually"
|
||||
: "- Make a git commit when complete"
|
||||
}
|
||||
|
||||
**Testing Utilities (CRITICAL):**
|
||||
|
||||
1. **Create/maintain tests/utils.ts** - Add helper functions for finding elements and common test operations
|
||||
2. **Use utilities in tests** - Import and use helper functions instead of repeating selectors
|
||||
3. **Add utilities as needed** - When you write a test, if you need a new helper, add it to utils.ts
|
||||
4. **Update utilities when functionality changes** - If you modify components, update corresponding utilities
|
||||
|
||||
Example utilities to add:
|
||||
- getByTestId(page, testId) - Find elements by data-testid
|
||||
- getButtonByText(page, text) - Find buttons by text
|
||||
- clickElement(page, testId) - Click an element by test ID
|
||||
- fillForm(page, formData) - Fill form fields
|
||||
- waitForElement(page, testId) - Wait for element to appear
|
||||
|
||||
This makes future tests easier to write and maintain!
|
||||
|
||||
**Test Deletion Policy:**
|
||||
After tests pass, delete them immediately:
|
||||
\`\`\`bash
|
||||
rm tests/[feature-name].spec.ts
|
||||
\`\`\`
|
||||
|
||||
Begin by reading the project structure and then implementing the feature.`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the prompt for verifying a specific feature
|
||||
*/
|
||||
async buildVerificationPrompt(feature, projectPath) {
|
||||
const skipTestsNote = feature.skipTests
|
||||
? `\n**⚠️ IMPORTANT - Manual Testing Mode:**\nThis feature has skipTests=true, which means:\n- DO NOT commit changes automatically\n- DO NOT mark as verified - it will automatically go to "waiting_approval" status\n- The user will manually review and commit the changes\n- Just implement the feature and mark it as verified (it will be converted to waiting_approval)\n`
|
||||
: "";
|
||||
|
||||
let imagesNote = "";
|
||||
if (feature.imagePaths && feature.imagePaths.length > 0) {
|
||||
const imagesList = feature.imagePaths
|
||||
.map(
|
||||
(img, idx) =>
|
||||
` ${idx + 1}. ${img.filename} (${img.mimeType})\n Path: ${
|
||||
img.path
|
||||
}`
|
||||
)
|
||||
.join("\n");
|
||||
|
||||
imagesNote = `\n**📎 Context Images Attached:**\nThe user has attached ${feature.imagePaths.length} image(s) for context. These images are provided both visually (in the initial message) and as files you can read:
|
||||
|
||||
${imagesList}
|
||||
|
||||
You can use the Read tool to view these images at any time during implementation. Review them carefully before implementing.\n`;
|
||||
}
|
||||
|
||||
// Get context files preview
|
||||
const contextFilesPreview = await contextManager.getContextFilesPreview(
|
||||
projectPath
|
||||
);
|
||||
|
||||
// Get memory content (lessons learned from previous runs)
|
||||
const memoryContent = await contextManager.getMemoryContent(projectPath);
|
||||
|
||||
// Build mode header for this feature
|
||||
const modeHeader = feature.skipTests
|
||||
? `**🔨 MODE: Manual Review (No Automated Tests)**
|
||||
This feature is set for manual review - focus on completing implementation without automated tests.`
|
||||
: `**🧪 MODE: Test-Driven Development (TDD)**
|
||||
This feature requires automated Playwright tests to verify the implementation.`;
|
||||
|
||||
return `You are implementing and verifying a feature until it is complete and working correctly.
|
||||
|
||||
${modeHeader}
|
||||
${memoryContent}
|
||||
|
||||
**Feature to Implement/Verify:**
|
||||
|
||||
ID: ${feature.id}
|
||||
Category: ${feature.category || "Uncategorized"}
|
||||
Description: ${feature.description || feature.summary || feature.title || "No description provided"}
|
||||
Current Status: ${feature.status}
|
||||
${skipTestsNote}${imagesNote}${contextFilesPreview}
|
||||
**Steps that should be implemented:**
|
||||
${(feature.steps || []).map((step, i) => `${i + 1}. ${step}`).join("\n") || "No specific steps provided - implement based on description"}
|
||||
|
||||
**Your Task:**
|
||||
|
||||
1. Read the project files to understand the current implementation
|
||||
2. If the feature is not fully implemented, continue implementing it
|
||||
${
|
||||
feature.skipTests
|
||||
? "3. Test the implementation manually (no automated tests needed for skipTests features)"
|
||||
: `3. Write or update Playwright tests to verify the feature works correctly
|
||||
4. Run the Playwright tests: npx playwright test tests/[feature-name].spec.ts
|
||||
5. Check if all tests pass
|
||||
6. **If ANY tests fail:**
|
||||
- Analyze the test failures and error messages
|
||||
- Fix the implementation code to make the tests pass
|
||||
- Update test utilities in tests/utils.ts if needed
|
||||
- Re-run the tests to verify the fixes
|
||||
- **REPEAT this process until ALL tests pass**
|
||||
7. **If ALL tests pass:**
|
||||
- **DELETE the test file(s) for this feature** - tests are only for immediate verification`
|
||||
}
|
||||
${
|
||||
feature.skipTests ? "4" : "8"
|
||||
}. **CRITICAL: Use the UpdateFeatureStatus tool to mark this feature as verified**
|
||||
${
|
||||
feature.skipTests
|
||||
? "5. **DO NOT commit changes** - the user will review and commit manually"
|
||||
: "9. Explain what was implemented/fixed and that all tests passed\n10. Commit your changes with git"
|
||||
}
|
||||
|
||||
**IMPORTANT - Updating Feature Status:**
|
||||
|
||||
When you have completed the feature${
|
||||
feature.skipTests ? "" : " and all tests pass"
|
||||
}, you MUST use the \`mcp__automaker-tools__UpdateFeatureStatus\` tool to update the feature status:
|
||||
- Call the tool with: featureId="${feature.id}" and status="verified"
|
||||
- **You can also include a summary parameter** to describe what was done: summary="Brief summary of changes"
|
||||
- **DO NOT manually edit feature files** - this can cause race conditions
|
||||
- The UpdateFeatureStatus tool safely updates the feature status without risk of corrupting other data
|
||||
- **If skipTests=true, the tool will automatically convert "verified" to "waiting_approval"** - this is correct behavior
|
||||
|
||||
**IMPORTANT - Feature Summary (REQUIRED):**
|
||||
|
||||
When calling UpdateFeatureStatus, you MUST include a summary parameter that describes:
|
||||
- What files were modified/created
|
||||
- What functionality was added or changed
|
||||
- Any notable implementation decisions
|
||||
|
||||
Example:
|
||||
\`\`\`
|
||||
UpdateFeatureStatus(featureId="${
|
||||
feature.id
|
||||
}", status="verified", summary="Added dark mode toggle to settings. Modified: settings.tsx, theme-provider.tsx. Created new useTheme hook.")
|
||||
\`\`\`
|
||||
|
||||
The summary will be displayed on the Kanban card so the user can see what was done without checking the code.
|
||||
|
||||
**Testing Utilities:**
|
||||
- Check if tests/utils.ts exists and is being used
|
||||
- If utilities are outdated due to functionality changes, update them
|
||||
- Add new utilities as needed for this feature's tests
|
||||
- Ensure test utilities stay in sync with code changes
|
||||
|
||||
**Test Deletion Policy:**
|
||||
After tests pass, delete them immediately:
|
||||
\`\`\`bash
|
||||
rm tests/[feature-name].spec.ts
|
||||
\`\`\`
|
||||
|
||||
**Important:**
|
||||
${
|
||||
feature.skipTests
|
||||
? "- Skip automated testing (skipTests=true) - user will manually verify\n- **DO NOT commit changes** - user will review and commit manually"
|
||||
: "- **CONTINUE IMPLEMENTING until all tests pass** - don't stop at the first failure\n- Only mark as verified if Playwright tests pass\n- **CRITICAL: Delete test files after they pass** - tests should not accumulate\n- Update test utilities if functionality changed\n- Make a git commit when the feature is complete\n- Be thorough and persistent in fixing issues"
|
||||
}
|
||||
- **CRITICAL: Use UpdateFeatureStatus tool instead of editing feature files directly**
|
||||
- **CRITICAL: Always include a summary when marking feature as verified**
|
||||
|
||||
Begin by reading the project structure and understanding what needs to be implemented or fixed.`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build prompt for resuming feature with previous context
|
||||
*/
|
||||
async buildResumePrompt(feature, previousContext, projectPath) {
|
||||
const skipTestsNote = feature.skipTests
|
||||
? `\n**⚠️ IMPORTANT - Manual Testing Mode:**\nThis feature has skipTests=true, which means:\n- DO NOT commit changes automatically\n- DO NOT mark as verified - it will automatically go to "waiting_approval" status\n- The user will manually review and commit the changes\n- Just implement the feature and mark it as verified (it will be converted to waiting_approval)\n`
|
||||
: "";
|
||||
|
||||
// For resume, check both followUpImages and imagePaths
|
||||
const imagePaths = feature.followUpImages || feature.imagePaths;
|
||||
let imagesNote = "";
|
||||
if (imagePaths && imagePaths.length > 0) {
|
||||
const imagesList = imagePaths
|
||||
.map((img, idx) => {
|
||||
// Handle both FeatureImagePath objects and simple path strings
|
||||
const path = typeof img === "string" ? img : img.path;
|
||||
const filename =
|
||||
typeof img === "string" ? path.split("/").pop() : img.filename;
|
||||
const mimeType = typeof img === "string" ? "image/*" : img.mimeType;
|
||||
return ` ${
|
||||
idx + 1
|
||||
}. ${filename} (${mimeType})\n Path: ${path}`;
|
||||
})
|
||||
.join("\n");
|
||||
|
||||
imagesNote = `\n**📎 Context Images Attached:**\nThe user has attached ${imagePaths.length} image(s) for context. These images are provided both visually (in the initial message) and as files you can read:
|
||||
|
||||
${imagesList}
|
||||
|
||||
You can use the Read tool to view these images at any time. Review them carefully.\n`;
|
||||
}
|
||||
|
||||
// Get context files preview
|
||||
const contextFilesPreview = await contextManager.getContextFilesPreview(
|
||||
projectPath
|
||||
);
|
||||
|
||||
// Get memory content (lessons learned from previous runs)
|
||||
const memoryContent = await contextManager.getMemoryContent(projectPath);
|
||||
|
||||
// Build mode header for this feature
|
||||
const modeHeader = feature.skipTests
|
||||
? `**🔨 MODE: Manual Review (No Automated Tests)**
|
||||
This feature is set for manual review - focus on clean implementation without automated tests.`
|
||||
: `**🧪 MODE: Test-Driven Development (TDD)**
|
||||
This feature requires automated Playwright tests to verify the implementation.`;
|
||||
|
||||
return `You are resuming work on a feature implementation that was previously started.
|
||||
|
||||
${modeHeader}
|
||||
${memoryContent}
|
||||
**Current Feature:**
|
||||
|
||||
ID: ${feature.id}
|
||||
Category: ${feature.category || "Uncategorized"}
|
||||
Description: ${feature.description || feature.summary || feature.title || "No description provided"}
|
||||
${skipTestsNote}${imagesNote}${contextFilesPreview}
|
||||
**Steps to Complete:**
|
||||
${(feature.steps || []).map((step, i) => `${i + 1}. ${step}`).join("\n") || "No specific steps provided - implement based on description"}
|
||||
|
||||
**Previous Work Context:**
|
||||
|
||||
${previousContext || "No previous context available - this is a fresh start."}
|
||||
|
||||
**Your Task:**
|
||||
|
||||
Continue where you left off and complete the feature implementation:
|
||||
|
||||
1. Review the previous work context above to understand what has been done
|
||||
2. Continue implementing the feature according to the description and steps
|
||||
${
|
||||
feature.skipTests
|
||||
? "3. Test the implementation manually (no automated tests needed for skipTests features)"
|
||||
: "3. Write Playwright tests to verify the feature works correctly (if not already done)\n4. Run the tests and ensure they pass\n5. **DELETE the test file(s) you created** - tests are only for immediate verification"
|
||||
}
|
||||
${
|
||||
feature.skipTests ? "4" : "6"
|
||||
}. **CRITICAL: Use the UpdateFeatureStatus tool to mark this feature as verified**
|
||||
${
|
||||
feature.skipTests
|
||||
? "5. **DO NOT commit changes** - the user will review and commit manually"
|
||||
: "7. Commit your changes with git"
|
||||
}
|
||||
|
||||
**IMPORTANT - Updating Feature Status:**
|
||||
|
||||
When you have completed the feature${
|
||||
feature.skipTests ? "" : " and all tests pass"
|
||||
}, you MUST use the \`mcp__automaker-tools__UpdateFeatureStatus\` tool to update the feature status:
|
||||
- Call the tool with: featureId="${feature.id}" and status="verified"
|
||||
- **You can also include a summary parameter** to describe what was done: summary="Brief summary of changes"
|
||||
- **DO NOT manually edit feature files** - this can cause race conditions
|
||||
- The UpdateFeatureStatus tool safely updates the feature status without risk of corrupting other data
|
||||
- **If skipTests=true, the tool will automatically convert "verified" to "waiting_approval"** - this is correct behavior
|
||||
|
||||
**IMPORTANT - Feature Summary (REQUIRED):**
|
||||
|
||||
When calling UpdateFeatureStatus, you MUST include a summary parameter that describes:
|
||||
- What files were modified/created
|
||||
- What functionality was added or changed
|
||||
- Any notable implementation decisions
|
||||
|
||||
Example:
|
||||
\`\`\`
|
||||
UpdateFeatureStatus(featureId="${
|
||||
feature.id
|
||||
}", status="verified", summary="Added dark mode toggle to settings. Modified: settings.tsx, theme-provider.tsx. Created new useTheme hook.")
|
||||
\`\`\`
|
||||
|
||||
The summary will be displayed on the Kanban card so the user can see what was done without checking the code.
|
||||
|
||||
**Important Guidelines:**
|
||||
|
||||
- Review what was already done in the previous context
|
||||
- Don't redo work that's already complete - continue from where it left off
|
||||
- Focus on completing any remaining tasks
|
||||
${
|
||||
feature.skipTests
|
||||
? "- Skip automated testing (skipTests=true) - user will manually verify"
|
||||
: "- Write comprehensive Playwright tests if not already done\n- Ensure all tests pass before marking as verified\n- **CRITICAL: Delete test files after verification**"
|
||||
}
|
||||
- **CRITICAL: Use UpdateFeatureStatus tool instead of editing feature files directly**
|
||||
- **CRITICAL: Always include a summary when marking feature as verified**
|
||||
${
|
||||
feature.skipTests
|
||||
? "- **DO NOT commit changes** - user will review and commit manually"
|
||||
: "- Make a git commit when complete"
|
||||
}
|
||||
|
||||
Begin by assessing what's been done and what remains to be completed.`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the prompt for project analysis
|
||||
*/
|
||||
buildProjectAnalysisPrompt(projectPath) {
|
||||
return `You are analyzing a new project that was just opened in Automaker, an autonomous AI development studio.
|
||||
|
||||
**Your Task:**
|
||||
|
||||
Analyze this project's codebase and update the .automaker/app_spec.txt file with accurate information about:
|
||||
|
||||
1. **Project Name** - Detect the name from package.json, README, or directory name
|
||||
2. **Overview** - Brief description of what the project does
|
||||
3. **Technology Stack** - Languages, frameworks, libraries detected
|
||||
4. **Core Capabilities** - Main features and functionality
|
||||
5. **Implemented Features** - What features are already built
|
||||
6. **Implementation Roadmap** - Break down remaining work into phases with individual features
|
||||
|
||||
**Steps to Follow:**
|
||||
|
||||
1. First, explore the project structure:
|
||||
- Look at package.json, cargo.toml, go.mod, requirements.txt, etc. for tech stack
|
||||
- Check README.md for project description
|
||||
- List key directories (src, lib, components, etc.)
|
||||
|
||||
2. Identify the tech stack:
|
||||
- Frontend framework (React, Vue, Next.js, etc.)
|
||||
- Backend framework (Express, FastAPI, etc.)
|
||||
- Database (if any config files exist)
|
||||
- Testing framework
|
||||
- Build tools
|
||||
|
||||
3. Update .automaker/app_spec.txt with your findings in this format:
|
||||
\`\`\`xml
|
||||
<project_specification>
|
||||
<project_name>Detected Name</project_name>
|
||||
|
||||
<overview>
|
||||
Clear description of what this project does based on your analysis.
|
||||
</overview>
|
||||
|
||||
<technology_stack>
|
||||
<frontend>
|
||||
<framework>Framework Name</framework>
|
||||
<!-- Add detected technologies -->
|
||||
</frontend>
|
||||
<backend>
|
||||
<!-- If applicable -->
|
||||
</backend>
|
||||
<database>
|
||||
<!-- If applicable -->
|
||||
</database>
|
||||
<testing>
|
||||
<!-- Testing frameworks detected -->
|
||||
</testing>
|
||||
</technology_stack>
|
||||
|
||||
<core_capabilities>
|
||||
<!-- List main features/capabilities you found -->
|
||||
</core_capabilities>
|
||||
|
||||
<implemented_features>
|
||||
<!-- List specific features that appear to be implemented -->
|
||||
</implemented_features>
|
||||
|
||||
<implementation_roadmap>
|
||||
<phase_1_foundation>
|
||||
<!-- List foundational features to build first -->
|
||||
</phase_1_foundation>
|
||||
<phase_2_core_logic>
|
||||
<!-- List core logic features -->
|
||||
</phase_2_core_logic>
|
||||
<phase_3_polish>
|
||||
<!-- List polish and enhancement features -->
|
||||
</phase_3_polish>
|
||||
</implementation_roadmap>
|
||||
</project_specification>
|
||||
\`\`\`
|
||||
|
||||
4. Ensure .automaker/context/ directory exists
|
||||
|
||||
5. Ensure .automaker/features/ directory exists
|
||||
|
||||
**Important:**
|
||||
- Be concise but accurate
|
||||
- Only include information you can verify from the codebase
|
||||
- If unsure about something, note it as "to be determined"
|
||||
- Don't make up features that don't exist
|
||||
- Features are stored in .automaker/features/{id}/feature.json - each feature gets its own folder
|
||||
|
||||
Begin by exploring the project structure.`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the system prompt for coding agent
|
||||
* @param {string} projectPath - Path to the project
|
||||
* @param {boolean} isTDD - Whether this is Test-Driven Development mode (skipTests=false)
|
||||
*/
|
||||
async getCodingPrompt(projectPath, isTDD = true) {
|
||||
// Get context files preview
|
||||
const contextFilesPreview = projectPath
|
||||
? await contextManager.getContextFilesPreview(projectPath)
|
||||
: "";
|
||||
|
||||
// Get memory content (lessons learned from previous runs)
|
||||
const memoryContent = projectPath
|
||||
? await contextManager.getMemoryContent(projectPath)
|
||||
: "";
|
||||
|
||||
// Build mode-specific instructions
|
||||
const modeHeader = isTDD
|
||||
? `**🧪 MODE: Test-Driven Development (TDD)**
|
||||
You are implementing features using TDD methodology. This means:
|
||||
- Write Playwright tests BEFORE or alongside implementation
|
||||
- Run tests frequently to verify your work
|
||||
- Tests are your validation mechanism
|
||||
- Delete tests after they pass (they're for immediate verification only)`
|
||||
: `**🔨 MODE: Manual Review (No Automated Tests)**
|
||||
You are implementing features for manual user review. This means:
|
||||
- Focus on clean, working implementation
|
||||
- NO automated test writing required
|
||||
- User will manually verify the implementation
|
||||
- DO NOT commit changes - user will review and commit`;
|
||||
|
||||
return `You are an AI coding agent working autonomously to implement features.
|
||||
|
||||
${modeHeader}
|
||||
${memoryContent}
|
||||
|
||||
**Feature Storage:**
|
||||
Features are stored in .automaker/features/{id}/feature.json - each feature has its own folder.
|
||||
|
||||
**THE ONLY WAY to update features:**
|
||||
Use the mcp__automaker-tools__UpdateFeatureStatus tool with featureId, status, and summary parameters.
|
||||
Do NOT manually edit feature.json files directly.
|
||||
|
||||
${contextFilesPreview}
|
||||
|
||||
Your role is to:
|
||||
- Implement features exactly as specified
|
||||
- Write production-quality code
|
||||
- Check if feature.skipTests is true - if so, skip automated testing and don't commit
|
||||
- Create comprehensive Playwright tests using testing utilities (only if skipTests is false)
|
||||
- Ensure all tests pass before marking features complete (only if skipTests is false)
|
||||
- **DELETE test files after successful verification** - tests are only for immediate feature verification (only if skipTests is false)
|
||||
- **Use the UpdateFeatureStatus tool to mark features as verified** - NEVER manually edit feature files
|
||||
- **Always include a summary parameter when calling UpdateFeatureStatus** - describe what was done
|
||||
- Commit working code to git (only if skipTests is false - skipTests features require manual review)
|
||||
- Be thorough and detail-oriented
|
||||
|
||||
**IMPORTANT - Manual Testing Mode (skipTests=true):**
|
||||
If a feature has skipTests=true:
|
||||
- DO NOT write automated tests
|
||||
- DO NOT commit changes - the user will review and commit manually
|
||||
- Still mark the feature as verified using UpdateFeatureStatus - it will automatically convert to "waiting_approval" for manual review
|
||||
- The user will manually verify and commit the changes
|
||||
|
||||
**IMPORTANT - UpdateFeatureStatus Tool:**
|
||||
You have access to the \`mcp__automaker-tools__UpdateFeatureStatus\` tool. When the feature is complete (and all tests pass if skipTests is false), use this tool to update the feature status:
|
||||
- Call with featureId, status="verified", and summary="Description of what was done"
|
||||
- **DO NOT manually edit feature files** - this can cause race conditions and restore old state
|
||||
- The tool safely updates the status without corrupting other feature data
|
||||
- **If skipTests=true, the tool will automatically convert "verified" to "waiting_approval"** - this is correct
|
||||
|
||||
**IMPORTANT - Feature Summary (REQUIRED):**
|
||||
When calling UpdateFeatureStatus, you MUST include a summary parameter that describes:
|
||||
- What files were modified/created
|
||||
- What functionality was added or changed
|
||||
- Any notable implementation decisions
|
||||
|
||||
Example: summary="Added dark mode toggle. Modified: settings.tsx, theme-provider.tsx. Created useTheme hook."
|
||||
|
||||
The summary will be displayed on the Kanban card so the user can quickly see what was done.
|
||||
|
||||
**Testing Utilities (CRITICAL):**
|
||||
- **Create and maintain tests/utils.ts** with helper functions for finding elements and common operations
|
||||
- **Always use utilities in tests** instead of repeating selectors
|
||||
- **Add new utilities as you write tests** - if you need a helper, add it to utils.ts
|
||||
- **Update utilities when functionality changes** - keep helpers in sync with code changes
|
||||
|
||||
This makes future tests easier to write and more maintainable!
|
||||
|
||||
**Test Deletion Policy:**
|
||||
Tests should NOT accumulate. After a feature is verified:
|
||||
1. Run the tests to ensure they pass
|
||||
2. Delete the test file for that feature
|
||||
3. Use UpdateFeatureStatus tool to mark the feature as "verified"
|
||||
|
||||
This prevents test brittleness as the app changes rapidly.
|
||||
|
||||
You have full access to:
|
||||
- Read and write files
|
||||
- Run bash commands
|
||||
- Execute tests
|
||||
- Delete files (rm command)
|
||||
- Make git commits
|
||||
- Search and analyze the codebase
|
||||
- **UpdateFeatureStatus tool** (mcp__automaker-tools__UpdateFeatureStatus) - Use this to update feature status
|
||||
|
||||
**🧠 Learning from Errors - Memory System:**
|
||||
|
||||
If you encounter an error or issue that:
|
||||
- Took multiple attempts to debug
|
||||
- Was caused by a non-obvious codebase quirk
|
||||
- Required understanding something specific about this project
|
||||
- Could trip up future agent runs
|
||||
|
||||
**ADD IT TO MEMORY** by appending to \`.automaker/memory.md\`:
|
||||
|
||||
\`\`\`markdown
|
||||
### Issue: [Brief Title]
|
||||
**Problem:** [1-2 sentence description of the issue]
|
||||
**Fix:** [Concise explanation of the solution]
|
||||
\`\`\`
|
||||
|
||||
Keep entries concise - focus on the essential information needed to avoid the issue in the future. This helps both you and other agents learn from mistakes.
|
||||
|
||||
Focus on one feature at a time and complete it fully before finishing. Always delete tests after they pass and use the UpdateFeatureStatus tool.`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the system prompt for verification agent
|
||||
* @param {string} projectPath - Path to the project
|
||||
* @param {boolean} isTDD - Whether this is Test-Driven Development mode (skipTests=false)
|
||||
*/
|
||||
async getVerificationPrompt(projectPath, isTDD = true) {
|
||||
// Get context files preview
|
||||
const contextFilesPreview = projectPath
|
||||
? await contextManager.getContextFilesPreview(projectPath)
|
||||
: "";
|
||||
|
||||
// Get memory content (lessons learned from previous runs)
|
||||
const memoryContent = projectPath
|
||||
? await contextManager.getMemoryContent(projectPath)
|
||||
: "";
|
||||
|
||||
// Build mode-specific instructions
|
||||
const modeHeader = isTDD
|
||||
? `**🧪 MODE: Test-Driven Development (TDD)**
|
||||
You are verifying/completing features using TDD methodology. This means:
|
||||
- Run Playwright tests to verify implementation
|
||||
- Fix failing tests by updating code
|
||||
- Tests are your validation mechanism
|
||||
- Delete tests after they pass (they're for immediate verification only)`
|
||||
: `**🔨 MODE: Manual Review (No Automated Tests)**
|
||||
You are completing features for manual user review. This means:
|
||||
- Focus on clean, working implementation
|
||||
- NO automated test writing required
|
||||
- User will manually verify the implementation
|
||||
- DO NOT commit changes - user will review and commit`;
|
||||
|
||||
return `You are an AI implementation and verification agent focused on completing features and ensuring they work.
|
||||
|
||||
${modeHeader}
|
||||
${memoryContent}
|
||||
**Feature Storage:**
|
||||
Features are stored in .automaker/features/{id}/feature.json - each feature has its own folder.
|
||||
|
||||
**THE ONLY WAY to update features:**
|
||||
Use the mcp__automaker-tools__UpdateFeatureStatus tool with featureId, status, and summary parameters.
|
||||
Do NOT manually edit feature.json files directly.
|
||||
|
||||
${contextFilesPreview}
|
||||
|
||||
Your role is to:
|
||||
- **Continue implementing features until they are complete** - don't stop at the first failure
|
||||
- Check if feature.skipTests is true - if so, skip automated testing and don't commit
|
||||
- Write or update code to fix failing tests (only if skipTests is false)
|
||||
- Run Playwright tests to verify feature implementations (only if skipTests is false)
|
||||
- If tests fail, analyze errors and fix the implementation (only if skipTests is false)
|
||||
- If other tests fail, verify if those tests are still accurate or should be updated or deleted (only if skipTests is false)
|
||||
- Continue rerunning tests and fixing issues until ALL tests pass (only if skipTests is false)
|
||||
- **DELETE test files after successful verification** - tests are only for immediate feature verification (only if skipTests is false)
|
||||
- **Use the UpdateFeatureStatus tool to mark features as verified** - NEVER manually edit feature files
|
||||
- **Always include a summary parameter when calling UpdateFeatureStatus** - describe what was done
|
||||
- **Update test utilities (tests/utils.ts) if functionality changed** - keep helpers in sync with code (only if skipTests is false)
|
||||
- Commit working code to git (only if skipTests is false - skipTests features require manual review)
|
||||
|
||||
**IMPORTANT - Manual Testing Mode (skipTests=true):**
|
||||
If a feature has skipTests=true:
|
||||
- DO NOT write automated tests
|
||||
- DO NOT commit changes - the user will review and commit manually
|
||||
- Still mark the feature as verified using UpdateFeatureStatus - it will automatically convert to "waiting_approval" for manual review
|
||||
- The user will manually verify and commit the changes
|
||||
|
||||
**IMPORTANT - UpdateFeatureStatus Tool:**
|
||||
You have access to the \`mcp__automaker-tools__UpdateFeatureStatus\` tool. When the feature is complete (and all tests pass if skipTests is false), use this tool to update the feature status:
|
||||
- Call with featureId, status="verified", and summary="Description of what was done"
|
||||
- **DO NOT manually edit feature files** - this can cause race conditions and restore old state
|
||||
- The tool safely updates the status without corrupting other feature data
|
||||
- **If skipTests=true, the tool will automatically convert "verified" to "waiting_approval"** - this is correct
|
||||
|
||||
**IMPORTANT - Feature Summary (REQUIRED):**
|
||||
When calling UpdateFeatureStatus, you MUST include a summary parameter that describes:
|
||||
- What files were modified/created
|
||||
- What functionality was added or changed
|
||||
- Any notable implementation decisions
|
||||
|
||||
Example: summary="Fixed login validation. Modified: auth.ts, login-form.tsx. Added password strength check."
|
||||
|
||||
The summary will be displayed on the Kanban card so the user can quickly see what was done.
|
||||
|
||||
**Testing Utilities:**
|
||||
- Check if tests/utils.ts needs updates based on code changes
|
||||
- If a component's selectors or behavior changed, update the corresponding utility functions
|
||||
- Add new utilities as needed for the feature's tests
|
||||
- Ensure utilities remain accurate and helpful for future tests
|
||||
|
||||
**Test Deletion Policy:**
|
||||
Tests should NOT accumulate. After a feature is verified:
|
||||
1. Delete the test file for that feature
|
||||
2. Use UpdateFeatureStatus tool to mark the feature as "verified"
|
||||
|
||||
This prevents test brittleness as the app changes rapidly.
|
||||
|
||||
You have access to:
|
||||
- Read and edit files
|
||||
- Write new code or modify existing code
|
||||
- Run bash commands (especially Playwright tests)
|
||||
- Delete files (rm command)
|
||||
- Analyze test output
|
||||
- Make git commits
|
||||
- **UpdateFeatureStatus tool** (mcp__automaker-tools__UpdateFeatureStatus) - Use this to update feature status
|
||||
|
||||
**🧠 Learning from Errors - Memory System:**
|
||||
|
||||
If you encounter an error or issue that:
|
||||
- Took multiple attempts to debug
|
||||
- Was caused by a non-obvious codebase quirk
|
||||
- Required understanding something specific about this project
|
||||
- Could trip up future agent runs
|
||||
|
||||
**ADD IT TO MEMORY** by appending to \`.automaker/memory.md\`:
|
||||
|
||||
\`\`\`markdown
|
||||
### Issue: [Brief Title]
|
||||
**Problem:** [1-2 sentence description of the issue]
|
||||
**Fix:** [Concise explanation of the solution]
|
||||
\`\`\`
|
||||
|
||||
Keep entries concise - focus on the essential information needed to avoid the issue in the future. This helps both you and other agents learn from mistakes.
|
||||
|
||||
**CRITICAL:** Be persistent and thorough - keep iterating on the implementation until all tests pass. Don't give up after the first failure. Always delete tests after they pass, use the UpdateFeatureStatus tool with a summary, and commit your work.`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get system prompt for project analysis agent
|
||||
*/
|
||||
getProjectAnalysisSystemPrompt() {
|
||||
return `You are a project analysis agent that examines codebases to understand their structure, tech stack, and implemented features.
|
||||
|
||||
Your goal is to:
|
||||
- Quickly scan and understand project structure
|
||||
- Identify programming languages, frameworks, and libraries
|
||||
- Detect existing features and capabilities
|
||||
- Update the .automaker/app_spec.txt with accurate information
|
||||
- Ensure all required .automaker files and directories exist
|
||||
|
||||
Be efficient - don't read every file, focus on:
|
||||
- Configuration files (package.json, tsconfig.json, etc.)
|
||||
- Main entry points
|
||||
- Directory structure
|
||||
- README and documentation
|
||||
|
||||
**Feature Storage:**
|
||||
Features are stored in .automaker/features/{id}/feature.json - each feature has its own folder.
|
||||
Use the UpdateFeatureStatus tool to manage features, not direct file edits.
|
||||
|
||||
You have access to Read, Write, Edit, Glob, Grep, and Bash tools. Use them to explore the structure and write the necessary files.`;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new PromptBuilder();
|
||||
@@ -1,84 +0,0 @@
|
||||
const os = require("os");
|
||||
|
||||
// Prefer prebuilt to avoid native build issues.
|
||||
const pty = require("@homebridge/node-pty-prebuilt-multiarch");
|
||||
|
||||
/**
|
||||
* Minimal PTY helper to run CLI commands with a pseudo-terminal.
|
||||
* Useful for CLIs (like Claude) that need raw mode on Windows.
|
||||
*
|
||||
* @param {string} command Executable path
|
||||
* @param {string[]} args Arguments for the executable
|
||||
* @param {Object} options Additional spawn options
|
||||
* @param {(chunk: string) => void} [options.onData] Data callback
|
||||
* @param {string} [options.cwd] Working directory
|
||||
* @param {Object} [options.env] Extra env vars
|
||||
* @param {number} [options.cols] Terminal columns
|
||||
* @param {number} [options.rows] Terminal rows
|
||||
* @returns {Promise<{ success: boolean, exitCode: number, signal?: number, output: string, errorOutput: string }>}
|
||||
*/
|
||||
function runPtyCommand(command, args = [], options = {}) {
|
||||
const {
|
||||
onData,
|
||||
cwd = process.cwd(),
|
||||
env = {},
|
||||
cols = 120,
|
||||
rows = 30,
|
||||
} = options;
|
||||
|
||||
const mergedEnv = {
|
||||
...process.env,
|
||||
TERM: process.env.TERM || "xterm-256color",
|
||||
...env,
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let ptyProcess;
|
||||
|
||||
try {
|
||||
ptyProcess = pty.spawn(command, args, {
|
||||
name: os.platform() === "win32" ? "Windows.Terminal" : "xterm-color",
|
||||
cols,
|
||||
rows,
|
||||
cwd,
|
||||
env: mergedEnv,
|
||||
useConpty: true,
|
||||
});
|
||||
} catch (error) {
|
||||
return reject(error);
|
||||
}
|
||||
|
||||
let output = "";
|
||||
let errorOutput = "";
|
||||
|
||||
ptyProcess.onData((data) => {
|
||||
output += data;
|
||||
if (typeof onData === "function") {
|
||||
onData(data);
|
||||
}
|
||||
});
|
||||
|
||||
// node-pty does not emit 'error' in practice, but guard anyway
|
||||
if (ptyProcess.on) {
|
||||
ptyProcess.on("error", (err) => {
|
||||
errorOutput += err?.message || "";
|
||||
reject(err);
|
||||
});
|
||||
}
|
||||
|
||||
ptyProcess.onExit(({ exitCode, signal }) => {
|
||||
resolve({
|
||||
success: exitCode === 0,
|
||||
exitCode,
|
||||
signal,
|
||||
output,
|
||||
errorOutput,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
runPtyCommand,
|
||||
};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,569 +0,0 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs/promises");
|
||||
const { exec, spawn } = require("child_process");
|
||||
const { promisify } = require("util");
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
/**
|
||||
* Worktree Manager - Handles git worktrees for feature isolation
|
||||
*
|
||||
* This service creates isolated git worktrees for each feature, allowing:
|
||||
* - Features to be worked on in isolation without affecting the main branch
|
||||
* - Easy rollback/revert by simply deleting the worktree
|
||||
* - Checkpointing - user can see changes in the worktree before merging
|
||||
*/
|
||||
class WorktreeManager {
|
||||
constructor() {
|
||||
// Cache for worktree info
|
||||
this.worktreeCache = new Map();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the base worktree directory path
|
||||
*/
|
||||
getWorktreeBasePath(projectPath) {
|
||||
return path.join(projectPath, ".automaker", "worktrees");
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a safe branch name from feature description
|
||||
*/
|
||||
generateBranchName(feature) {
|
||||
// Create a slug from the description
|
||||
const slug = feature.description
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9\s-]/g, "") // Remove special chars
|
||||
.replace(/\s+/g, "-") // Replace spaces with hyphens
|
||||
.substring(0, 40); // Limit length
|
||||
|
||||
// Add feature ID for uniqueness
|
||||
const shortId = feature.id.replace("feature-", "").substring(0, 12);
|
||||
return `feature/${shortId}-${slug}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the project is a git repository
|
||||
*/
|
||||
async isGitRepo(projectPath) {
|
||||
try {
|
||||
await execAsync("git rev-parse --is-inside-work-tree", { cwd: projectPath });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current branch name
|
||||
*/
|
||||
async getCurrentBranch(projectPath) {
|
||||
try {
|
||||
const { stdout } = await execAsync("git rev-parse --abbrev-ref HEAD", { cwd: projectPath });
|
||||
return stdout.trim();
|
||||
} catch (error) {
|
||||
console.error("[WorktreeManager] Failed to get current branch:", error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a branch exists (local or remote)
|
||||
*/
|
||||
async branchExists(projectPath, branchName) {
|
||||
try {
|
||||
await execAsync(`git rev-parse --verify ${branchName}`, { cwd: projectPath });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all existing worktrees
|
||||
*/
|
||||
async listWorktrees(projectPath) {
|
||||
try {
|
||||
const { stdout } = await execAsync("git worktree list --porcelain", { cwd: projectPath });
|
||||
const worktrees = [];
|
||||
const lines = stdout.split("\n");
|
||||
|
||||
let currentWorktree = null;
|
||||
for (const line of lines) {
|
||||
if (line.startsWith("worktree ")) {
|
||||
if (currentWorktree) {
|
||||
worktrees.push(currentWorktree);
|
||||
}
|
||||
currentWorktree = { path: line.replace("worktree ", "") };
|
||||
} else if (line.startsWith("branch ") && currentWorktree) {
|
||||
currentWorktree.branch = line.replace("branch refs/heads/", "");
|
||||
} else if (line.startsWith("HEAD ") && currentWorktree) {
|
||||
currentWorktree.head = line.replace("HEAD ", "");
|
||||
}
|
||||
}
|
||||
if (currentWorktree) {
|
||||
worktrees.push(currentWorktree);
|
||||
}
|
||||
|
||||
return worktrees;
|
||||
} catch (error) {
|
||||
console.error("[WorktreeManager] Failed to list worktrees:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a worktree for a feature
|
||||
* @param {string} projectPath - Path to the main project
|
||||
* @param {object} feature - Feature object with id and description
|
||||
* @returns {object} - { success, worktreePath, branchName, error }
|
||||
*/
|
||||
async createWorktree(projectPath, feature) {
|
||||
console.log(`[WorktreeManager] Creating worktree for feature: ${feature.id}`);
|
||||
|
||||
// Check if project is a git repo
|
||||
if (!await this.isGitRepo(projectPath)) {
|
||||
return { success: false, error: "Project is not a git repository" };
|
||||
}
|
||||
|
||||
const branchName = this.generateBranchName(feature);
|
||||
const worktreeBasePath = this.getWorktreeBasePath(projectPath);
|
||||
const worktreePath = path.join(worktreeBasePath, branchName.replace("feature/", ""));
|
||||
|
||||
try {
|
||||
// Ensure worktree directory exists
|
||||
await fs.mkdir(worktreeBasePath, { recursive: true });
|
||||
|
||||
// Check if worktree already exists
|
||||
const worktrees = await this.listWorktrees(projectPath);
|
||||
const existingWorktree = worktrees.find(
|
||||
w => w.path === worktreePath || w.branch === branchName
|
||||
);
|
||||
|
||||
if (existingWorktree) {
|
||||
console.log(`[WorktreeManager] Worktree already exists for feature: ${feature.id}`);
|
||||
return {
|
||||
success: true,
|
||||
worktreePath: existingWorktree.path,
|
||||
branchName: existingWorktree.branch,
|
||||
existed: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Get current branch to base the new branch on
|
||||
const baseBranch = await this.getCurrentBranch(projectPath);
|
||||
if (!baseBranch) {
|
||||
return { success: false, error: "Could not determine current branch" };
|
||||
}
|
||||
|
||||
// Check if branch already exists
|
||||
const branchExists = await this.branchExists(projectPath, branchName);
|
||||
|
||||
if (branchExists) {
|
||||
// Use existing branch
|
||||
console.log(`[WorktreeManager] Using existing branch: ${branchName}`);
|
||||
await execAsync(`git worktree add "${worktreePath}" ${branchName}`, { cwd: projectPath });
|
||||
} else {
|
||||
// Create new worktree with new branch
|
||||
console.log(`[WorktreeManager] Creating new branch: ${branchName} based on ${baseBranch}`);
|
||||
await execAsync(`git worktree add -b ${branchName} "${worktreePath}" ${baseBranch}`, { cwd: projectPath });
|
||||
}
|
||||
|
||||
// Copy .automaker directory to worktree (except worktrees directory itself to avoid recursion)
|
||||
const automakerSrc = path.join(projectPath, ".automaker");
|
||||
const automakerDst = path.join(worktreePath, ".automaker");
|
||||
|
||||
try {
|
||||
await fs.mkdir(automakerDst, { recursive: true });
|
||||
|
||||
// Note: Features are stored in .automaker/features/{id}/feature.json
|
||||
// These are managed by the main project, not copied to worktrees
|
||||
|
||||
// Copy app_spec.txt if it exists
|
||||
const appSpecSrc = path.join(automakerSrc, "app_spec.txt");
|
||||
const appSpecDst = path.join(automakerDst, "app_spec.txt");
|
||||
try {
|
||||
const content = await fs.readFile(appSpecSrc, "utf-8");
|
||||
await fs.writeFile(appSpecDst, content, "utf-8");
|
||||
} catch {
|
||||
// App spec might not exist yet
|
||||
}
|
||||
|
||||
// Copy categories.json if it exists
|
||||
const categoriesSrc = path.join(automakerSrc, "categories.json");
|
||||
const categoriesDst = path.join(automakerDst, "categories.json");
|
||||
try {
|
||||
const content = await fs.readFile(categoriesSrc, "utf-8");
|
||||
await fs.writeFile(categoriesDst, content, "utf-8");
|
||||
} catch {
|
||||
// Categories might not exist yet
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn("[WorktreeManager] Failed to copy .automaker directory:", error);
|
||||
}
|
||||
|
||||
// Store worktree info in cache
|
||||
this.worktreeCache.set(feature.id, {
|
||||
worktreePath,
|
||||
branchName,
|
||||
createdAt: new Date().toISOString(),
|
||||
baseBranch,
|
||||
});
|
||||
|
||||
console.log(`[WorktreeManager] Worktree created at: ${worktreePath}`);
|
||||
return {
|
||||
success: true,
|
||||
worktreePath,
|
||||
branchName,
|
||||
baseBranch,
|
||||
existed: false,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("[WorktreeManager] Failed to create worktree:", error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get worktree info for a feature
|
||||
*/
|
||||
async getWorktreeInfo(projectPath, featureId) {
|
||||
// Check cache first
|
||||
if (this.worktreeCache.has(featureId)) {
|
||||
return { success: true, ...this.worktreeCache.get(featureId) };
|
||||
}
|
||||
|
||||
// Scan worktrees to find matching one
|
||||
const worktrees = await this.listWorktrees(projectPath);
|
||||
const worktreeBasePath = this.getWorktreeBasePath(projectPath);
|
||||
|
||||
for (const worktree of worktrees) {
|
||||
// Check if this worktree is in our worktree directory
|
||||
if (worktree.path.startsWith(worktreeBasePath)) {
|
||||
// Check if the feature ID is in the branch name
|
||||
const shortId = featureId.replace("feature-", "").substring(0, 12);
|
||||
if (worktree.branch && worktree.branch.includes(shortId)) {
|
||||
const info = {
|
||||
worktreePath: worktree.path,
|
||||
branchName: worktree.branch,
|
||||
head: worktree.head,
|
||||
};
|
||||
this.worktreeCache.set(featureId, info);
|
||||
return { success: true, ...info };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { success: false, error: "Worktree not found" };
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a worktree for a feature
|
||||
* This effectively reverts all changes made by the agent
|
||||
*/
|
||||
async removeWorktree(projectPath, featureId, deleteBranch = false) {
|
||||
console.log(`[WorktreeManager] Removing worktree for feature: ${featureId}`);
|
||||
|
||||
const worktreeInfo = await this.getWorktreeInfo(projectPath, featureId);
|
||||
if (!worktreeInfo.success) {
|
||||
console.log(`[WorktreeManager] No worktree found for feature: ${featureId}`);
|
||||
return { success: true, message: "No worktree to remove" };
|
||||
}
|
||||
|
||||
const { worktreePath, branchName } = worktreeInfo;
|
||||
|
||||
try {
|
||||
// Remove the worktree
|
||||
await execAsync(`git worktree remove "${worktreePath}" --force`, { cwd: projectPath });
|
||||
console.log(`[WorktreeManager] Worktree removed: ${worktreePath}`);
|
||||
|
||||
// Optionally delete the branch too
|
||||
if (deleteBranch && branchName) {
|
||||
try {
|
||||
await execAsync(`git branch -D ${branchName}`, { cwd: projectPath });
|
||||
console.log(`[WorktreeManager] Branch deleted: ${branchName}`);
|
||||
} catch (error) {
|
||||
console.warn(`[WorktreeManager] Could not delete branch ${branchName}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from cache
|
||||
this.worktreeCache.delete(featureId);
|
||||
|
||||
return { success: true, removedPath: worktreePath, removedBranch: deleteBranch ? branchName : null };
|
||||
} catch (error) {
|
||||
console.error("[WorktreeManager] Failed to remove worktree:", error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get status of changes in a worktree
|
||||
*/
|
||||
async getWorktreeStatus(worktreePath) {
|
||||
try {
|
||||
const { stdout: statusOutput } = await execAsync("git status --porcelain", { cwd: worktreePath });
|
||||
const { stdout: diffStat } = await execAsync("git diff --stat", { cwd: worktreePath });
|
||||
const { stdout: commitLog } = await execAsync("git log --oneline -10", { cwd: worktreePath });
|
||||
|
||||
const files = statusOutput.trim().split("\n").filter(Boolean);
|
||||
const commits = commitLog.trim().split("\n").filter(Boolean);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
modifiedFiles: files.length,
|
||||
files: files.slice(0, 20), // Limit to 20 files
|
||||
diffStat: diffStat.trim(),
|
||||
recentCommits: commits.slice(0, 5), // Last 5 commits
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("[WorktreeManager] Failed to get worktree status:", error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed file diff content for a worktree
|
||||
* Returns unified diff format for all changes
|
||||
*/
|
||||
async getFileDiffs(worktreePath) {
|
||||
try {
|
||||
// Get both staged and unstaged diffs
|
||||
const { stdout: unstagedDiff } = await execAsync("git diff --no-color", {
|
||||
cwd: worktreePath,
|
||||
maxBuffer: 10 * 1024 * 1024 // 10MB buffer for large diffs
|
||||
});
|
||||
const { stdout: stagedDiff } = await execAsync("git diff --cached --no-color", {
|
||||
cwd: worktreePath,
|
||||
maxBuffer: 10 * 1024 * 1024
|
||||
});
|
||||
|
||||
// Get list of files with their status
|
||||
const { stdout: statusOutput } = await execAsync("git status --porcelain", { cwd: worktreePath });
|
||||
const files = statusOutput.trim().split("\n").filter(Boolean);
|
||||
|
||||
// Parse file statuses
|
||||
const fileStatuses = files.map(line => {
|
||||
const status = line.substring(0, 2);
|
||||
const filePath = line.substring(3);
|
||||
return {
|
||||
status: status.trim() || 'M',
|
||||
path: filePath,
|
||||
statusText: this.getStatusText(status)
|
||||
};
|
||||
});
|
||||
|
||||
// Combine diffs
|
||||
const combinedDiff = [stagedDiff, unstagedDiff].filter(Boolean).join("\n");
|
||||
|
||||
return {
|
||||
success: true,
|
||||
diff: combinedDiff,
|
||||
files: fileStatuses,
|
||||
hasChanges: files.length > 0
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("[WorktreeManager] Failed to get file diffs:", error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get human-readable status text from git status code
|
||||
*/
|
||||
getStatusText(status) {
|
||||
const statusMap = {
|
||||
'M': 'Modified',
|
||||
'A': 'Added',
|
||||
'D': 'Deleted',
|
||||
'R': 'Renamed',
|
||||
'C': 'Copied',
|
||||
'U': 'Updated',
|
||||
'?': 'Untracked',
|
||||
'!': 'Ignored'
|
||||
};
|
||||
const firstChar = status.charAt(0);
|
||||
const secondChar = status.charAt(1);
|
||||
return statusMap[firstChar] || statusMap[secondChar] || 'Changed';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get diff for a specific file in a worktree
|
||||
*/
|
||||
async getFileDiff(worktreePath, filePath) {
|
||||
try {
|
||||
// Try to get unstaged diff first, then staged if no unstaged changes
|
||||
let diff = '';
|
||||
try {
|
||||
const { stdout } = await execAsync(`git diff --no-color -- "${filePath}"`, {
|
||||
cwd: worktreePath,
|
||||
maxBuffer: 5 * 1024 * 1024
|
||||
});
|
||||
diff = stdout;
|
||||
} catch {
|
||||
// File might be staged
|
||||
}
|
||||
|
||||
if (!diff) {
|
||||
try {
|
||||
const { stdout } = await execAsync(`git diff --cached --no-color -- "${filePath}"`, {
|
||||
cwd: worktreePath,
|
||||
maxBuffer: 5 * 1024 * 1024
|
||||
});
|
||||
diff = stdout;
|
||||
} catch {
|
||||
// File might be untracked, show the content
|
||||
}
|
||||
}
|
||||
|
||||
// If still no diff, might be an untracked file - show the content
|
||||
if (!diff) {
|
||||
try {
|
||||
const fullPath = path.join(worktreePath, filePath);
|
||||
const content = await fs.readFile(fullPath, 'utf-8');
|
||||
diff = `+++ ${filePath} (new file)\n${content.split('\n').map(l => '+' + l).join('\n')}`;
|
||||
} catch {
|
||||
diff = '(Unable to read file content)';
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
diff,
|
||||
filePath
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`[WorktreeManager] Failed to get diff for ${filePath}:`, error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge worktree changes back to the main branch
|
||||
*/
|
||||
async mergeWorktree(projectPath, featureId, options = {}) {
|
||||
console.log(`[WorktreeManager] Merging worktree for feature: ${featureId}`);
|
||||
|
||||
const worktreeInfo = await this.getWorktreeInfo(projectPath, featureId);
|
||||
if (!worktreeInfo.success) {
|
||||
return { success: false, error: "Worktree not found" };
|
||||
}
|
||||
|
||||
const { branchName, worktreePath } = worktreeInfo;
|
||||
const baseBranch = await this.getCurrentBranch(projectPath);
|
||||
|
||||
try {
|
||||
// First commit any uncommitted changes in the worktree
|
||||
const { stdout: status } = await execAsync("git status --porcelain", { cwd: worktreePath });
|
||||
if (status.trim()) {
|
||||
// There are uncommitted changes - commit them
|
||||
await execAsync("git add -A", { cwd: worktreePath });
|
||||
const commitMsg = options.commitMessage || `feat: complete ${featureId}`;
|
||||
await execAsync(`git commit -m "${commitMsg}"`, { cwd: worktreePath });
|
||||
}
|
||||
|
||||
// Merge the feature branch into the current branch in the main repo
|
||||
if (options.squash) {
|
||||
await execAsync(`git merge --squash ${branchName}`, { cwd: projectPath });
|
||||
const squashMsg = options.squashMessage || `feat: ${featureId} - squashed merge`;
|
||||
await execAsync(`git commit -m "${squashMsg}"`, { cwd: projectPath });
|
||||
} else {
|
||||
await execAsync(`git merge ${branchName} --no-ff -m "Merge ${branchName}"`, { cwd: projectPath });
|
||||
}
|
||||
|
||||
console.log(`[WorktreeManager] Successfully merged ${branchName} into ${baseBranch}`);
|
||||
|
||||
// Optionally cleanup worktree after merge
|
||||
if (options.cleanup) {
|
||||
await this.removeWorktree(projectPath, featureId, true);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
mergedBranch: branchName,
|
||||
intoBranch: baseBranch,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("[WorktreeManager] Failed to merge worktree:", error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync changes from main branch to worktree (rebase or merge)
|
||||
*/
|
||||
async syncWorktree(projectPath, featureId, method = "rebase") {
|
||||
console.log(`[WorktreeManager] Syncing worktree for feature: ${featureId}`);
|
||||
|
||||
const worktreeInfo = await this.getWorktreeInfo(projectPath, featureId);
|
||||
if (!worktreeInfo.success) {
|
||||
return { success: false, error: "Worktree not found" };
|
||||
}
|
||||
|
||||
const { worktreePath, baseBranch } = worktreeInfo;
|
||||
|
||||
try {
|
||||
if (method === "rebase") {
|
||||
await execAsync(`git rebase ${baseBranch}`, { cwd: worktreePath });
|
||||
} else {
|
||||
await execAsync(`git merge ${baseBranch}`, { cwd: worktreePath });
|
||||
}
|
||||
|
||||
return { success: true, method };
|
||||
} catch (error) {
|
||||
console.error("[WorktreeManager] Failed to sync worktree:", error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of all feature worktrees
|
||||
*/
|
||||
async getAllFeatureWorktrees(projectPath) {
|
||||
const worktrees = await this.listWorktrees(projectPath);
|
||||
const worktreeBasePath = this.getWorktreeBasePath(projectPath);
|
||||
|
||||
return worktrees.filter(w =>
|
||||
w.path.startsWith(worktreeBasePath) &&
|
||||
w.branch &&
|
||||
w.branch.startsWith("feature/")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup orphaned worktrees (worktrees without matching features)
|
||||
*/
|
||||
async cleanupOrphanedWorktrees(projectPath, activeFeatureIds) {
|
||||
console.log("[WorktreeManager] Cleaning up orphaned worktrees...");
|
||||
|
||||
const worktrees = await this.getAllFeatureWorktrees(projectPath);
|
||||
const cleaned = [];
|
||||
|
||||
for (const worktree of worktrees) {
|
||||
// Extract feature ID from branch name
|
||||
const branchParts = worktree.branch.replace("feature/", "").split("-");
|
||||
const shortId = branchParts[0];
|
||||
|
||||
// Check if any active feature has this short ID
|
||||
const hasMatchingFeature = activeFeatureIds.some(id => {
|
||||
const featureShortId = id.replace("feature-", "").substring(0, 12);
|
||||
return featureShortId === shortId;
|
||||
});
|
||||
|
||||
if (!hasMatchingFeature) {
|
||||
console.log(`[WorktreeManager] Removing orphaned worktree: ${worktree.path}`);
|
||||
try {
|
||||
await execAsync(`git worktree remove "${worktree.path}" --force`, { cwd: projectPath });
|
||||
await execAsync(`git branch -D ${worktree.branch}`, { cwd: projectPath });
|
||||
cleaned.push(worktree.path);
|
||||
} catch (error) {
|
||||
console.warn(`[WorktreeManager] Failed to cleanup worktree ${worktree.path}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true, cleaned };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new WorktreeManager();
|
||||
@@ -29,11 +29,9 @@
|
||||
"dev:electron:wsl:gpu": "concurrently \"next dev -p 3007\" \"wait-on http://localhost:3007 && MESA_D3D12_DEFAULT_ADAPTER_NAME=NVIDIA electron . --no-sandbox --disable-gpu-sandbox\""
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.1.61",
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
"@dnd-kit/sortable": "^10.0.0",
|
||||
"@dnd-kit/utilities": "^3.2.2",
|
||||
"@homebridge/node-pty-prebuilt-multiarch": "^0.13.1",
|
||||
"@radix-ui/react-checkbox": "^1.3.3",
|
||||
"@radix-ui/react-dialog": "^1.1.15",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
||||
@@ -58,6 +56,17 @@
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"zustand": "^5.0.9"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"lightningcss-darwin-arm64": "^1.29.2",
|
||||
"lightningcss-darwin-x64": "^1.29.2",
|
||||
"lightningcss-linux-arm-gnueabihf": "^1.29.2",
|
||||
"lightningcss-linux-arm64-gnu": "^1.29.2",
|
||||
"lightningcss-linux-arm64-musl": "^1.29.2",
|
||||
"lightningcss-linux-x64-gnu": "^1.29.2",
|
||||
"lightningcss-linux-x64-musl": "^1.29.2",
|
||||
"lightningcss-win32-arm64-msvc": "^1.29.2",
|
||||
"lightningcss-win32-x64-msvc": "^1.29.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@electron/rebuild": "^4.0.2",
|
||||
"@playwright/test": "^1.57.0",
|
||||
@@ -86,8 +95,7 @@
|
||||
"electron/**/*",
|
||||
".next/**/*",
|
||||
"public/**/*",
|
||||
"!node_modules/**/*",
|
||||
"node_modules/@anthropic-ai/**/*"
|
||||
"!node_modules/**/*"
|
||||
],
|
||||
"extraResources": [
|
||||
{
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
@custom-variant catppuccin (&:is(.catppuccin *));
|
||||
@custom-variant onedark (&:is(.onedark *));
|
||||
@custom-variant synthwave (&:is(.synthwave *));
|
||||
@custom-variant red (&:is(.red *));
|
||||
|
||||
@theme inline {
|
||||
--color-background: var(--background);
|
||||
@@ -143,6 +144,80 @@
|
||||
--running-indicator-text: oklch(0.6 0.22 265);
|
||||
}
|
||||
|
||||
/* Apply dark mode immediately based on system preference (before JS runs) */
|
||||
@media (prefers-color-scheme: dark) {
|
||||
:root {
|
||||
/* Deep dark backgrounds - zinc-950 family */
|
||||
--background: oklch(0.04 0 0); /* zinc-950 */
|
||||
--background-50: oklch(0.04 0 0 / 0.5); /* zinc-950/50 */
|
||||
--background-80: oklch(0.04 0 0 / 0.8); /* zinc-950/80 */
|
||||
|
||||
/* Text colors following hierarchy */
|
||||
--foreground: oklch(1 0 0); /* text-white */
|
||||
--foreground-secondary: oklch(0.588 0 0); /* text-zinc-400 */
|
||||
--foreground-muted: oklch(0.525 0 0); /* text-zinc-500 */
|
||||
|
||||
/* Card and popover backgrounds */
|
||||
--card: oklch(0.14 0 0);
|
||||
--card-foreground: oklch(1 0 0);
|
||||
--popover: oklch(0.10 0 0);
|
||||
--popover-foreground: oklch(1 0 0);
|
||||
|
||||
/* Brand colors - purple/violet theme */
|
||||
--primary: oklch(0.55 0.25 265);
|
||||
--primary-foreground: oklch(1 0 0);
|
||||
--brand-400: oklch(0.6 0.22 265);
|
||||
--brand-500: oklch(0.55 0.25 265);
|
||||
--brand-600: oklch(0.5 0.28 270);
|
||||
|
||||
/* Glass morphism borders and accents */
|
||||
--secondary: oklch(1 0 0 / 0.05);
|
||||
--secondary-foreground: oklch(1 0 0);
|
||||
--muted: oklch(0.176 0 0);
|
||||
--muted-foreground: oklch(0.588 0 0);
|
||||
--accent: oklch(1 0 0 / 0.1);
|
||||
--accent-foreground: oklch(1 0 0);
|
||||
|
||||
/* Borders with transparency for glass effect */
|
||||
--border: oklch(0.176 0 0);
|
||||
--border-glass: oklch(1 0 0 / 0.1);
|
||||
--destructive: oklch(0.6 0.25 25);
|
||||
--input: oklch(0.04 0 0 / 0.8);
|
||||
--ring: oklch(0.55 0.25 265);
|
||||
|
||||
/* Chart colors with brand theme */
|
||||
--chart-1: oklch(0.55 0.25 265);
|
||||
--chart-2: oklch(0.65 0.2 160);
|
||||
--chart-3: oklch(0.75 0.2 70);
|
||||
--chart-4: oklch(0.6 0.25 300);
|
||||
--chart-5: oklch(0.6 0.25 20);
|
||||
|
||||
/* Sidebar with glass morphism */
|
||||
--sidebar: oklch(0.04 0 0 / 0.5);
|
||||
--sidebar-foreground: oklch(1 0 0);
|
||||
--sidebar-primary: oklch(0.55 0.25 265);
|
||||
--sidebar-primary-foreground: oklch(1 0 0);
|
||||
--sidebar-accent: oklch(1 0 0 / 0.05);
|
||||
--sidebar-accent-foreground: oklch(1 0 0);
|
||||
--sidebar-border: oklch(1 0 0 / 0.1);
|
||||
--sidebar-ring: oklch(0.55 0.25 265);
|
||||
|
||||
/* Action button colors */
|
||||
--action-view: oklch(0.6 0.25 265);
|
||||
--action-view-hover: oklch(0.55 0.27 270);
|
||||
--action-followup: oklch(0.6 0.2 230);
|
||||
--action-followup-hover: oklch(0.55 0.22 230);
|
||||
--action-commit: oklch(0.55 0.2 140);
|
||||
--action-commit-hover: oklch(0.5 0.22 140);
|
||||
--action-verify: oklch(0.55 0.2 140);
|
||||
--action-verify-hover: oklch(0.5 0.22 140);
|
||||
|
||||
/* Running indicator - Purple */
|
||||
--running-indicator: oklch(0.6 0.25 265);
|
||||
--running-indicator-text: oklch(0.65 0.22 265);
|
||||
}
|
||||
}
|
||||
|
||||
.light {
|
||||
/* Explicit light mode - same as root but ensures it overrides any dark defaults */
|
||||
--background: oklch(1 0 0); /* White */
|
||||
@@ -211,10 +286,10 @@
|
||||
--foreground-secondary: oklch(0.588 0 0); /* text-zinc-400 */
|
||||
--foreground-muted: oklch(0.525 0 0); /* text-zinc-500 */
|
||||
|
||||
/* Glass morphism effects */
|
||||
--card: oklch(0.04 0 0 / 0.5); /* zinc-950/50 with transparency */
|
||||
/* Card and popover backgrounds */
|
||||
--card: oklch(0.14 0 0); /* slightly lighter than background for contrast */
|
||||
--card-foreground: oklch(1 0 0);
|
||||
--popover: oklch(0.04 0 0 / 0.8); /* zinc-950/80 for popover */
|
||||
--popover: oklch(0.10 0 0); /* slightly lighter than background */
|
||||
--popover-foreground: oklch(1 0 0);
|
||||
|
||||
/* Brand colors - purple/violet theme */
|
||||
@@ -998,6 +1073,75 @@
|
||||
--running-indicator-text: oklch(0.75 0.26 350);
|
||||
}
|
||||
|
||||
/* Red Theme - Bold crimson/red aesthetic */
|
||||
.red {
|
||||
--background: oklch(0.12 0.03 15); /* Deep dark red-tinted black */
|
||||
--background-50: oklch(0.12 0.03 15 / 0.5);
|
||||
--background-80: oklch(0.12 0.03 15 / 0.8);
|
||||
|
||||
--foreground: oklch(0.95 0.01 15); /* Off-white with warm tint */
|
||||
--foreground-secondary: oklch(0.7 0.02 15);
|
||||
--foreground-muted: oklch(0.5 0.03 15);
|
||||
|
||||
--card: oklch(0.18 0.04 15); /* Slightly lighter dark red */
|
||||
--card-foreground: oklch(0.95 0.01 15);
|
||||
--popover: oklch(0.15 0.035 15);
|
||||
--popover-foreground: oklch(0.95 0.01 15);
|
||||
|
||||
--primary: oklch(0.55 0.25 25); /* Vibrant crimson red */
|
||||
--primary-foreground: oklch(0.98 0 0);
|
||||
|
||||
--brand-400: oklch(0.6 0.23 25);
|
||||
--brand-500: oklch(0.55 0.25 25); /* Crimson */
|
||||
--brand-600: oklch(0.5 0.27 25);
|
||||
|
||||
--secondary: oklch(0.22 0.05 15);
|
||||
--secondary-foreground: oklch(0.95 0.01 15);
|
||||
|
||||
--muted: oklch(0.22 0.05 15);
|
||||
--muted-foreground: oklch(0.5 0.03 15);
|
||||
|
||||
--accent: oklch(0.28 0.06 15);
|
||||
--accent-foreground: oklch(0.95 0.01 15);
|
||||
|
||||
--destructive: oklch(0.6 0.28 30); /* Bright orange-red for destructive */
|
||||
|
||||
--border: oklch(0.35 0.08 15);
|
||||
--border-glass: oklch(0.55 0.25 25 / 0.3);
|
||||
|
||||
--input: oklch(0.18 0.04 15);
|
||||
--ring: oklch(0.55 0.25 25);
|
||||
|
||||
--chart-1: oklch(0.55 0.25 25); /* Crimson */
|
||||
--chart-2: oklch(0.7 0.2 50); /* Orange */
|
||||
--chart-3: oklch(0.8 0.18 80); /* Gold */
|
||||
--chart-4: oklch(0.6 0.22 0); /* Pure red */
|
||||
--chart-5: oklch(0.65 0.2 350); /* Pink-red */
|
||||
|
||||
--sidebar: oklch(0.1 0.025 15);
|
||||
--sidebar-foreground: oklch(0.95 0.01 15);
|
||||
--sidebar-primary: oklch(0.55 0.25 25);
|
||||
--sidebar-primary-foreground: oklch(0.98 0 0);
|
||||
--sidebar-accent: oklch(0.22 0.05 15);
|
||||
--sidebar-accent-foreground: oklch(0.95 0.01 15);
|
||||
--sidebar-border: oklch(0.35 0.08 15);
|
||||
--sidebar-ring: oklch(0.55 0.25 25);
|
||||
|
||||
/* Action button colors - Red theme */
|
||||
--action-view: oklch(0.55 0.25 25); /* Crimson */
|
||||
--action-view-hover: oklch(0.5 0.27 25);
|
||||
--action-followup: oklch(0.7 0.2 50); /* Orange */
|
||||
--action-followup-hover: oklch(0.65 0.22 50);
|
||||
--action-commit: oklch(0.6 0.2 140); /* Green for positive actions */
|
||||
--action-commit-hover: oklch(0.55 0.22 140);
|
||||
--action-verify: oklch(0.6 0.2 140); /* Green */
|
||||
--action-verify-hover: oklch(0.55 0.22 140);
|
||||
|
||||
/* Running indicator - Crimson */
|
||||
--running-indicator: oklch(0.55 0.25 25);
|
||||
--running-indicator-text: oklch(0.6 0.23 25);
|
||||
}
|
||||
|
||||
@layer base {
|
||||
* {
|
||||
@apply border-border outline-ring/50;
|
||||
@@ -1253,6 +1397,39 @@
|
||||
.text-running-indicator {
|
||||
color: var(--running-indicator-text);
|
||||
}
|
||||
|
||||
/* Animated border for in-progress cards */
|
||||
@keyframes border-rotate {
|
||||
0% {
|
||||
background-position: 0% 50%;
|
||||
}
|
||||
50% {
|
||||
background-position: 100% 50%;
|
||||
}
|
||||
100% {
|
||||
background-position: 0% 50%;
|
||||
}
|
||||
}
|
||||
|
||||
.animated-border-wrapper {
|
||||
position: relative;
|
||||
border-radius: 0.75rem;
|
||||
padding: 2px;
|
||||
background: linear-gradient(
|
||||
90deg,
|
||||
var(--running-indicator),
|
||||
color-mix(in oklch, var(--running-indicator), transparent 50%),
|
||||
var(--running-indicator),
|
||||
color-mix(in oklch, var(--running-indicator), transparent 50%),
|
||||
var(--running-indicator)
|
||||
);
|
||||
background-size: 200% 100%;
|
||||
animation: border-rotate 3s ease infinite;
|
||||
}
|
||||
|
||||
.animated-border-wrapper > * {
|
||||
border-radius: calc(0.75rem - 2px);
|
||||
}
|
||||
}
|
||||
|
||||
/* Retro Overrides for Utilities */
|
||||
|
||||
@@ -7,7 +7,6 @@ import { BoardView } from "@/components/views/board-view";
|
||||
import { SpecView } from "@/components/views/spec-view";
|
||||
import { AgentView } from "@/components/views/agent-view";
|
||||
import { SettingsView } from "@/components/views/settings-view";
|
||||
import { AgentToolsView } from "@/components/views/agent-tools-view";
|
||||
import { InterviewView } from "@/components/views/interview-view";
|
||||
import { ContextView } from "@/components/views/context-view";
|
||||
import { ProfilesView } from "@/components/views/profiles-view";
|
||||
@@ -16,12 +15,26 @@ import { RunningAgentsView } from "@/components/views/running-agents-view";
|
||||
import { useAppStore } from "@/store/app-store";
|
||||
import { useSetupStore } from "@/store/setup-store";
|
||||
import { getElectronAPI, isElectron } from "@/lib/electron";
|
||||
import {
|
||||
FileBrowserProvider,
|
||||
useFileBrowser,
|
||||
setGlobalFileBrowser,
|
||||
} from "@/contexts/file-browser-context";
|
||||
|
||||
export default function Home() {
|
||||
const { currentView, setCurrentView, setIpcConnected, theme, currentProject } = useAppStore();
|
||||
function HomeContent() {
|
||||
const {
|
||||
currentView,
|
||||
setCurrentView,
|
||||
setIpcConnected,
|
||||
theme,
|
||||
currentProject,
|
||||
previewTheme,
|
||||
getEffectiveTheme,
|
||||
} = useAppStore();
|
||||
const { isFirstRun, setupComplete } = useSetupStore();
|
||||
const [isMounted, setIsMounted] = useState(false);
|
||||
const [streamerPanelOpen, setStreamerPanelOpen] = useState(false);
|
||||
const { openFileBrowser } = useFileBrowser();
|
||||
|
||||
// Hidden streamer panel - opens with "\" key
|
||||
const handleStreamerPanelShortcut = useCallback((event: KeyboardEvent) => {
|
||||
@@ -29,7 +42,11 @@ export default function Home() {
|
||||
const activeElement = document.activeElement;
|
||||
if (activeElement) {
|
||||
const tagName = activeElement.tagName.toLowerCase();
|
||||
if (tagName === "input" || tagName === "textarea" || tagName === "select") {
|
||||
if (
|
||||
tagName === "input" ||
|
||||
tagName === "textarea" ||
|
||||
tagName === "select"
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if (activeElement.getAttribute("contenteditable") === "true") {
|
||||
@@ -61,15 +78,20 @@ export default function Home() {
|
||||
};
|
||||
}, [handleStreamerPanelShortcut]);
|
||||
|
||||
// Compute the effective theme: project theme takes priority over global theme
|
||||
// This is reactive because it depends on currentProject and theme from the store
|
||||
const effectiveTheme = currentProject?.theme || theme;
|
||||
// Compute the effective theme: previewTheme takes priority, then project theme, then global theme
|
||||
// This is reactive because it depends on previewTheme, currentProject, and theme from the store
|
||||
const effectiveTheme = getEffectiveTheme();
|
||||
|
||||
// Prevent hydration issues
|
||||
useEffect(() => {
|
||||
setIsMounted(true);
|
||||
}, []);
|
||||
|
||||
// Initialize global file browser for HttpApiClient
|
||||
useEffect(() => {
|
||||
setGlobalFileBrowser(openFileBrowser);
|
||||
}, [openFileBrowser]);
|
||||
|
||||
// Check if this is first run and redirect to setup if needed
|
||||
useEffect(() => {
|
||||
console.log("[Setup Flow] Checking setup state:", {
|
||||
@@ -81,7 +103,9 @@ export default function Home() {
|
||||
});
|
||||
|
||||
if (isMounted && isFirstRun && !setupComplete) {
|
||||
console.log("[Setup Flow] Redirecting to setup wizard (first run, not complete)");
|
||||
console.log(
|
||||
"[Setup Flow] Redirecting to setup wizard (first run, not complete)"
|
||||
);
|
||||
setCurrentView("setup");
|
||||
} else if (isMounted && setupComplete) {
|
||||
console.log("[Setup Flow] Setup already complete, showing normal view");
|
||||
@@ -94,7 +118,7 @@ export default function Home() {
|
||||
try {
|
||||
const api = getElectronAPI();
|
||||
const result = await api.ping();
|
||||
setIpcConnected(result === "pong" || result === "pong (mock)");
|
||||
setIpcConnected(result === "pong");
|
||||
} catch (error) {
|
||||
console.error("IPC connection failed:", error);
|
||||
setIpcConnected(false);
|
||||
@@ -104,7 +128,7 @@ export default function Home() {
|
||||
testConnection();
|
||||
}, [setIpcConnected]);
|
||||
|
||||
// Apply theme class to document (uses effective theme - project-specific or global)
|
||||
// Apply theme class to document (uses effective theme - preview, project-specific, or global)
|
||||
useEffect(() => {
|
||||
const root = document.documentElement;
|
||||
root.classList.remove(
|
||||
@@ -119,7 +143,8 @@ export default function Home() {
|
||||
"gruvbox",
|
||||
"catppuccin",
|
||||
"onedark",
|
||||
"synthwave"
|
||||
"synthwave",
|
||||
"red"
|
||||
);
|
||||
|
||||
if (effectiveTheme === "dark") {
|
||||
@@ -144,6 +169,8 @@ export default function Home() {
|
||||
root.classList.add("onedark");
|
||||
} else if (effectiveTheme === "synthwave") {
|
||||
root.classList.add("synthwave");
|
||||
} else if (effectiveTheme === "red") {
|
||||
root.classList.add("red");
|
||||
} else if (effectiveTheme === "light") {
|
||||
root.classList.add("light");
|
||||
} else if (effectiveTheme === "system") {
|
||||
@@ -155,7 +182,7 @@ export default function Home() {
|
||||
root.classList.add("light");
|
||||
}
|
||||
}
|
||||
}, [effectiveTheme]);
|
||||
}, [effectiveTheme, previewTheme, currentProject, theme]);
|
||||
|
||||
const renderView = () => {
|
||||
switch (currentView) {
|
||||
@@ -171,8 +198,6 @@ export default function Home() {
|
||||
return <AgentView />;
|
||||
case "settings":
|
||||
return <SettingsView />;
|
||||
case "tools":
|
||||
return <AgentToolsView />;
|
||||
case "interview":
|
||||
return <InterviewView />;
|
||||
case "context":
|
||||
@@ -193,8 +218,8 @@ export default function Home() {
|
||||
<SetupView />
|
||||
{/* Environment indicator */}
|
||||
{isMounted && !isElectron() && (
|
||||
<div className="fixed bottom-4 right-4 px-3 py-1.5 bg-yellow-500/10 text-yellow-500 text-xs rounded-full border border-yellow-500/20 pointer-events-none">
|
||||
Web Mode (Mock IPC)
|
||||
<div className="fixed bottom-4 right-4 px-3 py-1.5 bg-blue-500/10 text-blue-500 text-xs rounded-full border border-blue-500/20 pointer-events-none">
|
||||
Web Mode
|
||||
</div>
|
||||
)}
|
||||
</main>
|
||||
@@ -204,23 +229,34 @@ export default function Home() {
|
||||
return (
|
||||
<main className="flex h-screen overflow-hidden" data-testid="app-container">
|
||||
<Sidebar />
|
||||
<div className="flex-1 flex flex-col overflow-hidden transition-all duration-300" style={{ marginRight: streamerPanelOpen ? '250px' : '0' }}>
|
||||
<div
|
||||
className="flex-1 flex flex-col overflow-hidden transition-all duration-300"
|
||||
style={{ marginRight: streamerPanelOpen ? "250px" : "0" }}
|
||||
>
|
||||
{renderView()}
|
||||
</div>
|
||||
|
||||
{/* Environment indicator - only show after mount to prevent hydration issues */}
|
||||
{isMounted && !isElectron() && (
|
||||
<div className="fixed bottom-4 right-4 px-3 py-1.5 bg-yellow-500/10 text-yellow-500 text-xs rounded-full border border-yellow-500/20 pointer-events-none">
|
||||
Web Mode (Mock IPC)
|
||||
<div className="fixed bottom-4 right-4 px-3 py-1.5 bg-blue-500/10 text-blue-500 text-xs rounded-full border border-blue-500/20 pointer-events-none">
|
||||
Web Mode
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Hidden streamer panel - opens with "\" key, pushes content */}
|
||||
<div
|
||||
className={`fixed top-0 right-0 h-full w-[250px] bg-background border-l border-border transition-transform duration-300 ${
|
||||
streamerPanelOpen ? 'translate-x-0' : 'translate-x-full'
|
||||
streamerPanelOpen ? "translate-x-0" : "translate-x-full"
|
||||
}`}
|
||||
/>
|
||||
</main>
|
||||
);
|
||||
}
|
||||
|
||||
export default function Home() {
|
||||
return (
|
||||
<FileBrowserProvider>
|
||||
<HomeContent />
|
||||
</FileBrowserProvider>
|
||||
);
|
||||
}
|
||||
|
||||
52
apps/app/src/components/delete-session-dialog.tsx
Normal file
52
apps/app/src/components/delete-session-dialog.tsx
Normal file
@@ -0,0 +1,52 @@
|
||||
import { MessageSquare } from "lucide-react";
|
||||
import { DeleteConfirmDialog } from "@/components/ui/delete-confirm-dialog";
|
||||
import type { SessionListItem } from "@/types/electron";
|
||||
|
||||
interface DeleteSessionDialogProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
session: SessionListItem | null;
|
||||
onConfirm: (sessionId: string) => void;
|
||||
}
|
||||
|
||||
export function DeleteSessionDialog({
|
||||
open,
|
||||
onOpenChange,
|
||||
session,
|
||||
onConfirm,
|
||||
}: DeleteSessionDialogProps) {
|
||||
const handleConfirm = () => {
|
||||
if (session) {
|
||||
onConfirm(session.id);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<DeleteConfirmDialog
|
||||
open={open}
|
||||
onOpenChange={onOpenChange}
|
||||
onConfirm={handleConfirm}
|
||||
title="Delete Session"
|
||||
description="Are you sure you want to delete this session? This action cannot be undone."
|
||||
confirmText="Delete Session"
|
||||
testId="delete-session-dialog"
|
||||
confirmTestId="confirm-delete-session"
|
||||
>
|
||||
{session && (
|
||||
<div className="flex items-center gap-3 p-4 rounded-lg bg-sidebar-accent/10 border border-sidebar-border">
|
||||
<div className="w-10 h-10 rounded-lg bg-sidebar-accent/20 border border-sidebar-border flex items-center justify-center shrink-0">
|
||||
<MessageSquare className="w-5 h-5 text-brand-500" />
|
||||
</div>
|
||||
<div className="min-w-0">
|
||||
<p className="font-medium text-foreground truncate">
|
||||
{session.name}
|
||||
</p>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
{session.messageCount} messages
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</DeleteConfirmDialog>
|
||||
);
|
||||
}
|
||||
520
apps/app/src/components/dialogs/board-background-modal.tsx
Normal file
520
apps/app/src/components/dialogs/board-background-modal.tsx
Normal file
@@ -0,0 +1,520 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useRef, useCallback, useEffect } from "react";
|
||||
import { ImageIcon, Upload, Loader2, Trash2 } from "lucide-react";
|
||||
import {
|
||||
Sheet,
|
||||
SheetContent,
|
||||
SheetDescription,
|
||||
SheetHeader,
|
||||
SheetTitle,
|
||||
} from "@/components/ui/sheet";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Slider } from "@/components/ui/slider";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useAppStore, defaultBackgroundSettings } from "@/store/app-store";
|
||||
import { getHttpApiClient } from "@/lib/http-api-client";
|
||||
import { toast } from "sonner";
|
||||
|
||||
const ACCEPTED_IMAGE_TYPES = [
|
||||
"image/jpeg",
|
||||
"image/jpg",
|
||||
"image/png",
|
||||
"image/gif",
|
||||
"image/webp",
|
||||
];
|
||||
const DEFAULT_MAX_FILE_SIZE = 10 * 1024 * 1024; // 10MB
|
||||
|
||||
interface BoardBackgroundModalProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
}
|
||||
|
||||
export function BoardBackgroundModal({
|
||||
open,
|
||||
onOpenChange,
|
||||
}: BoardBackgroundModalProps) {
|
||||
const {
|
||||
currentProject,
|
||||
boardBackgroundByProject,
|
||||
setBoardBackground,
|
||||
setCardOpacity,
|
||||
setColumnOpacity,
|
||||
setColumnBorderEnabled,
|
||||
setCardGlassmorphism,
|
||||
setCardBorderEnabled,
|
||||
setCardBorderOpacity,
|
||||
setHideScrollbar,
|
||||
clearBoardBackground,
|
||||
} = useAppStore();
|
||||
const [isDragOver, setIsDragOver] = useState(false);
|
||||
const [isProcessing, setIsProcessing] = useState(false);
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
const [previewImage, setPreviewImage] = useState<string | null>(null);
|
||||
|
||||
// Get current background settings (live from store)
|
||||
const backgroundSettings =
|
||||
(currentProject && boardBackgroundByProject[currentProject.path]) ||
|
||||
defaultBackgroundSettings;
|
||||
|
||||
const cardOpacity = backgroundSettings.cardOpacity;
|
||||
const columnOpacity = backgroundSettings.columnOpacity;
|
||||
const columnBorderEnabled = backgroundSettings.columnBorderEnabled;
|
||||
const cardGlassmorphism = backgroundSettings.cardGlassmorphism;
|
||||
const cardBorderEnabled = backgroundSettings.cardBorderEnabled;
|
||||
const cardBorderOpacity = backgroundSettings.cardBorderOpacity;
|
||||
const hideScrollbar = backgroundSettings.hideScrollbar;
|
||||
const imageVersion = backgroundSettings.imageVersion;
|
||||
|
||||
// Update preview image when background settings change
|
||||
useEffect(() => {
|
||||
if (currentProject && backgroundSettings.imagePath) {
|
||||
const serverUrl =
|
||||
process.env.NEXT_PUBLIC_SERVER_URL || "http://localhost:3008";
|
||||
// Add cache-busting query parameter to force browser to reload image
|
||||
const cacheBuster = imageVersion
|
||||
? `&v=${imageVersion}`
|
||||
: `&v=${Date.now()}`;
|
||||
const imagePath = `${serverUrl}/api/fs/image?path=${encodeURIComponent(
|
||||
backgroundSettings.imagePath
|
||||
)}&projectPath=${encodeURIComponent(currentProject.path)}${cacheBuster}`;
|
||||
setPreviewImage(imagePath);
|
||||
} else {
|
||||
setPreviewImage(null);
|
||||
}
|
||||
}, [currentProject, backgroundSettings.imagePath, imageVersion]);
|
||||
|
||||
const fileToBase64 = (file: File): Promise<string> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = () => {
|
||||
if (typeof reader.result === "string") {
|
||||
resolve(reader.result);
|
||||
} else {
|
||||
reject(new Error("Failed to read file as base64"));
|
||||
}
|
||||
};
|
||||
reader.onerror = () => reject(new Error("Failed to read file"));
|
||||
reader.readAsDataURL(file);
|
||||
});
|
||||
};
|
||||
|
||||
const processFile = useCallback(
|
||||
async (file: File) => {
|
||||
if (!currentProject) {
|
||||
toast.error("No project selected");
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate file type
|
||||
if (!ACCEPTED_IMAGE_TYPES.includes(file.type)) {
|
||||
toast.error(
|
||||
"Unsupported file type. Please use JPG, PNG, GIF, or WebP."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate file size
|
||||
if (file.size > DEFAULT_MAX_FILE_SIZE) {
|
||||
const maxSizeMB = DEFAULT_MAX_FILE_SIZE / (1024 * 1024);
|
||||
toast.error(`File too large. Maximum size is ${maxSizeMB}MB.`);
|
||||
return;
|
||||
}
|
||||
|
||||
setIsProcessing(true);
|
||||
try {
|
||||
const base64 = await fileToBase64(file);
|
||||
|
||||
// Set preview immediately
|
||||
setPreviewImage(base64);
|
||||
|
||||
// Save to server
|
||||
const httpClient = getHttpApiClient();
|
||||
const result = await httpClient.saveBoardBackground(
|
||||
base64,
|
||||
file.name,
|
||||
file.type,
|
||||
currentProject.path
|
||||
);
|
||||
|
||||
if (result.success && result.path) {
|
||||
// Update store with the relative path (live update)
|
||||
setBoardBackground(currentProject.path, result.path);
|
||||
toast.success("Background image saved");
|
||||
} else {
|
||||
toast.error(result.error || "Failed to save background image");
|
||||
setPreviewImage(null);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to process image:", error);
|
||||
toast.error("Failed to process image");
|
||||
setPreviewImage(null);
|
||||
} finally {
|
||||
setIsProcessing(false);
|
||||
}
|
||||
},
|
||||
[currentProject, setBoardBackground]
|
||||
);
|
||||
|
||||
const handleDrop = useCallback(
|
||||
(e: React.DragEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setIsDragOver(false);
|
||||
|
||||
const files = e.dataTransfer.files;
|
||||
if (files.length > 0) {
|
||||
processFile(files[0]);
|
||||
}
|
||||
},
|
||||
[processFile]
|
||||
);
|
||||
|
||||
const handleDragOver = useCallback((e: React.DragEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setIsDragOver(true);
|
||||
}, []);
|
||||
|
||||
const handleDragLeave = useCallback((e: React.DragEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setIsDragOver(false);
|
||||
}, []);
|
||||
|
||||
const handleFileSelect = useCallback(
|
||||
(e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const files = e.target.files;
|
||||
if (files && files.length > 0) {
|
||||
processFile(files[0]);
|
||||
}
|
||||
// Reset the input so the same file can be selected again
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = "";
|
||||
}
|
||||
},
|
||||
[processFile]
|
||||
);
|
||||
|
||||
const handleBrowseClick = useCallback(() => {
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.click();
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleClear = useCallback(async () => {
|
||||
if (!currentProject) return;
|
||||
|
||||
try {
|
||||
setIsProcessing(true);
|
||||
const httpClient = getHttpApiClient();
|
||||
const result = await httpClient.deleteBoardBackground(
|
||||
currentProject.path
|
||||
);
|
||||
|
||||
if (result.success) {
|
||||
clearBoardBackground(currentProject.path);
|
||||
setPreviewImage(null);
|
||||
toast.success("Background image cleared");
|
||||
} else {
|
||||
toast.error(result.error || "Failed to clear background image");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to clear background:", error);
|
||||
toast.error("Failed to clear background");
|
||||
} finally {
|
||||
setIsProcessing(false);
|
||||
}
|
||||
}, [currentProject, clearBoardBackground]);
|
||||
|
||||
// Live update opacity when sliders change
|
||||
const handleCardOpacityChange = useCallback(
|
||||
(value: number[]) => {
|
||||
if (!currentProject) return;
|
||||
setCardOpacity(currentProject.path, value[0]);
|
||||
},
|
||||
[currentProject, setCardOpacity]
|
||||
);
|
||||
|
||||
const handleColumnOpacityChange = useCallback(
|
||||
(value: number[]) => {
|
||||
if (!currentProject) return;
|
||||
setColumnOpacity(currentProject.path, value[0]);
|
||||
},
|
||||
[currentProject, setColumnOpacity]
|
||||
);
|
||||
|
||||
const handleColumnBorderToggle = useCallback(
|
||||
(checked: boolean) => {
|
||||
if (!currentProject) return;
|
||||
setColumnBorderEnabled(currentProject.path, checked);
|
||||
},
|
||||
[currentProject, setColumnBorderEnabled]
|
||||
);
|
||||
|
||||
const handleCardGlassmorphismToggle = useCallback(
|
||||
(checked: boolean) => {
|
||||
if (!currentProject) return;
|
||||
setCardGlassmorphism(currentProject.path, checked);
|
||||
},
|
||||
[currentProject, setCardGlassmorphism]
|
||||
);
|
||||
|
||||
const handleCardBorderToggle = useCallback(
|
||||
(checked: boolean) => {
|
||||
if (!currentProject) return;
|
||||
setCardBorderEnabled(currentProject.path, checked);
|
||||
},
|
||||
[currentProject, setCardBorderEnabled]
|
||||
);
|
||||
|
||||
const handleCardBorderOpacityChange = useCallback(
|
||||
(value: number[]) => {
|
||||
if (!currentProject) return;
|
||||
setCardBorderOpacity(currentProject.path, value[0]);
|
||||
},
|
||||
[currentProject, setCardBorderOpacity]
|
||||
);
|
||||
|
||||
const handleHideScrollbarToggle = useCallback(
|
||||
(checked: boolean) => {
|
||||
if (!currentProject) return;
|
||||
setHideScrollbar(currentProject.path, checked);
|
||||
},
|
||||
[currentProject, setHideScrollbar]
|
||||
);
|
||||
|
||||
if (!currentProject) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Sheet open={open} onOpenChange={onOpenChange}>
|
||||
<SheetContent side="right" className="w-full sm:max-w-md overflow-y-auto">
|
||||
<SheetHeader className="px-6 pt-6">
|
||||
<SheetTitle className="flex items-center gap-2">
|
||||
<ImageIcon className="w-5 h-5 text-brand-500" />
|
||||
Board Background Settings
|
||||
</SheetTitle>
|
||||
<SheetDescription className="text-muted-foreground">
|
||||
Set a custom background image for your kanban board and adjust
|
||||
card/column opacity
|
||||
</SheetDescription>
|
||||
</SheetHeader>
|
||||
|
||||
<div className="space-y-6 px-6 pb-6">
|
||||
{/* Image Upload Section */}
|
||||
<div className="space-y-3">
|
||||
<Label>Background Image</Label>
|
||||
|
||||
{/* Hidden file input */}
|
||||
<input
|
||||
ref={fileInputRef}
|
||||
type="file"
|
||||
accept={ACCEPTED_IMAGE_TYPES.join(",")}
|
||||
onChange={handleFileSelect}
|
||||
className="hidden"
|
||||
disabled={isProcessing}
|
||||
/>
|
||||
|
||||
{/* Drop zone */}
|
||||
<div
|
||||
onDrop={handleDrop}
|
||||
onDragOver={handleDragOver}
|
||||
onDragLeave={handleDragLeave}
|
||||
className={cn(
|
||||
"relative rounded-lg border-2 border-dashed transition-all duration-200",
|
||||
{
|
||||
"border-brand-500/60 bg-brand-500/5 dark:bg-brand-500/10":
|
||||
isDragOver && !isProcessing,
|
||||
"border-muted-foreground/25": !isDragOver && !isProcessing,
|
||||
"border-muted-foreground/10 opacity-50 cursor-not-allowed":
|
||||
isProcessing,
|
||||
"hover:border-brand-500/40 hover:bg-brand-500/5 dark:hover:bg-brand-500/5":
|
||||
!isProcessing && !isDragOver,
|
||||
}
|
||||
)}
|
||||
>
|
||||
{previewImage ? (
|
||||
<div className="relative p-4">
|
||||
<div className="relative w-full h-48 rounded-md overflow-hidden border border-border bg-muted">
|
||||
<img
|
||||
src={previewImage}
|
||||
alt="Background preview"
|
||||
className="w-full h-full object-cover"
|
||||
/>
|
||||
{isProcessing && (
|
||||
<div className="absolute inset-0 flex items-center justify-center bg-background/80">
|
||||
<Loader2 className="w-6 h-6 animate-spin text-brand-500" />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex gap-2 mt-3">
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleBrowseClick}
|
||||
disabled={isProcessing}
|
||||
className="flex-1"
|
||||
>
|
||||
<Upload className="w-4 h-4 mr-2" />
|
||||
Change Image
|
||||
</Button>
|
||||
<Button
|
||||
variant="destructive"
|
||||
size="sm"
|
||||
onClick={handleClear}
|
||||
disabled={isProcessing}
|
||||
>
|
||||
<Trash2 className="w-4 h-4 mr-2" />
|
||||
Clear
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div
|
||||
onClick={handleBrowseClick}
|
||||
className="flex flex-col items-center justify-center p-8 text-center cursor-pointer"
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
"rounded-full p-3 mb-3",
|
||||
isDragOver && !isProcessing
|
||||
? "bg-brand-500/10 dark:bg-brand-500/20"
|
||||
: "bg-muted"
|
||||
)}
|
||||
>
|
||||
{isProcessing ? (
|
||||
<Upload className="h-6 w-6 animate-spin text-muted-foreground" />
|
||||
) : (
|
||||
<ImageIcon className="h-6 w-6 text-muted-foreground" />
|
||||
)}
|
||||
</div>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
{isDragOver && !isProcessing
|
||||
? "Drop image here"
|
||||
: "Click to upload or drag and drop"}
|
||||
</p>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
JPG, PNG, GIF, or WebP (max{" "}
|
||||
{Math.round(DEFAULT_MAX_FILE_SIZE / (1024 * 1024))}MB)
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Opacity Controls */}
|
||||
<div className="space-y-4">
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label>Card Opacity</Label>
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{cardOpacity}%
|
||||
</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={[cardOpacity]}
|
||||
onValueChange={handleCardOpacityChange}
|
||||
min={0}
|
||||
max={100}
|
||||
step={1}
|
||||
className="w-full"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label>Column Opacity</Label>
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{columnOpacity}%
|
||||
</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={[columnOpacity]}
|
||||
onValueChange={handleColumnOpacityChange}
|
||||
min={0}
|
||||
max={100}
|
||||
step={1}
|
||||
className="w-full"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Column Border Toggle */}
|
||||
<div className="flex items-center gap-2">
|
||||
<Checkbox
|
||||
id="column-border-toggle"
|
||||
checked={columnBorderEnabled}
|
||||
onCheckedChange={handleColumnBorderToggle}
|
||||
/>
|
||||
<Label htmlFor="column-border-toggle" className="cursor-pointer">
|
||||
Show Column Borders
|
||||
</Label>
|
||||
</div>
|
||||
|
||||
{/* Card Glassmorphism Toggle */}
|
||||
<div className="flex items-center gap-2">
|
||||
<Checkbox
|
||||
id="card-glassmorphism-toggle"
|
||||
checked={cardGlassmorphism}
|
||||
onCheckedChange={handleCardGlassmorphismToggle}
|
||||
/>
|
||||
<Label
|
||||
htmlFor="card-glassmorphism-toggle"
|
||||
className="cursor-pointer"
|
||||
>
|
||||
Card Glassmorphism (blur effect)
|
||||
</Label>
|
||||
</div>
|
||||
|
||||
{/* Card Border Toggle */}
|
||||
<div className="flex items-center gap-2">
|
||||
<Checkbox
|
||||
id="card-border-toggle"
|
||||
checked={cardBorderEnabled}
|
||||
onCheckedChange={handleCardBorderToggle}
|
||||
/>
|
||||
<Label htmlFor="card-border-toggle" className="cursor-pointer">
|
||||
Show Card Borders
|
||||
</Label>
|
||||
</div>
|
||||
|
||||
{/* Card Border Opacity - only show when border is enabled */}
|
||||
{cardBorderEnabled && (
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label>Card Border Opacity</Label>
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{cardBorderOpacity}%
|
||||
</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={[cardBorderOpacity]}
|
||||
onValueChange={handleCardBorderOpacityChange}
|
||||
min={0}
|
||||
max={100}
|
||||
step={1}
|
||||
className="w-full"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Hide Scrollbar Toggle */}
|
||||
<div className="flex items-center gap-2">
|
||||
<Checkbox
|
||||
id="hide-scrollbar-toggle"
|
||||
checked={hideScrollbar}
|
||||
onCheckedChange={handleHideScrollbarToggle}
|
||||
/>
|
||||
<Label htmlFor="hide-scrollbar-toggle" className="cursor-pointer">
|
||||
Hide Board Scrollbar
|
||||
</Label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</SheetContent>
|
||||
</Sheet>
|
||||
);
|
||||
}
|
||||
231
apps/app/src/components/dialogs/file-browser-dialog.tsx
Normal file
231
apps/app/src/components/dialogs/file-browser-dialog.tsx
Normal file
@@ -0,0 +1,231 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
import { FolderOpen, Folder, ChevronRight, Home, ArrowLeft, HardDrive } from "lucide-react";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Button } from "@/components/ui/button";
|
||||
|
||||
interface DirectoryEntry {
|
||||
name: string;
|
||||
path: string;
|
||||
}
|
||||
|
||||
interface BrowseResult {
|
||||
success: boolean;
|
||||
currentPath: string;
|
||||
parentPath: string | null;
|
||||
directories: DirectoryEntry[];
|
||||
drives?: string[];
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface FileBrowserDialogProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
onSelect: (path: string) => void;
|
||||
title?: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export function FileBrowserDialog({
|
||||
open,
|
||||
onOpenChange,
|
||||
onSelect,
|
||||
title = "Select Project Directory",
|
||||
description = "Navigate to your project folder",
|
||||
}: FileBrowserDialogProps) {
|
||||
const [currentPath, setCurrentPath] = useState<string>("");
|
||||
const [parentPath, setParentPath] = useState<string | null>(null);
|
||||
const [directories, setDirectories] = useState<DirectoryEntry[]>([]);
|
||||
const [drives, setDrives] = useState<string[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState("");
|
||||
|
||||
const browseDirectory = async (dirPath?: string) => {
|
||||
setLoading(true);
|
||||
setError("");
|
||||
|
||||
try {
|
||||
// Get server URL from environment or default
|
||||
const serverUrl = process.env.NEXT_PUBLIC_SERVER_URL || "http://localhost:3008";
|
||||
|
||||
const response = await fetch(`${serverUrl}/api/fs/browse`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ dirPath }),
|
||||
});
|
||||
|
||||
const result: BrowseResult = await response.json();
|
||||
|
||||
if (result.success) {
|
||||
setCurrentPath(result.currentPath);
|
||||
setParentPath(result.parentPath);
|
||||
setDirectories(result.directories);
|
||||
setDrives(result.drives || []);
|
||||
} else {
|
||||
setError(result.error || "Failed to browse directory");
|
||||
}
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : "Failed to load directories");
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
// Load home directory on mount
|
||||
useEffect(() => {
|
||||
if (open && !currentPath) {
|
||||
browseDirectory();
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
const handleSelectDirectory = (dir: DirectoryEntry) => {
|
||||
browseDirectory(dir.path);
|
||||
};
|
||||
|
||||
const handleGoToParent = () => {
|
||||
if (parentPath) {
|
||||
browseDirectory(parentPath);
|
||||
}
|
||||
};
|
||||
|
||||
const handleGoHome = () => {
|
||||
browseDirectory();
|
||||
};
|
||||
|
||||
const handleSelectDrive = (drivePath: string) => {
|
||||
browseDirectory(drivePath);
|
||||
};
|
||||
|
||||
const handleSelect = () => {
|
||||
if (currentPath) {
|
||||
onSelect(currentPath);
|
||||
onOpenChange(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="bg-popover border-border max-w-2xl max-h-[80vh]">
|
||||
<DialogHeader>
|
||||
<DialogTitle className="flex items-center gap-2">
|
||||
<FolderOpen className="w-5 h-5 text-brand-500" />
|
||||
{title}
|
||||
</DialogTitle>
|
||||
<DialogDescription className="text-muted-foreground">
|
||||
{description}
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="flex flex-col gap-3 min-h-[400px]">
|
||||
{/* Drives selector (Windows only) */}
|
||||
{drives.length > 0 && (
|
||||
<div className="flex flex-wrap gap-2 p-3 rounded-lg bg-sidebar-accent/10 border border-sidebar-border">
|
||||
<div className="flex items-center gap-1 text-xs text-muted-foreground mr-2">
|
||||
<HardDrive className="w-3 h-3" />
|
||||
<span>Drives:</span>
|
||||
</div>
|
||||
{drives.map((drive) => (
|
||||
<Button
|
||||
key={drive}
|
||||
variant={currentPath.startsWith(drive) ? "default" : "outline"}
|
||||
size="sm"
|
||||
onClick={() => handleSelectDrive(drive)}
|
||||
className="h-7 px-3 text-xs"
|
||||
disabled={loading}
|
||||
>
|
||||
{drive.replace("\\", "")}
|
||||
</Button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Current path breadcrumb */}
|
||||
<div className="flex items-center gap-2 p-3 rounded-lg bg-sidebar-accent/10 border border-sidebar-border">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={handleGoHome}
|
||||
className="h-7 px-2"
|
||||
disabled={loading}
|
||||
>
|
||||
<Home className="w-4 h-4" />
|
||||
</Button>
|
||||
{parentPath && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={handleGoToParent}
|
||||
className="h-7 px-2"
|
||||
disabled={loading}
|
||||
>
|
||||
<ArrowLeft className="w-4 h-4" />
|
||||
</Button>
|
||||
)}
|
||||
<div className="flex-1 font-mono text-sm truncate text-muted-foreground">
|
||||
{currentPath || "Loading..."}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Directory list */}
|
||||
<div className="flex-1 overflow-y-auto border border-sidebar-border rounded-lg">
|
||||
{loading && (
|
||||
<div className="flex items-center justify-center h-full p-8">
|
||||
<div className="text-sm text-muted-foreground">Loading directories...</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{error && (
|
||||
<div className="flex items-center justify-center h-full p-8">
|
||||
<div className="text-sm text-destructive">{error}</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!loading && !error && directories.length === 0 && (
|
||||
<div className="flex items-center justify-center h-full p-8">
|
||||
<div className="text-sm text-muted-foreground">No subdirectories found</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!loading && !error && directories.length > 0 && (
|
||||
<div className="divide-y divide-sidebar-border">
|
||||
{directories.map((dir) => (
|
||||
<button
|
||||
key={dir.path}
|
||||
onClick={() => handleSelectDirectory(dir)}
|
||||
className="w-full flex items-center gap-3 p-3 hover:bg-sidebar-accent/10 transition-colors text-left group"
|
||||
>
|
||||
<Folder className="w-5 h-5 text-brand-500 shrink-0" />
|
||||
<span className="flex-1 truncate text-sm">{dir.name}</span>
|
||||
<ChevronRight className="w-4 h-4 text-muted-foreground opacity-0 group-hover:opacity-100 transition-opacity shrink-0" />
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="text-xs text-muted-foreground">
|
||||
Click on a folder to navigate. Select the current folder or navigate to a subfolder.
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<DialogFooter className="gap-2 sm:gap-0">
|
||||
<Button variant="ghost" onClick={() => onOpenChange(false)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={handleSelect} disabled={!currentPath || loading}>
|
||||
<FolderOpen className="w-4 h-4 mr-2" />
|
||||
Select Current Folder
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
import { useState, useMemo, useEffect, useCallback, useRef } from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useAppStore, formatShortcut } from "@/store/app-store";
|
||||
import { useAppStore, formatShortcut, type ThemeMode } from "@/store/app-store";
|
||||
import { CoursePromoBadge } from "@/components/ui/course-promo-badge";
|
||||
import { IS_MARKETING } from "@/config/app-config";
|
||||
import {
|
||||
FolderOpen,
|
||||
Plus,
|
||||
@@ -12,7 +14,6 @@ import {
|
||||
Bot,
|
||||
Folder,
|
||||
X,
|
||||
Wrench,
|
||||
PanelLeft,
|
||||
PanelLeftClose,
|
||||
ChevronDown,
|
||||
@@ -26,22 +27,11 @@ import {
|
||||
UserCircle,
|
||||
MoreVertical,
|
||||
Palette,
|
||||
Moon,
|
||||
Sun,
|
||||
Terminal,
|
||||
Ghost,
|
||||
Snowflake,
|
||||
Flame,
|
||||
Sparkles as TokyoNightIcon,
|
||||
Eclipse,
|
||||
Trees,
|
||||
Cat,
|
||||
Atom,
|
||||
Radio,
|
||||
Monitor,
|
||||
Search,
|
||||
Bug,
|
||||
Activity,
|
||||
Recycle,
|
||||
} from "lucide-react";
|
||||
import {
|
||||
DropdownMenu,
|
||||
@@ -70,7 +60,12 @@ import {
|
||||
useKeyboardShortcutsConfig,
|
||||
KeyboardShortcut,
|
||||
} from "@/hooks/use-keyboard-shortcuts";
|
||||
import { getElectronAPI, Project, TrashedProject } from "@/lib/electron";
|
||||
import {
|
||||
getElectronAPI,
|
||||
Project,
|
||||
TrashedProject,
|
||||
RunningAgent,
|
||||
} from "@/lib/electron";
|
||||
import {
|
||||
initializeProject,
|
||||
hasAppSpec,
|
||||
@@ -78,8 +73,10 @@ import {
|
||||
} from "@/lib/project-init";
|
||||
import { toast } from "sonner";
|
||||
import { Sparkles, Loader2 } from "lucide-react";
|
||||
import { themeOptions } from "@/config/theme-options";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import type { SpecRegenerationEvent } from "@/types/electron";
|
||||
import { DeleteProjectDialog } from "@/components/views/settings-view/components/delete-project-dialog";
|
||||
import {
|
||||
DndContext,
|
||||
DragEndEvent,
|
||||
@@ -173,21 +170,14 @@ function SortableProjectItem({
|
||||
);
|
||||
}
|
||||
|
||||
// Theme options for project theme selector
|
||||
// Theme options for project theme selector - derived from the shared config
|
||||
const PROJECT_THEME_OPTIONS = [
|
||||
{ value: "", label: "Use Global", icon: Monitor },
|
||||
{ value: "dark", label: "Dark", icon: Moon },
|
||||
{ value: "light", label: "Light", icon: Sun },
|
||||
{ value: "retro", label: "Retro", icon: Terminal },
|
||||
{ value: "dracula", label: "Dracula", icon: Ghost },
|
||||
{ value: "nord", label: "Nord", icon: Snowflake },
|
||||
{ value: "monokai", label: "Monokai", icon: Flame },
|
||||
{ value: "tokyonight", label: "Tokyo Night", icon: TokyoNightIcon },
|
||||
{ value: "solarized", label: "Solarized", icon: Eclipse },
|
||||
{ value: "gruvbox", label: "Gruvbox", icon: Trees },
|
||||
{ value: "catppuccin", label: "Catppuccin", icon: Cat },
|
||||
{ value: "onedark", label: "One Dark", icon: Atom },
|
||||
{ value: "synthwave", label: "Synthwave", icon: Radio },
|
||||
...themeOptions.map((opt) => ({
|
||||
value: opt.value,
|
||||
label: opt.label,
|
||||
icon: opt.Icon,
|
||||
})),
|
||||
] as const;
|
||||
|
||||
export function Sidebar() {
|
||||
@@ -198,7 +188,7 @@ export function Sidebar() {
|
||||
currentView,
|
||||
sidebarOpen,
|
||||
projectHistory,
|
||||
addProject,
|
||||
upsertAndSetCurrentProject,
|
||||
setCurrentProject,
|
||||
setCurrentView,
|
||||
toggleSidebar,
|
||||
@@ -210,7 +200,10 @@ export function Sidebar() {
|
||||
cycleNextProject,
|
||||
clearProjectHistory,
|
||||
setProjectTheme,
|
||||
setTheme,
|
||||
setPreviewTheme,
|
||||
theme: globalTheme,
|
||||
moveProjectToTrash,
|
||||
} = useAppStore();
|
||||
|
||||
// Get customizable keyboard shortcuts
|
||||
@@ -224,6 +217,12 @@ export function Sidebar() {
|
||||
const [activeTrashId, setActiveTrashId] = useState<string | null>(null);
|
||||
const [isEmptyingTrash, setIsEmptyingTrash] = useState(false);
|
||||
|
||||
// State for delete project confirmation dialog
|
||||
const [showDeleteProjectDialog, setShowDeleteProjectDialog] = useState(false);
|
||||
|
||||
// State for running agents count
|
||||
const [runningAgentsCount, setRunningAgentsCount] = useState(0);
|
||||
|
||||
// State for new project setup dialog
|
||||
const [showSetupDialog, setShowSetupDialog] = useState(false);
|
||||
const [setupProjectPath, setSetupProjectPath] = useState("");
|
||||
@@ -333,6 +332,49 @@ export function Sidebar() {
|
||||
};
|
||||
}, [setCurrentView]);
|
||||
|
||||
// Fetch running agents count function - used for initial load and event-driven updates
|
||||
const fetchRunningAgentsCount = useCallback(async () => {
|
||||
try {
|
||||
const api = getElectronAPI();
|
||||
if (api.runningAgents) {
|
||||
const result = await api.runningAgents.getAll();
|
||||
if (result.success && result.runningAgents) {
|
||||
setRunningAgentsCount(result.runningAgents.length);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("[Sidebar] Error fetching running agents count:", error);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Subscribe to auto-mode events to update running agents count in real-time
|
||||
useEffect(() => {
|
||||
const api = getElectronAPI();
|
||||
if (!api.autoMode) {
|
||||
// If autoMode is not available, still fetch initial count
|
||||
fetchRunningAgentsCount();
|
||||
return;
|
||||
}
|
||||
|
||||
// Initial fetch on mount
|
||||
fetchRunningAgentsCount();
|
||||
|
||||
const unsubscribe = api.autoMode.onEvent((event) => {
|
||||
// When a feature starts, completes, or errors, refresh the count
|
||||
if (
|
||||
event.type === "auto_mode_feature_complete" ||
|
||||
event.type === "auto_mode_error" ||
|
||||
event.type === "auto_mode_feature_start"
|
||||
) {
|
||||
fetchRunningAgentsCount();
|
||||
}
|
||||
});
|
||||
|
||||
return () => {
|
||||
unsubscribe();
|
||||
};
|
||||
}, [fetchRunningAgentsCount]);
|
||||
|
||||
// Handle creating initial spec for new project
|
||||
const handleCreateInitialSpec = useCallback(async () => {
|
||||
if (!setupProjectPath || !projectOverview.trim()) return;
|
||||
@@ -413,15 +455,14 @@ export function Sidebar() {
|
||||
return;
|
||||
}
|
||||
|
||||
const project = {
|
||||
id: `project-${Date.now()}`,
|
||||
name,
|
||||
path,
|
||||
lastOpened: new Date().toISOString(),
|
||||
};
|
||||
|
||||
addProject(project);
|
||||
setCurrentProject(project);
|
||||
// Upsert project and set as current (handles both create and update cases)
|
||||
// Theme preservation is handled by the store action
|
||||
const trashedProject = trashedProjects.find((p) => p.path === path);
|
||||
const effectiveTheme =
|
||||
(trashedProject?.theme as ThemeMode | undefined) ||
|
||||
(currentProject?.theme as ThemeMode | undefined) ||
|
||||
globalTheme;
|
||||
const project = upsertAndSetCurrentProject(path, name, effectiveTheme);
|
||||
|
||||
// Check if app_spec.txt exists
|
||||
const specExists = await hasAppSpec(path);
|
||||
@@ -455,7 +496,12 @@ export function Sidebar() {
|
||||
});
|
||||
}
|
||||
}
|
||||
}, [addProject, setCurrentProject]);
|
||||
}, [
|
||||
trashedProjects,
|
||||
upsertAndSetCurrentProject,
|
||||
currentProject,
|
||||
globalTheme,
|
||||
]);
|
||||
|
||||
const handleRestoreProject = useCallback(
|
||||
(projectId: string) => {
|
||||
@@ -510,14 +556,14 @@ export function Sidebar() {
|
||||
}
|
||||
|
||||
const confirmed = window.confirm(
|
||||
"Clear all trashed projects from Automaker? This does not delete folders from disk."
|
||||
"Clear all projects from recycle bin? This does not delete folders from disk."
|
||||
);
|
||||
if (!confirmed) return;
|
||||
|
||||
setIsEmptyingTrash(true);
|
||||
try {
|
||||
emptyTrash();
|
||||
toast.success("Trash cleared");
|
||||
toast.success("Recycle bin cleared");
|
||||
setShowTrashDialog(false);
|
||||
} finally {
|
||||
setIsEmptyingTrash(false);
|
||||
@@ -557,12 +603,6 @@ export function Sidebar() {
|
||||
icon: BookOpen,
|
||||
shortcut: shortcuts.context,
|
||||
},
|
||||
{
|
||||
id: "tools",
|
||||
label: "Agent Tools",
|
||||
icon: Wrench,
|
||||
shortcut: shortcuts.tools,
|
||||
},
|
||||
{
|
||||
id: "profiles",
|
||||
label: "AI Profiles",
|
||||
@@ -743,7 +783,9 @@ export function Sidebar() {
|
||||
<div
|
||||
className={cn(
|
||||
"h-20 border-b border-sidebar-border shrink-0 titlebar-drag-region",
|
||||
sidebarOpen ? "pt-8 px-3 lg:px-6 flex items-center justify-between" : "pt-2 pb-2 px-3 flex flex-col items-center justify-center gap-2"
|
||||
sidebarOpen
|
||||
? "pt-8 px-3 lg:px-6 flex items-center justify-between"
|
||||
: "pt-2 pb-2 px-3 flex flex-col items-center justify-center gap-2"
|
||||
)}
|
||||
>
|
||||
<div
|
||||
@@ -767,14 +809,24 @@ export function Sidebar() {
|
||||
sidebarOpen ? "hidden lg:block" : "hidden"
|
||||
)}
|
||||
>
|
||||
Auto<span className="text-brand-500">maker</span>
|
||||
{IS_MARKETING ? (
|
||||
<>
|
||||
https://<span className="text-brand-500">automaker</span>.app
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
Auto<span className="text-brand-500">maker</span>
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
{/* Bug Report Button */}
|
||||
<button
|
||||
onClick={() => {
|
||||
const api = getElectronAPI();
|
||||
api.openExternalLink("https://github.com/AutoMaker-Org/automaker/issues");
|
||||
api.openExternalLink(
|
||||
"https://github.com/AutoMaker-Org/automaker/issues"
|
||||
);
|
||||
}}
|
||||
className="titlebar-no-drag p-1.5 rounded-md text-muted-foreground hover:text-foreground hover:bg-sidebar-accent/50 transition-all"
|
||||
title="Report Bug / Feature Request"
|
||||
@@ -812,10 +864,10 @@ export function Sidebar() {
|
||||
<button
|
||||
onClick={() => setShowTrashDialog(true)}
|
||||
className="group flex items-center justify-center px-3 h-[42px] rounded-lg relative overflow-hidden transition-all text-muted-foreground hover:text-primary hover:bg-destructive/10 border border-sidebar-border"
|
||||
title="Trash"
|
||||
title="Recycle Bin"
|
||||
data-testid="trash-button"
|
||||
>
|
||||
<Trash2 className="size-4 shrink-0" />
|
||||
<Recycle className="size-4 shrink-0" />
|
||||
{trashedProjects.length > 0 && (
|
||||
<span className="absolute -top-[2px] -right-[2px] flex items-center justify-center w-5 h-5 text-[10px] font-medium rounded-full text-brand-500">
|
||||
{trashedProjects.length > 9 ? "9+" : trashedProjects.length}
|
||||
@@ -916,7 +968,14 @@ export function Sidebar() {
|
||||
|
||||
{/* Project Options Menu - theme and history */}
|
||||
{currentProject && (
|
||||
<DropdownMenu>
|
||||
<DropdownMenu
|
||||
onOpenChange={(open) => {
|
||||
// Clear preview theme when the menu closes
|
||||
if (!open) {
|
||||
setPreviewTheme(null);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<button
|
||||
className="hidden lg:flex items-center justify-center w-8 h-[42px] rounded-lg text-muted-foreground hover:text-foreground hover:bg-sidebar-accent/50 border border-sidebar-border transition-all titlebar-no-drag"
|
||||
@@ -939,8 +998,12 @@ export function Sidebar() {
|
||||
)}
|
||||
</DropdownMenuSubTrigger>
|
||||
<DropdownMenuSubContent
|
||||
className="w-48"
|
||||
className="w-56"
|
||||
data-testid="project-theme-menu"
|
||||
onPointerLeave={() => {
|
||||
// Clear preview theme when leaving the dropdown
|
||||
setPreviewTheme(null);
|
||||
}}
|
||||
>
|
||||
<DropdownMenuLabel className="text-xs text-muted-foreground">
|
||||
Select theme for this project
|
||||
@@ -950,6 +1013,15 @@ export function Sidebar() {
|
||||
value={currentProject.theme || ""}
|
||||
onValueChange={(value) => {
|
||||
if (currentProject) {
|
||||
// Clear preview theme when a theme is selected
|
||||
setPreviewTheme(null);
|
||||
// If selecting an actual theme (not "Use Global"), also update global
|
||||
if (value !== "") {
|
||||
setTheme(value as any);
|
||||
} else {
|
||||
// Restore to global theme when "Use Global" is selected
|
||||
setTheme(globalTheme);
|
||||
}
|
||||
setProjectTheme(
|
||||
currentProject.id,
|
||||
value === "" ? null : (value as any)
|
||||
@@ -959,22 +1031,54 @@ export function Sidebar() {
|
||||
>
|
||||
{PROJECT_THEME_OPTIONS.map((option) => {
|
||||
const Icon = option.icon;
|
||||
const themeValue =
|
||||
option.value === "" ? globalTheme : option.value;
|
||||
return (
|
||||
<DropdownMenuRadioItem
|
||||
<div
|
||||
key={option.value}
|
||||
value={option.value}
|
||||
data-testid={`project-theme-${
|
||||
option.value || "global"
|
||||
}`}
|
||||
onPointerEnter={() => {
|
||||
// Preview the theme on hover
|
||||
setPreviewTheme(themeValue as any);
|
||||
}}
|
||||
onPointerLeave={(e) => {
|
||||
// Clear preview theme when leaving this item
|
||||
// Only clear if we're not moving to another theme item
|
||||
const relatedTarget =
|
||||
e.relatedTarget as HTMLElement;
|
||||
if (
|
||||
!relatedTarget ||
|
||||
!relatedTarget.closest(
|
||||
'[data-testid^="project-theme-"]'
|
||||
)
|
||||
) {
|
||||
setPreviewTheme(null);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<Icon className="w-4 h-4 mr-2" />
|
||||
<span>{option.label}</span>
|
||||
{option.value === "" && (
|
||||
<span className="text-[10px] text-muted-foreground ml-1 capitalize">
|
||||
({globalTheme})
|
||||
</span>
|
||||
)}
|
||||
</DropdownMenuRadioItem>
|
||||
<DropdownMenuRadioItem
|
||||
value={option.value}
|
||||
data-testid={`project-theme-${
|
||||
option.value || "global"
|
||||
}`}
|
||||
onFocus={() => {
|
||||
// Preview the theme on keyboard navigation
|
||||
setPreviewTheme(themeValue as any);
|
||||
}}
|
||||
onBlur={() => {
|
||||
// Clear preview theme when losing focus
|
||||
// If moving to another item, its onFocus will set it again
|
||||
setPreviewTheme(null);
|
||||
}}
|
||||
>
|
||||
<Icon className="w-4 h-4 mr-2" />
|
||||
<span>{option.label}</span>
|
||||
{option.value === "" && (
|
||||
<span className="text-[10px] text-muted-foreground ml-1 capitalize">
|
||||
({globalTheme})
|
||||
</span>
|
||||
)}
|
||||
</DropdownMenuRadioItem>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</DropdownMenuRadioGroup>
|
||||
@@ -1017,6 +1121,17 @@ export function Sidebar() {
|
||||
</DropdownMenuItem>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Move to Trash Section */}
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem
|
||||
onClick={() => setShowDeleteProjectDialog(true)}
|
||||
className="text-destructive focus:text-destructive focus:bg-destructive/10"
|
||||
data-testid="move-project-to-trash"
|
||||
>
|
||||
<Trash2 className="w-4 h-4 mr-2" />
|
||||
<span>Move to Trash</span>
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)}
|
||||
@@ -1122,6 +1237,8 @@ export function Sidebar() {
|
||||
|
||||
{/* Bottom Section - Running Agents / Bug Report / Settings */}
|
||||
<div className="border-t border-sidebar-border bg-sidebar-accent/10 shrink-0">
|
||||
{/* Course Promo Badge */}
|
||||
<CoursePromoBadge sidebarOpen={sidebarOpen} />
|
||||
{/* Running Agents Link */}
|
||||
<div className="p-2 pb-0">
|
||||
<button
|
||||
@@ -1139,14 +1256,25 @@ export function Sidebar() {
|
||||
{isActiveRoute("running-agents") && (
|
||||
<div className="absolute inset-y-0 left-0 w-0.5 bg-brand-500 rounded-l-md"></div>
|
||||
)}
|
||||
<Activity
|
||||
className={cn(
|
||||
"w-4 h-4 shrink-0 transition-colors",
|
||||
isActiveRoute("running-agents")
|
||||
? "text-brand-500"
|
||||
: "group-hover:text-brand-400"
|
||||
<div className="relative">
|
||||
<Activity
|
||||
className={cn(
|
||||
"w-4 h-4 shrink-0 transition-colors",
|
||||
isActiveRoute("running-agents")
|
||||
? "text-brand-500"
|
||||
: "group-hover:text-brand-400"
|
||||
)}
|
||||
/>
|
||||
{/* Running agents count badge - shown in collapsed state */}
|
||||
{!sidebarOpen && runningAgentsCount > 0 && (
|
||||
<span
|
||||
className="absolute -top-1.5 -right-1.5 flex items-center justify-center min-w-5 h-5 px-1 text-[10px] font-semibold rounded-full bg-brand-500 text-white"
|
||||
data-testid="running-agents-count-collapsed"
|
||||
>
|
||||
{runningAgentsCount > 99 ? "99" : runningAgentsCount}
|
||||
</span>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
<span
|
||||
className={cn(
|
||||
"ml-2.5 font-medium text-sm flex-1 text-left",
|
||||
@@ -1155,6 +1283,18 @@ export function Sidebar() {
|
||||
>
|
||||
Running Agents
|
||||
</span>
|
||||
{/* Running agents count badge - shown in expanded state */}
|
||||
{sidebarOpen && runningAgentsCount > 0 && (
|
||||
<span
|
||||
className={cn(
|
||||
"hidden lg:flex items-center justify-center min-w-6 h-6 px-1.5 text-xs font-semibold rounded-full bg-brand-500 text-white",
|
||||
isActiveRoute("running-agents") && "bg-brand-600"
|
||||
)}
|
||||
data-testid="running-agents-count"
|
||||
>
|
||||
{runningAgentsCount > 99 ? "99" : runningAgentsCount}
|
||||
</span>
|
||||
)}
|
||||
{!sidebarOpen && (
|
||||
<span className="absolute left-full ml-2 px-2 py-1 bg-popover text-popover-foreground text-xs rounded opacity-0 group-hover:opacity-100 transition-opacity whitespace-nowrap z-50 border border-border">
|
||||
Running Agents
|
||||
@@ -1218,7 +1358,7 @@ export function Sidebar() {
|
||||
<Dialog open={showTrashDialog} onOpenChange={setShowTrashDialog}>
|
||||
<DialogContent className="bg-popover border-border max-w-2xl">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Trash</DialogTitle>
|
||||
<DialogTitle>Recycle Bin</DialogTitle>
|
||||
<DialogDescription className="text-muted-foreground">
|
||||
Restore projects to the sidebar or delete their folders using your
|
||||
system Trash.
|
||||
@@ -1226,7 +1366,9 @@ export function Sidebar() {
|
||||
</DialogHeader>
|
||||
|
||||
{trashedProjects.length === 0 ? (
|
||||
<p className="text-sm text-muted-foreground">Trash is empty.</p>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Recycle bin is empty.
|
||||
</p>
|
||||
) : (
|
||||
<div className="space-y-3 max-h-[360px] overflow-y-auto pr-1">
|
||||
{trashedProjects.map((project) => (
|
||||
@@ -1294,7 +1436,7 @@ export function Sidebar() {
|
||||
disabled={isEmptyingTrash}
|
||||
data-testid="empty-trash"
|
||||
>
|
||||
{isEmptyingTrash ? "Clearing..." : "Empty Trash"}
|
||||
{isEmptyingTrash ? "Clearing..." : "Empty Recycle Bin"}
|
||||
</Button>
|
||||
)}
|
||||
</DialogFooter>
|
||||
@@ -1397,6 +1539,14 @@ export function Sidebar() {
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Delete Project Confirmation Dialog */}
|
||||
<DeleteProjectDialog
|
||||
open={showDeleteProjectDialog}
|
||||
onOpenChange={setShowDeleteProjectDialog}
|
||||
project={currentProject}
|
||||
onConfirm={moveProjectToTrash}
|
||||
/>
|
||||
</aside>
|
||||
);
|
||||
}
|
||||
|
||||
453
apps/app/src/components/new-project-modal.tsx
Normal file
453
apps/app/src/components/new-project-modal.tsx
Normal file
@@ -0,0 +1,453 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { HotkeyButton } from "@/components/ui/hotkey-button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import {
|
||||
FolderPlus,
|
||||
FolderOpen,
|
||||
Rocket,
|
||||
ExternalLink,
|
||||
Check,
|
||||
Loader2,
|
||||
Link,
|
||||
Folder,
|
||||
} from "lucide-react";
|
||||
import { starterTemplates, type StarterTemplate } from "@/lib/templates";
|
||||
import { getElectronAPI } from "@/lib/electron";
|
||||
import { getHttpApiClient } from "@/lib/http-api-client";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useFileBrowser } from "@/contexts/file-browser-context";
|
||||
|
||||
interface ValidationErrors {
|
||||
projectName?: boolean;
|
||||
workspaceDir?: boolean;
|
||||
templateSelection?: boolean;
|
||||
customUrl?: boolean;
|
||||
}
|
||||
|
||||
interface NewProjectModalProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
onCreateBlankProject: (projectName: string, parentDir: string) => Promise<void>;
|
||||
onCreateFromTemplate: (
|
||||
template: StarterTemplate,
|
||||
projectName: string,
|
||||
parentDir: string
|
||||
) => Promise<void>;
|
||||
onCreateFromCustomUrl: (
|
||||
repoUrl: string,
|
||||
projectName: string,
|
||||
parentDir: string
|
||||
) => Promise<void>;
|
||||
isCreating: boolean;
|
||||
}
|
||||
|
||||
export function NewProjectModal({
|
||||
open,
|
||||
onOpenChange,
|
||||
onCreateBlankProject,
|
||||
onCreateFromTemplate,
|
||||
onCreateFromCustomUrl,
|
||||
isCreating,
|
||||
}: NewProjectModalProps) {
|
||||
const [activeTab, setActiveTab] = useState<"blank" | "template">("blank");
|
||||
const [projectName, setProjectName] = useState("");
|
||||
const [workspaceDir, setWorkspaceDir] = useState<string>("");
|
||||
const [isLoadingWorkspace, setIsLoadingWorkspace] = useState(false);
|
||||
const [selectedTemplate, setSelectedTemplate] = useState<StarterTemplate | null>(null);
|
||||
const [useCustomUrl, setUseCustomUrl] = useState(false);
|
||||
const [customUrl, setCustomUrl] = useState("");
|
||||
const [errors, setErrors] = useState<ValidationErrors>({});
|
||||
const { openFileBrowser } = useFileBrowser();
|
||||
|
||||
// Fetch workspace directory when modal opens
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
setIsLoadingWorkspace(true);
|
||||
const httpClient = getHttpApiClient();
|
||||
httpClient.workspace.getConfig()
|
||||
.then((result) => {
|
||||
if (result.success && result.workspaceDir) {
|
||||
setWorkspaceDir(result.workspaceDir);
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Failed to get workspace config:", error);
|
||||
})
|
||||
.finally(() => {
|
||||
setIsLoadingWorkspace(false);
|
||||
});
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
// Reset form when modal closes
|
||||
useEffect(() => {
|
||||
if (!open) {
|
||||
setProjectName("");
|
||||
setSelectedTemplate(null);
|
||||
setUseCustomUrl(false);
|
||||
setCustomUrl("");
|
||||
setActiveTab("blank");
|
||||
setErrors({});
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
// Clear specific errors when user fixes them
|
||||
useEffect(() => {
|
||||
if (projectName && errors.projectName) {
|
||||
setErrors((prev) => ({ ...prev, projectName: false }));
|
||||
}
|
||||
}, [projectName, errors.projectName]);
|
||||
|
||||
useEffect(() => {
|
||||
if ((selectedTemplate || (useCustomUrl && customUrl)) && errors.templateSelection) {
|
||||
setErrors((prev) => ({ ...prev, templateSelection: false }));
|
||||
}
|
||||
}, [selectedTemplate, useCustomUrl, customUrl, errors.templateSelection]);
|
||||
|
||||
useEffect(() => {
|
||||
if (customUrl && errors.customUrl) {
|
||||
setErrors((prev) => ({ ...prev, customUrl: false }));
|
||||
}
|
||||
}, [customUrl, errors.customUrl]);
|
||||
|
||||
const validateAndCreate = async () => {
|
||||
const newErrors: ValidationErrors = {};
|
||||
|
||||
// Check project name
|
||||
if (!projectName.trim()) {
|
||||
newErrors.projectName = true;
|
||||
}
|
||||
|
||||
// Check workspace dir
|
||||
if (!workspaceDir) {
|
||||
newErrors.workspaceDir = true;
|
||||
}
|
||||
|
||||
// Check template selection (only for template tab)
|
||||
if (activeTab === "template") {
|
||||
if (useCustomUrl) {
|
||||
if (!customUrl.trim()) {
|
||||
newErrors.customUrl = true;
|
||||
}
|
||||
} else if (!selectedTemplate) {
|
||||
newErrors.templateSelection = true;
|
||||
}
|
||||
}
|
||||
|
||||
// If there are errors, show them and don't proceed
|
||||
if (Object.values(newErrors).some(Boolean)) {
|
||||
setErrors(newErrors);
|
||||
return;
|
||||
}
|
||||
|
||||
// Clear errors and proceed
|
||||
setErrors({});
|
||||
|
||||
if (activeTab === "blank") {
|
||||
await onCreateBlankProject(projectName, workspaceDir);
|
||||
} else if (useCustomUrl && customUrl) {
|
||||
await onCreateFromCustomUrl(customUrl, projectName, workspaceDir);
|
||||
} else if (selectedTemplate) {
|
||||
await onCreateFromTemplate(selectedTemplate, projectName, workspaceDir);
|
||||
}
|
||||
};
|
||||
|
||||
const handleOpenRepo = (url: string) => {
|
||||
const api = getElectronAPI();
|
||||
api.openExternalLink(url);
|
||||
};
|
||||
|
||||
const handleSelectTemplate = (template: StarterTemplate) => {
|
||||
setSelectedTemplate(template);
|
||||
setUseCustomUrl(false);
|
||||
setCustomUrl("");
|
||||
};
|
||||
|
||||
const handleToggleCustomUrl = () => {
|
||||
setUseCustomUrl(!useCustomUrl);
|
||||
if (!useCustomUrl) {
|
||||
setSelectedTemplate(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleBrowseDirectory = async () => {
|
||||
const selectedPath = await openFileBrowser({
|
||||
title: "Select Base Project Directory",
|
||||
description: "Choose the parent directory where your project will be created",
|
||||
});
|
||||
if (selectedPath) {
|
||||
setWorkspaceDir(selectedPath);
|
||||
// Clear any workspace error when a valid directory is selected
|
||||
if (errors.workspaceDir) {
|
||||
setErrors((prev) => ({ ...prev, workspaceDir: false }));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const projectPath = workspaceDir && projectName ? `${workspaceDir}/${projectName}` : "";
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent
|
||||
className="bg-card border-border max-w-2xl max-h-[85vh] overflow-hidden flex flex-col"
|
||||
data-testid="new-project-modal"
|
||||
>
|
||||
<DialogHeader className="pb-2">
|
||||
<DialogTitle className="text-foreground">Create New Project</DialogTitle>
|
||||
<DialogDescription className="text-muted-foreground">
|
||||
Start with a blank project or choose from a starter template.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
{/* Project Name Input - Always visible at top */}
|
||||
<div className="space-y-3 pb-4 border-b border-border">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="project-name" className={cn("text-foreground", errors.projectName && "text-red-500")}>
|
||||
Project Name {errors.projectName && <span className="text-red-500">*</span>}
|
||||
</Label>
|
||||
<Input
|
||||
id="project-name"
|
||||
placeholder="my-awesome-project"
|
||||
value={projectName}
|
||||
onChange={(e) => setProjectName(e.target.value)}
|
||||
className={cn(
|
||||
"bg-input text-foreground placeholder:text-muted-foreground",
|
||||
errors.projectName
|
||||
? "border-red-500 focus:border-red-500 focus:ring-red-500/20"
|
||||
: "border-border"
|
||||
)}
|
||||
data-testid="project-name-input"
|
||||
autoFocus
|
||||
/>
|
||||
{errors.projectName && (
|
||||
<p className="text-xs text-red-500">Project name is required</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Workspace Directory Display */}
|
||||
<div className={cn(
|
||||
"flex items-center gap-2 text-sm",
|
||||
errors.workspaceDir ? "text-red-500" : "text-muted-foreground"
|
||||
)}>
|
||||
<Folder className="w-4 h-4 shrink-0" />
|
||||
<span className="flex-1 min-w-0">
|
||||
{isLoadingWorkspace ? (
|
||||
"Loading workspace..."
|
||||
) : workspaceDir ? (
|
||||
<>Will be created at: <code className="text-xs bg-muted px-1.5 py-0.5 rounded truncate">{projectPath || "..."}</code></>
|
||||
) : (
|
||||
<span className="text-red-500">No workspace configured</span>
|
||||
)}
|
||||
</span>
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleBrowseDirectory}
|
||||
disabled={isLoadingWorkspace}
|
||||
className="shrink-0 h-7 px-2 text-xs"
|
||||
data-testid="browse-directory-button"
|
||||
>
|
||||
<FolderOpen className="w-3.5 h-3.5 mr-1" />
|
||||
Browse
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Tabs
|
||||
value={activeTab}
|
||||
onValueChange={(v) => setActiveTab(v as "blank" | "template")}
|
||||
className="flex-1 flex flex-col overflow-hidden"
|
||||
>
|
||||
<TabsList className="w-full justify-start">
|
||||
<TabsTrigger value="blank" className="gap-2">
|
||||
<FolderPlus className="w-4 h-4" />
|
||||
Blank Project
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="template" className="gap-2">
|
||||
<Rocket className="w-4 h-4" />
|
||||
Starter Kit
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
|
||||
<div className="flex-1 overflow-y-auto py-4">
|
||||
<TabsContent value="blank" className="mt-0">
|
||||
<div className="p-4 rounded-lg bg-muted/50 border border-border">
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Create an empty project with the standard .automaker directory
|
||||
structure. Perfect for starting from scratch or importing an
|
||||
existing codebase.
|
||||
</p>
|
||||
</div>
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="template" className="mt-0">
|
||||
<div className="space-y-4">
|
||||
{/* Error message for template selection */}
|
||||
{errors.templateSelection && (
|
||||
<p className="text-sm text-red-500">Please select a template or enter a custom GitHub URL</p>
|
||||
)}
|
||||
|
||||
{/* Preset Templates */}
|
||||
<div className={cn(
|
||||
"space-y-3 rounded-lg p-1 -m-1",
|
||||
errors.templateSelection && "ring-2 ring-red-500/50"
|
||||
)}>
|
||||
{starterTemplates.map((template) => (
|
||||
<div
|
||||
key={template.id}
|
||||
className={cn(
|
||||
"p-4 rounded-lg border cursor-pointer transition-all",
|
||||
selectedTemplate?.id === template.id && !useCustomUrl
|
||||
? "border-brand-500 bg-brand-500/10"
|
||||
: "border-border bg-muted/30 hover:border-border-glass hover:bg-muted/50"
|
||||
)}
|
||||
onClick={() => handleSelectTemplate(template)}
|
||||
data-testid={`template-${template.id}`}
|
||||
>
|
||||
<div className="flex items-start justify-between gap-4">
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2 mb-1">
|
||||
<h4 className="font-medium text-foreground">
|
||||
{template.name}
|
||||
</h4>
|
||||
{selectedTemplate?.id === template.id && !useCustomUrl && (
|
||||
<Check className="w-4 h-4 text-brand-500" />
|
||||
)}
|
||||
</div>
|
||||
<p className="text-sm text-muted-foreground mb-3">
|
||||
{template.description}
|
||||
</p>
|
||||
|
||||
{/* Tech Stack */}
|
||||
<div className="flex flex-wrap gap-1.5 mb-3">
|
||||
{template.techStack.slice(0, 6).map((tech) => (
|
||||
<Badge
|
||||
key={tech}
|
||||
variant="secondary"
|
||||
className="text-xs"
|
||||
>
|
||||
{tech}
|
||||
</Badge>
|
||||
))}
|
||||
{template.techStack.length > 6 && (
|
||||
<Badge variant="secondary" className="text-xs">
|
||||
+{template.techStack.length - 6} more
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Key Features */}
|
||||
<div className="text-xs text-muted-foreground">
|
||||
<span className="font-medium">Features: </span>
|
||||
{template.features.slice(0, 3).join(" · ")}
|
||||
{template.features.length > 3 &&
|
||||
` · +${template.features.length - 3} more`}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="shrink-0"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleOpenRepo(template.repoUrl);
|
||||
}}
|
||||
>
|
||||
<ExternalLink className="w-4 h-4 mr-1" />
|
||||
View
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
|
||||
{/* Custom URL Option */}
|
||||
<div
|
||||
className={cn(
|
||||
"p-4 rounded-lg border cursor-pointer transition-all",
|
||||
useCustomUrl
|
||||
? "border-brand-500 bg-brand-500/10"
|
||||
: "border-border bg-muted/30 hover:border-border-glass hover:bg-muted/50"
|
||||
)}
|
||||
onClick={handleToggleCustomUrl}
|
||||
>
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<Link className="w-4 h-4 text-muted-foreground" />
|
||||
<h4 className="font-medium text-foreground">Custom GitHub URL</h4>
|
||||
{useCustomUrl && <Check className="w-4 h-4 text-brand-500" />}
|
||||
</div>
|
||||
<p className="text-sm text-muted-foreground mb-3">
|
||||
Clone any public GitHub repository as a starting point.
|
||||
</p>
|
||||
|
||||
{useCustomUrl && (
|
||||
<div onClick={(e) => e.stopPropagation()} className="space-y-1">
|
||||
<Input
|
||||
placeholder="https://github.com/username/repository"
|
||||
value={customUrl}
|
||||
onChange={(e) => setCustomUrl(e.target.value)}
|
||||
className={cn(
|
||||
"bg-input text-foreground placeholder:text-muted-foreground",
|
||||
errors.customUrl
|
||||
? "border-red-500 focus:border-red-500 focus:ring-red-500/20"
|
||||
: "border-border"
|
||||
)}
|
||||
data-testid="custom-url-input"
|
||||
/>
|
||||
{errors.customUrl && (
|
||||
<p className="text-xs text-red-500">GitHub URL is required</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</TabsContent>
|
||||
</div>
|
||||
</Tabs>
|
||||
|
||||
<DialogFooter className="border-t border-border pt-4">
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => onOpenChange(false)}
|
||||
className="text-muted-foreground hover:text-foreground hover:bg-accent"
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<HotkeyButton
|
||||
onClick={validateAndCreate}
|
||||
disabled={isCreating}
|
||||
className="bg-gradient-to-r from-brand-500 to-brand-600 hover:from-brand-600 hover:to-brand-600 text-white border-0"
|
||||
hotkey={{ key: "Enter", cmdCtrl: true }}
|
||||
hotkeyActive={open}
|
||||
data-testid="confirm-create-project"
|
||||
>
|
||||
{isCreating ? (
|
||||
<>
|
||||
<Loader2 className="w-4 h-4 mr-2 animate-spin" />
|
||||
{activeTab === "template" ? "Cloning..." : "Creating..."}
|
||||
</>
|
||||
) : (
|
||||
<>Create Project</>
|
||||
)}
|
||||
</HotkeyButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -4,14 +4,13 @@ import { useState, useEffect } from "react";
|
||||
import {
|
||||
Card,
|
||||
CardContent,
|
||||
CardDescription,
|
||||
CardHeader,
|
||||
CardTitle,
|
||||
} from "@/components/ui/card";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { HotkeyButton } from "@/components/ui/hotkey-button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Tabs, TabsList, TabsTrigger, TabsContent } from "@/components/ui/tabs";
|
||||
import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
import {
|
||||
Plus,
|
||||
MessageSquare,
|
||||
@@ -26,7 +25,8 @@ import {
|
||||
import { cn } from "@/lib/utils";
|
||||
import type { SessionListItem } from "@/types/electron";
|
||||
import { useKeyboardShortcutsConfig } from "@/hooks/use-keyboard-shortcuts";
|
||||
import { useAppStore } from "@/store/app-store";
|
||||
import { getElectronAPI } from "@/lib/electron";
|
||||
import { DeleteSessionDialog } from "@/components/delete-session-dialog";
|
||||
|
||||
// Random session name generator
|
||||
const adjectives = [
|
||||
@@ -114,17 +114,20 @@ export function SessionManager({
|
||||
const [runningSessions, setRunningSessions] = useState<Set<string>>(
|
||||
new Set()
|
||||
);
|
||||
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false);
|
||||
const [sessionToDelete, setSessionToDelete] = useState<SessionListItem | null>(null);
|
||||
|
||||
// Check running state for all sessions
|
||||
const checkRunningSessions = async (sessionList: SessionListItem[]) => {
|
||||
if (!window.electronAPI?.agent) return;
|
||||
const api = getElectronAPI();
|
||||
if (!api?.agent) return;
|
||||
|
||||
const runningIds = new Set<string>();
|
||||
|
||||
// Check each session's running state
|
||||
for (const session of sessionList) {
|
||||
try {
|
||||
const result = await window.electronAPI.agent.getHistory(session.id);
|
||||
const result = await api.agent.getHistory(session.id);
|
||||
if (result.success && result.isRunning) {
|
||||
runningIds.add(session.id);
|
||||
}
|
||||
@@ -142,10 +145,11 @@ export function SessionManager({
|
||||
|
||||
// Load sessions
|
||||
const loadSessions = async () => {
|
||||
if (!window.electronAPI?.sessions) return;
|
||||
const api = getElectronAPI();
|
||||
if (!api?.sessions) return;
|
||||
|
||||
// Always load all sessions and filter client-side
|
||||
const result = await window.electronAPI.sessions.list(true);
|
||||
const result = await api.sessions.list(true);
|
||||
if (result.success && result.sessions) {
|
||||
setSessions(result.sessions);
|
||||
// Check running state for all sessions
|
||||
@@ -173,39 +177,41 @@ export function SessionManager({
|
||||
|
||||
// Create new session with random name
|
||||
const handleCreateSession = async () => {
|
||||
if (!window.electronAPI?.sessions) return;
|
||||
const api = getElectronAPI();
|
||||
if (!api?.sessions) return;
|
||||
|
||||
const sessionName = newSessionName.trim() || generateRandomSessionName();
|
||||
|
||||
const result = await window.electronAPI.sessions.create(
|
||||
const result = await api.sessions.create(
|
||||
sessionName,
|
||||
projectPath,
|
||||
projectPath
|
||||
);
|
||||
|
||||
if (result.success && result.sessionId) {
|
||||
if (result.success && result.session?.id) {
|
||||
setNewSessionName("");
|
||||
setIsCreating(false);
|
||||
await loadSessions();
|
||||
onSelectSession(result.sessionId);
|
||||
onSelectSession(result.session.id);
|
||||
}
|
||||
};
|
||||
|
||||
// Create new session directly with a random name (one-click)
|
||||
const handleQuickCreateSession = async () => {
|
||||
if (!window.electronAPI?.sessions) return;
|
||||
const api = getElectronAPI();
|
||||
if (!api?.sessions) return;
|
||||
|
||||
const sessionName = generateRandomSessionName();
|
||||
|
||||
const result = await window.electronAPI.sessions.create(
|
||||
const result = await api.sessions.create(
|
||||
sessionName,
|
||||
projectPath,
|
||||
projectPath
|
||||
);
|
||||
|
||||
if (result.success && result.sessionId) {
|
||||
if (result.success && result.session?.id) {
|
||||
await loadSessions();
|
||||
onSelectSession(result.sessionId);
|
||||
onSelectSession(result.session.id);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -223,9 +229,10 @@ export function SessionManager({
|
||||
|
||||
// Rename session
|
||||
const handleRenameSession = async (sessionId: string) => {
|
||||
if (!editingName.trim() || !window.electronAPI?.sessions) return;
|
||||
const api = getElectronAPI();
|
||||
if (!editingName.trim() || !api?.sessions) return;
|
||||
|
||||
const result = await window.electronAPI.sessions.update(
|
||||
const result = await api.sessions.update(
|
||||
sessionId,
|
||||
editingName,
|
||||
undefined
|
||||
@@ -240,34 +247,60 @@ export function SessionManager({
|
||||
|
||||
// Archive session
|
||||
const handleArchiveSession = async (sessionId: string) => {
|
||||
if (!window.electronAPI?.sessions) return;
|
||||
const api = getElectronAPI();
|
||||
if (!api?.sessions) {
|
||||
console.error("[SessionManager] Sessions API not available");
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await window.electronAPI.sessions.archive(sessionId);
|
||||
if (result.success) {
|
||||
// If the archived session was currently selected, deselect it
|
||||
if (currentSessionId === sessionId) {
|
||||
onSelectSession(null);
|
||||
try {
|
||||
const result = await api.sessions.archive(sessionId);
|
||||
if (result.success) {
|
||||
// If the archived session was currently selected, deselect it
|
||||
if (currentSessionId === sessionId) {
|
||||
onSelectSession(null);
|
||||
}
|
||||
await loadSessions();
|
||||
} else {
|
||||
console.error("[SessionManager] Archive failed:", result.error);
|
||||
}
|
||||
await loadSessions();
|
||||
} catch (error) {
|
||||
console.error("[SessionManager] Archive error:", error);
|
||||
}
|
||||
};
|
||||
|
||||
// Unarchive session
|
||||
const handleUnarchiveSession = async (sessionId: string) => {
|
||||
if (!window.electronAPI?.sessions) return;
|
||||
const api = getElectronAPI();
|
||||
if (!api?.sessions) {
|
||||
console.error("[SessionManager] Sessions API not available");
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await window.electronAPI.sessions.unarchive(sessionId);
|
||||
if (result.success) {
|
||||
await loadSessions();
|
||||
try {
|
||||
const result = await api.sessions.unarchive(sessionId);
|
||||
if (result.success) {
|
||||
await loadSessions();
|
||||
} else {
|
||||
console.error("[SessionManager] Unarchive failed:", result.error);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("[SessionManager] Unarchive error:", error);
|
||||
}
|
||||
};
|
||||
|
||||
// Delete session
|
||||
const handleDeleteSession = async (sessionId: string) => {
|
||||
if (!window.electronAPI?.sessions) return;
|
||||
if (!confirm("Are you sure you want to delete this session?")) return;
|
||||
// Open delete session dialog
|
||||
const handleDeleteSession = (session: SessionListItem) => {
|
||||
setSessionToDelete(session);
|
||||
setIsDeleteDialogOpen(true);
|
||||
};
|
||||
|
||||
const result = await window.electronAPI.sessions.delete(sessionId);
|
||||
// Confirm delete session
|
||||
const confirmDeleteSession = async (sessionId: string) => {
|
||||
const api = getElectronAPI();
|
||||
if (!api?.sessions) return;
|
||||
|
||||
const result = await api.sessions.delete(sessionId);
|
||||
if (result.success) {
|
||||
await loadSessions();
|
||||
if (currentSessionId === sessionId) {
|
||||
@@ -278,6 +311,7 @@ export function SessionManager({
|
||||
}
|
||||
}
|
||||
}
|
||||
setSessionToDelete(null);
|
||||
};
|
||||
|
||||
const activeSessions = sessions.filter((s) => !s.isArchived);
|
||||
@@ -290,20 +324,24 @@ export function SessionManager({
|
||||
<CardHeader className="pb-3">
|
||||
<div className="flex items-center justify-between mb-4">
|
||||
<CardTitle>Agent Sessions</CardTitle>
|
||||
{activeTab === "active" && (
|
||||
<HotkeyButton
|
||||
variant="default"
|
||||
size="sm"
|
||||
onClick={handleQuickCreateSession}
|
||||
hotkey={shortcuts.newSession}
|
||||
hotkeyActive={false}
|
||||
data-testid="new-session-button"
|
||||
title={`New Session (${shortcuts.newSession})`}
|
||||
>
|
||||
<Plus className="w-4 h-4 mr-1" />
|
||||
New
|
||||
</HotkeyButton>
|
||||
)}
|
||||
<HotkeyButton
|
||||
variant="default"
|
||||
size="sm"
|
||||
onClick={() => {
|
||||
// Switch to active tab if on archived tab
|
||||
if (activeTab === "archived") {
|
||||
setActiveTab("active");
|
||||
}
|
||||
handleQuickCreateSession();
|
||||
}}
|
||||
hotkey={shortcuts.newSession}
|
||||
hotkeyActive={false}
|
||||
data-testid="new-session-button"
|
||||
title={`New Session (${shortcuts.newSession})`}
|
||||
>
|
||||
<Plus className="w-4 h-4 mr-1" />
|
||||
New
|
||||
</HotkeyButton>
|
||||
</div>
|
||||
|
||||
<Tabs
|
||||
@@ -500,8 +538,9 @@ export function SessionManager({
|
||||
<Button
|
||||
size="sm"
|
||||
variant="ghost"
|
||||
onClick={() => handleDeleteSession(session.id)}
|
||||
onClick={() => handleDeleteSession(session)}
|
||||
className="h-7 w-7 p-0 text-destructive"
|
||||
data-testid={`delete-session-${session.id}`}
|
||||
>
|
||||
<Trash2 className="w-3 h-3" />
|
||||
</Button>
|
||||
@@ -527,6 +566,14 @@ export function SessionManager({
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
|
||||
{/* Delete Session Confirmation Dialog */}
|
||||
<DeleteSessionDialog
|
||||
open={isDeleteDialogOpen}
|
||||
onOpenChange={setIsDeleteDialogOpen}
|
||||
session={sessionToDelete}
|
||||
onConfirm={confirmDeleteSession}
|
||||
/>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
88
apps/app/src/components/ui/course-promo-badge.tsx
Normal file
88
apps/app/src/components/ui/course-promo-badge.tsx
Normal file
@@ -0,0 +1,88 @@
|
||||
"use client";
|
||||
|
||||
import * as React from "react";
|
||||
import { Sparkles, X } from "lucide-react";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
|
||||
interface CoursePromoBadgeProps {
|
||||
sidebarOpen?: boolean;
|
||||
}
|
||||
|
||||
export function CoursePromoBadge({ sidebarOpen = true }: CoursePromoBadgeProps) {
|
||||
const [dismissed, setDismissed] = React.useState(false);
|
||||
|
||||
if (dismissed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Collapsed state - show only icon with tooltip
|
||||
if (!sidebarOpen) {
|
||||
return (
|
||||
<div className="p-2 pb-0 flex justify-center">
|
||||
<TooltipProvider delayDuration={300}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<a
|
||||
href="https://agenticjumpstart.com"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="group cursor-pointer flex items-center justify-center w-10 h-10 bg-primary/10 text-primary rounded-lg hover:bg-primary/20 transition-all border border-primary/30"
|
||||
data-testid="course-promo-badge-collapsed"
|
||||
>
|
||||
<Sparkles className="size-4 shrink-0" />
|
||||
</a>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right" className="flex items-center gap-2">
|
||||
<span>Become a 10x Dev</span>
|
||||
<span
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setDismissed(true);
|
||||
}}
|
||||
className="p-0.5 rounded-full hover:bg-primary/30 transition-colors cursor-pointer"
|
||||
aria-label="Dismiss"
|
||||
>
|
||||
<X className="size-3" />
|
||||
</span>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Expanded state - show full badge
|
||||
return (
|
||||
<div className="p-2 pb-0">
|
||||
<a
|
||||
href="https://agenticjumpstart.com"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="group cursor-pointer flex items-center justify-between w-full px-2 lg:px-3 py-2.5 bg-primary/10 text-primary rounded-lg font-medium text-sm hover:bg-primary/20 transition-all border border-primary/30"
|
||||
data-testid="course-promo-badge"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<Sparkles className="size-4 shrink-0" />
|
||||
<span className="hidden lg:block">Become a 10x Dev</span>
|
||||
</div>
|
||||
<span
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setDismissed(true);
|
||||
}}
|
||||
className="hidden lg:block p-1 rounded-full hover:bg-primary/30 transition-colors cursor-pointer"
|
||||
aria-label="Dismiss"
|
||||
>
|
||||
<X className="size-3.5" />
|
||||
</span>
|
||||
</a>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
88
apps/app/src/components/ui/delete-confirm-dialog.tsx
Normal file
88
apps/app/src/components/ui/delete-confirm-dialog.tsx
Normal file
@@ -0,0 +1,88 @@
|
||||
import { Trash2 } from "lucide-react";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { HotkeyButton } from "@/components/ui/hotkey-button";
|
||||
import type { ReactNode } from "react";
|
||||
|
||||
interface DeleteConfirmDialogProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
onConfirm: () => void;
|
||||
title: string;
|
||||
description: string;
|
||||
/** Optional content to show between description and buttons (e.g., item preview card) */
|
||||
children?: ReactNode;
|
||||
/** Text for the confirm button. Defaults to "Delete" */
|
||||
confirmText?: string;
|
||||
/** Test ID for the dialog */
|
||||
testId?: string;
|
||||
/** Test ID for the confirm button */
|
||||
confirmTestId?: string;
|
||||
}
|
||||
|
||||
export function DeleteConfirmDialog({
|
||||
open,
|
||||
onOpenChange,
|
||||
onConfirm,
|
||||
title,
|
||||
description,
|
||||
children,
|
||||
confirmText = "Delete",
|
||||
testId = "delete-confirm-dialog",
|
||||
confirmTestId = "confirm-delete-button",
|
||||
}: DeleteConfirmDialogProps) {
|
||||
const handleConfirm = () => {
|
||||
onConfirm();
|
||||
onOpenChange(false);
|
||||
};
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent
|
||||
className="bg-popover border-border max-w-md"
|
||||
data-testid={testId}
|
||||
>
|
||||
<DialogHeader>
|
||||
<DialogTitle className="flex items-center gap-2">
|
||||
<Trash2 className="w-5 h-5 text-destructive" />
|
||||
{title}
|
||||
</DialogTitle>
|
||||
<DialogDescription className="text-muted-foreground">
|
||||
{description}
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
{children}
|
||||
|
||||
<DialogFooter className="gap-2 sm:gap-2 pt-4">
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => onOpenChange(false)}
|
||||
className="px-4"
|
||||
data-testid="cancel-delete-button"
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<HotkeyButton
|
||||
variant="destructive"
|
||||
onClick={handleConfirm}
|
||||
data-testid={confirmTestId}
|
||||
hotkey={{ key: "Enter", cmdCtrl: true }}
|
||||
hotkeyActive={open}
|
||||
className="px-4"
|
||||
>
|
||||
<Trash2 className="w-4 h-4 mr-2" />
|
||||
{confirmText}
|
||||
</HotkeyButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import React, { useState, useRef, useCallback, useEffect } from "react";
|
||||
import React, { useState, useRef, useCallback } from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { ImageIcon, X, Loader2 } from "lucide-react";
|
||||
import { Textarea } from "@/components/ui/textarea";
|
||||
@@ -83,6 +83,13 @@ export function DescriptionImageDropZone({
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
const currentProject = useAppStore((state) => state.currentProject);
|
||||
|
||||
// Construct server URL for loading saved images
|
||||
const getImageServerUrl = useCallback((imagePath: string): string => {
|
||||
const serverUrl = process.env.NEXT_PUBLIC_SERVER_URL || "http://localhost:3008";
|
||||
const projectPath = currentProject?.path || "";
|
||||
return `${serverUrl}/api/fs/image?path=${encodeURIComponent(imagePath)}&projectPath=${encodeURIComponent(projectPath)}`;
|
||||
}, [currentProject?.path]);
|
||||
|
||||
const fileToBase64 = (file: File): Promise<string> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
@@ -98,7 +105,7 @@ export function DescriptionImageDropZone({
|
||||
});
|
||||
};
|
||||
|
||||
const saveImageToTemp = async (
|
||||
const saveImageToTemp = useCallback(async (
|
||||
base64Data: string,
|
||||
filename: string,
|
||||
mimeType: string
|
||||
@@ -107,8 +114,8 @@ export function DescriptionImageDropZone({
|
||||
const api = getElectronAPI();
|
||||
// Check if saveImageToTemp method exists
|
||||
if (!api.saveImageToTemp) {
|
||||
// Fallback for mock API - return a mock path in .automaker/images
|
||||
console.log("[DescriptionImageDropZone] Using mock path for image");
|
||||
// Fallback path when saveImageToTemp is not available
|
||||
console.log("[DescriptionImageDropZone] Using fallback path for image");
|
||||
return `.automaker/images/${Date.now()}_${filename}`;
|
||||
}
|
||||
|
||||
@@ -124,7 +131,7 @@ export function DescriptionImageDropZone({
|
||||
console.error("[DescriptionImageDropZone] Error saving image:", error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}, [currentProject?.path]);
|
||||
|
||||
const processFiles = useCallback(
|
||||
async (files: FileList) => {
|
||||
@@ -193,7 +200,7 @@ export function DescriptionImageDropZone({
|
||||
|
||||
setIsProcessing(false);
|
||||
},
|
||||
[disabled, isProcessing, images, maxFiles, maxFileSize, onImagesChange, previewImages]
|
||||
[disabled, isProcessing, images, maxFiles, maxFileSize, onImagesChange, previewImages, saveImageToTemp]
|
||||
);
|
||||
|
||||
const handleDrop = useCallback(
|
||||
@@ -374,7 +381,15 @@ export function DescriptionImageDropZone({
|
||||
className="max-w-full max-h-full object-contain"
|
||||
/>
|
||||
) : (
|
||||
<ImageIcon className="w-6 h-6 text-muted-foreground" />
|
||||
<img
|
||||
src={getImageServerUrl(image.path)}
|
||||
alt={image.filename}
|
||||
className="max-w-full max-h-full object-contain"
|
||||
onError={(e) => {
|
||||
// If image fails to load, hide it
|
||||
(e.target as HTMLImageElement).style.display = 'none';
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
{/* Remove button */}
|
||||
|
||||
@@ -88,7 +88,6 @@ const SHORTCUT_LABELS: Record<keyof KeyboardShortcuts, string> = {
|
||||
agent: "Agent Runner",
|
||||
spec: "Spec Editor",
|
||||
context: "Context",
|
||||
tools: "Agent Tools",
|
||||
settings: "Settings",
|
||||
profiles: "AI Profiles",
|
||||
toggleSidebar: "Toggle Sidebar",
|
||||
@@ -109,7 +108,6 @@ const SHORTCUT_CATEGORIES: Record<keyof KeyboardShortcuts, "navigation" | "ui" |
|
||||
agent: "navigation",
|
||||
spec: "navigation",
|
||||
context: "navigation",
|
||||
tools: "navigation",
|
||||
settings: "navigation",
|
||||
profiles: "navigation",
|
||||
toggleSidebar: "ui",
|
||||
|
||||
@@ -149,12 +149,12 @@ export function AgentToolsView() {
|
||||
setTerminalResult(null);
|
||||
|
||||
try {
|
||||
// Simulate agent requesting terminal command execution
|
||||
console.log(`[Agent Tool] Requesting to run command: ${terminalCommand}`);
|
||||
// Terminal command simulation for demonstration purposes
|
||||
console.log(`[Agent Tool] Simulating command: ${terminalCommand}`);
|
||||
|
||||
// In mock mode, simulate terminal output
|
||||
// In real Electron mode, this would use child_process
|
||||
const mockOutputs: Record<string, string> = {
|
||||
// Simulated outputs for common commands (preview mode)
|
||||
// In production, the agent executes commands via Claude SDK
|
||||
const simulatedOutputs: Record<string, string> = {
|
||||
ls: "app_spec.txt\nfeatures\nnode_modules\npackage.json\nsrc\ntests\ntsconfig.json",
|
||||
pwd: currentProject?.path || "/Users/demo/project",
|
||||
"echo hello": "hello",
|
||||
@@ -168,8 +168,8 @@ export function AgentToolsView() {
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
const output =
|
||||
mockOutputs[terminalCommand.toLowerCase()] ||
|
||||
`Command executed: ${terminalCommand}\n(Mock output - real execution requires Electron mode)`;
|
||||
simulatedOutputs[terminalCommand.toLowerCase()] ||
|
||||
`[Preview] ${terminalCommand}\n(Terminal commands are executed by the agent during feature implementation)`;
|
||||
|
||||
setTerminalResult({
|
||||
success: true,
|
||||
|
||||
@@ -399,7 +399,7 @@ ${Object.entries(projectAnalysis.filesByExtension)
|
||||
`;
|
||||
|
||||
// Write the spec file
|
||||
const specPath = `${currentProject.path}/app_spec.txt`;
|
||||
const specPath = `${currentProject.path}/.automaker/app_spec.txt`;
|
||||
const writeResult = await api.writeFile(specPath, specContent);
|
||||
|
||||
if (writeResult.success) {
|
||||
|
||||
@@ -24,6 +24,7 @@ import {
|
||||
AgentModel,
|
||||
ThinkingLevel,
|
||||
AIProfile,
|
||||
defaultBackgroundSettings,
|
||||
} from "@/store/app-store";
|
||||
import { getElectronAPI } from "@/lib/electron";
|
||||
import { cn, modelSupportsThinking } from "@/lib/utils";
|
||||
@@ -58,6 +59,7 @@ import { KanbanColumn } from "./kanban-column";
|
||||
import { KanbanCard } from "./kanban-card";
|
||||
import { AgentOutputModal } from "./agent-output-modal";
|
||||
import { FeatureSuggestionsDialog } from "./feature-suggestions-dialog";
|
||||
import { BoardBackgroundModal } from "@/components/dialogs/board-background-modal";
|
||||
import {
|
||||
Plus,
|
||||
RefreshCw,
|
||||
@@ -86,6 +88,7 @@ import {
|
||||
Square,
|
||||
Maximize2,
|
||||
Shuffle,
|
||||
ImageIcon,
|
||||
} from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
import { Slider } from "@/components/ui/slider";
|
||||
@@ -206,6 +209,7 @@ export function BoardView() {
|
||||
aiProfiles,
|
||||
kanbanCardDetailLevel,
|
||||
setKanbanCardDetailLevel,
|
||||
boardBackgroundByProject,
|
||||
} = useAppStore();
|
||||
const shortcuts = useKeyboardShortcutsConfig();
|
||||
const [activeFeature, setActiveFeature] = useState<Feature | null>(null);
|
||||
@@ -230,6 +234,8 @@ export function BoardView() {
|
||||
);
|
||||
const [showDeleteAllVerifiedDialog, setShowDeleteAllVerifiedDialog] =
|
||||
useState(false);
|
||||
const [showBoardBackgroundModal, setShowBoardBackgroundModal] =
|
||||
useState(false);
|
||||
const [persistedCategories, setPersistedCategories] = useState<string[]>([]);
|
||||
const [showFollowUpDialog, setShowFollowUpDialog] = useState(false);
|
||||
const [followUpFeature, setFollowUpFeature] = useState<Feature | null>(null);
|
||||
@@ -394,22 +400,26 @@ export function BoardView() {
|
||||
}, []);
|
||||
|
||||
// Load features using features API
|
||||
// IMPORTANT: Do NOT add 'features' to dependency array - it would cause infinite reload loop
|
||||
const loadFeatures = useCallback(async () => {
|
||||
if (!currentProject) return;
|
||||
|
||||
const currentPath = currentProject.path;
|
||||
const previousPath = prevProjectPathRef.current;
|
||||
const isProjectSwitch =
|
||||
previousPath !== null && currentPath !== previousPath;
|
||||
|
||||
// If project switched, clear features first to prevent cross-contamination
|
||||
// Also treat this as an initial load for the new project
|
||||
if (previousPath !== null && currentPath !== previousPath) {
|
||||
// Get cached features from store (without adding to dependencies)
|
||||
const cachedFeatures = useAppStore.getState().features;
|
||||
|
||||
// If project switched, mark it but don't clear features yet
|
||||
// We'll clear after successful API load to prevent data loss
|
||||
if (isProjectSwitch) {
|
||||
console.log(
|
||||
`[BoardView] Project switch detected: ${previousPath} -> ${currentPath}, clearing features`
|
||||
`[BoardView] Project switch detected: ${previousPath} -> ${currentPath}`
|
||||
);
|
||||
isSwitchingProjectRef.current = true;
|
||||
isInitialLoadRef.current = true;
|
||||
setFeatures([]);
|
||||
setPersistedCategories([]); // Also clear categories
|
||||
}
|
||||
|
||||
// Update the ref to track current project
|
||||
@@ -424,6 +434,7 @@ export function BoardView() {
|
||||
const api = getElectronAPI();
|
||||
if (!api.features) {
|
||||
console.error("[BoardView] Features API not available");
|
||||
// Keep cached features if API is unavailable
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -441,10 +452,31 @@ export function BoardView() {
|
||||
thinkingLevel: f.thinkingLevel || "none",
|
||||
})
|
||||
);
|
||||
// Successfully loaded features - now safe to set them
|
||||
setFeatures(featuresWithIds);
|
||||
|
||||
// Only clear categories on project switch AFTER successful load
|
||||
if (isProjectSwitch) {
|
||||
setPersistedCategories([]);
|
||||
}
|
||||
} else if (!result.success && result.error) {
|
||||
console.error("[BoardView] API returned error:", result.error);
|
||||
// If it's a new project or the error indicates no features found,
|
||||
// that's expected - start with empty array
|
||||
if (isProjectSwitch) {
|
||||
setFeatures([]);
|
||||
setPersistedCategories([]);
|
||||
}
|
||||
// Otherwise keep cached features
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to load features:", error);
|
||||
// On error, keep existing cached features for the current project
|
||||
// Only clear on project switch if we have no features from server
|
||||
if (isProjectSwitch && cachedFeatures.length === 0) {
|
||||
setFeatures([]);
|
||||
setPersistedCategories([]);
|
||||
}
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
isInitialLoadRef.current = false;
|
||||
@@ -531,7 +563,8 @@ export function BoardView() {
|
||||
const unsubscribe = api.autoMode.onEvent((event) => {
|
||||
// Use event's projectPath or projectId if available, otherwise use current project
|
||||
// Board view only reacts to events for the currently selected project
|
||||
const eventProjectId = ('projectId' in event && event.projectId) || projectId;
|
||||
const eventProjectId =
|
||||
("projectId" in event && event.projectId) || projectId;
|
||||
|
||||
if (event.type === "auto_mode_feature_complete") {
|
||||
// Reload features when a feature is completed
|
||||
@@ -558,10 +591,25 @@ export function BoardView() {
|
||||
}
|
||||
|
||||
loadFeatures();
|
||||
// Show error toast
|
||||
toast.error("Agent encountered an error", {
|
||||
description: event.error || "Check the logs for details",
|
||||
});
|
||||
|
||||
// Check for authentication errors and show a more helpful message
|
||||
const isAuthError =
|
||||
event.errorType === "authentication" ||
|
||||
(event.error &&
|
||||
(event.error.includes("Authentication failed") ||
|
||||
event.error.includes("Invalid API key")));
|
||||
|
||||
if (isAuthError) {
|
||||
toast.error("Authentication Failed", {
|
||||
description:
|
||||
"Your API key is invalid or expired. Please check Settings or run 'claude login' in terminal.",
|
||||
duration: 10000,
|
||||
});
|
||||
} else {
|
||||
toast.error("Agent encountered an error", {
|
||||
description: event.error || "Check the logs for details",
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -828,7 +876,11 @@ export function BoardView() {
|
||||
// features often have skipTests=true, and we want status-based handling first
|
||||
if (targetStatus === "verified") {
|
||||
moveFeature(featureId, "verified");
|
||||
persistFeatureUpdate(featureId, { status: "verified" });
|
||||
// Clear justFinishedAt timestamp when manually verifying via drag
|
||||
persistFeatureUpdate(featureId, {
|
||||
status: "verified",
|
||||
justFinishedAt: undefined,
|
||||
});
|
||||
toast.success("Feature verified", {
|
||||
description: `Manually verified: ${draggedFeature.description.slice(
|
||||
0,
|
||||
@@ -838,7 +890,11 @@ export function BoardView() {
|
||||
} else if (targetStatus === "backlog") {
|
||||
// Allow moving waiting_approval cards back to backlog
|
||||
moveFeature(featureId, "backlog");
|
||||
persistFeatureUpdate(featureId, { status: "backlog" });
|
||||
// Clear justFinishedAt timestamp when moving back to backlog
|
||||
persistFeatureUpdate(featureId, {
|
||||
status: "backlog",
|
||||
justFinishedAt: undefined,
|
||||
});
|
||||
toast.info("Feature moved to backlog", {
|
||||
description: `Moved to Backlog: ${draggedFeature.description.slice(
|
||||
0,
|
||||
@@ -1159,7 +1215,11 @@ export function BoardView() {
|
||||
description: feature.description,
|
||||
});
|
||||
moveFeature(feature.id, "verified");
|
||||
persistFeatureUpdate(feature.id, { status: "verified" });
|
||||
// Clear justFinishedAt timestamp when manually verifying
|
||||
persistFeatureUpdate(feature.id, {
|
||||
status: "verified",
|
||||
justFinishedAt: undefined,
|
||||
});
|
||||
toast.success("Feature verified", {
|
||||
description: `Marked as verified: ${feature.description.slice(0, 50)}${
|
||||
feature.description.length > 50 ? "..." : ""
|
||||
@@ -1225,9 +1285,11 @@ export function BoardView() {
|
||||
}
|
||||
|
||||
// Move feature back to in_progress before sending follow-up
|
||||
// Clear justFinishedAt timestamp since user is now interacting with it
|
||||
const updates = {
|
||||
status: "in_progress" as const,
|
||||
startedAt: new Date().toISOString(),
|
||||
justFinishedAt: undefined,
|
||||
};
|
||||
updateFeature(featureId, updates);
|
||||
persistFeatureUpdate(featureId, updates);
|
||||
@@ -1475,8 +1537,14 @@ export function BoardView() {
|
||||
if (isRunning) {
|
||||
map.in_progress.push(f);
|
||||
} else {
|
||||
// Otherwise, use the feature's status
|
||||
map[f.status].push(f);
|
||||
// Otherwise, use the feature's status (fallback to backlog for unknown statuses)
|
||||
const status = f.status as ColumnId;
|
||||
if (map[status]) {
|
||||
map[status].push(f);
|
||||
} else {
|
||||
// Unknown status, default to backlog
|
||||
map.backlog.push(f);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1581,7 +1649,7 @@ export function BoardView() {
|
||||
return;
|
||||
}
|
||||
|
||||
const featuresToStart = backlogFeatures.slice(0, availableSlots);
|
||||
const featuresToStart = backlogFeatures.slice(0, 1);
|
||||
|
||||
for (const feature of featuresToStart) {
|
||||
// Update the feature status with startedAt timestamp
|
||||
@@ -1790,202 +1858,282 @@ export function BoardView() {
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Kanban Card Detail Level Toggle */}
|
||||
{/* Board Background & Detail Level Controls */}
|
||||
{isMounted && (
|
||||
<TooltipProvider>
|
||||
<div
|
||||
className="flex items-center rounded-lg bg-secondary border border-border ml-4"
|
||||
data-testid="kanban-detail-toggle"
|
||||
>
|
||||
<div className="flex items-center gap-2 ml-4">
|
||||
{/* Board Background Button */}
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
onClick={() => setKanbanCardDetailLevel("minimal")}
|
||||
className={cn(
|
||||
"p-2 rounded-l-lg transition-colors",
|
||||
kanbanCardDetailLevel === "minimal"
|
||||
? "bg-brand-500/20 text-brand-500"
|
||||
: "text-muted-foreground hover:text-foreground hover:bg-accent"
|
||||
)}
|
||||
data-testid="kanban-toggle-minimal"
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => setShowBoardBackgroundModal(true)}
|
||||
className="h-8 px-2"
|
||||
data-testid="board-background-button"
|
||||
>
|
||||
<Minimize2 className="w-4 h-4" />
|
||||
</button>
|
||||
<ImageIcon className="w-4 h-4" />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Minimal - Title & category only</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
onClick={() => setKanbanCardDetailLevel("standard")}
|
||||
className={cn(
|
||||
"p-2 transition-colors",
|
||||
kanbanCardDetailLevel === "standard"
|
||||
? "bg-brand-500/20 text-brand-500"
|
||||
: "text-muted-foreground hover:text-foreground hover:bg-accent"
|
||||
)}
|
||||
data-testid="kanban-toggle-standard"
|
||||
>
|
||||
<Square className="w-4 h-4" />
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Standard - Steps & progress</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
onClick={() => setKanbanCardDetailLevel("detailed")}
|
||||
className={cn(
|
||||
"p-2 rounded-r-lg transition-colors",
|
||||
kanbanCardDetailLevel === "detailed"
|
||||
? "bg-brand-500/20 text-brand-500"
|
||||
: "text-muted-foreground hover:text-foreground hover:bg-accent"
|
||||
)}
|
||||
data-testid="kanban-toggle-detailed"
|
||||
>
|
||||
<Maximize2 className="w-4 h-4" />
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Detailed - Model, tools & tasks</p>
|
||||
<p>Board Background Settings</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
{/* Kanban Card Detail Level Toggle */}
|
||||
<div
|
||||
className="flex items-center rounded-lg bg-secondary border border-border"
|
||||
data-testid="kanban-detail-toggle"
|
||||
>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
onClick={() => setKanbanCardDetailLevel("minimal")}
|
||||
className={cn(
|
||||
"p-2 rounded-l-lg transition-colors",
|
||||
kanbanCardDetailLevel === "minimal"
|
||||
? "bg-brand-500/20 text-brand-500"
|
||||
: "text-muted-foreground hover:text-foreground hover:bg-accent"
|
||||
)}
|
||||
data-testid="kanban-toggle-minimal"
|
||||
>
|
||||
<Minimize2 className="w-4 h-4" />
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Minimal - Title & category only</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
onClick={() => setKanbanCardDetailLevel("standard")}
|
||||
className={cn(
|
||||
"p-2 transition-colors",
|
||||
kanbanCardDetailLevel === "standard"
|
||||
? "bg-brand-500/20 text-brand-500"
|
||||
: "text-muted-foreground hover:text-foreground hover:bg-accent"
|
||||
)}
|
||||
data-testid="kanban-toggle-standard"
|
||||
>
|
||||
<Square className="w-4 h-4" />
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Standard - Steps & progress</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
onClick={() => setKanbanCardDetailLevel("detailed")}
|
||||
className={cn(
|
||||
"p-2 rounded-r-lg transition-colors",
|
||||
kanbanCardDetailLevel === "detailed"
|
||||
? "bg-brand-500/20 text-brand-500"
|
||||
: "text-muted-foreground hover:text-foreground hover:bg-accent"
|
||||
)}
|
||||
data-testid="kanban-toggle-detailed"
|
||||
>
|
||||
<Maximize2 className="w-4 h-4" />
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Detailed - Model, tools & tasks</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
</TooltipProvider>
|
||||
)}
|
||||
</div>
|
||||
{/* Kanban Columns */}
|
||||
<div className="flex-1 overflow-x-auto px-4 pb-4">
|
||||
<DndContext
|
||||
sensors={sensors}
|
||||
collisionDetection={collisionDetectionStrategy}
|
||||
onDragStart={handleDragStart}
|
||||
onDragEnd={handleDragEnd}
|
||||
>
|
||||
<div className="flex gap-4 h-full min-w-max">
|
||||
{COLUMNS.map((column) => {
|
||||
const columnFeatures = getColumnFeatures(column.id);
|
||||
return (
|
||||
<KanbanColumn
|
||||
key={column.id}
|
||||
id={column.id}
|
||||
title={column.title}
|
||||
color={column.color}
|
||||
count={columnFeatures.length}
|
||||
headerAction={
|
||||
column.id === "verified" && columnFeatures.length > 0 ? (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 px-2 text-xs text-destructive hover:text-destructive hover:bg-destructive/10"
|
||||
onClick={() => setShowDeleteAllVerifiedDialog(true)}
|
||||
data-testid="delete-all-verified-button"
|
||||
>
|
||||
<Trash2 className="w-3 h-3 mr-1" />
|
||||
Delete All
|
||||
</Button>
|
||||
) : column.id === "backlog" ? (
|
||||
<div className="flex items-center gap-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 w-6 p-0 text-yellow-500 hover:text-yellow-400 hover:bg-yellow-500/10 relative"
|
||||
onClick={() => setShowSuggestionsDialog(true)}
|
||||
title="Feature Suggestions"
|
||||
data-testid="feature-suggestions-button"
|
||||
>
|
||||
<Lightbulb className="w-3.5 h-3.5" />
|
||||
{suggestionsCount > 0 && (
|
||||
<span
|
||||
className="absolute -top-1 -right-1 w-4 h-4 text-[9px] font-mono rounded-full bg-yellow-500 text-black flex items-center justify-center"
|
||||
data-testid="suggestions-count"
|
||||
>
|
||||
{suggestionsCount}
|
||||
</span>
|
||||
)}
|
||||
</Button>
|
||||
{columnFeatures.length > 0 && (
|
||||
<HotkeyButton
|
||||
{(() => {
|
||||
// Get background settings for current project
|
||||
const backgroundSettings =
|
||||
(currentProject && boardBackgroundByProject[currentProject.path]) ||
|
||||
defaultBackgroundSettings;
|
||||
|
||||
// Build background image style if image exists
|
||||
const backgroundImageStyle = backgroundSettings.imagePath
|
||||
? {
|
||||
backgroundImage: `url(${
|
||||
process.env.NEXT_PUBLIC_SERVER_URL || "http://localhost:3008"
|
||||
}/api/fs/image?path=${encodeURIComponent(
|
||||
backgroundSettings.imagePath
|
||||
)}&projectPath=${encodeURIComponent(
|
||||
currentProject?.path || ""
|
||||
)}${
|
||||
backgroundSettings.imageVersion
|
||||
? `&v=${backgroundSettings.imageVersion}`
|
||||
: ""
|
||||
})`,
|
||||
backgroundSize: "cover",
|
||||
backgroundPosition: "center",
|
||||
backgroundRepeat: "no-repeat",
|
||||
}
|
||||
: {};
|
||||
|
||||
return (
|
||||
<div
|
||||
className="flex-1 overflow-x-auto px-4 pb-4 relative"
|
||||
style={backgroundImageStyle}
|
||||
>
|
||||
<DndContext
|
||||
sensors={sensors}
|
||||
collisionDetection={collisionDetectionStrategy}
|
||||
onDragStart={handleDragStart}
|
||||
onDragEnd={handleDragEnd}
|
||||
>
|
||||
<div className="flex gap-4 h-full min-w-max">
|
||||
{COLUMNS.map((column) => {
|
||||
const columnFeatures = getColumnFeatures(column.id);
|
||||
return (
|
||||
<KanbanColumn
|
||||
key={column.id}
|
||||
id={column.id}
|
||||
title={column.title}
|
||||
color={column.color}
|
||||
count={columnFeatures.length}
|
||||
opacity={backgroundSettings.columnOpacity}
|
||||
showBorder={backgroundSettings.columnBorderEnabled}
|
||||
hideScrollbar={backgroundSettings.hideScrollbar}
|
||||
headerAction={
|
||||
column.id === "verified" &&
|
||||
columnFeatures.length > 0 ? (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 px-2 text-xs text-primary hover:text-primary hover:bg-primary/10"
|
||||
onClick={handleStartNextFeatures}
|
||||
hotkey={shortcuts.startNext}
|
||||
hotkeyActive={false}
|
||||
data-testid="start-next-button"
|
||||
className="h-6 px-2 text-xs text-destructive hover:text-destructive hover:bg-destructive/10"
|
||||
onClick={() =>
|
||||
setShowDeleteAllVerifiedDialog(true)
|
||||
}
|
||||
data-testid="delete-all-verified-button"
|
||||
>
|
||||
<FastForward className="w-3 h-3 mr-1" />
|
||||
Pull Top
|
||||
</HotkeyButton>
|
||||
)}
|
||||
</div>
|
||||
) : undefined
|
||||
}
|
||||
>
|
||||
<SortableContext
|
||||
items={columnFeatures.map((f) => f.id)}
|
||||
strategy={verticalListSortingStrategy}
|
||||
>
|
||||
{columnFeatures.map((feature, index) => {
|
||||
// Calculate shortcut key for in-progress cards (first 10 get 1-9, 0)
|
||||
let shortcutKey: string | undefined;
|
||||
if (column.id === "in_progress" && index < 10) {
|
||||
shortcutKey = index === 9 ? "0" : String(index + 1);
|
||||
<Trash2 className="w-3 h-3 mr-1" />
|
||||
Delete All
|
||||
</Button>
|
||||
) : column.id === "backlog" ? (
|
||||
<div className="flex items-center gap-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 w-6 p-0 text-yellow-500 hover:text-yellow-400 hover:bg-yellow-500/10 relative"
|
||||
onClick={() => setShowSuggestionsDialog(true)}
|
||||
title="Feature Suggestions"
|
||||
data-testid="feature-suggestions-button"
|
||||
>
|
||||
<Lightbulb className="w-3.5 h-3.5" />
|
||||
{suggestionsCount > 0 && (
|
||||
<span
|
||||
className="absolute -top-1 -right-1 w-4 h-4 text-[9px] font-mono rounded-full bg-yellow-500 text-black flex items-center justify-center"
|
||||
data-testid="suggestions-count"
|
||||
>
|
||||
{suggestionsCount}
|
||||
</span>
|
||||
)}
|
||||
</Button>
|
||||
{columnFeatures.length > 0 && (
|
||||
<HotkeyButton
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 px-2 text-xs text-primary hover:text-primary hover:bg-primary/10"
|
||||
onClick={handleStartNextFeatures}
|
||||
hotkey={shortcuts.startNext}
|
||||
hotkeyActive={false}
|
||||
data-testid="start-next-button"
|
||||
>
|
||||
<FastForward className="w-3 h-3 mr-1" />
|
||||
Pull Top
|
||||
</HotkeyButton>
|
||||
)}
|
||||
</div>
|
||||
) : undefined
|
||||
}
|
||||
return (
|
||||
<KanbanCard
|
||||
key={feature.id}
|
||||
feature={feature}
|
||||
onEdit={() => setEditingFeature(feature)}
|
||||
onDelete={() => handleDeleteFeature(feature.id)}
|
||||
onViewOutput={() => handleViewOutput(feature)}
|
||||
onVerify={() => handleVerifyFeature(feature)}
|
||||
onResume={() => handleResumeFeature(feature)}
|
||||
onForceStop={() => handleForceStopFeature(feature)}
|
||||
onManualVerify={() => handleManualVerify(feature)}
|
||||
onMoveBackToInProgress={() =>
|
||||
handleMoveBackToInProgress(feature)
|
||||
>
|
||||
<SortableContext
|
||||
items={columnFeatures.map((f) => f.id)}
|
||||
strategy={verticalListSortingStrategy}
|
||||
>
|
||||
{columnFeatures.map((feature, index) => {
|
||||
// Calculate shortcut key for in-progress cards (first 10 get 1-9, 0)
|
||||
let shortcutKey: string | undefined;
|
||||
if (column.id === "in_progress" && index < 10) {
|
||||
shortcutKey =
|
||||
index === 9 ? "0" : String(index + 1);
|
||||
}
|
||||
onFollowUp={() => handleOpenFollowUp(feature)}
|
||||
onCommit={() => handleCommitFeature(feature)}
|
||||
onRevert={() => handleRevertFeature(feature)}
|
||||
onMerge={() => handleMergeFeature(feature)}
|
||||
hasContext={featuresWithContext.has(feature.id)}
|
||||
isCurrentAutoTask={runningAutoTasks.includes(
|
||||
feature.id
|
||||
)}
|
||||
shortcutKey={shortcutKey}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</SortableContext>
|
||||
</KanbanColumn>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
return (
|
||||
<KanbanCard
|
||||
key={feature.id}
|
||||
feature={feature}
|
||||
onEdit={() => setEditingFeature(feature)}
|
||||
onDelete={() => handleDeleteFeature(feature.id)}
|
||||
onViewOutput={() => handleViewOutput(feature)}
|
||||
onVerify={() => handleVerifyFeature(feature)}
|
||||
onResume={() => handleResumeFeature(feature)}
|
||||
onForceStop={() =>
|
||||
handleForceStopFeature(feature)
|
||||
}
|
||||
onManualVerify={() =>
|
||||
handleManualVerify(feature)
|
||||
}
|
||||
onMoveBackToInProgress={() =>
|
||||
handleMoveBackToInProgress(feature)
|
||||
}
|
||||
onFollowUp={() => handleOpenFollowUp(feature)}
|
||||
onCommit={() => handleCommitFeature(feature)}
|
||||
onRevert={() => handleRevertFeature(feature)}
|
||||
onMerge={() => handleMergeFeature(feature)}
|
||||
hasContext={featuresWithContext.has(feature.id)}
|
||||
isCurrentAutoTask={runningAutoTasks.includes(
|
||||
feature.id
|
||||
)}
|
||||
shortcutKey={shortcutKey}
|
||||
opacity={backgroundSettings.cardOpacity}
|
||||
glassmorphism={
|
||||
backgroundSettings.cardGlassmorphism
|
||||
}
|
||||
cardBorderEnabled={
|
||||
backgroundSettings.cardBorderEnabled
|
||||
}
|
||||
cardBorderOpacity={
|
||||
backgroundSettings.cardBorderOpacity
|
||||
}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</SortableContext>
|
||||
</KanbanColumn>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
<DragOverlay>
|
||||
{activeFeature && (
|
||||
<Card className="w-72 opacity-90 rotate-3 shadow-xl">
|
||||
<CardHeader className="p-3">
|
||||
<CardTitle className="text-sm">
|
||||
{activeFeature.description}
|
||||
</CardTitle>
|
||||
<CardDescription className="text-xs">
|
||||
{activeFeature.category}
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
</Card>
|
||||
)}
|
||||
</DragOverlay>
|
||||
</DndContext>
|
||||
</div>
|
||||
<DragOverlay>
|
||||
{activeFeature && (
|
||||
<Card className="w-72 opacity-90 rotate-3 shadow-xl">
|
||||
<CardHeader className="p-3">
|
||||
<CardTitle className="text-sm">
|
||||
{activeFeature.description}
|
||||
</CardTitle>
|
||||
<CardDescription className="text-xs">
|
||||
{activeFeature.category}
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
</Card>
|
||||
)}
|
||||
</DragOverlay>
|
||||
</DndContext>
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
</div>
|
||||
|
||||
{/* Board Background Modal */}
|
||||
<BoardBackgroundModal
|
||||
open={showBoardBackgroundModal}
|
||||
onOpenChange={setShowBoardBackgroundModal}
|
||||
/>
|
||||
|
||||
{/* Add Feature Dialog */}
|
||||
<Dialog
|
||||
open={showAddDialog}
|
||||
|
||||
@@ -17,6 +17,8 @@ import {
|
||||
File,
|
||||
X,
|
||||
BookOpen,
|
||||
EditIcon,
|
||||
Eye,
|
||||
} from "lucide-react";
|
||||
import {
|
||||
useKeyboardShortcuts,
|
||||
@@ -34,6 +36,7 @@ import {
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Markdown } from "../ui/markdown";
|
||||
|
||||
interface ContextFile {
|
||||
name: string;
|
||||
@@ -60,6 +63,7 @@ export function ContextView() {
|
||||
);
|
||||
const [newFileContent, setNewFileContent] = useState("");
|
||||
const [isDropHovering, setIsDropHovering] = useState(false);
|
||||
const [isPreviewMode, setIsPreviewMode] = useState(false);
|
||||
|
||||
// Keyboard shortcuts for this view
|
||||
const contextShortcuts: KeyboardShortcut[] = useMemo(
|
||||
@@ -80,6 +84,11 @@ export function ContextView() {
|
||||
return `${currentProject.path}/.automaker/context`;
|
||||
}, [currentProject]);
|
||||
|
||||
const isMarkdownFile = (filename: string): boolean => {
|
||||
const ext = filename.toLowerCase().substring(filename.lastIndexOf("."));
|
||||
return ext === ".md" || ext === ".markdown";
|
||||
};
|
||||
|
||||
// Determine if a file is an image based on extension
|
||||
const isImageFile = (filename: string): boolean => {
|
||||
const imageExtensions = [
|
||||
@@ -151,6 +160,7 @@ export function ContextView() {
|
||||
// Could add a confirmation dialog here
|
||||
}
|
||||
loadFileContent(file);
|
||||
setIsPreviewMode(isMarkdownFile(file.name));
|
||||
};
|
||||
|
||||
// Save current file
|
||||
@@ -448,6 +458,27 @@ export function ContextView() {
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex gap-2">
|
||||
{selectedFile.type === "text" &&
|
||||
isMarkdownFile(selectedFile.name) && (
|
||||
<Button
|
||||
variant={"outline"}
|
||||
size="sm"
|
||||
onClick={() => setIsPreviewMode(!isPreviewMode)}
|
||||
data-testid="toggle-preview-mode"
|
||||
>
|
||||
{isPreviewMode ? (
|
||||
<>
|
||||
<EditIcon className="w-4 h-4 mr-2" />
|
||||
Edit
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Eye className="w-4 h-4 mr-2" />
|
||||
Preview
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
)}
|
||||
{selectedFile.type === "text" && (
|
||||
<Button
|
||||
size="sm"
|
||||
@@ -484,6 +515,10 @@ export function ContextView() {
|
||||
className="max-w-full max-h-full object-contain"
|
||||
/>
|
||||
</div>
|
||||
) : isPreviewMode ? (
|
||||
<Card className="h-full overflow-auto p-4" data-testid="markdown-preview">
|
||||
<Markdown>{editedContent}</Markdown>
|
||||
</Card>
|
||||
) : (
|
||||
<Card className="h-full overflow-hidden">
|
||||
<textarea
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
import { cn } from "@/lib/utils";
|
||||
import { getElectronAPI } from "@/lib/electron";
|
||||
import { Markdown } from "@/components/ui/markdown";
|
||||
import { useFileBrowser } from "@/contexts/file-browser-context";
|
||||
|
||||
interface InterviewMessage {
|
||||
id: string;
|
||||
@@ -65,6 +66,7 @@ const INTERVIEW_QUESTIONS = [
|
||||
export function InterviewView() {
|
||||
const { setCurrentView, addProject, setCurrentProject, setAppSpec } =
|
||||
useAppStore();
|
||||
const { openFileBrowser } = useFileBrowser();
|
||||
const [input, setInput] = useState("");
|
||||
const [messages, setMessages] = useState<InterviewMessage[]>([]);
|
||||
const [currentQuestionIndex, setCurrentQuestionIndex] = useState(0);
|
||||
@@ -286,11 +288,13 @@ export function InterviewView() {
|
||||
};
|
||||
|
||||
const handleSelectDirectory = async () => {
|
||||
const api = getElectronAPI();
|
||||
const result = await api.openDirectory();
|
||||
const selectedPath = await openFileBrowser({
|
||||
title: "Select Base Directory",
|
||||
description: "Choose the parent directory where your new project will be created",
|
||||
});
|
||||
|
||||
if (!result.canceled && result.filePaths[0]) {
|
||||
setProjectPath(result.filePaths[0]);
|
||||
if (selectedPath) {
|
||||
setProjectPath(selectedPath);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect, memo } from "react";
|
||||
import { useState, useEffect, useMemo, memo } from "react";
|
||||
import { useSortable } from "@dnd-kit/sortable";
|
||||
import { CSS } from "@dnd-kit/utilities";
|
||||
import { cn } from "@/lib/utils";
|
||||
@@ -21,13 +21,14 @@ import {
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { DeleteConfirmDialog } from "@/components/ui/delete-confirm-dialog";
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/ui/dropdown-menu";
|
||||
import { Feature, useAppStore } from "@/store/app-store";
|
||||
import { Feature, useAppStore, ThinkingLevel } from "@/store/app-store";
|
||||
import {
|
||||
GripVertical,
|
||||
Edit,
|
||||
@@ -55,6 +56,7 @@ import {
|
||||
GitMerge,
|
||||
ChevronDown,
|
||||
ChevronUp,
|
||||
Brain,
|
||||
} from "lucide-react";
|
||||
import { CountUpTimer } from "@/components/ui/count-up-timer";
|
||||
import { getElectronAPI } from "@/lib/electron";
|
||||
@@ -72,6 +74,21 @@ import {
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
|
||||
/**
|
||||
* Formats thinking level for compact display
|
||||
*/
|
||||
function formatThinkingLevel(level: ThinkingLevel | undefined): string {
|
||||
if (!level || level === "none") return "";
|
||||
const labels: Record<ThinkingLevel, string> = {
|
||||
none: "",
|
||||
low: "Low",
|
||||
medium: "Med",
|
||||
high: "High",
|
||||
ultrathink: "Ultra",
|
||||
};
|
||||
return labels[level];
|
||||
}
|
||||
|
||||
interface KanbanCardProps {
|
||||
feature: Feature;
|
||||
onEdit: () => void;
|
||||
@@ -93,6 +110,14 @@ interface KanbanCardProps {
|
||||
contextContent?: string;
|
||||
/** Feature summary from agent completion */
|
||||
summary?: string;
|
||||
/** Opacity percentage (0-100) */
|
||||
opacity?: number;
|
||||
/** Whether to use glassmorphism (backdrop-blur) effect */
|
||||
glassmorphism?: boolean;
|
||||
/** Whether to show card borders */
|
||||
cardBorderEnabled?: boolean;
|
||||
/** Card border opacity percentage (0-100) */
|
||||
cardBorderOpacity?: number;
|
||||
}
|
||||
|
||||
export const KanbanCard = memo(function KanbanCard({
|
||||
@@ -114,12 +139,17 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
shortcutKey,
|
||||
contextContent,
|
||||
summary,
|
||||
opacity = 100,
|
||||
glassmorphism = true,
|
||||
cardBorderEnabled = true,
|
||||
cardBorderOpacity = 100,
|
||||
}: KanbanCardProps) {
|
||||
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false);
|
||||
const [isSummaryDialogOpen, setIsSummaryDialogOpen] = useState(false);
|
||||
const [isRevertDialogOpen, setIsRevertDialogOpen] = useState(false);
|
||||
const [agentInfo, setAgentInfo] = useState<AgentTaskInfo | null>(null);
|
||||
const [isDescriptionExpanded, setIsDescriptionExpanded] = useState(false);
|
||||
const [currentTime, setCurrentTime] = useState(() => Date.now());
|
||||
const { kanbanCardDetailLevel } = useAppStore();
|
||||
|
||||
// Check if feature has worktree
|
||||
@@ -131,6 +161,43 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
kanbanCardDetailLevel === "detailed";
|
||||
const showAgentInfo = kanbanCardDetailLevel === "detailed";
|
||||
|
||||
// Helper to check if "just finished" badge should be shown (within 2 minutes)
|
||||
const isJustFinished = useMemo(() => {
|
||||
if (
|
||||
!feature.justFinishedAt ||
|
||||
feature.status !== "waiting_approval" ||
|
||||
feature.error
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
const finishedTime = new Date(feature.justFinishedAt).getTime();
|
||||
const twoMinutes = 2 * 60 * 1000; // 2 minutes in milliseconds
|
||||
return currentTime - finishedTime < twoMinutes;
|
||||
}, [feature.justFinishedAt, feature.status, feature.error, currentTime]);
|
||||
|
||||
// Update current time periodically to check if badge should be hidden
|
||||
useEffect(() => {
|
||||
if (!feature.justFinishedAt || feature.status !== "waiting_approval") {
|
||||
return;
|
||||
}
|
||||
|
||||
const finishedTime = new Date(feature.justFinishedAt).getTime();
|
||||
const twoMinutes = 2 * 60 * 1000; // 2 minutes in milliseconds
|
||||
const timeRemaining = twoMinutes - (currentTime - finishedTime);
|
||||
|
||||
if (timeRemaining <= 0) {
|
||||
// Already past 2 minutes
|
||||
return;
|
||||
}
|
||||
|
||||
// Update time every second to check if 2 minutes have passed
|
||||
const interval = setInterval(() => {
|
||||
setCurrentTime(Date.now());
|
||||
}, 1000);
|
||||
|
||||
return () => clearInterval(interval);
|
||||
}, [feature.justFinishedAt, feature.status, currentTime]);
|
||||
|
||||
// Load context file for in_progress, waiting_approval, and verified features
|
||||
useEffect(() => {
|
||||
const loadContext = async () => {
|
||||
@@ -167,11 +234,11 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
} else {
|
||||
// Fallback to direct file read for backward compatibility
|
||||
const contextPath = `${currentProject.path}/.automaker/features/${feature.id}/agent-output.md`;
|
||||
const result = await api.readFile(contextPath);
|
||||
const result = await api.readFile(contextPath);
|
||||
|
||||
if (result.success && result.content) {
|
||||
const info = parseAgentContext(result.content);
|
||||
setAgentInfo(info);
|
||||
if (result.success && result.content) {
|
||||
const info = parseAgentContext(result.content);
|
||||
setAgentInfo(info);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
@@ -195,22 +262,19 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
};
|
||||
|
||||
const handleConfirmDelete = () => {
|
||||
setIsDeleteDialogOpen(false);
|
||||
onDelete();
|
||||
};
|
||||
|
||||
const handleCancelDelete = () => {
|
||||
setIsDeleteDialogOpen(false);
|
||||
};
|
||||
|
||||
// Dragging logic:
|
||||
// - Backlog items can always be dragged
|
||||
// - skipTests items can be dragged even when in_progress or verified (unless currently running)
|
||||
// - waiting_approval items can always be dragged (to allow manual verification via drag)
|
||||
// - Non-skipTests (TDD) items in progress or verified cannot be dragged
|
||||
// - verified items can always be dragged (to allow moving back to waiting_approval or backlog)
|
||||
// - Non-skipTests (TDD) items in progress cannot be dragged (they are running)
|
||||
const isDraggable =
|
||||
feature.status === "backlog" ||
|
||||
feature.status === "waiting_approval" ||
|
||||
feature.status === "verified" ||
|
||||
(feature.skipTests && !isCurrentAutoTask);
|
||||
const {
|
||||
attributes,
|
||||
@@ -227,17 +291,48 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
const style = {
|
||||
transform: CSS.Transform.toString(transform),
|
||||
transition,
|
||||
opacity: isDragging ? 0.5 : undefined,
|
||||
};
|
||||
|
||||
return (
|
||||
// Calculate border style based on enabled state and opacity
|
||||
const borderStyle: React.CSSProperties = { ...style };
|
||||
if (!cardBorderEnabled) {
|
||||
(borderStyle as Record<string, string>).borderWidth = "0px";
|
||||
(borderStyle as Record<string, string>).borderColor = "transparent";
|
||||
} else if (cardBorderOpacity !== 100) {
|
||||
// Apply border opacity using color-mix to blend the border color with transparent
|
||||
// The --border variable uses oklch format, so we use color-mix in oklch space
|
||||
// Ensure border width is set (1px is the default Tailwind border width)
|
||||
(borderStyle as Record<string, string>).borderWidth = "1px";
|
||||
(
|
||||
borderStyle as Record<string, string>
|
||||
).borderColor = `color-mix(in oklch, var(--border) ${cardBorderOpacity}%, transparent)`;
|
||||
}
|
||||
|
||||
const cardElement = (
|
||||
<Card
|
||||
ref={setNodeRef}
|
||||
style={style}
|
||||
style={isCurrentAutoTask ? style : borderStyle}
|
||||
className={cn(
|
||||
"cursor-grab active:cursor-grabbing transition-all backdrop-blur-sm border-border relative kanban-card-content select-none",
|
||||
isDragging && "opacity-50 scale-105 shadow-lg",
|
||||
isCurrentAutoTask &&
|
||||
"border-running-indicator border-2 shadow-running-indicator/50 shadow-lg animate-pulse",
|
||||
"cursor-grab active:cursor-grabbing transition-all relative kanban-card-content select-none",
|
||||
// Apply border class when border is enabled and opacity is 100%
|
||||
// When opacity is not 100%, we use inline styles for border color
|
||||
// Skip border classes when animated border is active (isCurrentAutoTask)
|
||||
!isCurrentAutoTask &&
|
||||
cardBorderEnabled &&
|
||||
cardBorderOpacity === 100 &&
|
||||
"border-border",
|
||||
// When border is enabled but opacity is not 100%, we still need border width
|
||||
!isCurrentAutoTask &&
|
||||
cardBorderEnabled &&
|
||||
cardBorderOpacity !== 100 &&
|
||||
"border",
|
||||
// Remove default background when using opacity overlay
|
||||
!isDragging && "bg-transparent",
|
||||
// Remove default backdrop-blur-sm from Card component when glassmorphism is disabled
|
||||
!glassmorphism && "backdrop-blur-[0px]!",
|
||||
isDragging && "scale-105 shadow-lg",
|
||||
// Error state border (only when not in progress)
|
||||
feature.error &&
|
||||
!isCurrentAutoTask &&
|
||||
"border-red-500 border-2 shadow-red-500/30 shadow-lg",
|
||||
@@ -248,6 +343,16 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
{...attributes}
|
||||
{...(isDraggable ? listeners : {})}
|
||||
>
|
||||
{/* Background overlay with opacity - only affects background, not content */}
|
||||
{!isDragging && (
|
||||
<div
|
||||
className={cn(
|
||||
"absolute inset-0 rounded-xl bg-card -z-10",
|
||||
glassmorphism && "backdrop-blur-sm"
|
||||
)}
|
||||
style={{ opacity: opacity / 100 }}
|
||||
/>
|
||||
)}
|
||||
{/* Skip Tests indicator badge */}
|
||||
{feature.skipTests && !feature.error && (
|
||||
<div
|
||||
@@ -278,6 +383,21 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
<span>Errored</span>
|
||||
</div>
|
||||
)}
|
||||
{/* Just Finished indicator badge - shows when agent just completed work (for 2 minutes) */}
|
||||
{isJustFinished && (
|
||||
<div
|
||||
className={cn(
|
||||
"absolute px-1.5 py-0.5 text-[10px] font-medium rounded flex items-center gap-1 z-10",
|
||||
feature.skipTests ? "top-8 left-2" : "top-2 left-2",
|
||||
"bg-green-500/20 border border-green-500/50 text-green-400 animate-pulse"
|
||||
)}
|
||||
data-testid={`just-finished-badge-${feature.id}`}
|
||||
title="Agent just finished working on this feature"
|
||||
>
|
||||
<Sparkles className="w-3 h-3" />
|
||||
<span>Fresh Baked</span>
|
||||
</div>
|
||||
)}
|
||||
{/* Branch badge - show when feature has a worktree */}
|
||||
{hasWorktree && !isCurrentAutoTask && (
|
||||
<TooltipProvider delayDuration={300}>
|
||||
@@ -287,19 +407,23 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
className={cn(
|
||||
"absolute px-1.5 py-0.5 text-[10px] font-medium rounded flex items-center gap-1 z-10 cursor-default",
|
||||
"bg-purple-500/20 border border-purple-500/50 text-purple-400",
|
||||
// Position below error badge if present, otherwise use normal position
|
||||
feature.error || feature.skipTests
|
||||
// Position below other badges if present, otherwise use normal position
|
||||
feature.error || feature.skipTests || isJustFinished
|
||||
? "top-8 left-2"
|
||||
: "top-2 left-2"
|
||||
)}
|
||||
data-testid={`branch-badge-${feature.id}`}
|
||||
>
|
||||
<GitBranch className="w-3 h-3 shrink-0" />
|
||||
<span className="truncate max-w-[80px]">{feature.branchName?.replace("feature/", "")}</span>
|
||||
<span className="truncate max-w-[80px]">
|
||||
{feature.branchName?.replace("feature/", "")}
|
||||
</span>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="bottom" className="max-w-[300px]">
|
||||
<p className="font-mono text-xs break-all">{feature.branchName}</p>
|
||||
<p className="font-mono text-xs break-all">
|
||||
{feature.branchName}
|
||||
</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
@@ -308,14 +432,19 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
className={cn(
|
||||
"p-3 pb-2 block", // Reset grid layout to block for custom kanban card layout
|
||||
// Add extra top padding when badges are present to prevent text overlap
|
||||
(feature.skipTests || feature.error) && "pt-10",
|
||||
(feature.skipTests || feature.error || isJustFinished) && "pt-10",
|
||||
// Add even more top padding when both badges and branch are shown
|
||||
hasWorktree && (feature.skipTests || feature.error) && "pt-14"
|
||||
hasWorktree &&
|
||||
(feature.skipTests || feature.error || isJustFinished) &&
|
||||
"pt-14"
|
||||
)}
|
||||
>
|
||||
{isCurrentAutoTask && (
|
||||
<div className="absolute top-2 right-2 flex items-center justify-center gap-2 bg-running-indicator/20 border border-running-indicator rounded px-2 py-0.5">
|
||||
<Loader2 className="w-4 h-4 text-running-indicator animate-spin" />
|
||||
<span className="text-xs text-running-indicator font-medium">
|
||||
{formatModelName(feature.model ?? DEFAULT_MODEL)}
|
||||
</span>
|
||||
{feature.startedAt && (
|
||||
<CountUpTimer
|
||||
startedAt={feature.startedAt}
|
||||
@@ -439,7 +568,9 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
) : (
|
||||
<Circle className="w-3 h-3 mt-0.5 shrink-0" />
|
||||
)}
|
||||
<span className="break-words hyphens-auto line-clamp-2 leading-relaxed">{step}</span>
|
||||
<span className="break-words hyphens-auto line-clamp-2 leading-relaxed">
|
||||
{step}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
{feature.steps.length > 3 && (
|
||||
@@ -450,6 +581,28 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Model/Preset Info for Backlog Cards - Show in Detailed mode */}
|
||||
{showAgentInfo && feature.status === "backlog" && (
|
||||
<div className="mb-3 space-y-2 overflow-hidden">
|
||||
<div className="flex items-center gap-2 text-xs flex-wrap">
|
||||
<div className="flex items-center gap-1 text-cyan-400">
|
||||
<Cpu className="w-3 h-3" />
|
||||
<span className="font-medium">
|
||||
{formatModelName(feature.model ?? DEFAULT_MODEL)}
|
||||
</span>
|
||||
</div>
|
||||
{feature.thinkingLevel && feature.thinkingLevel !== "none" && (
|
||||
<div className="flex items-center gap-1 text-purple-400">
|
||||
<Brain className="w-3 h-3" />
|
||||
<span className="font-medium">
|
||||
{formatThinkingLevel(feature.thinkingLevel)}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Agent Info Panel - shows for in_progress, waiting_approval, verified */}
|
||||
{/* Detailed mode: Show all agent info */}
|
||||
{showAgentInfo && feature.status !== "backlog" && agentInfo && (
|
||||
@@ -511,7 +664,8 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
todo.status === "completed" &&
|
||||
"text-muted-foreground line-through",
|
||||
todo.status === "in_progress" && "text-amber-400",
|
||||
todo.status === "pending" && "text-foreground-secondary"
|
||||
todo.status === "pending" &&
|
||||
"text-foreground-secondary"
|
||||
)}
|
||||
>
|
||||
{todo.content}
|
||||
@@ -803,35 +957,15 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
</CardContent>
|
||||
|
||||
{/* Delete Confirmation Dialog */}
|
||||
<Dialog open={isDeleteDialogOpen} onOpenChange={setIsDeleteDialogOpen}>
|
||||
<DialogContent data-testid="delete-confirmation-dialog">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Delete Feature</DialogTitle>
|
||||
<DialogDescription>
|
||||
Are you sure you want to delete this feature? This action cannot
|
||||
be undone.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<DialogFooter className="mt-6">
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={handleCancelDelete}
|
||||
data-testid="cancel-delete-button"
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<HotkeyButton
|
||||
variant="destructive"
|
||||
onClick={handleConfirmDelete}
|
||||
data-testid="confirm-delete-button"
|
||||
hotkey={{ key: "Enter", cmdCtrl: true }}
|
||||
hotkeyActive={isDeleteDialogOpen}
|
||||
>
|
||||
Delete
|
||||
</HotkeyButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
<DeleteConfirmDialog
|
||||
open={isDeleteDialogOpen}
|
||||
onOpenChange={setIsDeleteDialogOpen}
|
||||
onConfirm={handleConfirmDelete}
|
||||
title="Delete Feature"
|
||||
description="Are you sure you want to delete this feature? This action cannot be undone."
|
||||
testId="delete-confirmation-dialog"
|
||||
confirmTestId="confirm-delete-button"
|
||||
/>
|
||||
|
||||
{/* Summary Modal */}
|
||||
<Dialog open={isSummaryDialogOpen} onOpenChange={setIsSummaryDialogOpen}>
|
||||
@@ -844,9 +978,13 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
<Sparkles className="w-5 h-5 text-green-400" />
|
||||
Implementation Summary
|
||||
</DialogTitle>
|
||||
<DialogDescription className="text-sm" title={feature.description || feature.summary || ""}>
|
||||
<DialogDescription
|
||||
className="text-sm"
|
||||
title={feature.description || feature.summary || ""}
|
||||
>
|
||||
{(() => {
|
||||
const displayText = feature.description || feature.summary || "No description";
|
||||
const displayText =
|
||||
feature.description || feature.summary || "No description";
|
||||
return displayText.length > 100
|
||||
? `${displayText.slice(0, 100)}...`
|
||||
: displayText;
|
||||
@@ -882,10 +1020,15 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
Revert Changes
|
||||
</DialogTitle>
|
||||
<DialogDescription>
|
||||
This will discard all changes made by the agent and move the feature back to the backlog.
|
||||
This will discard all changes made by the agent and move the
|
||||
feature back to the backlog.
|
||||
{feature.branchName && (
|
||||
<span className="block mt-2 font-medium">
|
||||
Branch <code className="bg-muted px-1 py-0.5 rounded">{feature.branchName}</code> will be deleted.
|
||||
Branch{" "}
|
||||
<code className="bg-muted px-1 py-0.5 rounded">
|
||||
{feature.branchName}
|
||||
</code>{" "}
|
||||
will be deleted.
|
||||
</span>
|
||||
)}
|
||||
<span className="block mt-2 text-red-400 font-medium">
|
||||
@@ -917,4 +1060,11 @@ export const KanbanCard = memo(function KanbanCard({
|
||||
</Dialog>
|
||||
</Card>
|
||||
);
|
||||
|
||||
// Wrap with animated border when in progress
|
||||
if (isCurrentAutoTask) {
|
||||
return <div className="animated-border-wrapper">{cardElement}</div>;
|
||||
}
|
||||
|
||||
return cardElement;
|
||||
});
|
||||
|
||||
@@ -12,6 +12,9 @@ interface KanbanColumnProps {
|
||||
count: number;
|
||||
children: ReactNode;
|
||||
headerAction?: ReactNode;
|
||||
opacity?: number; // Opacity percentage (0-100) - only affects background
|
||||
showBorder?: boolean; // Whether to show column border
|
||||
hideScrollbar?: boolean; // Whether to hide the column scrollbar
|
||||
}
|
||||
|
||||
export const KanbanColumn = memo(function KanbanColumn({
|
||||
@@ -21,6 +24,9 @@ export const KanbanColumn = memo(function KanbanColumn({
|
||||
count,
|
||||
children,
|
||||
headerAction,
|
||||
opacity = 100,
|
||||
showBorder = true,
|
||||
hideScrollbar = false,
|
||||
}: KanbanColumnProps) {
|
||||
const { setNodeRef, isOver } = useDroppable({ id });
|
||||
|
||||
@@ -28,13 +34,27 @@ export const KanbanColumn = memo(function KanbanColumn({
|
||||
<div
|
||||
ref={setNodeRef}
|
||||
className={cn(
|
||||
"flex flex-col h-full rounded-lg bg-card backdrop-blur-sm border border-border transition-colors w-72",
|
||||
isOver && "bg-accent"
|
||||
"relative flex flex-col h-full rounded-lg transition-colors w-72",
|
||||
showBorder && "border border-border"
|
||||
)}
|
||||
data-testid={`kanban-column-${id}`}
|
||||
>
|
||||
{/* Column Header */}
|
||||
<div className="flex items-center gap-2 p-3 border-b border-border">
|
||||
{/* Background layer with opacity - only this layer is affected by opacity */}
|
||||
<div
|
||||
className={cn(
|
||||
"absolute inset-0 rounded-lg backdrop-blur-sm transition-colors",
|
||||
isOver ? "bg-accent" : "bg-card"
|
||||
)}
|
||||
style={{ opacity: opacity / 100 }}
|
||||
/>
|
||||
|
||||
{/* Column Header - positioned above the background */}
|
||||
<div
|
||||
className={cn(
|
||||
"relative z-10 flex items-center gap-2 p-3",
|
||||
showBorder && "border-b border-border"
|
||||
)}
|
||||
>
|
||||
<div className={cn("w-3 h-3 rounded-full", color)} />
|
||||
<h3 className="font-medium text-sm flex-1">{title}</h3>
|
||||
{headerAction}
|
||||
@@ -43,8 +63,14 @@ export const KanbanColumn = memo(function KanbanColumn({
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Column Content */}
|
||||
<div className="flex-1 overflow-y-auto p-2 space-y-2">
|
||||
{/* Column Content - positioned above the background */}
|
||||
<div
|
||||
className={cn(
|
||||
"relative z-10 flex-1 overflow-y-auto p-2 space-y-2",
|
||||
hideScrollbar &&
|
||||
"[&::-webkit-scrollbar]:hidden [-ms-overflow-style:none] [scrollbar-width:none]"
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -82,12 +82,14 @@ export function SettingsView() {
|
||||
// Compute the effective theme for the current project
|
||||
const effectiveTheme = (settingsProject?.theme || theme) as Theme;
|
||||
|
||||
// Handler to set theme - saves to project if one is selected, otherwise to global
|
||||
// Handler to set theme - always updates global theme (user's preference),
|
||||
// and also sets per-project theme if a project is selected
|
||||
const handleSetTheme = (newTheme: typeof theme) => {
|
||||
// Always update global theme so user's preference persists across all projects
|
||||
setTheme(newTheme);
|
||||
// Also set per-project theme if a project is selected
|
||||
if (currentProject) {
|
||||
setProjectTheme(currentProject.id, newTheme);
|
||||
} else {
|
||||
setTheme(newTheme);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -61,12 +61,16 @@ export function AuthenticationStatusDisplay({
|
||||
{claudeAuthStatus.method === "oauth_token_env"
|
||||
? "Using CLAUDE_CODE_OAUTH_TOKEN"
|
||||
: claudeAuthStatus.method === "oauth_token"
|
||||
? "Using stored OAuth token"
|
||||
? "Using stored OAuth token (subscription)"
|
||||
: claudeAuthStatus.method === "api_key_env"
|
||||
? "Using ANTHROPIC_API_KEY"
|
||||
: claudeAuthStatus.method === "api_key"
|
||||
? "Using stored API key"
|
||||
: "Unknown method"}
|
||||
: claudeAuthStatus.method === "credentials_file"
|
||||
? "Using credentials file"
|
||||
: claudeAuthStatus.method === "cli_authenticated"
|
||||
? "Using Claude CLI authentication"
|
||||
: `Using ${claudeAuthStatus.method || "detected"} authentication`}
|
||||
</span>
|
||||
</div>
|
||||
</>
|
||||
@@ -107,14 +111,16 @@ export function AuthenticationStatusDisplay({
|
||||
<div className="flex items-center gap-2 text-muted-foreground">
|
||||
<Info className="w-3 h-3 shrink-0" />
|
||||
<span>
|
||||
{codexAuthStatus.method === "cli_verified" ||
|
||||
codexAuthStatus.method === "cli_tokens"
|
||||
{codexAuthStatus.method === "subscription"
|
||||
? "Using Codex subscription (Plus/Team)"
|
||||
: codexAuthStatus.method === "cli_verified" ||
|
||||
codexAuthStatus.method === "cli_tokens"
|
||||
? "Using CLI login (OpenAI account)"
|
||||
: codexAuthStatus.method === "api_key"
|
||||
? "Using stored API key"
|
||||
: codexAuthStatus.method === "env"
|
||||
? "Using OPENAI_API_KEY"
|
||||
: "Unknown method"}
|
||||
: `Using ${codexAuthStatus.method || "unknown"} authentication`}
|
||||
</span>
|
||||
</div>
|
||||
</>
|
||||
|
||||
@@ -1,13 +1,5 @@
|
||||
import { Trash2, Folder } from "lucide-react";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Folder, Trash2 } from "lucide-react";
|
||||
import { DeleteConfirmDialog } from "@/components/ui/delete-confirm-dialog";
|
||||
import type { Project } from "@/lib/electron";
|
||||
|
||||
interface DeleteProjectDialogProps {
|
||||
@@ -26,24 +18,22 @@ export function DeleteProjectDialog({
|
||||
const handleConfirm = () => {
|
||||
if (project) {
|
||||
onConfirm(project.id);
|
||||
onOpenChange(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="bg-popover border-border max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle className="flex items-center gap-2">
|
||||
<Trash2 className="w-5 h-5 text-destructive" />
|
||||
Delete Project
|
||||
</DialogTitle>
|
||||
<DialogDescription className="text-muted-foreground">
|
||||
Are you sure you want to move this project to Trash?
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
{project && (
|
||||
<DeleteConfirmDialog
|
||||
open={open}
|
||||
onOpenChange={onOpenChange}
|
||||
onConfirm={handleConfirm}
|
||||
title="Delete Project"
|
||||
description="Are you sure you want to move this project to Trash?"
|
||||
confirmText="Move to Trash"
|
||||
testId="delete-project-dialog"
|
||||
confirmTestId="confirm-delete-project"
|
||||
>
|
||||
{project && (
|
||||
<>
|
||||
<div className="flex items-center gap-3 p-4 rounded-lg bg-sidebar-accent/10 border border-sidebar-border">
|
||||
<div className="w-10 h-10 rounded-lg bg-sidebar-accent/20 border border-sidebar-border flex items-center justify-center shrink-0">
|
||||
<Folder className="w-5 h-5 text-brand-500" />
|
||||
@@ -57,27 +47,13 @@ export function DeleteProjectDialog({
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<p className="text-sm text-muted-foreground">
|
||||
The folder will remain on disk until you permanently delete it from
|
||||
Trash.
|
||||
</p>
|
||||
|
||||
<DialogFooter className="gap-2 sm:gap-0">
|
||||
<Button variant="ghost" onClick={() => onOpenChange(false)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant="destructive"
|
||||
onClick={handleConfirm}
|
||||
data-testid="confirm-delete-project"
|
||||
>
|
||||
<Trash2 className="w-4 h-4 mr-2" />
|
||||
Move to Trash
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
The folder will remain on disk until you permanently delete it from
|
||||
Trash.
|
||||
</p>
|
||||
</>
|
||||
)}
|
||||
</DeleteConfirmDialog>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -68,19 +68,24 @@ export function useCliStatus() {
|
||||
try {
|
||||
const result = await api.setup.getClaudeStatus();
|
||||
if (result.success && result.auth) {
|
||||
const auth = result.auth;
|
||||
// Validate method is one of the expected values, default to "none"
|
||||
const validMethods = ["oauth_token_env", "oauth_token", "api_key", "api_key_env", "none"] as const;
|
||||
// Cast to extended type that includes server-added fields
|
||||
const auth = result.auth as typeof result.auth & {
|
||||
oauthTokenValid?: boolean;
|
||||
apiKeyValid?: boolean;
|
||||
};
|
||||
// Map server method names to client method types
|
||||
// Server returns: oauth_token_env, oauth_token, api_key_env, api_key, credentials_file, cli_authenticated, none
|
||||
const validMethods = ["oauth_token_env", "oauth_token", "api_key", "api_key_env", "credentials_file", "cli_authenticated", "none"] as const;
|
||||
type AuthMethod = typeof validMethods[number];
|
||||
const method: AuthMethod = validMethods.includes(auth.method as AuthMethod)
|
||||
? (auth.method as AuthMethod)
|
||||
: "none";
|
||||
: auth.authenticated ? "api_key" : "none"; // Default authenticated to api_key, not none
|
||||
const authStatus = {
|
||||
authenticated: auth.authenticated,
|
||||
method,
|
||||
hasCredentialsFile: auth.hasCredentialsFile ?? false,
|
||||
oauthTokenValid: auth.hasStoredOAuthToken || auth.hasEnvOAuthToken,
|
||||
apiKeyValid: auth.hasStoredApiKey || auth.hasEnvApiKey,
|
||||
oauthTokenValid: auth.oauthTokenValid || auth.hasStoredOAuthToken || auth.hasEnvOAuthToken,
|
||||
apiKeyValid: auth.apiKeyValid || auth.hasStoredApiKey || auth.hasEnvApiKey,
|
||||
hasEnvOAuthToken: auth.hasEnvOAuthToken,
|
||||
hasEnvApiKey: auth.hasEnvApiKey,
|
||||
};
|
||||
@@ -96,27 +101,30 @@ export function useCliStatus() {
|
||||
try {
|
||||
const result = await api.setup.getCodexStatus();
|
||||
if (result.success && result.auth) {
|
||||
const auth = result.auth;
|
||||
// Determine method - prioritize cli_verified and cli_tokens over auth_file
|
||||
const method =
|
||||
auth.method === "cli_verified" || auth.method === "cli_tokens"
|
||||
? auth.method === "cli_verified"
|
||||
? ("cli_verified" as const)
|
||||
: ("cli_tokens" as const)
|
||||
: auth.method === "auth_file"
|
||||
? ("api_key" as const)
|
||||
: auth.method === "env_var"
|
||||
? ("env" as const)
|
||||
: ("none" as const);
|
||||
// Cast to extended type that includes server-added fields
|
||||
const auth = result.auth as typeof result.auth & {
|
||||
hasSubscription?: boolean;
|
||||
cliLoggedIn?: boolean;
|
||||
hasEnvApiKey?: boolean;
|
||||
};
|
||||
// Map server method names to client method types
|
||||
// Server returns: subscription, cli_verified, cli_tokens, api_key, env, none
|
||||
const validMethods = ["subscription", "cli_verified", "cli_tokens", "api_key", "env", "none"] as const;
|
||||
type CodexMethod = typeof validMethods[number];
|
||||
const method: CodexMethod = validMethods.includes(auth.method as CodexMethod)
|
||||
? (auth.method as CodexMethod)
|
||||
: auth.authenticated ? "api_key" : "none"; // Default authenticated to api_key
|
||||
|
||||
const authStatus = {
|
||||
authenticated: auth.authenticated,
|
||||
method,
|
||||
// Only set apiKeyValid for actual API key methods, not CLI login
|
||||
// Only set apiKeyValid for actual API key methods, not CLI login or subscription
|
||||
apiKeyValid:
|
||||
method === "cli_verified" || method === "cli_tokens"
|
||||
method === "cli_verified" || method === "cli_tokens" || method === "subscription"
|
||||
? undefined
|
||||
: auth.hasAuthFile || auth.hasEnvKey,
|
||||
: auth.hasAuthFile || auth.hasEnvKey || auth.hasEnvApiKey,
|
||||
hasSubscription: auth.hasSubscription,
|
||||
cliLoggedIn: auth.cliLoggedIn,
|
||||
};
|
||||
setCodexAuthStatus(authStatus);
|
||||
}
|
||||
|
||||
@@ -29,7 +29,8 @@ export type Theme =
|
||||
| "gruvbox"
|
||||
| "catppuccin"
|
||||
| "onedark"
|
||||
| "synthwave";
|
||||
| "synthwave"
|
||||
| "red";
|
||||
|
||||
export type KanbanDetailLevel = "minimal" | "standard" | "detailed";
|
||||
|
||||
|
||||
@@ -40,6 +40,8 @@ export function useCliStatus({
|
||||
"oauth_token",
|
||||
"api_key",
|
||||
"api_key_env",
|
||||
"credentials_file",
|
||||
"cli_authenticated",
|
||||
"none",
|
||||
] as const;
|
||||
type AuthMethod = (typeof validMethods)[number];
|
||||
|
||||
@@ -14,7 +14,8 @@ import {
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Save, RefreshCw, FileText, Sparkles, Loader2, FilePlus2, AlertCircle, ListPlus } from "lucide-react";
|
||||
import { Save, RefreshCw, FileText, Sparkles, Loader2, FilePlus2, AlertCircle, ListPlus, CheckCircle2 } from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import { XmlSyntaxEditor } from "@/components/ui/xml-syntax-editor";
|
||||
import type { SpecRegenerationEvent } from "@/types/electron";
|
||||
@@ -311,14 +312,22 @@ export function SpecView() {
|
||||
// The backend sends explicit signals for completion:
|
||||
// 1. "All tasks completed" in the message
|
||||
// 2. [Phase: complete] marker in logs
|
||||
// 3. "Spec regeneration complete!" for regeneration
|
||||
// 4. "Initial spec creation complete!" for creation without features
|
||||
const isFinalCompletionMessage = event.message?.includes("All tasks completed") ||
|
||||
event.message === "All tasks completed!" ||
|
||||
event.message === "All tasks completed";
|
||||
event.message === "All tasks completed" ||
|
||||
event.message === "Spec regeneration complete!" ||
|
||||
event.message === "Initial spec creation complete!";
|
||||
|
||||
const hasCompletePhase = logsRef.current.includes("[Phase: complete]");
|
||||
|
||||
// Intermediate completion means features are being generated after spec creation
|
||||
const isIntermediateCompletion = event.message?.includes("Features are being generated") ||
|
||||
event.message?.includes("features are being generated");
|
||||
|
||||
// Rely solely on explicit backend signals
|
||||
const shouldComplete = isFinalCompletionMessage || hasCompletePhase;
|
||||
const shouldComplete = (isFinalCompletionMessage || hasCompletePhase) && !isIntermediateCompletion;
|
||||
|
||||
if (shouldComplete) {
|
||||
// Fully complete - clear all states immediately
|
||||
@@ -337,9 +346,29 @@ export function SpecView() {
|
||||
setProjectOverview("");
|
||||
setErrorMessage("");
|
||||
stateRestoredRef.current = false;
|
||||
// Reload the spec to show the new content
|
||||
loadSpec();
|
||||
} else {
|
||||
|
||||
// Reload the spec with delay to ensure file is written to disk
|
||||
setTimeout(() => {
|
||||
loadSpec();
|
||||
}, SPEC_FILE_WRITE_DELAY);
|
||||
|
||||
// Show success toast notification
|
||||
const isRegeneration = event.message?.includes("regeneration");
|
||||
const isFeatureGeneration = event.message?.includes("Feature generation");
|
||||
toast.success(
|
||||
isFeatureGeneration
|
||||
? "Feature Generation Complete"
|
||||
: isRegeneration
|
||||
? "Spec Regeneration Complete"
|
||||
: "Spec Creation Complete",
|
||||
{
|
||||
description: isFeatureGeneration
|
||||
? "Features have been created from the app specification."
|
||||
: "Your app specification has been saved.",
|
||||
icon: <CheckCircle2 className="w-4 h-4" />,
|
||||
}
|
||||
);
|
||||
} else if (isIntermediateCompletion) {
|
||||
// Intermediate completion - keep state active for feature generation
|
||||
setIsCreating(true);
|
||||
setIsRegenerating(true);
|
||||
|
||||
@@ -2,9 +2,6 @@
|
||||
|
||||
import { useState, useCallback } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { HotkeyButton } from "@/components/ui/hotkey-button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
@@ -13,15 +10,8 @@ import {
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import {
|
||||
Card,
|
||||
CardContent,
|
||||
CardDescription,
|
||||
CardHeader,
|
||||
CardTitle,
|
||||
} from "@/components/ui/card";
|
||||
import { useAppStore } from "@/store/app-store";
|
||||
import { getElectronAPI } from "@/lib/electron";
|
||||
import { useAppStore, type ThemeMode } from "@/store/app-store";
|
||||
import { getElectronAPI, type Project } from "@/lib/electron";
|
||||
import { initializeProject } from "@/lib/project-init";
|
||||
import {
|
||||
FolderOpen,
|
||||
@@ -40,13 +30,23 @@ import {
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/ui/dropdown-menu";
|
||||
import { toast } from "sonner";
|
||||
import { WorkspacePickerModal } from "@/components/workspace-picker-modal";
|
||||
import { NewProjectModal } from "@/components/new-project-modal";
|
||||
import { getHttpApiClient } from "@/lib/http-api-client";
|
||||
import type { StarterTemplate } from "@/lib/templates";
|
||||
|
||||
export function WelcomeView() {
|
||||
const { projects, addProject, setCurrentProject, setCurrentView } =
|
||||
useAppStore();
|
||||
const [showNewProjectDialog, setShowNewProjectDialog] = useState(false);
|
||||
const [newProjectName, setNewProjectName] = useState("");
|
||||
const [newProjectPath, setNewProjectPath] = useState("");
|
||||
const {
|
||||
projects,
|
||||
trashedProjects,
|
||||
currentProject,
|
||||
upsertAndSetCurrentProject,
|
||||
addProject,
|
||||
setCurrentProject,
|
||||
setCurrentView,
|
||||
theme: globalTheme,
|
||||
} = useAppStore();
|
||||
const [showNewProjectModal, setShowNewProjectModal] = useState(false);
|
||||
const [isCreating, setIsCreating] = useState(false);
|
||||
const [isOpening, setIsOpening] = useState(false);
|
||||
const [showInitDialog, setShowInitDialog] = useState(false);
|
||||
@@ -57,6 +57,7 @@ export function WelcomeView() {
|
||||
projectName: string;
|
||||
projectPath: string;
|
||||
} | null>(null);
|
||||
const [showWorkspacePicker, setShowWorkspacePicker] = useState(false);
|
||||
|
||||
/**
|
||||
* Kick off project analysis agent to analyze the codebase
|
||||
@@ -105,15 +106,14 @@ export function WelcomeView() {
|
||||
return;
|
||||
}
|
||||
|
||||
const project = {
|
||||
id: `project-${Date.now()}`,
|
||||
name,
|
||||
path,
|
||||
lastOpened: new Date().toISOString(),
|
||||
};
|
||||
|
||||
addProject(project);
|
||||
setCurrentProject(project);
|
||||
// Upsert project and set as current (handles both create and update cases)
|
||||
// Theme preservation is handled by the store action
|
||||
const trashedProject = trashedProjects.find((p) => p.path === path);
|
||||
const effectiveTheme =
|
||||
(trashedProject?.theme as ThemeMode | undefined) ||
|
||||
(currentProject?.theme as ThemeMode | undefined) ||
|
||||
globalTheme;
|
||||
const project = upsertAndSetCurrentProject(path, name, effectiveTheme);
|
||||
|
||||
// Show initialization dialog if files were created
|
||||
if (initResult.createdFiles && initResult.createdFiles.length > 0) {
|
||||
@@ -148,21 +148,63 @@ export function WelcomeView() {
|
||||
setIsOpening(false);
|
||||
}
|
||||
},
|
||||
[addProject, setCurrentProject, analyzeProject]
|
||||
[
|
||||
trashedProjects,
|
||||
currentProject,
|
||||
globalTheme,
|
||||
upsertAndSetCurrentProject,
|
||||
analyzeProject,
|
||||
]
|
||||
);
|
||||
|
||||
const handleOpenProject = useCallback(async () => {
|
||||
const api = getElectronAPI();
|
||||
const result = await api.openDirectory();
|
||||
try {
|
||||
// Check if workspace is configured
|
||||
const httpClient = getHttpApiClient();
|
||||
const configResult = await httpClient.workspace.getConfig();
|
||||
|
||||
if (!result.canceled && result.filePaths[0]) {
|
||||
const path = result.filePaths[0];
|
||||
// Extract folder name from path (works on both Windows and Mac/Linux)
|
||||
const name = path.split(/[/\\]/).filter(Boolean).pop() || "Untitled Project";
|
||||
await initializeAndOpenProject(path, name);
|
||||
if (configResult.success && configResult.configured) {
|
||||
// Show workspace picker modal
|
||||
setShowWorkspacePicker(true);
|
||||
} else {
|
||||
// Fall back to current behavior (native dialog or manual input)
|
||||
const api = getElectronAPI();
|
||||
const result = await api.openDirectory();
|
||||
|
||||
if (!result.canceled && result.filePaths[0]) {
|
||||
const path = result.filePaths[0];
|
||||
// Extract folder name from path (works on both Windows and Mac/Linux)
|
||||
const name =
|
||||
path.split(/[/\\]/).filter(Boolean).pop() || "Untitled Project";
|
||||
await initializeAndOpenProject(path, name);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("[Welcome] Failed to check workspace config:", error);
|
||||
// Fall back to current behavior on error
|
||||
const api = getElectronAPI();
|
||||
const result = await api.openDirectory();
|
||||
|
||||
if (!result.canceled && result.filePaths[0]) {
|
||||
const path = result.filePaths[0];
|
||||
const name =
|
||||
path.split(/[/\\]/).filter(Boolean).pop() || "Untitled Project";
|
||||
await initializeAndOpenProject(path, name);
|
||||
}
|
||||
}
|
||||
}, [initializeAndOpenProject]);
|
||||
|
||||
/**
|
||||
* Handle selecting a project from workspace picker
|
||||
*/
|
||||
const handleWorkspaceSelect = useCallback(
|
||||
async (path: string, name: string) => {
|
||||
setShowWorkspacePicker(false);
|
||||
await initializeAndOpenProject(path, name);
|
||||
},
|
||||
[initializeAndOpenProject]
|
||||
);
|
||||
|
||||
/**
|
||||
* Handle clicking on a recent project
|
||||
*/
|
||||
@@ -174,31 +216,24 @@ export function WelcomeView() {
|
||||
);
|
||||
|
||||
const handleNewProject = () => {
|
||||
setNewProjectName("");
|
||||
setNewProjectPath("");
|
||||
setShowNewProjectDialog(true);
|
||||
setShowNewProjectModal(true);
|
||||
};
|
||||
|
||||
const handleInteractiveMode = () => {
|
||||
setCurrentView("interview");
|
||||
};
|
||||
|
||||
const handleSelectDirectory = async () => {
|
||||
const api = getElectronAPI();
|
||||
const result = await api.openDirectory();
|
||||
|
||||
if (!result.canceled && result.filePaths[0]) {
|
||||
setNewProjectPath(result.filePaths[0]);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCreateProject = async () => {
|
||||
if (!newProjectName || !newProjectPath) return;
|
||||
|
||||
/**
|
||||
* Create a blank project with just .automaker directory structure
|
||||
*/
|
||||
const handleCreateBlankProject = async (
|
||||
projectName: string,
|
||||
parentDir: string
|
||||
) => {
|
||||
setIsCreating(true);
|
||||
try {
|
||||
const api = getElectronAPI();
|
||||
const projectPath = `${newProjectPath}/${newProjectName}`;
|
||||
const projectPath = `${parentDir}/${projectName}`;
|
||||
|
||||
// Create project directory
|
||||
await api.mkdir(projectPath);
|
||||
@@ -217,7 +252,7 @@ export function WelcomeView() {
|
||||
await api.writeFile(
|
||||
`${projectPath}/.automaker/app_spec.txt`,
|
||||
`<project_specification>
|
||||
<project_name>${newProjectName}</project_name>
|
||||
<project_name>${projectName}</project_name>
|
||||
|
||||
<overview>
|
||||
Describe your project here. This file will be analyzed by an AI agent
|
||||
@@ -240,24 +275,24 @@ export function WelcomeView() {
|
||||
|
||||
const project = {
|
||||
id: `project-${Date.now()}`,
|
||||
name: newProjectName,
|
||||
name: projectName,
|
||||
path: projectPath,
|
||||
lastOpened: new Date().toISOString(),
|
||||
};
|
||||
|
||||
addProject(project);
|
||||
setCurrentProject(project);
|
||||
setShowNewProjectDialog(false);
|
||||
setShowNewProjectModal(false);
|
||||
|
||||
toast.success("Project created", {
|
||||
description: `Created ${newProjectName} with .automaker directory`,
|
||||
description: `Created ${projectName} with .automaker directory`,
|
||||
});
|
||||
|
||||
// Set init status to show the dialog
|
||||
setInitStatus({
|
||||
isNewProject: true,
|
||||
createdFiles: initResult.createdFiles || [],
|
||||
projectName: newProjectName,
|
||||
projectName: projectName,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
setShowInitDialog(true);
|
||||
@@ -271,6 +306,210 @@ export function WelcomeView() {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a project from a GitHub starter template
|
||||
*/
|
||||
const handleCreateFromTemplate = async (
|
||||
template: StarterTemplate,
|
||||
projectName: string,
|
||||
parentDir: string
|
||||
) => {
|
||||
setIsCreating(true);
|
||||
try {
|
||||
const httpClient = getHttpApiClient();
|
||||
const api = getElectronAPI();
|
||||
|
||||
// Clone the template repository
|
||||
const cloneResult = await httpClient.templates.clone(
|
||||
template.repoUrl,
|
||||
projectName,
|
||||
parentDir
|
||||
);
|
||||
|
||||
if (!cloneResult.success || !cloneResult.projectPath) {
|
||||
toast.error("Failed to clone template", {
|
||||
description: cloneResult.error || "Unknown error occurred",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const projectPath = cloneResult.projectPath;
|
||||
|
||||
// Initialize .automaker directory with all necessary files
|
||||
const initResult = await initializeProject(projectPath);
|
||||
|
||||
if (!initResult.success) {
|
||||
toast.error("Failed to initialize project", {
|
||||
description: initResult.error || "Unknown error occurred",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Update the app_spec.txt with template-specific info
|
||||
await api.writeFile(
|
||||
`${projectPath}/.automaker/app_spec.txt`,
|
||||
`<project_specification>
|
||||
<project_name>${projectName}</project_name>
|
||||
|
||||
<overview>
|
||||
This project was created from the "${template.name}" starter template.
|
||||
${template.description}
|
||||
</overview>
|
||||
|
||||
<technology_stack>
|
||||
${template.techStack
|
||||
.map((tech) => `<technology>${tech}</technology>`)
|
||||
.join("\n ")}
|
||||
</technology_stack>
|
||||
|
||||
<core_capabilities>
|
||||
${template.features
|
||||
.map((feature) => `<capability>${feature}</capability>`)
|
||||
.join("\n ")}
|
||||
</core_capabilities>
|
||||
|
||||
<implemented_features>
|
||||
<!-- The AI agent will populate this based on code analysis -->
|
||||
</implemented_features>
|
||||
</project_specification>`
|
||||
);
|
||||
|
||||
const project = {
|
||||
id: `project-${Date.now()}`,
|
||||
name: projectName,
|
||||
path: projectPath,
|
||||
lastOpened: new Date().toISOString(),
|
||||
};
|
||||
|
||||
addProject(project);
|
||||
setCurrentProject(project);
|
||||
setShowNewProjectModal(false);
|
||||
|
||||
toast.success("Project created from template", {
|
||||
description: `Created ${projectName} from ${template.name}`,
|
||||
});
|
||||
|
||||
// Set init status to show the dialog
|
||||
setInitStatus({
|
||||
isNewProject: true,
|
||||
createdFiles: initResult.createdFiles || [],
|
||||
projectName: projectName,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
setShowInitDialog(true);
|
||||
|
||||
// Kick off project analysis
|
||||
analyzeProject(projectPath);
|
||||
} catch (error) {
|
||||
console.error("Failed to create project from template:", error);
|
||||
toast.error("Failed to create project", {
|
||||
description: error instanceof Error ? error.message : "Unknown error",
|
||||
});
|
||||
} finally {
|
||||
setIsCreating(false);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a project from a custom GitHub URL
|
||||
*/
|
||||
const handleCreateFromCustomUrl = async (
|
||||
repoUrl: string,
|
||||
projectName: string,
|
||||
parentDir: string
|
||||
) => {
|
||||
setIsCreating(true);
|
||||
try {
|
||||
const httpClient = getHttpApiClient();
|
||||
const api = getElectronAPI();
|
||||
|
||||
// Clone the repository
|
||||
const cloneResult = await httpClient.templates.clone(
|
||||
repoUrl,
|
||||
projectName,
|
||||
parentDir
|
||||
);
|
||||
|
||||
if (!cloneResult.success || !cloneResult.projectPath) {
|
||||
toast.error("Failed to clone repository", {
|
||||
description: cloneResult.error || "Unknown error occurred",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const projectPath = cloneResult.projectPath;
|
||||
|
||||
// Initialize .automaker directory with all necessary files
|
||||
const initResult = await initializeProject(projectPath);
|
||||
|
||||
if (!initResult.success) {
|
||||
toast.error("Failed to initialize project", {
|
||||
description: initResult.error || "Unknown error occurred",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Update the app_spec.txt with basic info
|
||||
await api.writeFile(
|
||||
`${projectPath}/.automaker/app_spec.txt`,
|
||||
`<project_specification>
|
||||
<project_name>${projectName}</project_name>
|
||||
|
||||
<overview>
|
||||
This project was cloned from ${repoUrl}.
|
||||
The AI agent will analyze the project structure.
|
||||
</overview>
|
||||
|
||||
<technology_stack>
|
||||
<!-- The AI agent will fill this in after analyzing your project -->
|
||||
</technology_stack>
|
||||
|
||||
<core_capabilities>
|
||||
<!-- List core features and capabilities -->
|
||||
</core_capabilities>
|
||||
|
||||
<implemented_features>
|
||||
<!-- The AI agent will populate this based on code analysis -->
|
||||
</implemented_features>
|
||||
</project_specification>`
|
||||
);
|
||||
|
||||
const project = {
|
||||
id: `project-${Date.now()}`,
|
||||
name: projectName,
|
||||
path: projectPath,
|
||||
lastOpened: new Date().toISOString(),
|
||||
};
|
||||
|
||||
addProject(project);
|
||||
setCurrentProject(project);
|
||||
setShowNewProjectModal(false);
|
||||
|
||||
toast.success("Project created from repository", {
|
||||
description: `Created ${projectName} from ${repoUrl}`,
|
||||
});
|
||||
|
||||
// Set init status to show the dialog
|
||||
setInitStatus({
|
||||
isNewProject: true,
|
||||
createdFiles: initResult.createdFiles || [],
|
||||
projectName: projectName,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
setShowInitDialog(true);
|
||||
|
||||
// Kick off project analysis
|
||||
analyzeProject(projectPath);
|
||||
} catch (error) {
|
||||
console.error("Failed to create project from custom URL:", error);
|
||||
toast.error("Failed to create project", {
|
||||
description: error instanceof Error ? error.message : "Unknown error",
|
||||
});
|
||||
} finally {
|
||||
setIsCreating(false);
|
||||
}
|
||||
};
|
||||
|
||||
const recentProjects = [...projects]
|
||||
.sort((a, b) => {
|
||||
const dateA = a.lastOpened ? new Date(a.lastOpened).getTime() : 0;
|
||||
@@ -451,82 +690,15 @@ export function WelcomeView() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* New Project Dialog */}
|
||||
<Dialog
|
||||
open={showNewProjectDialog}
|
||||
onOpenChange={setShowNewProjectDialog}
|
||||
>
|
||||
<DialogContent
|
||||
className="bg-card border-border"
|
||||
data-testid="new-project-dialog"
|
||||
>
|
||||
<DialogHeader>
|
||||
<DialogTitle className="text-foreground">
|
||||
Create New Project
|
||||
</DialogTitle>
|
||||
<DialogDescription className="text-muted-foreground">
|
||||
Set up a new project directory with initial configuration files.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<div className="space-y-4 py-4">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="project-name" className="text-foreground">
|
||||
Project Name
|
||||
</Label>
|
||||
<Input
|
||||
id="project-name"
|
||||
placeholder="my-awesome-project"
|
||||
value={newProjectName}
|
||||
onChange={(e) => setNewProjectName(e.target.value)}
|
||||
className="bg-input border-border text-foreground placeholder:text-muted-foreground"
|
||||
data-testid="project-name-input"
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="project-path" className="text-foreground">
|
||||
Parent Directory
|
||||
</Label>
|
||||
<div className="flex gap-2">
|
||||
<Input
|
||||
id="project-path"
|
||||
placeholder="/path/to/projects"
|
||||
value={newProjectPath}
|
||||
onChange={(e) => setNewProjectPath(e.target.value)}
|
||||
className="flex-1 bg-input border-border text-foreground placeholder:text-muted-foreground"
|
||||
data-testid="project-path-input"
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
onClick={handleSelectDirectory}
|
||||
className="bg-secondary hover:bg-secondary/80 text-foreground border border-border"
|
||||
data-testid="browse-directory"
|
||||
>
|
||||
Browse
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<DialogFooter>
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => setShowNewProjectDialog(false)}
|
||||
className="text-muted-foreground hover:text-foreground hover:bg-accent"
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<HotkeyButton
|
||||
onClick={handleCreateProject}
|
||||
disabled={!newProjectName || !newProjectPath || isCreating}
|
||||
className="bg-gradient-to-r from-brand-500 to-brand-600 hover:from-brand-600 hover:to-brand-600 text-white border-0"
|
||||
hotkey={{ key: "Enter", cmdCtrl: true }}
|
||||
hotkeyActive={showNewProjectDialog}
|
||||
data-testid="confirm-create-project"
|
||||
>
|
||||
{isCreating ? "Creating..." : "Create Project"}
|
||||
</HotkeyButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
{/* New Project Modal */}
|
||||
<NewProjectModal
|
||||
open={showNewProjectModal}
|
||||
onOpenChange={setShowNewProjectModal}
|
||||
onCreateBlankProject={handleCreateBlankProject}
|
||||
onCreateFromTemplate={handleCreateFromTemplate}
|
||||
onCreateFromCustomUrl={handleCreateFromCustomUrl}
|
||||
isCreating={isCreating}
|
||||
/>
|
||||
|
||||
{/* Project Initialization Dialog */}
|
||||
<Dialog open={showInitDialog} onOpenChange={setShowInitDialog}>
|
||||
@@ -601,6 +773,13 @@ export function WelcomeView() {
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
{/* Workspace Picker Modal */}
|
||||
<WorkspacePickerModal
|
||||
open={showWorkspacePicker}
|
||||
onOpenChange={setShowWorkspacePicker}
|
||||
onSelect={handleWorkspaceSelect}
|
||||
/>
|
||||
|
||||
{/* Loading overlay when opening project */}
|
||||
{isOpening && (
|
||||
<div
|
||||
|
||||
154
apps/app/src/components/workspace-picker-modal.tsx
Normal file
154
apps/app/src/components/workspace-picker-modal.tsx
Normal file
@@ -0,0 +1,154 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect, useCallback } from "react";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Folder, Loader2, FolderOpen, AlertCircle } from "lucide-react";
|
||||
import { getHttpApiClient } from "@/lib/http-api-client";
|
||||
|
||||
interface WorkspaceDirectory {
|
||||
name: string;
|
||||
path: string;
|
||||
}
|
||||
|
||||
interface WorkspacePickerModalProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
onSelect: (path: string, name: string) => void;
|
||||
}
|
||||
|
||||
export function WorkspacePickerModal({
|
||||
open,
|
||||
onOpenChange,
|
||||
onSelect,
|
||||
}: WorkspacePickerModalProps) {
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [directories, setDirectories] = useState<WorkspaceDirectory[]>([]);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const loadDirectories = useCallback(async () => {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const client = getHttpApiClient();
|
||||
const result = await client.workspace.getDirectories();
|
||||
|
||||
if (result.success && result.directories) {
|
||||
setDirectories(result.directories);
|
||||
} else {
|
||||
setError(result.error || "Failed to load directories");
|
||||
}
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : "Failed to load directories");
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Load directories when modal opens
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
loadDirectories();
|
||||
}
|
||||
}, [open, loadDirectories]);
|
||||
|
||||
const handleSelect = (dir: WorkspaceDirectory) => {
|
||||
onSelect(dir.path, dir.name);
|
||||
};
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="bg-card border-border max-w-lg max-h-[80vh] flex flex-col">
|
||||
<DialogHeader>
|
||||
<DialogTitle className="flex items-center gap-2 text-foreground">
|
||||
<FolderOpen className="w-5 h-5 text-brand-500" />
|
||||
Select Project
|
||||
</DialogTitle>
|
||||
<DialogDescription className="text-muted-foreground">
|
||||
Choose a project from your workspace directory
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="flex-1 overflow-y-auto py-4 min-h-[200px]">
|
||||
{isLoading && (
|
||||
<div className="flex flex-col items-center justify-center h-full gap-3">
|
||||
<Loader2 className="w-8 h-8 text-brand-500 animate-spin" />
|
||||
<p className="text-sm text-muted-foreground">Loading projects...</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{error && !isLoading && (
|
||||
<div className="flex flex-col items-center justify-center h-full gap-3 text-center px-4">
|
||||
<div className="w-12 h-12 rounded-full bg-destructive/10 flex items-center justify-center">
|
||||
<AlertCircle className="w-6 h-6 text-destructive" />
|
||||
</div>
|
||||
<p className="text-sm text-destructive">{error}</p>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
onClick={loadDirectories}
|
||||
className="mt-2"
|
||||
>
|
||||
Try Again
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isLoading && !error && directories.length === 0 && (
|
||||
<div className="flex flex-col items-center justify-center h-full gap-3 text-center px-4">
|
||||
<div className="w-12 h-12 rounded-full bg-muted flex items-center justify-center">
|
||||
<Folder className="w-6 h-6 text-muted-foreground" />
|
||||
</div>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
No projects found in workspace directory
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isLoading && !error && directories.length > 0 && (
|
||||
<div className="space-y-2">
|
||||
{directories.map((dir) => (
|
||||
<button
|
||||
key={dir.path}
|
||||
onClick={() => handleSelect(dir)}
|
||||
className="w-full flex items-center gap-3 p-3 rounded-lg border border-border bg-card hover:bg-card/70 hover:border-brand-500/50 transition-all duration-200 text-left group"
|
||||
data-testid={`workspace-dir-${dir.name}`}
|
||||
>
|
||||
<div className="w-10 h-10 rounded-lg bg-muted border border-border flex items-center justify-center group-hover:border-brand-500/50 transition-colors shrink-0">
|
||||
<Folder className="w-5 h-5 text-muted-foreground group-hover:text-brand-500 transition-colors" />
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<p className="font-medium text-foreground truncate group-hover:text-brand-500 transition-colors">
|
||||
{dir.name}
|
||||
</p>
|
||||
<p className="text-xs text-muted-foreground/70 truncate">
|
||||
{dir.path}
|
||||
</p>
|
||||
</div>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => onOpenChange(false)}
|
||||
className="text-muted-foreground hover:text-foreground hover:bg-accent"
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
6
apps/app/src/config/app-config.ts
Normal file
6
apps/app/src/config/app-config.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
/**
|
||||
* Marketing mode flag
|
||||
* When set to true, displays "https://automaker.app" with "maker" in theme color
|
||||
*/
|
||||
|
||||
export const IS_MARKETING = process.env.NEXT_PUBLIC_IS_MARKETING === "true";
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
Eclipse,
|
||||
Flame,
|
||||
Ghost,
|
||||
Heart,
|
||||
Moon,
|
||||
Radio,
|
||||
Snowflake,
|
||||
@@ -85,4 +86,10 @@ export const themeOptions: ReadonlyArray<ThemeOption> = [
|
||||
Icon: Radio,
|
||||
testId: "synthwave-mode-button",
|
||||
},
|
||||
{
|
||||
value: "red",
|
||||
label: "Red",
|
||||
Icon: Heart,
|
||||
testId: "red-mode-button",
|
||||
},
|
||||
];
|
||||
|
||||
84
apps/app/src/contexts/file-browser-context.tsx
Normal file
84
apps/app/src/contexts/file-browser-context.tsx
Normal file
@@ -0,0 +1,84 @@
|
||||
"use client";
|
||||
|
||||
import { createContext, useContext, useState, useCallback, type ReactNode } from "react";
|
||||
import { FileBrowserDialog } from "@/components/dialogs/file-browser-dialog";
|
||||
|
||||
interface FileBrowserOptions {
|
||||
title?: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
interface FileBrowserContextValue {
|
||||
openFileBrowser: (options?: FileBrowserOptions) => Promise<string | null>;
|
||||
}
|
||||
|
||||
const FileBrowserContext = createContext<FileBrowserContextValue | null>(null);
|
||||
|
||||
export function FileBrowserProvider({ children }: { children: ReactNode }) {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [resolver, setResolver] = useState<((value: string | null) => void) | null>(null);
|
||||
const [dialogOptions, setDialogOptions] = useState<FileBrowserOptions>({});
|
||||
|
||||
const openFileBrowser = useCallback((options?: FileBrowserOptions): Promise<string | null> => {
|
||||
return new Promise((resolve) => {
|
||||
setDialogOptions(options || {});
|
||||
setIsOpen(true);
|
||||
setResolver(() => resolve);
|
||||
});
|
||||
}, []);
|
||||
|
||||
const handleSelect = useCallback((path: string) => {
|
||||
if (resolver) {
|
||||
resolver(path);
|
||||
setResolver(null);
|
||||
}
|
||||
setIsOpen(false);
|
||||
setDialogOptions({});
|
||||
}, [resolver]);
|
||||
|
||||
const handleOpenChange = useCallback((open: boolean) => {
|
||||
if (!open && resolver) {
|
||||
resolver(null);
|
||||
setResolver(null);
|
||||
}
|
||||
setIsOpen(open);
|
||||
if (!open) {
|
||||
setDialogOptions({});
|
||||
}
|
||||
}, [resolver]);
|
||||
|
||||
return (
|
||||
<FileBrowserContext.Provider value={{ openFileBrowser }}>
|
||||
{children}
|
||||
<FileBrowserDialog
|
||||
open={isOpen}
|
||||
onOpenChange={handleOpenChange}
|
||||
onSelect={handleSelect}
|
||||
title={dialogOptions.title}
|
||||
description={dialogOptions.description}
|
||||
/>
|
||||
</FileBrowserContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export function useFileBrowser() {
|
||||
const context = useContext(FileBrowserContext);
|
||||
if (!context) {
|
||||
throw new Error("useFileBrowser must be used within FileBrowserProvider");
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
// Global reference for non-React code (like HttpApiClient)
|
||||
let globalFileBrowserFn: ((options?: FileBrowserOptions) => Promise<string | null>) | null = null;
|
||||
|
||||
export function setGlobalFileBrowser(fn: (options?: FileBrowserOptions) => Promise<string | null>) {
|
||||
globalFileBrowserFn = fn;
|
||||
}
|
||||
|
||||
export function getGlobalFileBrowser() {
|
||||
return globalFileBrowserFn;
|
||||
}
|
||||
|
||||
// Export the options type for consumers
|
||||
export type { FileBrowserOptions };
|
||||
@@ -121,11 +121,26 @@ export function useAutoMode() {
|
||||
case "auto_mode_error":
|
||||
console.error("[AutoMode Error]", event.error);
|
||||
if (event.featureId && event.error) {
|
||||
// Check for authentication errors and provide a more helpful message
|
||||
const isAuthError = event.errorType === "authentication" ||
|
||||
event.error.includes("Authentication failed") ||
|
||||
event.error.includes("Invalid API key");
|
||||
|
||||
const errorMessage = isAuthError
|
||||
? `Authentication failed: Please check your API key in Settings or run 'claude login' in terminal to re-authenticate.`
|
||||
: event.error;
|
||||
|
||||
addAutoModeActivity({
|
||||
featureId: event.featureId,
|
||||
type: "error",
|
||||
message: event.error,
|
||||
message: errorMessage,
|
||||
errorType: isAuthError ? "authentication" : "execution",
|
||||
});
|
||||
|
||||
// Remove the task from running since it failed
|
||||
if (eventProjectId) {
|
||||
removeRunningTask(eventProjectId, event.featureId);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import { useState, useEffect, useCallback, useRef } from "react";
|
||||
import type { Message, StreamEvent } from "@/types/electron";
|
||||
import { useMessageQueue } from "./use-message-queue";
|
||||
import type { ImageAttachment } from "@/store/app-store";
|
||||
import { getElectronAPI } from "@/lib/electron";
|
||||
|
||||
interface UseElectronAgentOptions {
|
||||
sessionId: string;
|
||||
@@ -44,8 +45,9 @@ export function useElectronAgent({
|
||||
// Send message directly to the agent (bypassing queue)
|
||||
const sendMessageDirectly = useCallback(
|
||||
async (content: string, images?: ImageAttachment[]) => {
|
||||
if (!window.electronAPI?.agent) {
|
||||
setError("Electron API not available");
|
||||
const api = getElectronAPI();
|
||||
if (!api?.agent) {
|
||||
setError("API not available");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -64,10 +66,10 @@ export function useElectronAgent({
|
||||
|
||||
// Save images to .automaker/images and get paths
|
||||
let imagePaths: string[] | undefined;
|
||||
if (images && images.length > 0) {
|
||||
if (images && images.length > 0 && api.saveImageToTemp) {
|
||||
imagePaths = [];
|
||||
for (const image of images) {
|
||||
const result = await window.electronAPI.saveImageToTemp(
|
||||
const result = await api.saveImageToTemp(
|
||||
image.data,
|
||||
image.filename,
|
||||
image.mimeType,
|
||||
@@ -82,7 +84,7 @@ export function useElectronAgent({
|
||||
}
|
||||
}
|
||||
|
||||
const result = await window.electronAPI.agent.send(
|
||||
const result = await api.agent!.send(
|
||||
sessionId,
|
||||
content,
|
||||
workingDirectory,
|
||||
@@ -120,8 +122,9 @@ export function useElectronAgent({
|
||||
|
||||
// Initialize connection and load history
|
||||
useEffect(() => {
|
||||
if (!window.electronAPI?.agent) {
|
||||
setError("Electron API not available. Please run in Electron.");
|
||||
const api = getElectronAPI();
|
||||
if (!api?.agent) {
|
||||
setError("API not available.");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -142,7 +145,7 @@ export function useElectronAgent({
|
||||
|
||||
try {
|
||||
console.log("[useElectronAgent] Starting session:", sessionId);
|
||||
const result = await window.electronAPI.agent.start(
|
||||
const result = await api.agent!.start(
|
||||
sessionId,
|
||||
workingDirectory
|
||||
);
|
||||
@@ -155,7 +158,7 @@ export function useElectronAgent({
|
||||
setIsConnected(true);
|
||||
|
||||
// Check if the agent is currently running for this session
|
||||
const historyResult = await window.electronAPI.agent.getHistory(sessionId);
|
||||
const historyResult = await api.agent!.getHistory(sessionId);
|
||||
if (mounted && historyResult.success) {
|
||||
const isRunning = historyResult.isRunning || false;
|
||||
console.log("[useElectronAgent] Session running state:", isRunning);
|
||||
@@ -190,7 +193,8 @@ export function useElectronAgent({
|
||||
|
||||
// Subscribe to streaming events
|
||||
useEffect(() => {
|
||||
if (!window.electronAPI?.agent) return;
|
||||
const api = getElectronAPI();
|
||||
if (!api?.agent) return;
|
||||
if (!sessionId) return; // Don't subscribe if no session
|
||||
|
||||
console.log("[useElectronAgent] Subscribing to stream events for session:", sessionId);
|
||||
@@ -282,7 +286,7 @@ export function useElectronAgent({
|
||||
}
|
||||
};
|
||||
|
||||
unsubscribeRef.current = window.electronAPI.agent.onStream(handleStream);
|
||||
unsubscribeRef.current = api.agent!.onStream(handleStream as (data: unknown) => void);
|
||||
|
||||
return () => {
|
||||
if (unsubscribeRef.current) {
|
||||
@@ -296,8 +300,9 @@ export function useElectronAgent({
|
||||
// Send a message to the agent
|
||||
const sendMessage = useCallback(
|
||||
async (content: string, images?: ImageAttachment[]) => {
|
||||
if (!window.electronAPI?.agent) {
|
||||
setError("Electron API not available");
|
||||
const api = getElectronAPI();
|
||||
if (!api?.agent) {
|
||||
setError("API not available");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -317,10 +322,10 @@ export function useElectronAgent({
|
||||
|
||||
// Save images to .automaker/images and get paths
|
||||
let imagePaths: string[] | undefined;
|
||||
if (images && images.length > 0) {
|
||||
if (images && images.length > 0 && api.saveImageToTemp) {
|
||||
imagePaths = [];
|
||||
for (const image of images) {
|
||||
const result = await window.electronAPI.saveImageToTemp(
|
||||
const result = await api.saveImageToTemp(
|
||||
image.data,
|
||||
image.filename,
|
||||
image.mimeType,
|
||||
@@ -335,7 +340,7 @@ export function useElectronAgent({
|
||||
}
|
||||
}
|
||||
|
||||
const result = await window.electronAPI.agent.send(
|
||||
const result = await api.agent!.send(
|
||||
sessionId,
|
||||
content,
|
||||
workingDirectory,
|
||||
@@ -359,14 +364,15 @@ export function useElectronAgent({
|
||||
|
||||
// Stop current execution
|
||||
const stopExecution = useCallback(async () => {
|
||||
if (!window.electronAPI?.agent) {
|
||||
setError("Electron API not available");
|
||||
const api = getElectronAPI();
|
||||
if (!api?.agent) {
|
||||
setError("API not available");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log("[useElectronAgent] Stopping execution");
|
||||
const result = await window.electronAPI.agent.stop(sessionId);
|
||||
const result = await api.agent!.stop(sessionId);
|
||||
|
||||
if (!result.success) {
|
||||
setError(result.error || "Failed to stop execution");
|
||||
@@ -381,14 +387,15 @@ export function useElectronAgent({
|
||||
|
||||
// Clear conversation history
|
||||
const clearHistory = useCallback(async () => {
|
||||
if (!window.electronAPI?.agent) {
|
||||
setError("Electron API not available");
|
||||
const api = getElectronAPI();
|
||||
if (!api?.agent) {
|
||||
setError("API not available");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log("[useElectronAgent] Clearing history");
|
||||
const result = await window.electronAPI.agent.clear(sessionId);
|
||||
const result = await api.agent!.clear(sessionId);
|
||||
|
||||
if (result.success) {
|
||||
setMessages([]);
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
// Type definitions for Electron IPC API
|
||||
import type { SessionListItem, Message } from "@/types/electron";
|
||||
|
||||
export interface FileEntry {
|
||||
name: string;
|
||||
@@ -413,6 +414,68 @@ export interface ElectronAPI {
|
||||
onInstallProgress?: (callback: (progress: any) => void) => () => void;
|
||||
onAuthProgress?: (callback: (progress: any) => void) => () => void;
|
||||
};
|
||||
agent?: {
|
||||
start: (
|
||||
sessionId: string,
|
||||
workingDirectory?: string
|
||||
) => Promise<{
|
||||
success: boolean;
|
||||
messages?: Message[];
|
||||
error?: string;
|
||||
}>;
|
||||
send: (
|
||||
sessionId: string,
|
||||
message: string,
|
||||
workingDirectory?: string,
|
||||
imagePaths?: string[]
|
||||
) => Promise<{ success: boolean; error?: string }>;
|
||||
getHistory: (sessionId: string) => Promise<{
|
||||
success: boolean;
|
||||
messages?: Message[];
|
||||
isRunning?: boolean;
|
||||
error?: string;
|
||||
}>;
|
||||
stop: (sessionId: string) => Promise<{ success: boolean; error?: string }>;
|
||||
clear: (sessionId: string) => Promise<{ success: boolean; error?: string }>;
|
||||
onStream: (callback: (data: unknown) => void) => () => void;
|
||||
};
|
||||
sessions?: {
|
||||
list: (includeArchived?: boolean) => Promise<{
|
||||
success: boolean;
|
||||
sessions?: SessionListItem[];
|
||||
error?: string;
|
||||
}>;
|
||||
create: (
|
||||
name: string,
|
||||
projectPath: string,
|
||||
workingDirectory?: string
|
||||
) => Promise<{
|
||||
success: boolean;
|
||||
session?: {
|
||||
id: string;
|
||||
name: string;
|
||||
projectPath: string;
|
||||
workingDirectory?: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
};
|
||||
error?: string;
|
||||
}>;
|
||||
update: (
|
||||
sessionId: string,
|
||||
name?: string,
|
||||
tags?: string[]
|
||||
) => Promise<{ success: boolean; error?: string }>;
|
||||
archive: (
|
||||
sessionId: string
|
||||
) => Promise<{ success: boolean; error?: string }>;
|
||||
unarchive: (
|
||||
sessionId: string
|
||||
) => Promise<{ success: boolean; error?: string }>;
|
||||
delete: (
|
||||
sessionId: string
|
||||
) => Promise<{ success: boolean; error?: string }>;
|
||||
};
|
||||
}
|
||||
|
||||
// Note: Window interface is declared in @/types/electron.d.ts
|
||||
@@ -438,18 +501,92 @@ const STORAGE_KEYS = {
|
||||
// Mock file system using localStorage
|
||||
const mockFileSystem: Record<string, string> = {};
|
||||
|
||||
// Check if we're in Electron
|
||||
// Check if we're in Electron (for UI indicators only)
|
||||
export const isElectron = (): boolean => {
|
||||
return typeof window !== "undefined" && window.isElectron === true;
|
||||
};
|
||||
|
||||
// Get the Electron API or a mock for web development
|
||||
// Check if backend server is available
|
||||
let serverAvailable: boolean | null = null;
|
||||
let serverCheckPromise: Promise<boolean> | null = null;
|
||||
|
||||
export const checkServerAvailable = async (): Promise<boolean> => {
|
||||
if (serverAvailable !== null) return serverAvailable;
|
||||
if (serverCheckPromise) return serverCheckPromise;
|
||||
|
||||
serverCheckPromise = (async () => {
|
||||
try {
|
||||
const serverUrl =
|
||||
process.env.NEXT_PUBLIC_SERVER_URL || "http://localhost:3008";
|
||||
const response = await fetch(`${serverUrl}/api/health`, {
|
||||
method: "GET",
|
||||
signal: AbortSignal.timeout(2000),
|
||||
});
|
||||
serverAvailable = response.ok;
|
||||
} catch {
|
||||
serverAvailable = false;
|
||||
}
|
||||
return serverAvailable;
|
||||
})();
|
||||
|
||||
return serverCheckPromise;
|
||||
};
|
||||
|
||||
// Reset server check (useful for retrying connection)
|
||||
export const resetServerCheck = (): void => {
|
||||
serverAvailable = null;
|
||||
serverCheckPromise = null;
|
||||
};
|
||||
|
||||
// Cached HTTP client instance
|
||||
let httpClientInstance: ElectronAPI | null = null;
|
||||
|
||||
/**
|
||||
* Get the HTTP API client
|
||||
*
|
||||
* All API calls go through HTTP to the backend server.
|
||||
* This is the only transport mode supported.
|
||||
*/
|
||||
export const getElectronAPI = (): ElectronAPI => {
|
||||
if (isElectron() && window.electronAPI) {
|
||||
return window.electronAPI;
|
||||
if (typeof window === "undefined") {
|
||||
throw new Error("Cannot get API during SSR");
|
||||
}
|
||||
|
||||
// Return mock API for web development
|
||||
if (!httpClientInstance) {
|
||||
const { getHttpApiClient } = require("./http-api-client");
|
||||
httpClientInstance = getHttpApiClient();
|
||||
}
|
||||
return httpClientInstance!;
|
||||
};
|
||||
|
||||
// Async version (same as sync since HTTP client is synchronously instantiated)
|
||||
export const getElectronAPIAsync = async (): Promise<ElectronAPI> => {
|
||||
return getElectronAPI();
|
||||
};
|
||||
|
||||
// Check if backend is connected (for showing connection status in UI)
|
||||
export const isBackendConnected = async (): Promise<boolean> => {
|
||||
return await checkServerAvailable();
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the current API mode being used
|
||||
* Always returns "http" since that's the only mode now
|
||||
*/
|
||||
export const getCurrentApiMode = (): "http" => {
|
||||
return "http";
|
||||
};
|
||||
|
||||
// Debug helpers
|
||||
if (typeof window !== "undefined") {
|
||||
(window as any).__checkApiMode = () => {
|
||||
console.log("Current API mode:", getCurrentApiMode());
|
||||
console.log("isElectron():", isElectron());
|
||||
};
|
||||
}
|
||||
|
||||
// Mock API for development/fallback when no backend is available
|
||||
const getMockElectronAPI = (): ElectronAPI => {
|
||||
return {
|
||||
ping: async () => "pong (mock)",
|
||||
|
||||
@@ -748,17 +885,21 @@ interface SetupAPI {
|
||||
getClaudeStatus: () => Promise<{
|
||||
success: boolean;
|
||||
status?: string;
|
||||
installed?: boolean;
|
||||
method?: string;
|
||||
version?: string;
|
||||
path?: string;
|
||||
auth?: {
|
||||
authenticated: boolean;
|
||||
method: string;
|
||||
hasCredentialsFile: boolean;
|
||||
hasToken: boolean;
|
||||
hasCredentialsFile?: boolean;
|
||||
hasToken?: boolean;
|
||||
hasStoredOAuthToken?: boolean;
|
||||
hasStoredApiKey?: boolean;
|
||||
hasEnvApiKey?: boolean;
|
||||
hasEnvOAuthToken?: boolean;
|
||||
hasCliAuth?: boolean;
|
||||
hasRecentActivity?: boolean;
|
||||
};
|
||||
error?: string;
|
||||
}>;
|
||||
@@ -838,11 +979,14 @@ function createMockSetupAPI(): SetupAPI {
|
||||
return {
|
||||
success: true,
|
||||
status: "not_installed",
|
||||
installed: false,
|
||||
auth: {
|
||||
authenticated: false,
|
||||
method: "none",
|
||||
hasCredentialsFile: false,
|
||||
hasToken: false,
|
||||
hasCliAuth: false,
|
||||
hasRecentActivity: false,
|
||||
},
|
||||
};
|
||||
},
|
||||
@@ -1866,7 +2010,9 @@ function createMockSpecRegenerationAPI(): SpecRegenerationAPI {
|
||||
}
|
||||
|
||||
mockSpecRegenerationRunning = true;
|
||||
console.log(`[Mock] Generating features from existing spec for: ${projectPath}`);
|
||||
console.log(
|
||||
`[Mock] Generating features from existing spec for: ${projectPath}`
|
||||
);
|
||||
|
||||
// Simulate async feature generation
|
||||
simulateFeatureGeneration(projectPath);
|
||||
@@ -2053,7 +2199,8 @@ async function simulateFeatureGeneration(projectPath: string) {
|
||||
mockSpecRegenerationPhase = "initialization";
|
||||
emitSpecRegenerationEvent({
|
||||
type: "spec_regeneration_progress",
|
||||
content: "[Phase: initialization] Starting feature generation from existing app_spec.txt...\n",
|
||||
content:
|
||||
"[Phase: initialization] Starting feature generation from existing app_spec.txt...\n",
|
||||
});
|
||||
|
||||
await new Promise((resolve) => {
|
||||
|
||||
279
apps/app/src/lib/file-picker.ts
Normal file
279
apps/app/src/lib/file-picker.ts
Normal file
@@ -0,0 +1,279 @@
|
||||
/**
|
||||
* File Picker Utility for Web Browsers
|
||||
*
|
||||
* Provides cross-platform file and directory selection using:
|
||||
* 1. HTML5 webkitdirectory input - primary method (works on Windows)
|
||||
* 2. File System Access API (showDirectoryPicker) - fallback for modern browsers
|
||||
*
|
||||
* Note: Browsers don't expose absolute file paths for security reasons.
|
||||
* This implementation extracts directory information and may require
|
||||
* user confirmation or server-side path resolution.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Directory picker result with structure information for server-side resolution
|
||||
*/
|
||||
export interface DirectoryPickerResult {
|
||||
directoryName: string;
|
||||
sampleFiles: string[]; // Relative paths of sample files for identification
|
||||
fileCount: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens a directory picker dialog
|
||||
* @returns Promise resolving to directory information, or null if canceled
|
||||
*
|
||||
* Note: Browsers don't expose absolute file paths for security reasons.
|
||||
* This function returns directory structure information that the server
|
||||
* can use to locate the actual directory path.
|
||||
*/
|
||||
export async function openDirectoryPicker(): Promise<DirectoryPickerResult | null> {
|
||||
// Use webkitdirectory (works on Windows and all modern browsers)
|
||||
return new Promise<DirectoryPickerResult | null>((resolve) => {
|
||||
let resolved = false;
|
||||
const input = document.createElement("input");
|
||||
input.type = "file";
|
||||
input.webkitdirectory = true;
|
||||
input.style.display = "none";
|
||||
|
||||
const cleanup = () => {
|
||||
if (input.parentNode) {
|
||||
document.body.removeChild(input);
|
||||
}
|
||||
};
|
||||
|
||||
let changeEventFired = false;
|
||||
let focusTimeout: ReturnType<typeof setTimeout> | null = null;
|
||||
|
||||
const safeResolve = (value: DirectoryPickerResult | null) => {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
changeEventFired = true;
|
||||
if (focusTimeout) {
|
||||
clearTimeout(focusTimeout);
|
||||
focusTimeout = null;
|
||||
}
|
||||
cleanup();
|
||||
resolve(value);
|
||||
}
|
||||
};
|
||||
|
||||
input.addEventListener("change", (e) => {
|
||||
changeEventFired = true;
|
||||
if (focusTimeout) {
|
||||
clearTimeout(focusTimeout);
|
||||
focusTimeout = null;
|
||||
}
|
||||
|
||||
console.log("[FilePicker] Change event fired");
|
||||
const files = input.files;
|
||||
console.log("[FilePicker] Files selected:", files?.length || 0);
|
||||
|
||||
if (!files || files.length === 0) {
|
||||
console.log("[FilePicker] No files selected");
|
||||
safeResolve(null);
|
||||
return;
|
||||
}
|
||||
|
||||
const firstFile = files[0];
|
||||
console.log("[FilePicker] First file:", {
|
||||
name: firstFile.name,
|
||||
webkitRelativePath: firstFile.webkitRelativePath,
|
||||
// @ts-expect-error
|
||||
path: firstFile.path,
|
||||
});
|
||||
|
||||
// Extract directory name from webkitRelativePath
|
||||
// webkitRelativePath format: "directoryName/subfolder/file.txt" or "directoryName/file.txt"
|
||||
let directoryName = "Selected Directory";
|
||||
|
||||
// Method 1: Try to get absolute path from File object (non-standard, works in Electron/Chromium)
|
||||
// @ts-expect-error - path property is non-standard but available in some browsers
|
||||
if (firstFile.path) {
|
||||
// @ts-expect-error
|
||||
const filePath = firstFile.path as string;
|
||||
console.log("[FilePicker] Found file.path:", filePath);
|
||||
// Extract directory path (remove filename)
|
||||
const lastSeparator = Math.max(
|
||||
filePath.lastIndexOf("\\"),
|
||||
filePath.lastIndexOf("/")
|
||||
);
|
||||
if (lastSeparator > 0) {
|
||||
const absolutePath = filePath.substring(0, lastSeparator);
|
||||
console.log("[FilePicker] Found absolute path:", absolutePath);
|
||||
// Return as directory name for now - server can validate it directly
|
||||
directoryName = absolutePath;
|
||||
}
|
||||
}
|
||||
|
||||
// Method 2: Extract directory name from webkitRelativePath
|
||||
if (directoryName === "Selected Directory" && firstFile.webkitRelativePath) {
|
||||
const relativePath = firstFile.webkitRelativePath;
|
||||
console.log("[FilePicker] Using webkitRelativePath:", relativePath);
|
||||
const pathParts = relativePath.split("/");
|
||||
if (pathParts.length > 0) {
|
||||
directoryName = pathParts[0]; // Top-level directory name
|
||||
console.log("[FilePicker] Extracted directory name:", directoryName);
|
||||
}
|
||||
}
|
||||
|
||||
// Collect sample file paths for server-side directory matching
|
||||
// Take first 10 files to identify the directory
|
||||
const sampleFiles: string[] = [];
|
||||
const maxSamples = 10;
|
||||
for (let i = 0; i < Math.min(files.length, maxSamples); i++) {
|
||||
const file = files[i];
|
||||
if (file.webkitRelativePath) {
|
||||
sampleFiles.push(file.webkitRelativePath);
|
||||
} else if (file.name) {
|
||||
sampleFiles.push(file.name);
|
||||
}
|
||||
}
|
||||
|
||||
console.log("[FilePicker] Directory info:", {
|
||||
directoryName,
|
||||
fileCount: files.length,
|
||||
sampleFiles: sampleFiles.slice(0, 5), // Log first 5
|
||||
});
|
||||
|
||||
safeResolve({
|
||||
directoryName,
|
||||
sampleFiles,
|
||||
fileCount: files.length,
|
||||
});
|
||||
});
|
||||
|
||||
// Handle cancellation - but be very careful not to interfere with change event
|
||||
// On Windows, the dialog might take time to process, so we wait longer
|
||||
const handleFocus = () => {
|
||||
// Wait longer on Windows - the dialog might take time to process
|
||||
// Only resolve as canceled if change event hasn't fired after a delay
|
||||
focusTimeout = setTimeout(() => {
|
||||
if (!resolved && !changeEventFired && (!input.files || input.files.length === 0)) {
|
||||
console.log("[FilePicker] Dialog canceled (no files after focus and no change event)");
|
||||
safeResolve(null);
|
||||
}
|
||||
}, 2000); // Increased timeout for Windows - give it time
|
||||
};
|
||||
|
||||
// Add to DOM temporarily
|
||||
document.body.appendChild(input);
|
||||
console.log("[FilePicker] Opening directory picker...");
|
||||
|
||||
// Try to show picker programmatically
|
||||
if ("showPicker" in HTMLInputElement.prototype) {
|
||||
try {
|
||||
(input as any).showPicker();
|
||||
console.log("[FilePicker] Using showPicker()");
|
||||
} catch (error) {
|
||||
console.log("[FilePicker] showPicker() failed, using click()", error);
|
||||
input.click();
|
||||
}
|
||||
} else {
|
||||
console.log("[FilePicker] Using click()");
|
||||
input.click();
|
||||
}
|
||||
|
||||
// Set up cancellation detection with longer delay
|
||||
// Only add focus listener if we're not already resolved
|
||||
window.addEventListener("focus", handleFocus, { once: true });
|
||||
|
||||
// Also handle blur as a cancellation signal (but with delay)
|
||||
window.addEventListener("blur", () => {
|
||||
// Dialog opened, wait for it to close
|
||||
setTimeout(() => {
|
||||
window.addEventListener("focus", handleFocus, { once: true });
|
||||
}, 100);
|
||||
}, { once: true });
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens a file picker dialog
|
||||
* @param options Optional configuration (multiple files, file types, etc.)
|
||||
* @returns Promise resolving to selected file path(s), or null if canceled
|
||||
*/
|
||||
export async function openFilePicker(
|
||||
options?: {
|
||||
multiple?: boolean;
|
||||
accept?: string;
|
||||
}
|
||||
): Promise<string | string[] | null> {
|
||||
// Use standard file input (works on all browsers including Windows)
|
||||
return new Promise<string | string[] | null>((resolve) => {
|
||||
const input = document.createElement("input");
|
||||
input.type = "file";
|
||||
input.multiple = options?.multiple ?? false;
|
||||
if (options?.accept) {
|
||||
input.accept = options.accept;
|
||||
}
|
||||
input.style.display = "none";
|
||||
|
||||
const cleanup = () => {
|
||||
if (input.parentNode) {
|
||||
document.body.removeChild(input);
|
||||
}
|
||||
};
|
||||
|
||||
input.addEventListener("change", () => {
|
||||
const files = input.files;
|
||||
if (!files || files.length === 0) {
|
||||
cleanup();
|
||||
resolve(null);
|
||||
return;
|
||||
}
|
||||
|
||||
// Try to extract paths from File objects
|
||||
const extractPath = (file: File): string => {
|
||||
// Try to get path from File object (non-standard, but available in some browsers)
|
||||
// @ts-expect-error - path property is non-standard
|
||||
if (file.path) {
|
||||
// @ts-expect-error
|
||||
return file.path as string;
|
||||
}
|
||||
// Fallback to filename (server will need to resolve)
|
||||
return file.name;
|
||||
};
|
||||
|
||||
if (options?.multiple) {
|
||||
const paths = Array.from(files).map(extractPath);
|
||||
cleanup();
|
||||
resolve(paths);
|
||||
} else {
|
||||
const path = extractPath(files[0]);
|
||||
cleanup();
|
||||
resolve(path);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle window focus (user may have canceled)
|
||||
const handleFocus = () => {
|
||||
setTimeout(() => {
|
||||
if (!input.files || input.files.length === 0) {
|
||||
cleanup();
|
||||
resolve(null);
|
||||
}
|
||||
}, 200);
|
||||
};
|
||||
|
||||
// Add to DOM temporarily
|
||||
document.body.appendChild(input);
|
||||
|
||||
// Try to show picker programmatically
|
||||
// Note: showPicker() is available in modern browsers but TypeScript types it as void
|
||||
// In practice, it may return a Promise in some implementations, but we'll handle errors via try/catch
|
||||
if ("showPicker" in HTMLInputElement.prototype) {
|
||||
try {
|
||||
(input as any).showPicker();
|
||||
} catch {
|
||||
// Fallback to click if showPicker fails
|
||||
input.click();
|
||||
}
|
||||
} else {
|
||||
input.click();
|
||||
}
|
||||
|
||||
// Set up cancellation detection
|
||||
window.addEventListener("focus", handleFocus, { once: true });
|
||||
});
|
||||
}
|
||||
826
apps/app/src/lib/http-api-client.ts
Normal file
826
apps/app/src/lib/http-api-client.ts
Normal file
@@ -0,0 +1,826 @@
|
||||
/**
|
||||
* HTTP API Client for web mode
|
||||
*
|
||||
* This client provides the same API as the Electron IPC bridge,
|
||||
* but communicates with the backend server via HTTP/WebSocket.
|
||||
*/
|
||||
|
||||
import type {
|
||||
ElectronAPI,
|
||||
FileResult,
|
||||
WriteResult,
|
||||
ReaddirResult,
|
||||
StatResult,
|
||||
DialogResult,
|
||||
SaveImageResult,
|
||||
AutoModeAPI,
|
||||
FeaturesAPI,
|
||||
SuggestionsAPI,
|
||||
SpecRegenerationAPI,
|
||||
AutoModeEvent,
|
||||
SuggestionsEvent,
|
||||
SpecRegenerationEvent,
|
||||
FeatureSuggestion,
|
||||
SuggestionType,
|
||||
} from "./electron";
|
||||
import type { Message, SessionListItem } from "@/types/electron";
|
||||
import type { Feature } from "@/store/app-store";
|
||||
import type {
|
||||
WorktreeAPI,
|
||||
GitAPI,
|
||||
ModelDefinition,
|
||||
ProviderStatus,
|
||||
} from "@/types/electron";
|
||||
import { getGlobalFileBrowser } from "@/contexts/file-browser-context";
|
||||
|
||||
// Server URL - configurable via environment variable
|
||||
const getServerUrl = (): string => {
|
||||
if (typeof window !== "undefined") {
|
||||
const envUrl = process.env.NEXT_PUBLIC_SERVER_URL;
|
||||
if (envUrl) return envUrl;
|
||||
}
|
||||
return "http://localhost:3008";
|
||||
};
|
||||
|
||||
// Get API key from environment variable
|
||||
const getApiKey = (): string | null => {
|
||||
if (typeof window !== "undefined") {
|
||||
return process.env.NEXT_PUBLIC_AUTOMAKER_API_KEY || null;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
type EventType =
|
||||
| "agent:stream"
|
||||
| "auto-mode:event"
|
||||
| "suggestions:event"
|
||||
| "spec-regeneration:event";
|
||||
|
||||
type EventCallback = (payload: unknown) => void;
|
||||
|
||||
/**
|
||||
* HTTP API Client that implements ElectronAPI interface
|
||||
*/
|
||||
export class HttpApiClient implements ElectronAPI {
|
||||
private serverUrl: string;
|
||||
private ws: WebSocket | null = null;
|
||||
private eventCallbacks: Map<EventType, Set<EventCallback>> = new Map();
|
||||
private reconnectTimer: NodeJS.Timeout | null = null;
|
||||
private isConnecting = false;
|
||||
|
||||
constructor() {
|
||||
this.serverUrl = getServerUrl();
|
||||
this.connectWebSocket();
|
||||
}
|
||||
|
||||
private connectWebSocket(): void {
|
||||
if (
|
||||
this.isConnecting ||
|
||||
(this.ws && this.ws.readyState === WebSocket.OPEN)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.isConnecting = true;
|
||||
|
||||
try {
|
||||
const wsUrl = this.serverUrl.replace(/^http/, "ws") + "/api/events";
|
||||
this.ws = new WebSocket(wsUrl);
|
||||
|
||||
this.ws.onopen = () => {
|
||||
console.log("[HttpApiClient] WebSocket connected");
|
||||
this.isConnecting = false;
|
||||
if (this.reconnectTimer) {
|
||||
clearTimeout(this.reconnectTimer);
|
||||
this.reconnectTimer = null;
|
||||
}
|
||||
};
|
||||
|
||||
this.ws.onmessage = (event) => {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
const callbacks = this.eventCallbacks.get(data.type);
|
||||
if (callbacks) {
|
||||
callbacks.forEach((cb) => cb(data.payload));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"[HttpApiClient] Failed to parse WebSocket message:",
|
||||
error
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
this.ws.onclose = () => {
|
||||
console.log("[HttpApiClient] WebSocket disconnected");
|
||||
this.isConnecting = false;
|
||||
this.ws = null;
|
||||
// Attempt to reconnect after 5 seconds
|
||||
if (!this.reconnectTimer) {
|
||||
this.reconnectTimer = setTimeout(() => {
|
||||
this.reconnectTimer = null;
|
||||
this.connectWebSocket();
|
||||
}, 5000);
|
||||
}
|
||||
};
|
||||
|
||||
this.ws.onerror = (error) => {
|
||||
console.error("[HttpApiClient] WebSocket error:", error);
|
||||
this.isConnecting = false;
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("[HttpApiClient] Failed to create WebSocket:", error);
|
||||
this.isConnecting = false;
|
||||
}
|
||||
}
|
||||
|
||||
private subscribeToEvent(
|
||||
type: EventType,
|
||||
callback: EventCallback
|
||||
): () => void {
|
||||
if (!this.eventCallbacks.has(type)) {
|
||||
this.eventCallbacks.set(type, new Set());
|
||||
}
|
||||
this.eventCallbacks.get(type)!.add(callback);
|
||||
|
||||
// Ensure WebSocket is connected
|
||||
this.connectWebSocket();
|
||||
|
||||
return () => {
|
||||
const callbacks = this.eventCallbacks.get(type);
|
||||
if (callbacks) {
|
||||
callbacks.delete(callback);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private getHeaders(): Record<string, string> {
|
||||
const headers: Record<string, string> = {
|
||||
"Content-Type": "application/json",
|
||||
};
|
||||
const apiKey = getApiKey();
|
||||
if (apiKey) {
|
||||
headers["X-API-Key"] = apiKey;
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
private async post<T>(endpoint: string, body?: unknown): Promise<T> {
|
||||
const response = await fetch(`${this.serverUrl}${endpoint}`, {
|
||||
method: "POST",
|
||||
headers: this.getHeaders(),
|
||||
body: body ? JSON.stringify(body) : undefined,
|
||||
});
|
||||
return response.json();
|
||||
}
|
||||
|
||||
private async get<T>(endpoint: string): Promise<T> {
|
||||
const headers = this.getHeaders();
|
||||
const response = await fetch(`${this.serverUrl}${endpoint}`, { headers });
|
||||
return response.json();
|
||||
}
|
||||
|
||||
private async put<T>(endpoint: string, body?: unknown): Promise<T> {
|
||||
const response = await fetch(`${this.serverUrl}${endpoint}`, {
|
||||
method: "PUT",
|
||||
headers: this.getHeaders(),
|
||||
body: body ? JSON.stringify(body) : undefined,
|
||||
});
|
||||
return response.json();
|
||||
}
|
||||
|
||||
private async httpDelete<T>(endpoint: string): Promise<T> {
|
||||
const response = await fetch(`${this.serverUrl}${endpoint}`, {
|
||||
method: "DELETE",
|
||||
headers: this.getHeaders(),
|
||||
});
|
||||
return response.json();
|
||||
}
|
||||
|
||||
// Basic operations
|
||||
async ping(): Promise<string> {
|
||||
const result = await this.get<{ status: string }>("/api/health");
|
||||
return result.status === "ok" ? "pong" : "error";
|
||||
}
|
||||
|
||||
async openExternalLink(
|
||||
url: string
|
||||
): Promise<{ success: boolean; error?: string }> {
|
||||
// Open in new tab
|
||||
window.open(url, "_blank", "noopener,noreferrer");
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
// File picker - uses server-side file browser dialog
|
||||
async openDirectory(): Promise<DialogResult> {
|
||||
const fileBrowser = getGlobalFileBrowser();
|
||||
|
||||
if (!fileBrowser) {
|
||||
console.error("File browser not initialized");
|
||||
return { canceled: true, filePaths: [] };
|
||||
}
|
||||
|
||||
const path = await fileBrowser();
|
||||
|
||||
if (!path) {
|
||||
return { canceled: true, filePaths: [] };
|
||||
}
|
||||
|
||||
// Validate with server
|
||||
const result = await this.post<{
|
||||
success: boolean;
|
||||
path?: string;
|
||||
error?: string;
|
||||
}>("/api/fs/validate-path", { filePath: path });
|
||||
|
||||
if (result.success && result.path) {
|
||||
return { canceled: false, filePaths: [result.path] };
|
||||
}
|
||||
|
||||
console.error("Invalid directory:", result.error);
|
||||
return { canceled: true, filePaths: [] };
|
||||
}
|
||||
|
||||
async openFile(options?: object): Promise<DialogResult> {
|
||||
const fileBrowser = getGlobalFileBrowser();
|
||||
|
||||
if (!fileBrowser) {
|
||||
console.error("File browser not initialized");
|
||||
return { canceled: true, filePaths: [] };
|
||||
}
|
||||
|
||||
// For now, use the same directory browser (could be enhanced for file selection)
|
||||
const path = await fileBrowser();
|
||||
|
||||
if (!path) {
|
||||
return { canceled: true, filePaths: [] };
|
||||
}
|
||||
|
||||
const result = await this.post<{ success: boolean; exists: boolean }>(
|
||||
"/api/fs/exists",
|
||||
{ filePath: path }
|
||||
);
|
||||
|
||||
if (result.success && result.exists) {
|
||||
return { canceled: false, filePaths: [path] };
|
||||
}
|
||||
|
||||
console.error("File not found");
|
||||
return { canceled: true, filePaths: [] };
|
||||
}
|
||||
|
||||
// File system operations
|
||||
async readFile(filePath: string): Promise<FileResult> {
|
||||
return this.post("/api/fs/read", { filePath });
|
||||
}
|
||||
|
||||
async writeFile(filePath: string, content: string): Promise<WriteResult> {
|
||||
return this.post("/api/fs/write", { filePath, content });
|
||||
}
|
||||
|
||||
async mkdir(dirPath: string): Promise<WriteResult> {
|
||||
return this.post("/api/fs/mkdir", { dirPath });
|
||||
}
|
||||
|
||||
async readdir(dirPath: string): Promise<ReaddirResult> {
|
||||
return this.post("/api/fs/readdir", { dirPath });
|
||||
}
|
||||
|
||||
async exists(filePath: string): Promise<boolean> {
|
||||
const result = await this.post<{ success: boolean; exists: boolean }>(
|
||||
"/api/fs/exists",
|
||||
{ filePath }
|
||||
);
|
||||
return result.exists;
|
||||
}
|
||||
|
||||
async stat(filePath: string): Promise<StatResult> {
|
||||
return this.post("/api/fs/stat", { filePath });
|
||||
}
|
||||
|
||||
async deleteFile(filePath: string): Promise<WriteResult> {
|
||||
return this.post("/api/fs/delete", { filePath });
|
||||
}
|
||||
|
||||
async trashItem(filePath: string): Promise<WriteResult> {
|
||||
// In web mode, trash is just delete
|
||||
return this.deleteFile(filePath);
|
||||
}
|
||||
|
||||
async getPath(name: string): Promise<string> {
|
||||
// Server provides data directory
|
||||
if (name === "userData") {
|
||||
const result = await this.get<{ dataDir: string }>(
|
||||
"/api/health/detailed"
|
||||
);
|
||||
return result.dataDir || "/data";
|
||||
}
|
||||
return `/data/${name}`;
|
||||
}
|
||||
|
||||
async saveImageToTemp(
|
||||
data: string,
|
||||
filename: string,
|
||||
mimeType: string,
|
||||
projectPath?: string
|
||||
): Promise<SaveImageResult> {
|
||||
return this.post("/api/fs/save-image", {
|
||||
data,
|
||||
filename,
|
||||
mimeType,
|
||||
projectPath,
|
||||
});
|
||||
}
|
||||
|
||||
async saveBoardBackground(
|
||||
data: string,
|
||||
filename: string,
|
||||
mimeType: string,
|
||||
projectPath: string
|
||||
): Promise<{ success: boolean; path?: string; error?: string }> {
|
||||
return this.post("/api/fs/save-board-background", {
|
||||
data,
|
||||
filename,
|
||||
mimeType,
|
||||
projectPath,
|
||||
});
|
||||
}
|
||||
|
||||
async deleteBoardBackground(
|
||||
projectPath: string
|
||||
): Promise<{ success: boolean; error?: string }> {
|
||||
return this.post("/api/fs/delete-board-background", { projectPath });
|
||||
}
|
||||
|
||||
// CLI checks - server-side
|
||||
async checkClaudeCli(): Promise<{
|
||||
success: boolean;
|
||||
status?: string;
|
||||
method?: string;
|
||||
version?: string;
|
||||
path?: string;
|
||||
recommendation?: string;
|
||||
installCommands?: {
|
||||
macos?: string;
|
||||
windows?: string;
|
||||
linux?: string;
|
||||
npm?: string;
|
||||
};
|
||||
error?: string;
|
||||
}> {
|
||||
return this.get("/api/setup/claude-status");
|
||||
}
|
||||
|
||||
async checkCodexCli(): Promise<{
|
||||
success: boolean;
|
||||
status?: string;
|
||||
method?: string;
|
||||
version?: string;
|
||||
path?: string;
|
||||
hasApiKey?: boolean;
|
||||
recommendation?: string;
|
||||
installCommands?: {
|
||||
macos?: string;
|
||||
windows?: string;
|
||||
linux?: string;
|
||||
npm?: string;
|
||||
};
|
||||
error?: string;
|
||||
}> {
|
||||
return this.get("/api/setup/codex-status");
|
||||
}
|
||||
|
||||
// Model API
|
||||
model = {
|
||||
getAvailable: async (): Promise<{
|
||||
success: boolean;
|
||||
models?: ModelDefinition[];
|
||||
error?: string;
|
||||
}> => {
|
||||
return this.get("/api/models/available");
|
||||
},
|
||||
checkProviders: async (): Promise<{
|
||||
success: boolean;
|
||||
providers?: Record<string, ProviderStatus>;
|
||||
error?: string;
|
||||
}> => {
|
||||
return this.get("/api/models/providers");
|
||||
},
|
||||
};
|
||||
|
||||
async testOpenAIConnection(apiKey?: string): Promise<{
|
||||
success: boolean;
|
||||
message?: string;
|
||||
error?: string;
|
||||
}> {
|
||||
return this.post("/api/setup/test-openai", { apiKey });
|
||||
}
|
||||
|
||||
// Setup API
|
||||
setup = {
|
||||
getClaudeStatus: (): Promise<{
|
||||
success: boolean;
|
||||
status?: string;
|
||||
installed?: boolean;
|
||||
method?: string;
|
||||
version?: string;
|
||||
path?: string;
|
||||
auth?: {
|
||||
authenticated: boolean;
|
||||
method: string;
|
||||
hasCredentialsFile?: boolean;
|
||||
hasToken?: boolean;
|
||||
hasStoredOAuthToken?: boolean;
|
||||
hasStoredApiKey?: boolean;
|
||||
hasEnvApiKey?: boolean;
|
||||
hasEnvOAuthToken?: boolean;
|
||||
hasCliAuth?: boolean;
|
||||
hasRecentActivity?: boolean;
|
||||
};
|
||||
error?: string;
|
||||
}> => this.get("/api/setup/claude-status"),
|
||||
|
||||
getCodexStatus: (): Promise<{
|
||||
success: boolean;
|
||||
status?: string;
|
||||
method?: string;
|
||||
version?: string;
|
||||
path?: string;
|
||||
auth?: {
|
||||
authenticated: boolean;
|
||||
method: string;
|
||||
hasAuthFile: boolean;
|
||||
hasEnvKey: boolean;
|
||||
hasStoredApiKey?: boolean;
|
||||
hasEnvApiKey?: boolean;
|
||||
};
|
||||
error?: string;
|
||||
}> => this.get("/api/setup/codex-status"),
|
||||
|
||||
installClaude: (): Promise<{
|
||||
success: boolean;
|
||||
message?: string;
|
||||
error?: string;
|
||||
}> => this.post("/api/setup/install-claude"),
|
||||
|
||||
installCodex: (): Promise<{
|
||||
success: boolean;
|
||||
message?: string;
|
||||
error?: string;
|
||||
}> => this.post("/api/setup/install-codex"),
|
||||
|
||||
authClaude: (): Promise<{
|
||||
success: boolean;
|
||||
token?: string;
|
||||
requiresManualAuth?: boolean;
|
||||
terminalOpened?: boolean;
|
||||
command?: string;
|
||||
error?: string;
|
||||
message?: string;
|
||||
output?: string;
|
||||
}> => this.post("/api/setup/auth-claude"),
|
||||
|
||||
authCodex: (
|
||||
apiKey?: string
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
requiresManualAuth?: boolean;
|
||||
command?: string;
|
||||
error?: string;
|
||||
}> => this.post("/api/setup/auth-codex", { apiKey }),
|
||||
|
||||
storeApiKey: (
|
||||
provider: string,
|
||||
apiKey: string
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
error?: string;
|
||||
}> => this.post("/api/setup/store-api-key", { provider, apiKey }),
|
||||
|
||||
getApiKeys: (): Promise<{
|
||||
success: boolean;
|
||||
hasAnthropicKey: boolean;
|
||||
hasOpenAIKey: boolean;
|
||||
hasGoogleKey: boolean;
|
||||
}> => this.get("/api/setup/api-keys"),
|
||||
|
||||
configureCodexMcp: (
|
||||
projectPath: string
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
configPath?: string;
|
||||
error?: string;
|
||||
}> => this.post("/api/setup/configure-codex-mcp", { projectPath }),
|
||||
|
||||
getPlatform: (): Promise<{
|
||||
success: boolean;
|
||||
platform: string;
|
||||
arch: string;
|
||||
homeDir: string;
|
||||
isWindows: boolean;
|
||||
isMac: boolean;
|
||||
isLinux: boolean;
|
||||
}> => this.get("/api/setup/platform"),
|
||||
|
||||
onInstallProgress: (callback: (progress: unknown) => void) => {
|
||||
return this.subscribeToEvent("agent:stream", callback);
|
||||
},
|
||||
|
||||
onAuthProgress: (callback: (progress: unknown) => void) => {
|
||||
return this.subscribeToEvent("agent:stream", callback);
|
||||
},
|
||||
};
|
||||
|
||||
// Features API
|
||||
features: FeaturesAPI = {
|
||||
getAll: (projectPath: string) =>
|
||||
this.post("/api/features/list", { projectPath }),
|
||||
get: (projectPath: string, featureId: string) =>
|
||||
this.post("/api/features/get", { projectPath, featureId }),
|
||||
create: (projectPath: string, feature: Feature) =>
|
||||
this.post("/api/features/create", { projectPath, feature }),
|
||||
update: (
|
||||
projectPath: string,
|
||||
featureId: string,
|
||||
updates: Partial<Feature>
|
||||
) => this.post("/api/features/update", { projectPath, featureId, updates }),
|
||||
delete: (projectPath: string, featureId: string) =>
|
||||
this.post("/api/features/delete", { projectPath, featureId }),
|
||||
getAgentOutput: (projectPath: string, featureId: string) =>
|
||||
this.post("/api/features/agent-output", { projectPath, featureId }),
|
||||
};
|
||||
|
||||
// Auto Mode API
|
||||
autoMode: AutoModeAPI = {
|
||||
start: (projectPath: string, maxConcurrency?: number) =>
|
||||
this.post("/api/auto-mode/start", { projectPath, maxConcurrency }),
|
||||
stop: (projectPath: string) =>
|
||||
this.post("/api/auto-mode/stop", { projectPath }),
|
||||
stopFeature: (featureId: string) =>
|
||||
this.post("/api/auto-mode/stop-feature", { featureId }),
|
||||
status: (projectPath?: string) =>
|
||||
this.post("/api/auto-mode/status", { projectPath }),
|
||||
runFeature: (
|
||||
projectPath: string,
|
||||
featureId: string,
|
||||
useWorktrees?: boolean
|
||||
) =>
|
||||
this.post("/api/auto-mode/run-feature", {
|
||||
projectPath,
|
||||
featureId,
|
||||
useWorktrees,
|
||||
}),
|
||||
verifyFeature: (projectPath: string, featureId: string) =>
|
||||
this.post("/api/auto-mode/verify-feature", { projectPath, featureId }),
|
||||
resumeFeature: (projectPath: string, featureId: string) =>
|
||||
this.post("/api/auto-mode/resume-feature", { projectPath, featureId }),
|
||||
contextExists: (projectPath: string, featureId: string) =>
|
||||
this.post("/api/auto-mode/context-exists", { projectPath, featureId }),
|
||||
analyzeProject: (projectPath: string) =>
|
||||
this.post("/api/auto-mode/analyze-project", { projectPath }),
|
||||
followUpFeature: (
|
||||
projectPath: string,
|
||||
featureId: string,
|
||||
prompt: string,
|
||||
imagePaths?: string[]
|
||||
) =>
|
||||
this.post("/api/auto-mode/follow-up-feature", {
|
||||
projectPath,
|
||||
featureId,
|
||||
prompt,
|
||||
imagePaths,
|
||||
}),
|
||||
commitFeature: (projectPath: string, featureId: string) =>
|
||||
this.post("/api/auto-mode/commit-feature", { projectPath, featureId }),
|
||||
onEvent: (callback: (event: AutoModeEvent) => void) => {
|
||||
return this.subscribeToEvent(
|
||||
"auto-mode:event",
|
||||
callback as EventCallback
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
// Worktree API
|
||||
worktree: WorktreeAPI = {
|
||||
revertFeature: (projectPath: string, featureId: string) =>
|
||||
this.post("/api/worktree/revert", { projectPath, featureId }),
|
||||
mergeFeature: (projectPath: string, featureId: string, options?: object) =>
|
||||
this.post("/api/worktree/merge", { projectPath, featureId, options }),
|
||||
getInfo: (projectPath: string, featureId: string) =>
|
||||
this.post("/api/worktree/info", { projectPath, featureId }),
|
||||
getStatus: (projectPath: string, featureId: string) =>
|
||||
this.post("/api/worktree/status", { projectPath, featureId }),
|
||||
list: (projectPath: string) =>
|
||||
this.post("/api/worktree/list", { projectPath }),
|
||||
getDiffs: (projectPath: string, featureId: string) =>
|
||||
this.post("/api/worktree/diffs", { projectPath, featureId }),
|
||||
getFileDiff: (projectPath: string, featureId: string, filePath: string) =>
|
||||
this.post("/api/worktree/file-diff", {
|
||||
projectPath,
|
||||
featureId,
|
||||
filePath,
|
||||
}),
|
||||
};
|
||||
|
||||
// Git API
|
||||
git: GitAPI = {
|
||||
getDiffs: (projectPath: string) =>
|
||||
this.post("/api/git/diffs", { projectPath }),
|
||||
getFileDiff: (projectPath: string, filePath: string) =>
|
||||
this.post("/api/git/file-diff", { projectPath, filePath }),
|
||||
};
|
||||
|
||||
// Suggestions API
|
||||
suggestions: SuggestionsAPI = {
|
||||
generate: (projectPath: string, suggestionType?: SuggestionType) =>
|
||||
this.post("/api/suggestions/generate", { projectPath, suggestionType }),
|
||||
stop: () => this.post("/api/suggestions/stop"),
|
||||
status: () => this.get("/api/suggestions/status"),
|
||||
onEvent: (callback: (event: SuggestionsEvent) => void) => {
|
||||
return this.subscribeToEvent(
|
||||
"suggestions:event",
|
||||
callback as EventCallback
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
// Spec Regeneration API
|
||||
specRegeneration: SpecRegenerationAPI = {
|
||||
create: (
|
||||
projectPath: string,
|
||||
projectOverview: string,
|
||||
generateFeatures?: boolean
|
||||
) =>
|
||||
this.post("/api/spec-regeneration/create", {
|
||||
projectPath,
|
||||
projectOverview,
|
||||
generateFeatures,
|
||||
}),
|
||||
generate: (projectPath: string, projectDefinition: string) =>
|
||||
this.post("/api/spec-regeneration/generate", {
|
||||
projectPath,
|
||||
projectDefinition,
|
||||
}),
|
||||
generateFeatures: (projectPath: string) =>
|
||||
this.post("/api/spec-regeneration/generate-features", { projectPath }),
|
||||
stop: () => this.post("/api/spec-regeneration/stop"),
|
||||
status: () => this.get("/api/spec-regeneration/status"),
|
||||
onEvent: (callback: (event: SpecRegenerationEvent) => void) => {
|
||||
return this.subscribeToEvent(
|
||||
"spec-regeneration:event",
|
||||
callback as EventCallback
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
// Running Agents API
|
||||
runningAgents = {
|
||||
getAll: (): Promise<{
|
||||
success: boolean;
|
||||
runningAgents?: Array<{
|
||||
featureId: string;
|
||||
projectPath: string;
|
||||
projectName: string;
|
||||
isAutoMode: boolean;
|
||||
}>;
|
||||
totalCount?: number;
|
||||
autoLoopRunning?: boolean;
|
||||
error?: string;
|
||||
}> => this.get("/api/running-agents"),
|
||||
};
|
||||
|
||||
// Workspace API
|
||||
workspace = {
|
||||
getConfig: (): Promise<{
|
||||
success: boolean;
|
||||
configured: boolean;
|
||||
workspaceDir?: string;
|
||||
error?: string;
|
||||
}> => this.get("/api/workspace/config"),
|
||||
|
||||
getDirectories: (): Promise<{
|
||||
success: boolean;
|
||||
directories?: Array<{ name: string; path: string }>;
|
||||
error?: string;
|
||||
}> => this.get("/api/workspace/directories"),
|
||||
};
|
||||
|
||||
// Agent API
|
||||
agent = {
|
||||
start: (
|
||||
sessionId: string,
|
||||
workingDirectory?: string
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
messages?: Message[];
|
||||
error?: string;
|
||||
}> => this.post("/api/agent/start", { sessionId, workingDirectory }),
|
||||
|
||||
send: (
|
||||
sessionId: string,
|
||||
message: string,
|
||||
workingDirectory?: string,
|
||||
imagePaths?: string[]
|
||||
): Promise<{ success: boolean; error?: string }> =>
|
||||
this.post("/api/agent/send", {
|
||||
sessionId,
|
||||
message,
|
||||
workingDirectory,
|
||||
imagePaths,
|
||||
}),
|
||||
|
||||
getHistory: (
|
||||
sessionId: string
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
messages?: Message[];
|
||||
isRunning?: boolean;
|
||||
error?: string;
|
||||
}> => this.post("/api/agent/history", { sessionId }),
|
||||
|
||||
stop: (sessionId: string): Promise<{ success: boolean; error?: string }> =>
|
||||
this.post("/api/agent/stop", { sessionId }),
|
||||
|
||||
clear: (sessionId: string): Promise<{ success: boolean; error?: string }> =>
|
||||
this.post("/api/agent/clear", { sessionId }),
|
||||
|
||||
onStream: (callback: (data: unknown) => void): (() => void) => {
|
||||
return this.subscribeToEvent("agent:stream", callback as EventCallback);
|
||||
},
|
||||
};
|
||||
|
||||
// Templates API
|
||||
templates = {
|
||||
clone: (
|
||||
repoUrl: string,
|
||||
projectName: string,
|
||||
parentDir: string
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
projectPath?: string;
|
||||
projectName?: string;
|
||||
error?: string;
|
||||
}> =>
|
||||
this.post("/api/templates/clone", { repoUrl, projectName, parentDir }),
|
||||
};
|
||||
|
||||
// Sessions API
|
||||
sessions = {
|
||||
list: (
|
||||
includeArchived?: boolean
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
sessions?: SessionListItem[];
|
||||
error?: string;
|
||||
}> => this.get(`/api/sessions?includeArchived=${includeArchived || false}`),
|
||||
|
||||
create: (
|
||||
name: string,
|
||||
projectPath: string,
|
||||
workingDirectory?: string
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
session?: {
|
||||
id: string;
|
||||
name: string;
|
||||
projectPath: string;
|
||||
workingDirectory?: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
};
|
||||
error?: string;
|
||||
}> => this.post("/api/sessions", { name, projectPath, workingDirectory }),
|
||||
|
||||
update: (
|
||||
sessionId: string,
|
||||
name?: string,
|
||||
tags?: string[]
|
||||
): Promise<{ success: boolean; error?: string }> =>
|
||||
this.put(`/api/sessions/${sessionId}`, { name, tags }),
|
||||
|
||||
archive: (
|
||||
sessionId: string
|
||||
): Promise<{ success: boolean; error?: string }> =>
|
||||
this.post(`/api/sessions/${sessionId}/archive`, {}),
|
||||
|
||||
unarchive: (
|
||||
sessionId: string
|
||||
): Promise<{ success: boolean; error?: string }> =>
|
||||
this.post(`/api/sessions/${sessionId}/unarchive`, {}),
|
||||
|
||||
delete: (
|
||||
sessionId: string
|
||||
): Promise<{ success: boolean; error?: string }> =>
|
||||
this.httpDelete(`/api/sessions/${sessionId}`),
|
||||
};
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
let httpApiClientInstance: HttpApiClient | null = null;
|
||||
|
||||
export function getHttpApiClient(): HttpApiClient {
|
||||
if (!httpApiClientInstance) {
|
||||
httpApiClientInstance = new HttpApiClient();
|
||||
}
|
||||
return httpApiClientInstance;
|
||||
}
|
||||
62
apps/app/src/lib/templates.ts
Normal file
62
apps/app/src/lib/templates.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
* Starter Kit Templates
|
||||
*
|
||||
* Define GitHub templates that users can clone when creating new projects.
|
||||
*/
|
||||
|
||||
export interface StarterTemplate {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
repoUrl: string;
|
||||
techStack: string[];
|
||||
features: string[];
|
||||
category: "fullstack" | "frontend" | "backend" | "ai" | "other";
|
||||
author: string;
|
||||
}
|
||||
|
||||
export const starterTemplates: StarterTemplate[] = [
|
||||
{
|
||||
id: "agentic-jumpstart",
|
||||
name: "Agentic Jumpstart",
|
||||
description: "A starter template for building agentic AI applications with a pre-configured development environment including database setup, Docker support, and TypeScript configuration.",
|
||||
repoUrl: "https://github.com/webdevcody/agentic-jumpstart-starter-kit",
|
||||
techStack: ["TypeScript", "Vite", "Drizzle ORM", "Docker", "PostCSS"],
|
||||
features: [
|
||||
"Pre-configured VS Code settings",
|
||||
"Docker Compose setup",
|
||||
"Database migrations with Drizzle",
|
||||
"Type-safe development",
|
||||
"Environment setup with .env.example"
|
||||
],
|
||||
category: "ai",
|
||||
author: "webdevcody"
|
||||
},
|
||||
{
|
||||
id: "full-stack-campus",
|
||||
name: "Full Stack Campus",
|
||||
description: "A feature-driven development template for building community platforms. Includes authentication, Stripe payments, file uploads, and real-time features using TanStack Start.",
|
||||
repoUrl: "https://github.com/webdevcody/full-stack-campus",
|
||||
techStack: ["TanStack Start", "PostgreSQL", "Drizzle ORM", "Better Auth", "Tailwind CSS", "Radix UI", "Stripe", "AWS S3/R2"],
|
||||
features: [
|
||||
"Community posts with comments and reactions",
|
||||
"User profiles and portfolios",
|
||||
"Calendar event management",
|
||||
"Direct messaging",
|
||||
"Member discovery directory",
|
||||
"Real-time notifications",
|
||||
"Tiered subscriptions (free/basic/pro)",
|
||||
"File uploads with presigned URLs"
|
||||
],
|
||||
category: "fullstack",
|
||||
author: "webdevcody"
|
||||
}
|
||||
];
|
||||
|
||||
export function getTemplateById(id: string): StarterTemplate | undefined {
|
||||
return starterTemplates.find(t => t.id === id);
|
||||
}
|
||||
|
||||
export function getTemplatesByCategory(category: StarterTemplate["category"]): StarterTemplate[] {
|
||||
return starterTemplates.filter(t => t.category === category);
|
||||
}
|
||||
@@ -9,7 +9,6 @@ export type ViewMode =
|
||||
| "board"
|
||||
| "agent"
|
||||
| "settings"
|
||||
| "tools"
|
||||
| "interview"
|
||||
| "context"
|
||||
| "profiles"
|
||||
@@ -28,7 +27,8 @@ export type ThemeMode =
|
||||
| "gruvbox"
|
||||
| "catppuccin"
|
||||
| "onedark"
|
||||
| "synthwave";
|
||||
| "synthwave"
|
||||
| "red";
|
||||
|
||||
export type KanbanCardDetailLevel = "minimal" | "standard" | "detailed";
|
||||
|
||||
@@ -40,23 +40,39 @@ export interface ApiKeys {
|
||||
|
||||
// Keyboard Shortcut with optional modifiers
|
||||
export interface ShortcutKey {
|
||||
key: string; // The main key (e.g., "K", "N", "1")
|
||||
shift?: boolean; // Shift key modifier
|
||||
cmdCtrl?: boolean; // Cmd on Mac, Ctrl on Windows/Linux
|
||||
alt?: boolean; // Alt/Option key modifier
|
||||
key: string; // The main key (e.g., "K", "N", "1")
|
||||
shift?: boolean; // Shift key modifier
|
||||
cmdCtrl?: boolean; // Cmd on Mac, Ctrl on Windows/Linux
|
||||
alt?: boolean; // Alt/Option key modifier
|
||||
}
|
||||
|
||||
// Helper to parse shortcut string to ShortcutKey object
|
||||
export function parseShortcut(shortcut: string): ShortcutKey {
|
||||
const parts = shortcut.split("+").map(p => p.trim());
|
||||
const parts = shortcut.split("+").map((p) => p.trim());
|
||||
const result: ShortcutKey = { key: parts[parts.length - 1] };
|
||||
|
||||
// Normalize common OS-specific modifiers (Cmd/Ctrl/Win/Super symbols) into cmdCtrl
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const modifier = parts[i].toLowerCase();
|
||||
if (modifier === "shift") result.shift = true;
|
||||
else if (modifier === "cmd" || modifier === "ctrl" || modifier === "win" || modifier === "super" || modifier === "⌘" || modifier === "^" || modifier === "⊞" || modifier === "◆") result.cmdCtrl = true;
|
||||
else if (modifier === "alt" || modifier === "opt" || modifier === "option" || modifier === "⌥") result.alt = true;
|
||||
else if (
|
||||
modifier === "cmd" ||
|
||||
modifier === "ctrl" ||
|
||||
modifier === "win" ||
|
||||
modifier === "super" ||
|
||||
modifier === "⌘" ||
|
||||
modifier === "^" ||
|
||||
modifier === "⊞" ||
|
||||
modifier === "◆"
|
||||
)
|
||||
result.cmdCtrl = true;
|
||||
else if (
|
||||
modifier === "alt" ||
|
||||
modifier === "opt" ||
|
||||
modifier === "option" ||
|
||||
modifier === "⌥"
|
||||
)
|
||||
result.alt = true;
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -68,36 +84,49 @@ export function formatShortcut(shortcut: string, forDisplay = false): string {
|
||||
const parts: string[] = [];
|
||||
|
||||
// Prefer User-Agent Client Hints when available; fall back to legacy
|
||||
const platform: 'darwin' | 'win32' | 'linux' = (() => {
|
||||
if (typeof navigator === 'undefined') return 'linux';
|
||||
const platform: "darwin" | "win32" | "linux" = (() => {
|
||||
if (typeof navigator === "undefined") return "linux";
|
||||
|
||||
const uaPlatform = (navigator as Navigator & { userAgentData?: { platform?: string } })
|
||||
.userAgentData?.platform?.toLowerCase?.();
|
||||
const uaPlatform = (
|
||||
navigator as Navigator & { userAgentData?: { platform?: string } }
|
||||
).userAgentData?.platform?.toLowerCase?.();
|
||||
const legacyPlatform = navigator.platform?.toLowerCase?.();
|
||||
const platformString = uaPlatform || legacyPlatform || '';
|
||||
const platformString = uaPlatform || legacyPlatform || "";
|
||||
|
||||
if (platformString.includes('mac')) return 'darwin';
|
||||
if (platformString.includes('win')) return 'win32';
|
||||
return 'linux';
|
||||
if (platformString.includes("mac")) return "darwin";
|
||||
if (platformString.includes("win")) return "win32";
|
||||
return "linux";
|
||||
})();
|
||||
|
||||
// Primary modifier - OS-specific
|
||||
if (parsed.cmdCtrl) {
|
||||
if (forDisplay) {
|
||||
parts.push(platform === 'darwin' ? '⌘' : platform === 'win32' ? '⊞' : '◆');
|
||||
parts.push(
|
||||
platform === "darwin" ? "⌘" : platform === "win32" ? "⊞" : "◆"
|
||||
);
|
||||
} else {
|
||||
parts.push(platform === 'darwin' ? 'Cmd' : platform === 'win32' ? 'Win' : 'Super');
|
||||
parts.push(
|
||||
platform === "darwin" ? "Cmd" : platform === "win32" ? "Win" : "Super"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Alt/Option
|
||||
if (parsed.alt) {
|
||||
parts.push(forDisplay ? (platform === 'darwin' ? '⌥' : 'Alt') : (platform === 'darwin' ? 'Opt' : 'Alt'));
|
||||
parts.push(
|
||||
forDisplay
|
||||
? platform === "darwin"
|
||||
? "⌥"
|
||||
: "Alt"
|
||||
: platform === "darwin"
|
||||
? "Opt"
|
||||
: "Alt"
|
||||
);
|
||||
}
|
||||
|
||||
// Shift
|
||||
if (parsed.shift) {
|
||||
parts.push(forDisplay ? '⇧' : 'Shift');
|
||||
parts.push(forDisplay ? "⇧" : "Shift");
|
||||
}
|
||||
|
||||
parts.push(parsed.key.toUpperCase());
|
||||
@@ -113,7 +142,6 @@ export interface KeyboardShortcuts {
|
||||
agent: string;
|
||||
spec: string;
|
||||
context: string;
|
||||
tools: string;
|
||||
settings: string;
|
||||
profiles: string;
|
||||
|
||||
@@ -139,25 +167,24 @@ export const DEFAULT_KEYBOARD_SHORTCUTS: KeyboardShortcuts = {
|
||||
agent: "A",
|
||||
spec: "D",
|
||||
context: "C",
|
||||
tools: "T",
|
||||
settings: "S",
|
||||
profiles: "M",
|
||||
|
||||
|
||||
// UI
|
||||
toggleSidebar: "`",
|
||||
|
||||
|
||||
// Actions
|
||||
// Note: Some shortcuts share the same key (e.g., "N" for addFeature, newSession, addProfile)
|
||||
// This is intentional as they are context-specific and only active in their respective views
|
||||
addFeature: "N", // Only active in board view
|
||||
addContextFile: "F", // Only active in context view
|
||||
startNext: "G", // Only active in board view
|
||||
newSession: "N", // Only active in agent view
|
||||
openProject: "O", // Global shortcut
|
||||
projectPicker: "P", // Global shortcut
|
||||
cyclePrevProject: "Q", // Global shortcut
|
||||
cycleNextProject: "E", // Global shortcut
|
||||
addProfile: "N", // Only active in profiles view
|
||||
addFeature: "N", // Only active in board view
|
||||
addContextFile: "N", // Only active in context view
|
||||
startNext: "G", // Only active in board view
|
||||
newSession: "N", // Only active in agent view
|
||||
openProject: "O", // Global shortcut
|
||||
projectPicker: "P", // Global shortcut
|
||||
cyclePrevProject: "Q", // Global shortcut
|
||||
cycleNextProject: "E", // Global shortcut
|
||||
addProfile: "N", // Only active in profiles view
|
||||
};
|
||||
|
||||
export interface ImageAttachment {
|
||||
@@ -248,6 +275,7 @@ export interface Feature {
|
||||
// Worktree info - set when a feature is being worked on in an isolated git worktree
|
||||
worktreePath?: string; // Path to the worktree directory
|
||||
branchName?: string; // Name of the feature branch
|
||||
justFinishedAt?: string; // ISO timestamp when agent just finished and moved to waiting_approval (shows badge for 2 minutes)
|
||||
}
|
||||
|
||||
// File tree node for project analysis
|
||||
@@ -304,10 +332,13 @@ export interface AppState {
|
||||
chatHistoryOpen: boolean;
|
||||
|
||||
// Auto Mode (per-project state, keyed by project ID)
|
||||
autoModeByProject: Record<string, {
|
||||
isRunning: boolean;
|
||||
runningTasks: string[]; // Feature IDs being worked on
|
||||
}>;
|
||||
autoModeByProject: Record<
|
||||
string,
|
||||
{
|
||||
isRunning: boolean;
|
||||
runningTasks: string[]; // Feature IDs being worked on
|
||||
}
|
||||
>;
|
||||
autoModeActivityLog: AutoModeActivity[];
|
||||
maxConcurrency: number; // Maximum number of concurrent agent tasks
|
||||
|
||||
@@ -335,8 +366,49 @@ export interface AppState {
|
||||
// Project Analysis
|
||||
projectAnalysis: ProjectAnalysis | null;
|
||||
isAnalyzing: boolean;
|
||||
|
||||
// Board Background Settings (per-project, keyed by project path)
|
||||
boardBackgroundByProject: Record<
|
||||
string,
|
||||
{
|
||||
imagePath: string | null; // Path to background image in .automaker directory
|
||||
imageVersion?: number; // Timestamp to bust browser cache when image is updated
|
||||
cardOpacity: number; // Opacity of cards (0-100)
|
||||
columnOpacity: number; // Opacity of columns (0-100)
|
||||
columnBorderEnabled: boolean; // Whether to show column borders
|
||||
cardGlassmorphism: boolean; // Whether to use glassmorphism (backdrop-blur) on cards
|
||||
cardBorderEnabled: boolean; // Whether to show card borders
|
||||
cardBorderOpacity: number; // Opacity of card borders (0-100)
|
||||
hideScrollbar: boolean; // Whether to hide the board scrollbar
|
||||
}
|
||||
>;
|
||||
|
||||
// Theme Preview (for hover preview in theme selectors)
|
||||
previewTheme: ThemeMode | null;
|
||||
}
|
||||
|
||||
// Default background settings for board backgrounds
|
||||
export const defaultBackgroundSettings: {
|
||||
imagePath: string | null;
|
||||
imageVersion?: number;
|
||||
cardOpacity: number;
|
||||
columnOpacity: number;
|
||||
columnBorderEnabled: boolean;
|
||||
cardGlassmorphism: boolean;
|
||||
cardBorderEnabled: boolean;
|
||||
cardBorderOpacity: number;
|
||||
hideScrollbar: boolean;
|
||||
} = {
|
||||
imagePath: null,
|
||||
cardOpacity: 100,
|
||||
columnOpacity: 100,
|
||||
columnBorderEnabled: true,
|
||||
cardGlassmorphism: true,
|
||||
cardBorderEnabled: true,
|
||||
cardBorderOpacity: 100,
|
||||
hideScrollbar: false,
|
||||
};
|
||||
|
||||
export interface AutoModeActivity {
|
||||
id: string;
|
||||
featureId: string;
|
||||
@@ -354,6 +426,7 @@ export interface AutoModeActivity {
|
||||
tool?: string;
|
||||
passes?: boolean;
|
||||
phase?: "planning" | "action" | "verification";
|
||||
errorType?: "authentication" | "execution";
|
||||
}
|
||||
|
||||
export interface AppActions {
|
||||
@@ -366,6 +439,11 @@ export interface AppActions {
|
||||
deleteTrashedProject: (projectId: string) => void;
|
||||
emptyTrash: () => void;
|
||||
setCurrentProject: (project: Project | null) => void;
|
||||
upsertAndSetCurrentProject: (
|
||||
path: string,
|
||||
name: string,
|
||||
theme?: ThemeMode
|
||||
) => Project; // Upsert project by path and set as current
|
||||
reorderProjects: (oldIndex: number, newIndex: number) => void;
|
||||
cyclePrevProject: () => void; // Cycle back through project history (Q)
|
||||
cycleNextProject: () => void; // Cycle forward through project history (E)
|
||||
@@ -379,7 +457,8 @@ export interface AppActions {
|
||||
// Theme actions
|
||||
setTheme: (theme: ThemeMode) => void;
|
||||
setProjectTheme: (projectId: string, theme: ThemeMode | null) => void; // Set per-project theme (null to clear)
|
||||
getEffectiveTheme: () => ThemeMode; // Get the effective theme (project or global)
|
||||
getEffectiveTheme: () => ThemeMode; // Get the effective theme (project, global, or preview if set)
|
||||
setPreviewTheme: (theme: ThemeMode | null) => void; // Set preview theme for hover preview (null to clear)
|
||||
|
||||
// Feature actions
|
||||
setFeatures: (features: Feature[]) => void;
|
||||
@@ -415,7 +494,10 @@ export interface AppActions {
|
||||
addRunningTask: (projectId: string, taskId: string) => void;
|
||||
removeRunningTask: (projectId: string, taskId: string) => void;
|
||||
clearRunningTasks: (projectId: string) => void;
|
||||
getAutoModeState: (projectId: string) => { isRunning: boolean; runningTasks: string[] };
|
||||
getAutoModeState: (projectId: string) => {
|
||||
isRunning: boolean;
|
||||
runningTasks: string[];
|
||||
};
|
||||
addAutoModeActivity: (
|
||||
activity: Omit<AutoModeActivity, "id" | "timestamp">
|
||||
) => void;
|
||||
@@ -454,9 +536,33 @@ export interface AppActions {
|
||||
clearAnalysis: () => void;
|
||||
|
||||
// Agent Session actions
|
||||
setLastSelectedSession: (projectPath: string, sessionId: string | null) => void;
|
||||
setLastSelectedSession: (
|
||||
projectPath: string,
|
||||
sessionId: string | null
|
||||
) => void;
|
||||
getLastSelectedSession: (projectPath: string) => string | null;
|
||||
|
||||
// Board Background actions
|
||||
setBoardBackground: (projectPath: string, imagePath: string | null) => void;
|
||||
setCardOpacity: (projectPath: string, opacity: number) => void;
|
||||
setColumnOpacity: (projectPath: string, opacity: number) => void;
|
||||
setColumnBorderEnabled: (projectPath: string, enabled: boolean) => void;
|
||||
getBoardBackground: (projectPath: string) => {
|
||||
imagePath: string | null;
|
||||
cardOpacity: number;
|
||||
columnOpacity: number;
|
||||
columnBorderEnabled: boolean;
|
||||
cardGlassmorphism: boolean;
|
||||
cardBorderEnabled: boolean;
|
||||
cardBorderOpacity: number;
|
||||
hideScrollbar: boolean;
|
||||
};
|
||||
setCardGlassmorphism: (projectPath: string, enabled: boolean) => void;
|
||||
setCardBorderEnabled: (projectPath: string, enabled: boolean) => void;
|
||||
setCardBorderOpacity: (projectPath: string, opacity: number) => void;
|
||||
setHideScrollbar: (projectPath: string, hide: boolean) => void;
|
||||
clearBoardBackground: (projectPath: string) => void;
|
||||
|
||||
// Reset
|
||||
reset: () => void;
|
||||
}
|
||||
@@ -466,7 +572,8 @@ const DEFAULT_AI_PROFILES: AIProfile[] = [
|
||||
{
|
||||
id: "profile-heavy-task",
|
||||
name: "Heavy Task",
|
||||
description: "Claude Opus with Ultrathink for complex architecture, migrations, or deep debugging.",
|
||||
description:
|
||||
"Claude Opus with Ultrathink for complex architecture, migrations, or deep debugging.",
|
||||
model: "opus",
|
||||
thinkingLevel: "ultrathink",
|
||||
provider: "claude",
|
||||
@@ -476,7 +583,8 @@ const DEFAULT_AI_PROFILES: AIProfile[] = [
|
||||
{
|
||||
id: "profile-balanced",
|
||||
name: "Balanced",
|
||||
description: "Claude Sonnet with medium thinking for typical development tasks.",
|
||||
description:
|
||||
"Claude Sonnet with medium thinking for typical development tasks.",
|
||||
model: "sonnet",
|
||||
thinkingLevel: "medium",
|
||||
provider: "claude",
|
||||
@@ -548,6 +656,8 @@ const initialState: AppState = {
|
||||
aiProfiles: DEFAULT_AI_PROFILES,
|
||||
projectAnalysis: null,
|
||||
isAnalyzing: false,
|
||||
boardBackgroundByProject: {},
|
||||
previewTheme: null,
|
||||
};
|
||||
|
||||
export const useAppStore = create<AppState & AppActions>()(
|
||||
@@ -638,6 +748,7 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
name: trashed.name,
|
||||
path: trashed.path,
|
||||
lastOpened: new Date().toISOString(),
|
||||
theme: trashed.theme, // Preserve theme from trashed project
|
||||
};
|
||||
|
||||
set({
|
||||
@@ -672,7 +783,9 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
// Add to project history (MRU order)
|
||||
const currentHistory = get().projectHistory;
|
||||
// Remove this project if it's already in history
|
||||
const filteredHistory = currentHistory.filter((id) => id !== project.id);
|
||||
const filteredHistory = currentHistory.filter(
|
||||
(id) => id !== project.id
|
||||
);
|
||||
// Add to the front (most recent)
|
||||
const newHistory = [project.id, ...filteredHistory];
|
||||
// Reset history index to 0 (current project)
|
||||
@@ -682,6 +795,58 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
}
|
||||
},
|
||||
|
||||
upsertAndSetCurrentProject: (path, name, theme) => {
|
||||
const {
|
||||
projects,
|
||||
trashedProjects,
|
||||
currentProject,
|
||||
theme: globalTheme,
|
||||
} = get();
|
||||
const existingProject = projects.find((p) => p.path === path);
|
||||
let project: Project;
|
||||
|
||||
if (existingProject) {
|
||||
// Update existing project, preserving theme and other properties
|
||||
project = {
|
||||
...existingProject,
|
||||
name, // Update name in case it changed
|
||||
lastOpened: new Date().toISOString(),
|
||||
};
|
||||
// Update the project in the store
|
||||
const updatedProjects = projects.map((p) =>
|
||||
p.id === existingProject.id ? project : p
|
||||
);
|
||||
set({ projects: updatedProjects });
|
||||
} else {
|
||||
// Create new project - check for trashed project with same path first (preserves theme if deleted/recreated)
|
||||
// Then fall back to provided theme, then current project theme, then global theme
|
||||
const trashedProject = trashedProjects.find((p) => p.path === path);
|
||||
const effectiveTheme =
|
||||
theme ||
|
||||
trashedProject?.theme ||
|
||||
currentProject?.theme ||
|
||||
globalTheme;
|
||||
project = {
|
||||
id: `project-${Date.now()}`,
|
||||
name,
|
||||
path,
|
||||
lastOpened: new Date().toISOString(),
|
||||
theme: effectiveTheme,
|
||||
};
|
||||
// Add the new project to the store
|
||||
set({
|
||||
projects: [
|
||||
...projects,
|
||||
{ ...project, lastOpened: new Date().toISOString() },
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
// Set as current project (this will also update history and view)
|
||||
get().setCurrentProject(project);
|
||||
return project;
|
||||
},
|
||||
|
||||
cyclePrevProject: () => {
|
||||
const { projectHistory, projectHistoryIndex, projects } = get();
|
||||
|
||||
@@ -712,7 +877,7 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
currentProject: targetProject,
|
||||
projectHistory: validHistory,
|
||||
projectHistoryIndex: newIndex,
|
||||
currentView: "board"
|
||||
currentView: "board",
|
||||
});
|
||||
}
|
||||
},
|
||||
@@ -737,9 +902,8 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
if (currentIndex === -1) currentIndex = 0;
|
||||
|
||||
// Move to the previous index (going forward = lower index), wrapping around
|
||||
const newIndex = currentIndex <= 0
|
||||
? validHistory.length - 1
|
||||
: currentIndex - 1;
|
||||
const newIndex =
|
||||
currentIndex <= 0 ? validHistory.length - 1 : currentIndex - 1;
|
||||
const targetProjectId = validHistory[newIndex];
|
||||
const targetProject = projects.find((p) => p.id === targetProjectId);
|
||||
|
||||
@@ -749,7 +913,7 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
currentProject: targetProject,
|
||||
projectHistory: validHistory,
|
||||
projectHistoryIndex: newIndex,
|
||||
currentView: "board"
|
||||
currentView: "board",
|
||||
});
|
||||
}
|
||||
},
|
||||
@@ -801,6 +965,11 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
},
|
||||
|
||||
getEffectiveTheme: () => {
|
||||
// If preview theme is set, use it (for hover preview)
|
||||
const previewTheme = get().previewTheme;
|
||||
if (previewTheme) {
|
||||
return previewTheme;
|
||||
}
|
||||
const currentProject = get().currentProject;
|
||||
// If current project has a theme set, use it
|
||||
if (currentProject?.theme) {
|
||||
@@ -810,6 +979,8 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
return get().theme;
|
||||
},
|
||||
|
||||
setPreviewTheme: (theme) => set({ previewTheme: theme }),
|
||||
|
||||
// Feature actions
|
||||
setFeatures: (features) => set({ features }),
|
||||
|
||||
@@ -961,7 +1132,10 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
// Auto Mode actions (per-project)
|
||||
setAutoModeRunning: (projectId, running) => {
|
||||
const current = get().autoModeByProject;
|
||||
const projectState = current[projectId] || { isRunning: false, runningTasks: [] };
|
||||
const projectState = current[projectId] || {
|
||||
isRunning: false,
|
||||
runningTasks: [],
|
||||
};
|
||||
set({
|
||||
autoModeByProject: {
|
||||
...current,
|
||||
@@ -972,7 +1146,10 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
|
||||
addRunningTask: (projectId, taskId) => {
|
||||
const current = get().autoModeByProject;
|
||||
const projectState = current[projectId] || { isRunning: false, runningTasks: [] };
|
||||
const projectState = current[projectId] || {
|
||||
isRunning: false,
|
||||
runningTasks: [],
|
||||
};
|
||||
if (!projectState.runningTasks.includes(taskId)) {
|
||||
set({
|
||||
autoModeByProject: {
|
||||
@@ -988,13 +1165,18 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
|
||||
removeRunningTask: (projectId, taskId) => {
|
||||
const current = get().autoModeByProject;
|
||||
const projectState = current[projectId] || { isRunning: false, runningTasks: [] };
|
||||
const projectState = current[projectId] || {
|
||||
isRunning: false,
|
||||
runningTasks: [],
|
||||
};
|
||||
set({
|
||||
autoModeByProject: {
|
||||
...current,
|
||||
[projectId]: {
|
||||
...projectState,
|
||||
runningTasks: projectState.runningTasks.filter((id) => id !== taskId),
|
||||
runningTasks: projectState.runningTasks.filter(
|
||||
(id) => id !== taskId
|
||||
),
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -1002,7 +1184,10 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
|
||||
clearRunningTasks: (projectId) => {
|
||||
const current = get().autoModeByProject;
|
||||
const projectState = current[projectId] || { isRunning: false, runningTasks: [] };
|
||||
const projectState = current[projectId] || {
|
||||
isRunning: false,
|
||||
runningTasks: [],
|
||||
};
|
||||
set({
|
||||
autoModeByProject: {
|
||||
...current,
|
||||
@@ -1117,7 +1302,9 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
const current = get().lastSelectedSessionByProject;
|
||||
if (sessionId === null) {
|
||||
// Remove the entry for this project
|
||||
const { [projectPath]: _, ...rest } = current;
|
||||
const rest = Object.fromEntries(
|
||||
Object.entries(current).filter(([key]) => key !== projectPath)
|
||||
);
|
||||
set({ lastSelectedSessionByProject: rest });
|
||||
} else {
|
||||
set({
|
||||
@@ -1132,33 +1319,200 @@ export const useAppStore = create<AppState & AppActions>()(
|
||||
getLastSelectedSession: (projectPath) => {
|
||||
return get().lastSelectedSessionByProject[projectPath] || null;
|
||||
},
|
||||
|
||||
// Board Background actions
|
||||
setBoardBackground: (projectPath, imagePath) => {
|
||||
const current = get().boardBackgroundByProject;
|
||||
const existing = current[projectPath] || {
|
||||
imagePath: null,
|
||||
cardOpacity: 100,
|
||||
columnOpacity: 100,
|
||||
columnBorderEnabled: true,
|
||||
cardGlassmorphism: true,
|
||||
cardBorderEnabled: true,
|
||||
cardBorderOpacity: 100,
|
||||
hideScrollbar: false,
|
||||
};
|
||||
set({
|
||||
boardBackgroundByProject: {
|
||||
...current,
|
||||
[projectPath]: {
|
||||
...existing,
|
||||
imagePath,
|
||||
// Update imageVersion timestamp to bust browser cache when image changes
|
||||
imageVersion: imagePath ? Date.now() : undefined,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
setCardOpacity: (projectPath, opacity) => {
|
||||
const current = get().boardBackgroundByProject;
|
||||
const existing = current[projectPath] || defaultBackgroundSettings;
|
||||
set({
|
||||
boardBackgroundByProject: {
|
||||
...current,
|
||||
[projectPath]: {
|
||||
...existing,
|
||||
cardOpacity: opacity,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
setColumnOpacity: (projectPath, opacity) => {
|
||||
const current = get().boardBackgroundByProject;
|
||||
const existing = current[projectPath] || defaultBackgroundSettings;
|
||||
set({
|
||||
boardBackgroundByProject: {
|
||||
...current,
|
||||
[projectPath]: {
|
||||
...existing,
|
||||
columnOpacity: opacity,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
getBoardBackground: (projectPath) => {
|
||||
const settings = get().boardBackgroundByProject[projectPath];
|
||||
return settings || defaultBackgroundSettings;
|
||||
},
|
||||
|
||||
setColumnBorderEnabled: (projectPath, enabled) => {
|
||||
const current = get().boardBackgroundByProject;
|
||||
const existing = current[projectPath] || defaultBackgroundSettings;
|
||||
set({
|
||||
boardBackgroundByProject: {
|
||||
...current,
|
||||
[projectPath]: {
|
||||
...existing,
|
||||
columnBorderEnabled: enabled,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
setCardGlassmorphism: (projectPath, enabled) => {
|
||||
const current = get().boardBackgroundByProject;
|
||||
const existing = current[projectPath] || defaultBackgroundSettings;
|
||||
set({
|
||||
boardBackgroundByProject: {
|
||||
...current,
|
||||
[projectPath]: {
|
||||
...existing,
|
||||
cardGlassmorphism: enabled,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
setCardBorderEnabled: (projectPath, enabled) => {
|
||||
const current = get().boardBackgroundByProject;
|
||||
const existing = current[projectPath] || defaultBackgroundSettings;
|
||||
set({
|
||||
boardBackgroundByProject: {
|
||||
...current,
|
||||
[projectPath]: {
|
||||
...existing,
|
||||
cardBorderEnabled: enabled,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
setCardBorderOpacity: (projectPath, opacity) => {
|
||||
const current = get().boardBackgroundByProject;
|
||||
const existing = current[projectPath] || defaultBackgroundSettings;
|
||||
set({
|
||||
boardBackgroundByProject: {
|
||||
...current,
|
||||
[projectPath]: {
|
||||
...existing,
|
||||
cardBorderOpacity: opacity,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
setHideScrollbar: (projectPath, hide) => {
|
||||
const current = get().boardBackgroundByProject;
|
||||
const existing = current[projectPath] || defaultBackgroundSettings;
|
||||
set({
|
||||
boardBackgroundByProject: {
|
||||
...current,
|
||||
[projectPath]: {
|
||||
...existing,
|
||||
hideScrollbar: hide,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
clearBoardBackground: (projectPath) => {
|
||||
const current = get().boardBackgroundByProject;
|
||||
const existing = current[projectPath] || defaultBackgroundSettings;
|
||||
set({
|
||||
boardBackgroundByProject: {
|
||||
...current,
|
||||
[projectPath]: {
|
||||
...existing,
|
||||
imagePath: null, // Only clear the image, preserve other settings
|
||||
imageVersion: undefined, // Clear version when clearing image
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
// Reset
|
||||
reset: () => set(initialState),
|
||||
}),
|
||||
{
|
||||
name: "automaker-storage",
|
||||
version: 1, // Increment when making breaking changes to persisted state
|
||||
migrate: (persistedState: unknown, version: number) => {
|
||||
const state = persistedState as Partial<AppState>;
|
||||
|
||||
// Migration from version 0 (no version) to version 1:
|
||||
// - Change addContextFile shortcut from "F" to "N"
|
||||
if (version === 0) {
|
||||
if (state.keyboardShortcuts?.addContextFile === "F") {
|
||||
state.keyboardShortcuts.addContextFile = "N";
|
||||
}
|
||||
}
|
||||
|
||||
return state as AppState;
|
||||
},
|
||||
partialize: (state) => ({
|
||||
// Project management
|
||||
projects: state.projects,
|
||||
currentProject: state.currentProject,
|
||||
trashedProjects: state.trashedProjects,
|
||||
projectHistory: state.projectHistory,
|
||||
projectHistoryIndex: state.projectHistoryIndex,
|
||||
// Features - cached locally for faster hydration (authoritative source is server)
|
||||
features: state.features,
|
||||
// UI state
|
||||
currentView: state.currentView,
|
||||
theme: state.theme,
|
||||
sidebarOpen: state.sidebarOpen,
|
||||
apiKeys: state.apiKeys,
|
||||
chatSessions: state.chatSessions,
|
||||
chatHistoryOpen: state.chatHistoryOpen,
|
||||
kanbanCardDetailLevel: state.kanbanCardDetailLevel,
|
||||
// Settings
|
||||
apiKeys: state.apiKeys,
|
||||
maxConcurrency: state.maxConcurrency,
|
||||
autoModeByProject: state.autoModeByProject,
|
||||
kanbanCardDetailLevel: state.kanbanCardDetailLevel,
|
||||
defaultSkipTests: state.defaultSkipTests,
|
||||
useWorktrees: state.useWorktrees,
|
||||
showProfilesOnly: state.showProfilesOnly,
|
||||
keyboardShortcuts: state.keyboardShortcuts,
|
||||
muteDoneSound: state.muteDoneSound,
|
||||
// Profiles and sessions
|
||||
aiProfiles: state.aiProfiles,
|
||||
chatSessions: state.chatSessions,
|
||||
lastSelectedSessionByProject: state.lastSelectedSessionByProject,
|
||||
// Board background settings
|
||||
boardBackgroundByProject: state.boardBackgroundByProject,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -10,10 +10,20 @@ export interface CliStatus {
|
||||
error?: string;
|
||||
}
|
||||
|
||||
// Claude Auth Method - all possible authentication sources
|
||||
export type ClaudeAuthMethod =
|
||||
| "oauth_token_env" // CLAUDE_CODE_OAUTH_TOKEN environment variable
|
||||
| "oauth_token" // Stored OAuth token from claude login
|
||||
| "api_key_env" // ANTHROPIC_API_KEY environment variable
|
||||
| "api_key" // Manually stored API key
|
||||
| "credentials_file" // Generic credentials file detection
|
||||
| "cli_authenticated" // Claude CLI is installed and has active sessions/activity
|
||||
| "none";
|
||||
|
||||
// Claude Auth Status
|
||||
export interface ClaudeAuthStatus {
|
||||
authenticated: boolean;
|
||||
method: "oauth_token_env" | "oauth_token" | "api_key" | "api_key_env" | "none";
|
||||
method: ClaudeAuthMethod;
|
||||
hasCredentialsFile?: boolean;
|
||||
oauthTokenValid?: boolean;
|
||||
apiKeyValid?: boolean;
|
||||
@@ -22,12 +32,23 @@ export interface ClaudeAuthStatus {
|
||||
error?: string;
|
||||
}
|
||||
|
||||
// Codex Auth Method - all possible authentication sources
|
||||
export type CodexAuthMethod =
|
||||
| "subscription" // Codex/OpenAI Plus or Team subscription
|
||||
| "cli_verified" // CLI logged in with OpenAI account
|
||||
| "cli_tokens" // CLI with stored access tokens
|
||||
| "api_key" // Manually stored API key
|
||||
| "env" // OPENAI_API_KEY environment variable
|
||||
| "none";
|
||||
|
||||
// Codex Auth Status
|
||||
export interface CodexAuthStatus {
|
||||
authenticated: boolean;
|
||||
method: "api_key" | "env" | "cli_verified" | "cli_tokens" | "none";
|
||||
method: CodexAuthMethod;
|
||||
apiKeyValid?: boolean;
|
||||
mcpConfigured?: boolean;
|
||||
hasSubscription?: boolean;
|
||||
cliLoggedIn?: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
|
||||
1
apps/app/src/types/electron.d.ts
vendored
1
apps/app/src/types/electron.d.ts
vendored
@@ -192,6 +192,7 @@ export type AutoModeEvent =
|
||||
| {
|
||||
type: "auto_mode_error";
|
||||
error: string;
|
||||
errorType?: "authentication" | "execution";
|
||||
featureId?: string;
|
||||
projectId?: string;
|
||||
projectPath?: string;
|
||||
|
||||
@@ -298,6 +298,109 @@
|
||||
.feature-card:nth-child(4) { animation-delay: 0.4s; }
|
||||
.feature-card:nth-child(5) { animation-delay: 0.5s; }
|
||||
.feature-card:nth-child(6) { animation-delay: 0.6s; }
|
||||
|
||||
/* Download Buttons */
|
||||
.download-section {
|
||||
margin-top: 2.5rem;
|
||||
}
|
||||
|
||||
.download-label {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.9rem;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.download-buttons {
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
justify-content: center;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.btn-download {
|
||||
padding: 0.75rem 1.5rem;
|
||||
border-radius: 0.5rem;
|
||||
text-decoration: none;
|
||||
font-weight: 600;
|
||||
transition: all 0.3s;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
background: rgba(30, 41, 59, 0.8);
|
||||
color: var(--text);
|
||||
border: 1px solid rgba(148, 163, 184, 0.2);
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.btn-download:hover {
|
||||
background: rgba(99, 102, 241, 0.2);
|
||||
border-color: var(--primary);
|
||||
transform: translateY(-2px);
|
||||
}
|
||||
|
||||
.btn-download svg {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
}
|
||||
|
||||
.download-subtitle {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.9rem;
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
.download-subtitle a {
|
||||
color: var(--primary);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.download-subtitle a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
/* Video Demo Section */
|
||||
.video-demo {
|
||||
margin-top: 3rem;
|
||||
max-width: 900px;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
padding: 0 2rem;
|
||||
}
|
||||
|
||||
.video-container {
|
||||
position: relative;
|
||||
margin-left: -2rem;
|
||||
margin-right: -2rem;
|
||||
width: calc(100% + 4rem);
|
||||
padding-bottom: 66.67%; /* Taller aspect ratio to show more height */
|
||||
background: rgba(30, 41, 59, 0.5);
|
||||
border-radius: 1rem;
|
||||
overflow: hidden;
|
||||
border: 1px solid rgba(148, 163, 184, 0.2);
|
||||
box-shadow: 0 10px 40px rgba(0, 0, 0, 0.3);
|
||||
}
|
||||
|
||||
.video-container video {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.video-demo {
|
||||
margin-top: 2rem;
|
||||
padding: 0 1rem;
|
||||
}
|
||||
|
||||
.video-container {
|
||||
margin-left: -1rem;
|
||||
margin-right: -1rem;
|
||||
width: calc(100% + 2rem);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
@@ -307,6 +410,7 @@
|
||||
<ul class="nav-links">
|
||||
<li><a href="#features">Features</a></li>
|
||||
<li><a href="#tech">Tech Stack</a></li>
|
||||
<li><a href="releases.html">Releases</a></li>
|
||||
<li><a href="https://github.com/AutoMaker-Org/automaker" target="_blank">GitHub</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
@@ -321,6 +425,36 @@
|
||||
<a href="https://github.com/AutoMaker-Org/automaker" class="btn btn-primary" target="_blank">View on GitHub</a>
|
||||
<a href="https://github.com/AutoMaker-Org/automaker#getting-started" class="btn btn-secondary" target="_blank">Get Started</a>
|
||||
</div>
|
||||
|
||||
<div class="video-demo">
|
||||
<div class="video-container">
|
||||
<video controls autoplay muted loop playsinline>
|
||||
<source src="https://releases.automaker.app/demo.mp4" type="video/mp4">
|
||||
Your browser does not support the video tag.
|
||||
</video>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="download-section" id="downloadSection" style="display: none;">
|
||||
<p class="download-label">Download for your platform:</p>
|
||||
<div class="download-buttons">
|
||||
<a href="#" class="btn-download" id="download-windows" style="display: none;">
|
||||
<svg viewBox="0 0 24 24" fill="currentColor"><path d="M0 3.449L9.75 2.1v9.451H0m10.949-9.602L24 0v11.4H10.949M0 12.6h9.75v9.451L0 20.699M10.949 12.6H24V24l-12.9-1.801"/></svg>
|
||||
Windows
|
||||
</a>
|
||||
<a href="#" class="btn-download" id="download-macos" style="display: none;">
|
||||
<svg viewBox="0 0 24 24" fill="currentColor"><path d="M18.71 19.5c-.83 1.24-1.71 2.45-3.05 2.47-1.34.03-1.77-.79-3.29-.79-1.53 0-2 .77-3.27.82-1.31.05-2.3-1.32-3.14-2.53C4.25 17 2.94 12.45 4.7 9.39c.87-1.52 2.43-2.48 4.12-2.51 1.28-.02 2.5.87 3.29.87.78 0 2.26-1.07 3.81-.91.65.03 2.47.26 3.64 1.98-.09.06-2.17 1.28-2.15 3.81.03 3.02 2.65 4.03 2.68 4.04-.03.07-.42 1.44-1.38 2.83M13 3.5c.73-.83 1.94-1.46 2.94-1.5.13 1.17-.34 2.35-1.04 3.19-.69.85-1.83 1.51-2.95 1.42-.15-1.15.41-2.35 1.05-3.11z"/></svg>
|
||||
macOS
|
||||
</a>
|
||||
<a href="#" class="btn-download" id="download-linux" style="display: none;">
|
||||
<svg viewBox="0 0 24 24" fill="currentColor"><path d="M12.504 0c-.155 0-.315.008-.48.021-4.226.333-3.105 4.807-3.17 6.298-.076 1.092-.3 1.953-1.05 3.02-.885 1.051-2.127 2.75-2.716 4.521-.278.832-.41 1.684-.287 2.489a.424.424 0 00-.11.135c-.26.268-.45.6-.663.839-.199.199-.485.267-.797.4-.313.136-.658.269-.864.68-.09.189-.136.394-.132.602 0 .199.027.4.055.536.058.399.116.728.04.97-.249.68-.28 1.145-.106 1.484.174.334.535.47.94.601.81.2 1.91.135 2.774.6.926.466 1.866.67 2.616.47.526-.116.97-.464 1.208-.946.587-.003 1.23-.269 2.26-.334.699-.058 1.574.267 2.577.2.025.134.063.198.114.333l.003.003c.391.778 1.113 1.132 1.884 1.071.771-.06 1.592-.536 2.257-1.306.631-.765 1.683-1.084 2.378-1.503.348-.199.629-.469.649-.853.023-.4-.2-.811-.714-1.376v-.097l-.003-.003c-.17-.2-.25-.535-.338-.926-.085-.401-.182-.786-.492-1.046h-.003c-.059-.054-.123-.067-.188-.135a.357.357 0 00-.19-.064c.431-1.278.264-2.55-.173-3.694-.533-1.41-1.465-2.638-2.175-3.483-.796-1.005-1.576-1.957-1.56-3.368.026-2.152.236-6.133-3.544-6.139zm.529 3.405h.013c.213 0 .396.062.584.198.19.135.33.332.438.533.105.259.158.459.166.724 0-.02.006-.04.006-.06v.105a.086.086 0 01-.004-.021l-.004-.024a1.807 1.807 0 01-.15.706.953.953 0 01-.213.335.71.71 0 00-.088-.042c-.104-.045-.198-.064-.284-.133a1.312 1.312 0 00-.22-.066c.05-.06.146-.133.183-.198.053-.128.082-.264.088-.402v-.02a1.21 1.21 0 00-.061-.4c-.045-.134-.101-.2-.183-.333-.084-.066-.167-.132-.267-.132h-.016c-.093 0-.176.03-.262.132a.8.8 0 00-.205.334 1.18 1.18 0 00-.09.4v.019c.002.089.008.179.02.267-.193-.067-.438-.135-.607-.202a1.635 1.635 0 01-.018-.2v-.02a1.772 1.772 0 01.15-.768c.082-.22.232-.406.43-.533a.985.985 0 01.594-.2zm-2.962.059h.036c.142 0 .27.048.399.135.146.129.264.288.344.465.09.199.14.4.153.667v.004c.007.134.006.2-.002.266v.08c-.03.007-.056.018-.083.024-.152.055-.274.135-.393.2.012-.09.013-.18.003-.267v-.015c-.012-.133-.04-.2-.082-.333a.613.613 0 00-.166-.267.248.248 0 00-.183-.064h-.021c-.071.006-.13.04-.186.132a.552.552 0 00-.12.27.944.944 0 00-.023.33v.015c.012.135.037.2.08.334.046.134.098.2.166.268.01.009.02.018.034.024-.07.057-.117.07-.176.136a.304.304 0 01-.131.068 2.62 2.62 0 01-.275-.402 1.772 1.772 0 01-.155-.667 1.759 1.759 0 01.08-.668 1.43 1.43 0 01.283-.535c.128-.133.26-.2.418-.2zm1.37 1.706c.332 0 .733.065 1.216.399.293.2.523.269 1.052.468h.003c.255.136.405.266.478.399v-.131a.571.571 0 01.016.47c-.123.31-.516.643-1.063.842v.002c-.268.135-.501.333-.775.465-.276.135-.588.292-1.012.267a1.139 1.139 0 01-.448-.067 3.566 3.566 0 01-.322-.198c-.195-.135-.363-.332-.612-.465v-.005h-.005c-.4-.246-.616-.512-.686-.71-.07-.268-.005-.47.193-.6.224-.135.38-.271.483-.336.104-.074.143-.102.176-.131h.002v-.003c.169-.202.436-.47.839-.601.139-.036.294-.065.466-.065zm2.8 2.142c.358 1.417 1.196 3.475 1.735 4.473.286.534.855 1.659 1.102 3.024.156-.005.33.018.513.064.646-1.671-.546-3.467-1.089-3.966-.22-.2-.232-.335-.123-.335.59.534 1.365 1.572 1.646 2.757.13.535.16 1.104.021 1.67.067.028.135.06.205.067 1.032.534 1.413.938 1.23 1.537v-.002c-.06-.003-.12 0-.18 0h-.016c.151-.467-.182-.825-1.065-1.224-.915-.4-1.646-.336-1.77.465-.008.043-.013.066-.018.135-.068.023-.139.053-.209.064-.43.268-.662.669-.793 1.187-.13.533-.17 1.156-.205 1.869v.003c-.02.482-.04 1.053-.158 1.425-.06.134-.133.27-.238.465h-.003c-.067-.004-.003-.401-.004-.469.006-.534.011-1.2.036-1.534.006-.468.011-.534-.021-.267-.18.936-.323 1.2-.608 1.67a1.016 1.016 0 01-.112.134v.003l-.005-.003c-.07-.2-.044-.401-.044-.535-.002-.468.006-.869-.089-1.334-.066-.468-.353-.935-.711-1.469-.074-.104-.264-.333-.376-.533-.073-.133-.067-.267.123-.336.104-.037.2-.135.29-.2.09-.067.18-.136.27-.2.02-.015.04-.018.059-.036.14-.083.267-.2.368-.335a.838.838 0 00.145-.262l.002-.004c.028-.087.042-.133.034-.2-.034-.135-.232-.333-.393-.468-.226-.2-.4-.333-.673-.467l-.005-.002c-.569-.27-1.322-.534-1.927-.8a.082.082 0 01-.026-.013c-.136-.071-.27-.2-.406-.4-.466-.735-.727-1.536-.727-1.936 0-.2.067-.4.129-.533.032-.067.065-.135.102-.2.036-.067.257-.2.378-.267.143-.095.287-.191.441-.263z"/></svg>
|
||||
Linux
|
||||
</a>
|
||||
</div>
|
||||
<p class="download-subtitle">
|
||||
<span id="latestVersion"></span> | <a href="releases.html">All releases</a>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
@@ -408,5 +542,53 @@
|
||||
</p>
|
||||
</div>
|
||||
</footer>
|
||||
|
||||
<script>
|
||||
(function() {
|
||||
const R2_RELEASES_URL = window.RELEASES_JSON_URL || 'https://releases.automaker.app/releases.json';
|
||||
|
||||
async function loadLatestRelease() {
|
||||
try {
|
||||
const response = await fetch(R2_RELEASES_URL);
|
||||
if (!response.ok) throw new Error('Failed to fetch releases');
|
||||
|
||||
const data = await response.json();
|
||||
if (!data.releases || data.releases.length === 0) return;
|
||||
|
||||
const latest = data.releases[0];
|
||||
let hasAnyAsset = false;
|
||||
|
||||
if (latest.assets.windows) {
|
||||
const btn = document.getElementById('download-windows');
|
||||
btn.href = latest.assets.windows.url;
|
||||
btn.style.display = 'inline-flex';
|
||||
hasAnyAsset = true;
|
||||
}
|
||||
if (latest.assets.macos || latest.assets.macosArm) {
|
||||
const btn = document.getElementById('download-macos');
|
||||
const macAsset = latest.assets.macosArm || latest.assets.macos;
|
||||
btn.href = macAsset.url;
|
||||
btn.style.display = 'inline-flex';
|
||||
hasAnyAsset = true;
|
||||
}
|
||||
if (latest.assets.linux) {
|
||||
const btn = document.getElementById('download-linux');
|
||||
btn.href = latest.assets.linux.url;
|
||||
btn.style.display = 'inline-flex';
|
||||
hasAnyAsset = true;
|
||||
}
|
||||
|
||||
if (hasAnyAsset) {
|
||||
document.getElementById('latestVersion').textContent = latest.version;
|
||||
document.getElementById('downloadSection').style.display = 'block';
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load releases:', error);
|
||||
}
|
||||
}
|
||||
|
||||
loadLatestRelease();
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
422
apps/marketing/public/releases.html
Normal file
422
apps/marketing/public/releases.html
Normal file
@@ -0,0 +1,422 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Releases - Automaker</title>
|
||||
<style>
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
:root {
|
||||
--primary: #6366f1;
|
||||
--primary-dark: #4f46e5;
|
||||
--secondary: #8b5cf6;
|
||||
--accent: #ec4899;
|
||||
--dark: #0f172a;
|
||||
--dark-light: #1e293b;
|
||||
--text: #e2e8f0;
|
||||
--text-muted: #94a3b8;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
|
||||
background: linear-gradient(135deg, #0f172a 0%, #1e293b 50%, #0f172a 100%);
|
||||
color: var(--text);
|
||||
line-height: 1.6;
|
||||
overflow-x: hidden;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.container {
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
padding: 0 2rem;
|
||||
}
|
||||
|
||||
/* Header */
|
||||
header {
|
||||
padding: 2rem 0;
|
||||
position: sticky;
|
||||
top: 0;
|
||||
background: rgba(15, 23, 42, 0.8);
|
||||
backdrop-filter: blur(10px);
|
||||
z-index: 100;
|
||||
border-bottom: 1px solid rgba(148, 163, 184, 0.1);
|
||||
}
|
||||
|
||||
nav {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.logo {
|
||||
font-size: 1.5rem;
|
||||
font-weight: 700;
|
||||
background: linear-gradient(135deg, var(--primary), var(--secondary));
|
||||
-webkit-background-clip: text;
|
||||
-webkit-text-fill-color: transparent;
|
||||
background-clip: text;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.nav-links {
|
||||
display: flex;
|
||||
gap: 2rem;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.nav-links a {
|
||||
color: var(--text-muted);
|
||||
text-decoration: none;
|
||||
transition: color 0.3s;
|
||||
}
|
||||
|
||||
.nav-links a:hover,
|
||||
.nav-links a.active {
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
/* Page Header */
|
||||
.page-header {
|
||||
padding: 4rem 0 2rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.page-header h1 {
|
||||
font-size: 3rem;
|
||||
font-weight: 800;
|
||||
margin-bottom: 1rem;
|
||||
background: linear-gradient(135deg, #ffffff 0%, var(--text-muted) 100%);
|
||||
-webkit-background-clip: text;
|
||||
-webkit-text-fill-color: transparent;
|
||||
background-clip: text;
|
||||
}
|
||||
|
||||
.page-header p {
|
||||
font-size: 1.2rem;
|
||||
color: var(--text-muted);
|
||||
max-width: 600px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
/* Releases Section */
|
||||
.releases-section {
|
||||
padding: 2rem 0 6rem;
|
||||
}
|
||||
|
||||
.releases-list {
|
||||
max-width: 800px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.release-card {
|
||||
background: rgba(30, 41, 59, 0.5);
|
||||
border: 1px solid rgba(148, 163, 184, 0.1);
|
||||
border-radius: 1rem;
|
||||
padding: 2rem;
|
||||
margin-bottom: 1.5rem;
|
||||
transition: all 0.3s;
|
||||
}
|
||||
|
||||
.release-card:first-child {
|
||||
border-color: var(--primary);
|
||||
background: rgba(99, 102, 241, 0.1);
|
||||
}
|
||||
|
||||
.release-card:hover {
|
||||
border-color: rgba(99, 102, 241, 0.5);
|
||||
}
|
||||
|
||||
.release-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 1rem;
|
||||
flex-wrap: wrap;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.release-version {
|
||||
font-size: 1.5rem;
|
||||
font-weight: 700;
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
.release-date {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.latest-badge {
|
||||
background: linear-gradient(135deg, var(--primary), var(--secondary));
|
||||
color: white;
|
||||
padding: 0.25rem 0.75rem;
|
||||
border-radius: 1rem;
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.release-downloads {
|
||||
display: flex;
|
||||
gap: 1rem;
|
||||
flex-wrap: wrap;
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
.download-link {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
padding: 0.5rem 1rem;
|
||||
background: rgba(148, 163, 184, 0.1);
|
||||
border: 1px solid rgba(148, 163, 184, 0.2);
|
||||
border-radius: 0.5rem;
|
||||
color: var(--text);
|
||||
text-decoration: none;
|
||||
font-size: 0.9rem;
|
||||
transition: all 0.3s;
|
||||
}
|
||||
|
||||
.download-link:hover {
|
||||
background: rgba(99, 102, 241, 0.2);
|
||||
border-color: var(--primary);
|
||||
}
|
||||
|
||||
.download-link svg {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
}
|
||||
|
||||
.download-size {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
|
||||
.release-notes-link {
|
||||
color: var(--primary);
|
||||
text-decoration: none;
|
||||
font-size: 0.9rem;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.25rem;
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
||||
.release-notes-link:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.loading-spinner {
|
||||
text-align: center;
|
||||
padding: 4rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.error-message {
|
||||
text-align: center;
|
||||
padding: 2rem;
|
||||
color: var(--accent);
|
||||
background: rgba(236, 72, 153, 0.1);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
|
||||
.error-message a {
|
||||
color: var(--primary);
|
||||
}
|
||||
|
||||
.no-releases {
|
||||
text-align: center;
|
||||
padding: 4rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* Footer */
|
||||
footer {
|
||||
padding: 3rem 0;
|
||||
text-align: center;
|
||||
border-top: 1px solid rgba(148, 163, 184, 0.1);
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
footer a {
|
||||
color: var(--primary);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
footer a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
/* Responsive */
|
||||
@media (max-width: 768px) {
|
||||
.page-header h1 {
|
||||
font-size: 2rem;
|
||||
}
|
||||
|
||||
.nav-links {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.release-header {
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
}
|
||||
|
||||
.release-downloads {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.download-link {
|
||||
width: 100%;
|
||||
justify-content: center;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<nav class="container">
|
||||
<a href="index.html" class="logo">Automaker</a>
|
||||
<ul class="nav-links">
|
||||
<li><a href="index.html#features">Features</a></li>
|
||||
<li><a href="index.html#tech">Tech Stack</a></li>
|
||||
<li><a href="releases.html" class="active">Releases</a></li>
|
||||
<li><a href="https://github.com/AutoMaker-Org/automaker" target="_blank">GitHub</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
</header>
|
||||
|
||||
<main>
|
||||
<section class="page-header">
|
||||
<div class="container">
|
||||
<h1>Releases</h1>
|
||||
<p>Download Automaker for your platform. All versions are available below.</p>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section class="releases-section">
|
||||
<div class="container">
|
||||
<div class="releases-list" id="releasesList">
|
||||
<div class="loading-spinner">Loading releases...</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
|
||||
<footer>
|
||||
<div class="container">
|
||||
<p>Made with love by <a href="mailto:webdevcody@gmail.com">Cody Seibert</a></p>
|
||||
<p style="margin-top: 1rem;">
|
||||
<a href="https://github.com/AutoMaker-Org/automaker" target="_blank">GitHub</a> |
|
||||
<a href="https://github.com/AutoMaker-Org/automaker/blob/main/LICENSE" target="_blank">License</a>
|
||||
</p>
|
||||
</div>
|
||||
</footer>
|
||||
|
||||
<script>
|
||||
(function() {
|
||||
const R2_RELEASES_URL = window.RELEASES_JSON_URL || 'https://releases.automaker.app/releases.json';
|
||||
|
||||
const platformIcons = {
|
||||
windows: '<svg viewBox="0 0 24 24" fill="currentColor"><path d="M0 3.449L9.75 2.1v9.451H0m10.949-9.602L24 0v11.4H10.949M0 12.6h9.75v9.451L0 20.699M10.949 12.6H24V24l-12.9-1.801"/></svg>',
|
||||
macos: '<svg viewBox="0 0 24 24" fill="currentColor"><path d="M18.71 19.5c-.83 1.24-1.71 2.45-3.05 2.47-1.34.03-1.77-.79-3.29-.79-1.53 0-2 .77-3.27.82-1.31.05-2.3-1.32-3.14-2.53C4.25 17 2.94 12.45 4.7 9.39c.87-1.52 2.43-2.48 4.12-2.51 1.28-.02 2.5.87 3.29.87.78 0 2.26-1.07 3.81-.91.65.03 2.47.26 3.64 1.98-.09.06-2.17 1.28-2.15 3.81.03 3.02 2.65 4.03 2.68 4.04-.03.07-.42 1.44-1.38 2.83M13 3.5c.73-.83 1.94-1.46 2.94-1.5.13 1.17-.34 2.35-1.04 3.19-.69.85-1.83 1.51-2.95 1.42-.15-1.15.41-2.35 1.05-3.11z"/></svg>',
|
||||
macosArm: '<svg viewBox="0 0 24 24" fill="currentColor"><path d="M18.71 19.5c-.83 1.24-1.71 2.45-3.05 2.47-1.34.03-1.77-.79-3.29-.79-1.53 0-2 .77-3.27.82-1.31.05-2.3-1.32-3.14-2.53C4.25 17 2.94 12.45 4.7 9.39c.87-1.52 2.43-2.48 4.12-2.51 1.28-.02 2.5.87 3.29.87.78 0 2.26-1.07 3.81-.91.65.03 2.47.26 3.64 1.98-.09.06-2.17 1.28-2.15 3.81.03 3.02 2.65 4.03 2.68 4.04-.03.07-.42 1.44-1.38 2.83M13 3.5c.73-.83 1.94-1.46 2.94-1.5.13 1.17-.34 2.35-1.04 3.19-.69.85-1.83 1.51-2.95 1.42-.15-1.15.41-2.35 1.05-3.11z"/></svg>',
|
||||
linux: '<svg viewBox="0 0 24 24" fill="currentColor"><path d="M12.504 0c-.155 0-.315.008-.48.021-4.226.333-3.105 4.807-3.17 6.298-.076 1.092-.3 1.953-1.05 3.02-.885 1.051-2.127 2.75-2.716 4.521-.278.832-.41 1.684-.287 2.489a.424.424 0 00-.11.135c-.26.268-.45.6-.663.839-.199.199-.485.267-.797.4-.313.136-.658.269-.864.68-.09.189-.136.394-.132.602 0 .199.027.4.055.536.058.399.116.728.04.97-.249.68-.28 1.145-.106 1.484.174.334.535.47.94.601.81.2 1.91.135 2.774.6.926.466 1.866.67 2.616.47.526-.116.97-.464 1.208-.946.587-.003 1.23-.269 2.26-.334.699-.058 1.574.267 2.577.2.025.134.063.198.114.333l.003.003c.391.778 1.113 1.132 1.884 1.071.771-.06 1.592-.536 2.257-1.306.631-.765 1.683-1.084 2.378-1.503.348-.199.629-.469.649-.853.023-.4-.2-.811-.714-1.376v-.097l-.003-.003c-.17-.2-.25-.535-.338-.926-.085-.401-.182-.786-.492-1.046h-.003c-.059-.054-.123-.067-.188-.135a.357.357 0 00-.19-.064c.431-1.278.264-2.55-.173-3.694-.533-1.41-1.465-2.638-2.175-3.483-.796-1.005-1.576-1.957-1.56-3.368.026-2.152.236-6.133-3.544-6.139zm.529 3.405h.013c.213 0 .396.062.584.198.19.135.33.332.438.533.105.259.158.459.166.724 0-.02.006-.04.006-.06v.105a.086.086 0 01-.004-.021l-.004-.024a1.807 1.807 0 01-.15.706.953.953 0 01-.213.335.71.71 0 00-.088-.042c-.104-.045-.198-.064-.284-.133a1.312 1.312 0 00-.22-.066c.05-.06.146-.133.183-.198.053-.128.082-.264.088-.402v-.02a1.21 1.21 0 00-.061-.4c-.045-.134-.101-.2-.183-.333-.084-.066-.167-.132-.267-.132h-.016c-.093 0-.176.03-.262.132a.8.8 0 00-.205.334 1.18 1.18 0 00-.09.4v.019c.002.089.008.179.02.267-.193-.067-.438-.135-.607-.202a1.635 1.635 0 01-.018-.2v-.02a1.772 1.772 0 01.15-.768c.082-.22.232-.406.43-.533a.985.985 0 01.594-.2zm-2.962.059h.036c.142 0 .27.048.399.135.146.129.264.288.344.465.09.199.14.4.153.667v.004c.007.134.006.2-.002.266v.08c-.03.007-.056.018-.083.024-.152.055-.274.135-.393.2.012-.09.013-.18.003-.267v-.015c-.012-.133-.04-.2-.082-.333a.613.613 0 00-.166-.267.248.248 0 00-.183-.064h-.021c-.071.006-.13.04-.186.132a.552.552 0 00-.12.27.944.944 0 00-.023.33v.015c.012.135.037.2.08.334.046.134.098.2.166.268.01.009.02.018.034.024-.07.057-.117.07-.176.136a.304.304 0 01-.131.068 2.62 2.62 0 01-.275-.402 1.772 1.772 0 01-.155-.667 1.759 1.759 0 01.08-.668 1.43 1.43 0 01.283-.535c.128-.133.26-.2.418-.2zm1.37 1.706c.332 0 .733.065 1.216.399.293.2.523.269 1.052.468h.003c.255.136.405.266.478.399v-.131a.571.571 0 01.016.47c-.123.31-.516.643-1.063.842v.002c-.268.135-.501.333-.775.465-.276.135-.588.292-1.012.267a1.139 1.139 0 01-.448-.067 3.566 3.566 0 01-.322-.198c-.195-.135-.363-.332-.612-.465v-.005h-.005c-.4-.246-.616-.512-.686-.71-.07-.268-.005-.47.193-.6.224-.135.38-.271.483-.336.104-.074.143-.102.176-.131h.002v-.003c.169-.202.436-.47.839-.601.139-.036.294-.065.466-.065zm2.8 2.142c.358 1.417 1.196 3.475 1.735 4.473.286.534.855 1.659 1.102 3.024.156-.005.33.018.513.064.646-1.671-.546-3.467-1.089-3.966-.22-.2-.232-.335-.123-.335.59.534 1.365 1.572 1.646 2.757.13.535.16 1.104.021 1.67.067.028.135.06.205.067 1.032.534 1.413.938 1.23 1.537v-.002c-.06-.003-.12 0-.18 0h-.016c.151-.467-.182-.825-1.065-1.224-.915-.4-1.646-.336-1.77.465-.008.043-.013.066-.018.135-.068.023-.139.053-.209.064-.43.268-.662.669-.793 1.187-.13.533-.17 1.156-.205 1.869v.003c-.02.482-.04 1.053-.158 1.425-.06.134-.133.27-.238.465h-.003c-.067-.004-.003-.401-.004-.469.006-.534.011-1.2.036-1.534.006-.468.011-.534-.021-.267-.18.936-.323 1.2-.608 1.67a1.016 1.016 0 01-.112.134v.003l-.005-.003c-.07-.2-.044-.401-.044-.535-.002-.468.006-.869-.089-1.334-.066-.468-.353-.935-.711-1.469-.074-.104-.264-.333-.376-.533-.073-.133-.067-.267.123-.336.104-.037.2-.135.29-.2.09-.067.18-.136.27-.2.02-.015.04-.018.059-.036.14-.083.267-.2.368-.335a.838.838 0 00.145-.262l.002-.004c.028-.087.042-.133.034-.2-.034-.135-.232-.333-.393-.468-.226-.2-.4-.333-.673-.467l-.005-.002c-.569-.27-1.322-.534-1.927-.8a.082.082 0 01-.026-.013c-.136-.071-.27-.2-.406-.4-.466-.735-.727-1.536-.727-1.936 0-.2.067-.4.129-.533.032-.067.065-.135.102-.2.036-.067.257-.2.378-.267.143-.095.287-.191.441-.263z"/></svg>'
|
||||
};
|
||||
|
||||
const platformLabels = {
|
||||
windows: 'Windows',
|
||||
macos: 'macOS (Intel)',
|
||||
macosArm: 'macOS (Apple Silicon)',
|
||||
linux: 'Linux'
|
||||
};
|
||||
|
||||
function formatDate(isoString) {
|
||||
const date = new Date(isoString);
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric'
|
||||
});
|
||||
}
|
||||
|
||||
function formatSize(bytes) {
|
||||
const mb = bytes / (1024 * 1024);
|
||||
return mb.toFixed(1) + ' MB';
|
||||
}
|
||||
|
||||
function renderRelease(release, isLatest) {
|
||||
const assets = Object.entries(release.assets)
|
||||
.filter(([_, asset]) => asset)
|
||||
.map(([platform, asset]) => `
|
||||
<a href="${asset.url}" class="download-link">
|
||||
${platformIcons[platform] || ''}
|
||||
<span>${platformLabels[platform] || platform}</span>
|
||||
<span class="download-size">${formatSize(asset.size)}</span>
|
||||
</a>
|
||||
`).join('');
|
||||
|
||||
return `
|
||||
<div class="release-card">
|
||||
<div class="release-header">
|
||||
<div>
|
||||
<span class="release-version">${release.version}</span>
|
||||
${isLatest ? '<span class="latest-badge">Latest</span>' : ''}
|
||||
</div>
|
||||
<span class="release-date">${formatDate(release.date)}</span>
|
||||
</div>
|
||||
<div class="release-downloads">
|
||||
${assets}
|
||||
</div>
|
||||
<a href="${release.githubReleaseUrl}" class="release-notes-link" target="_blank">
|
||||
View release notes on GitHub
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
|
||||
<path d="M18 13v6a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h6"></path>
|
||||
<polyline points="15 3 21 3 21 9"></polyline>
|
||||
<line x1="10" y1="14" x2="21" y2="3"></line>
|
||||
</svg>
|
||||
</a>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
async function loadReleases() {
|
||||
const container = document.getElementById('releasesList');
|
||||
|
||||
try {
|
||||
const response = await fetch(R2_RELEASES_URL);
|
||||
if (!response.ok) throw new Error('Failed to fetch releases');
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (!data.releases || data.releases.length === 0) {
|
||||
container.innerHTML = '<div class="no-releases">No releases available yet. Check back soon!</div>';
|
||||
return;
|
||||
}
|
||||
|
||||
container.innerHTML = data.releases
|
||||
.map((release, index) => renderRelease(release, index === 0))
|
||||
.join('');
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to load releases:', error);
|
||||
container.innerHTML = `
|
||||
<div class="error-message">
|
||||
<p>Unable to load releases. Please try again later or visit our
|
||||
<a href="https://github.com/AutoMaker-Org/automaker/releases" target="_blank">GitHub releases page</a>.</p>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
}
|
||||
|
||||
loadReleases();
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
45
apps/server/.env.example
Normal file
45
apps/server/.env.example
Normal file
@@ -0,0 +1,45 @@
|
||||
# Automaker Server Configuration
|
||||
# Copy this file to .env and configure your settings
|
||||
|
||||
# ============================================
|
||||
# REQUIRED
|
||||
# ============================================
|
||||
|
||||
# Your Anthropic API key for Claude models
|
||||
ANTHROPIC_API_KEY=sk-ant-...
|
||||
|
||||
# ============================================
|
||||
# OPTIONAL - Security
|
||||
# ============================================
|
||||
|
||||
# API key for authenticating requests (leave empty to disable auth)
|
||||
# If set, all API requests must include X-API-Key header
|
||||
AUTOMAKER_API_KEY=
|
||||
|
||||
# Restrict file operations to these directories (comma-separated)
|
||||
# Important for security in multi-tenant environments
|
||||
ALLOWED_PROJECT_DIRS=/home/user/projects,/var/www
|
||||
|
||||
# CORS origin - which domains can access the API
|
||||
# Use "*" for development, set specific origin for production
|
||||
CORS_ORIGIN=*
|
||||
|
||||
# ============================================
|
||||
# OPTIONAL - Server
|
||||
# ============================================
|
||||
|
||||
# Port to run the server on
|
||||
PORT=3008
|
||||
|
||||
# Data directory for sessions and metadata
|
||||
DATA_DIR=./data
|
||||
|
||||
# ============================================
|
||||
# OPTIONAL - Additional AI Providers
|
||||
# ============================================
|
||||
|
||||
# OpenAI API key (for Codex CLI support)
|
||||
OPENAI_API_KEY=
|
||||
|
||||
# Google API key (for future Gemini support)
|
||||
GOOGLE_API_KEY=
|
||||
2
apps/server/.gitignore
vendored
Normal file
2
apps/server/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
.env
|
||||
data
|
||||
55
apps/server/Dockerfile
Normal file
55
apps/server/Dockerfile
Normal file
@@ -0,0 +1,55 @@
|
||||
# Automaker Backend Server
|
||||
# Multi-stage build for minimal production image
|
||||
|
||||
# Build stage
|
||||
FROM node:20-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy package files
|
||||
COPY package*.json ./
|
||||
COPY apps/server/package*.json ./apps/server/
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --workspace=apps/server
|
||||
|
||||
# Copy source
|
||||
COPY apps/server ./apps/server
|
||||
|
||||
# Build TypeScript
|
||||
RUN npm run build --workspace=apps/server
|
||||
|
||||
# Production stage
|
||||
FROM node:20-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Create non-root user
|
||||
RUN addgroup -g 1001 -S automaker && \
|
||||
adduser -S automaker -u 1001
|
||||
|
||||
# Copy built files and production dependencies
|
||||
COPY --from=builder /app/apps/server/dist ./dist
|
||||
COPY --from=builder /app/apps/server/package*.json ./
|
||||
COPY --from=builder /app/node_modules ./node_modules
|
||||
|
||||
# Create data directory
|
||||
RUN mkdir -p /data && chown automaker:automaker /data
|
||||
|
||||
# Switch to non-root user
|
||||
USER automaker
|
||||
|
||||
# Environment variables
|
||||
ENV NODE_ENV=production
|
||||
ENV PORT=3008
|
||||
ENV DATA_DIR=/data
|
||||
|
||||
# Expose port
|
||||
EXPOSE 3008
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost:3008/api/health || exit 1
|
||||
|
||||
# Start server
|
||||
CMD ["node", "dist/index.js"]
|
||||
29
apps/server/package.json
Normal file
29
apps/server/package.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "@automaker/server",
|
||||
"version": "0.1.0",
|
||||
"description": "Backend server for Automaker - provides API for both web and Electron modes",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"scripts": {
|
||||
"dev": "tsx watch src/index.ts",
|
||||
"build": "tsc",
|
||||
"start": "node dist/index.js",
|
||||
"lint": "eslint src/"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.1.61",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^17.2.3",
|
||||
"express": "^5.1.0",
|
||||
"ws": "^8.18.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cors": "^2.8.18",
|
||||
"@types/express": "^5.0.1",
|
||||
"@types/node": "^20",
|
||||
"@types/ws": "^8.18.1",
|
||||
"tsx": "^4.19.4",
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
175
apps/server/src/index.ts
Normal file
175
apps/server/src/index.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
/**
|
||||
* Automaker Backend Server
|
||||
*
|
||||
* Provides HTTP/WebSocket API for both web and Electron modes.
|
||||
* In Electron mode, this server runs locally.
|
||||
* In web mode, this server runs on a remote host.
|
||||
*/
|
||||
|
||||
import express from "express";
|
||||
import cors from "cors";
|
||||
import { WebSocketServer, WebSocket } from "ws";
|
||||
import { createServer } from "http";
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { createEventEmitter, type EventEmitter } from "./lib/events.js";
|
||||
import { initAllowedPaths } from "./lib/security.js";
|
||||
import { authMiddleware, getAuthStatus } from "./lib/auth.js";
|
||||
import { createFsRoutes } from "./routes/fs.js";
|
||||
import { createHealthRoutes } from "./routes/health.js";
|
||||
import { createAgentRoutes } from "./routes/agent.js";
|
||||
import { createSessionsRoutes } from "./routes/sessions.js";
|
||||
import { createFeaturesRoutes } from "./routes/features.js";
|
||||
import { createAutoModeRoutes } from "./routes/auto-mode.js";
|
||||
import { createWorktreeRoutes } from "./routes/worktree.js";
|
||||
import { createGitRoutes } from "./routes/git.js";
|
||||
import { createSetupRoutes } from "./routes/setup.js";
|
||||
import { createSuggestionsRoutes } from "./routes/suggestions.js";
|
||||
import { createModelsRoutes } from "./routes/models.js";
|
||||
import { createSpecRegenerationRoutes } from "./routes/spec-regeneration.js";
|
||||
import { createRunningAgentsRoutes } from "./routes/running-agents.js";
|
||||
import { createWorkspaceRoutes } from "./routes/workspace.js";
|
||||
import { createTemplatesRoutes } from "./routes/templates.js";
|
||||
import { AgentService } from "./services/agent-service.js";
|
||||
import { FeatureLoader } from "./services/feature-loader.js";
|
||||
import { AutoModeService } from "./services/auto-mode-service.js";
|
||||
|
||||
// Load environment variables
|
||||
dotenv.config();
|
||||
|
||||
const PORT = parseInt(process.env.PORT || "3008", 10);
|
||||
const DATA_DIR = process.env.DATA_DIR || "./data";
|
||||
|
||||
// Check for required environment variables
|
||||
// Claude Agent SDK supports EITHER OAuth token (subscription) OR API key (pay-per-use)
|
||||
const hasAnthropicKey = !!process.env.ANTHROPIC_API_KEY;
|
||||
const hasOAuthToken = !!process.env.CLAUDE_CODE_OAUTH_TOKEN;
|
||||
|
||||
if (!hasAnthropicKey && !hasOAuthToken) {
|
||||
console.warn(`
|
||||
╔═══════════════════════════════════════════════════════════════════════╗
|
||||
║ ⚠️ WARNING: No Claude authentication configured ║
|
||||
║ ║
|
||||
║ The Claude Agent SDK requires authentication to function. ║
|
||||
║ ║
|
||||
║ Option 1 - Subscription (OAuth Token): ║
|
||||
║ export CLAUDE_CODE_OAUTH_TOKEN="your-oauth-token" ║
|
||||
║ ║
|
||||
║ Option 2 - Pay-per-use (API Key): ║
|
||||
║ export ANTHROPIC_API_KEY="sk-ant-..." ║
|
||||
║ ║
|
||||
║ Or use the setup wizard in Settings to configure authentication. ║
|
||||
╚═══════════════════════════════════════════════════════════════════════╝
|
||||
`);
|
||||
} else if (hasOAuthToken) {
|
||||
console.log("[Server] ✓ CLAUDE_CODE_OAUTH_TOKEN detected (subscription auth)");
|
||||
} else {
|
||||
console.log("[Server] ✓ ANTHROPIC_API_KEY detected (API key auth)");
|
||||
}
|
||||
|
||||
// Initialize security
|
||||
initAllowedPaths();
|
||||
|
||||
// Create Express app
|
||||
const app = express();
|
||||
|
||||
// Middleware
|
||||
app.use(
|
||||
cors({
|
||||
origin: process.env.CORS_ORIGIN || "*",
|
||||
credentials: true,
|
||||
})
|
||||
);
|
||||
app.use(express.json({ limit: "50mb" }));
|
||||
|
||||
// Create shared event emitter for streaming
|
||||
const events: EventEmitter = createEventEmitter();
|
||||
|
||||
// Create services
|
||||
const agentService = new AgentService(DATA_DIR, events);
|
||||
const featureLoader = new FeatureLoader();
|
||||
const autoModeService = new AutoModeService(events);
|
||||
|
||||
// Initialize services
|
||||
(async () => {
|
||||
await agentService.initialize();
|
||||
console.log("[Server] Agent service initialized");
|
||||
})();
|
||||
|
||||
// Mount API routes - health is unauthenticated for monitoring
|
||||
app.use("/api/health", createHealthRoutes());
|
||||
|
||||
// Apply authentication to all other routes
|
||||
app.use("/api", authMiddleware);
|
||||
|
||||
app.use("/api/fs", createFsRoutes(events));
|
||||
app.use("/api/agent", createAgentRoutes(agentService, events));
|
||||
app.use("/api/sessions", createSessionsRoutes(agentService));
|
||||
app.use("/api/features", createFeaturesRoutes(featureLoader));
|
||||
app.use("/api/auto-mode", createAutoModeRoutes(autoModeService));
|
||||
app.use("/api/worktree", createWorktreeRoutes());
|
||||
app.use("/api/git", createGitRoutes());
|
||||
app.use("/api/setup", createSetupRoutes());
|
||||
app.use("/api/suggestions", createSuggestionsRoutes(events));
|
||||
app.use("/api/models", createModelsRoutes());
|
||||
app.use("/api/spec-regeneration", createSpecRegenerationRoutes(events));
|
||||
app.use("/api/running-agents", createRunningAgentsRoutes(autoModeService));
|
||||
app.use("/api/workspace", createWorkspaceRoutes());
|
||||
app.use("/api/templates", createTemplatesRoutes());
|
||||
|
||||
// Create HTTP server
|
||||
const server = createServer(app);
|
||||
|
||||
// WebSocket server for streaming events
|
||||
const wss = new WebSocketServer({ server, path: "/api/events" });
|
||||
|
||||
wss.on("connection", (ws: WebSocket) => {
|
||||
console.log("[WebSocket] Client connected");
|
||||
|
||||
// Subscribe to all events and forward to this client
|
||||
const unsubscribe = events.subscribe((type, payload) => {
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
ws.send(JSON.stringify({ type, payload }));
|
||||
}
|
||||
});
|
||||
|
||||
ws.on("close", () => {
|
||||
console.log("[WebSocket] Client disconnected");
|
||||
unsubscribe();
|
||||
});
|
||||
|
||||
ws.on("error", (error) => {
|
||||
console.error("[WebSocket] Error:", error);
|
||||
unsubscribe();
|
||||
});
|
||||
});
|
||||
|
||||
// Start server
|
||||
server.listen(PORT, () => {
|
||||
console.log(`
|
||||
╔═══════════════════════════════════════════════════════╗
|
||||
║ Automaker Backend Server ║
|
||||
╠═══════════════════════════════════════════════════════╣
|
||||
║ HTTP API: http://localhost:${PORT} ║
|
||||
║ WebSocket: ws://localhost:${PORT}/api/events ║
|
||||
║ Health: http://localhost:${PORT}/api/health ║
|
||||
╚═══════════════════════════════════════════════════════╝
|
||||
`);
|
||||
});
|
||||
|
||||
// Graceful shutdown
|
||||
process.on("SIGTERM", () => {
|
||||
console.log("SIGTERM received, shutting down...");
|
||||
server.close(() => {
|
||||
console.log("Server closed");
|
||||
process.exit(0);
|
||||
});
|
||||
});
|
||||
|
||||
process.on("SIGINT", () => {
|
||||
console.log("SIGINT received, shutting down...");
|
||||
server.close(() => {
|
||||
console.log("Server closed");
|
||||
process.exit(0);
|
||||
});
|
||||
});
|
||||
62
apps/server/src/lib/auth.ts
Normal file
62
apps/server/src/lib/auth.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
* Authentication middleware for API security
|
||||
*
|
||||
* Supports API key authentication via header or environment variable.
|
||||
*/
|
||||
|
||||
import type { Request, Response, NextFunction } from "express";
|
||||
|
||||
// API key from environment (optional - if not set, auth is disabled)
|
||||
const API_KEY = process.env.AUTOMAKER_API_KEY;
|
||||
|
||||
/**
|
||||
* Authentication middleware
|
||||
*
|
||||
* If AUTOMAKER_API_KEY is set, requires matching key in X-API-Key header.
|
||||
* If not set, allows all requests (development mode).
|
||||
*/
|
||||
export function authMiddleware(req: Request, res: Response, next: NextFunction): void {
|
||||
// If no API key is configured, allow all requests
|
||||
if (!API_KEY) {
|
||||
next();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for API key in header
|
||||
const providedKey = req.headers["x-api-key"] as string | undefined;
|
||||
|
||||
if (!providedKey) {
|
||||
res.status(401).json({
|
||||
success: false,
|
||||
error: "Authentication required. Provide X-API-Key header.",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (providedKey !== API_KEY) {
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
error: "Invalid API key.",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if authentication is enabled
|
||||
*/
|
||||
export function isAuthEnabled(): boolean {
|
||||
return !!API_KEY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get authentication status for health endpoint
|
||||
*/
|
||||
export function getAuthStatus(): { enabled: boolean; method: string } {
|
||||
return {
|
||||
enabled: !!API_KEY,
|
||||
method: API_KEY ? "api_key" : "none",
|
||||
};
|
||||
}
|
||||
57
apps/server/src/lib/events.ts
Normal file
57
apps/server/src/lib/events.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* Event emitter for streaming events to WebSocket clients
|
||||
*/
|
||||
|
||||
export type EventType =
|
||||
| "agent:stream"
|
||||
| "auto-mode:event"
|
||||
| "auto-mode:started"
|
||||
| "auto-mode:stopped"
|
||||
| "auto-mode:idle"
|
||||
| "auto-mode:error"
|
||||
| "feature:started"
|
||||
| "feature:completed"
|
||||
| "feature:stopped"
|
||||
| "feature:error"
|
||||
| "feature:progress"
|
||||
| "feature:tool-use"
|
||||
| "feature:follow-up-started"
|
||||
| "feature:follow-up-completed"
|
||||
| "feature:verified"
|
||||
| "feature:committed"
|
||||
| "project:analysis-started"
|
||||
| "project:analysis-progress"
|
||||
| "project:analysis-completed"
|
||||
| "project:analysis-error"
|
||||
| "suggestions:event"
|
||||
| "spec-regeneration:event";
|
||||
|
||||
export type EventCallback = (type: EventType, payload: unknown) => void;
|
||||
|
||||
export interface EventEmitter {
|
||||
emit: (type: EventType, payload: unknown) => void;
|
||||
subscribe: (callback: EventCallback) => () => void;
|
||||
}
|
||||
|
||||
export function createEventEmitter(): EventEmitter {
|
||||
const subscribers = new Set<EventCallback>();
|
||||
|
||||
return {
|
||||
emit(type: EventType, payload: unknown) {
|
||||
for (const callback of subscribers) {
|
||||
try {
|
||||
callback(type, payload);
|
||||
} catch (error) {
|
||||
console.error("Error in event subscriber:", error);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
subscribe(callback: EventCallback) {
|
||||
subscribers.add(callback);
|
||||
return () => {
|
||||
subscribers.delete(callback);
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
72
apps/server/src/lib/security.ts
Normal file
72
apps/server/src/lib/security.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
/**
|
||||
* Security utilities for path validation
|
||||
*/
|
||||
|
||||
import path from "path";
|
||||
|
||||
// Allowed project directories - loaded from environment
|
||||
const allowedPaths = new Set<string>();
|
||||
|
||||
/**
|
||||
* Initialize allowed paths from environment variable
|
||||
*/
|
||||
export function initAllowedPaths(): void {
|
||||
const dirs = process.env.ALLOWED_PROJECT_DIRS;
|
||||
if (dirs) {
|
||||
for (const dir of dirs.split(",")) {
|
||||
const trimmed = dir.trim();
|
||||
if (trimmed) {
|
||||
allowedPaths.add(path.resolve(trimmed));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Always allow the data directory
|
||||
const dataDir = process.env.DATA_DIR;
|
||||
if (dataDir) {
|
||||
allowedPaths.add(path.resolve(dataDir));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a path to the allowed list
|
||||
*/
|
||||
export function addAllowedPath(filePath: string): void {
|
||||
allowedPaths.add(path.resolve(filePath));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path is allowed
|
||||
*/
|
||||
export function isPathAllowed(filePath: string): boolean {
|
||||
const resolved = path.resolve(filePath);
|
||||
|
||||
// Check if the path is under any allowed directory
|
||||
for (const allowed of allowedPaths) {
|
||||
if (resolved.startsWith(allowed + path.sep) || resolved === allowed) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a path and throw if not allowed
|
||||
*/
|
||||
export function validatePath(filePath: string): string {
|
||||
const resolved = path.resolve(filePath);
|
||||
|
||||
if (!isPathAllowed(resolved)) {
|
||||
throw new Error(`Access denied: ${filePath} is not in an allowed directory`);
|
||||
}
|
||||
|
||||
return resolved;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of allowed paths (for debugging)
|
||||
*/
|
||||
export function getAllowedPaths(): string[] {
|
||||
return Array.from(allowedPaths);
|
||||
}
|
||||
132
apps/server/src/routes/agent.ts
Normal file
132
apps/server/src/routes/agent.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
/**
|
||||
* Agent routes - HTTP API for Claude agent interactions
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import { AgentService } from "../services/agent-service.js";
|
||||
import type { EventEmitter } from "../lib/events.js";
|
||||
|
||||
export function createAgentRoutes(
|
||||
agentService: AgentService,
|
||||
_events: EventEmitter
|
||||
): Router {
|
||||
const router = Router();
|
||||
|
||||
// Start a conversation
|
||||
router.post("/start", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId, workingDirectory } = req.body as {
|
||||
sessionId: string;
|
||||
workingDirectory?: string;
|
||||
};
|
||||
|
||||
if (!sessionId) {
|
||||
res.status(400).json({ success: false, error: "sessionId is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await agentService.startConversation({
|
||||
sessionId,
|
||||
workingDirectory,
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Send a message
|
||||
router.post("/send", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId, message, workingDirectory, imagePaths } = req.body as {
|
||||
sessionId: string;
|
||||
message: string;
|
||||
workingDirectory?: string;
|
||||
imagePaths?: string[];
|
||||
};
|
||||
|
||||
if (!sessionId || !message) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "sessionId and message are required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Start the message processing (don't await - it streams via WebSocket)
|
||||
agentService
|
||||
.sendMessage({
|
||||
sessionId,
|
||||
message,
|
||||
workingDirectory,
|
||||
imagePaths,
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("[Agent Route] Error sending message:", error);
|
||||
});
|
||||
|
||||
// Return immediately - responses come via WebSocket
|
||||
res.json({ success: true, message: "Message sent" });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get conversation history
|
||||
router.post("/history", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId } = req.body as { sessionId: string };
|
||||
|
||||
if (!sessionId) {
|
||||
res.status(400).json({ success: false, error: "sessionId is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const result = agentService.getHistory(sessionId);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Stop execution
|
||||
router.post("/stop", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId } = req.body as { sessionId: string };
|
||||
|
||||
if (!sessionId) {
|
||||
res.status(400).json({ success: false, error: "sessionId is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await agentService.stopExecution(sessionId);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Clear conversation
|
||||
router.post("/clear", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId } = req.body as { sessionId: string };
|
||||
|
||||
if (!sessionId) {
|
||||
res.status(400).json({ success: false, error: "sessionId is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await agentService.clearSession(sessionId);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
261
apps/server/src/routes/auto-mode.ts
Normal file
261
apps/server/src/routes/auto-mode.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
/**
|
||||
* Auto Mode routes - HTTP API for autonomous feature implementation
|
||||
*
|
||||
* Uses the AutoModeService for real feature execution with Claude Agent SDK
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import type { AutoModeService } from "../services/auto-mode-service.js";
|
||||
|
||||
export function createAutoModeRoutes(autoModeService: AutoModeService): Router {
|
||||
const router = Router();
|
||||
|
||||
// Start auto mode loop
|
||||
router.post("/start", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, maxConcurrency } = req.body as {
|
||||
projectPath: string;
|
||||
maxConcurrency?: number;
|
||||
};
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: "projectPath is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
await autoModeService.startAutoLoop(projectPath, maxConcurrency || 3);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Stop auto mode loop
|
||||
router.post("/stop", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const runningCount = await autoModeService.stopAutoLoop();
|
||||
res.json({ success: true, runningFeatures: runningCount });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Stop a specific feature
|
||||
router.post("/stop-feature", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { featureId } = req.body as { featureId: string };
|
||||
|
||||
if (!featureId) {
|
||||
res.status(400).json({ success: false, error: "featureId is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const stopped = await autoModeService.stopFeature(featureId);
|
||||
res.json({ success: true, stopped });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get auto mode status
|
||||
router.post("/status", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const status = autoModeService.getStatus();
|
||||
res.json({
|
||||
success: true,
|
||||
...status,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Run a single feature
|
||||
router.post("/run-feature", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId, useWorktrees } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
useWorktrees?: boolean;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId are required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Start execution in background
|
||||
autoModeService
|
||||
.executeFeature(projectPath, featureId, useWorktrees ?? true, false)
|
||||
.catch((error) => {
|
||||
console.error(`[AutoMode] Feature ${featureId} error:`, error);
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Verify a feature
|
||||
router.post("/verify-feature", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId are required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const passes = await autoModeService.verifyFeature(projectPath, featureId);
|
||||
res.json({ success: true, passes });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Resume a feature
|
||||
router.post("/resume-feature", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId, useWorktrees } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
useWorktrees?: boolean;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId are required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Start resume in background
|
||||
autoModeService
|
||||
.resumeFeature(projectPath, featureId, useWorktrees ?? true)
|
||||
.catch((error) => {
|
||||
console.error(`[AutoMode] Resume feature ${featureId} error:`, error);
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Check if context exists for a feature
|
||||
router.post("/context-exists", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId are required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const exists = await autoModeService.contextExists(projectPath, featureId);
|
||||
res.json({ success: true, exists });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Analyze project
|
||||
router.post("/analyze-project", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: "projectPath is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Start analysis in background
|
||||
autoModeService.analyzeProject(projectPath).catch((error) => {
|
||||
console.error(`[AutoMode] Project analysis error:`, error);
|
||||
});
|
||||
|
||||
res.json({ success: true, message: "Project analysis started" });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Follow up on a feature
|
||||
router.post("/follow-up-feature", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId, prompt, imagePaths } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
prompt: string;
|
||||
imagePaths?: string[];
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId || !prompt) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath, featureId, and prompt are required",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Start follow-up in background
|
||||
autoModeService
|
||||
.followUpFeature(projectPath, featureId, prompt, imagePaths)
|
||||
.catch((error) => {
|
||||
console.error(`[AutoMode] Follow up feature ${featureId} error:`, error);
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Commit feature changes
|
||||
router.post("/commit-feature", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId are required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const commitHash = await autoModeService.commitFeature(projectPath, featureId);
|
||||
res.json({ success: true, commitHash });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
159
apps/server/src/routes/features.ts
Normal file
159
apps/server/src/routes/features.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
/**
|
||||
* Features routes - HTTP API for feature management
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import { FeatureLoader, type Feature } from "../services/feature-loader.js";
|
||||
import { addAllowedPath } from "../lib/security.js";
|
||||
|
||||
export function createFeaturesRoutes(featureLoader: FeatureLoader): Router {
|
||||
const router = Router();
|
||||
|
||||
// List all features for a project
|
||||
router.post("/list", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: "projectPath is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Add project path to allowed paths
|
||||
addAllowedPath(projectPath);
|
||||
|
||||
const features = await featureLoader.getAll(projectPath);
|
||||
res.json({ success: true, features });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get a single feature
|
||||
router.post("/get", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId are required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const feature = await featureLoader.get(projectPath, featureId);
|
||||
if (!feature) {
|
||||
res.status(404).json({ success: false, error: "Feature not found" });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true, feature });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Create a new feature
|
||||
router.post("/create", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, feature } = req.body as {
|
||||
projectPath: string;
|
||||
feature: Partial<Feature>;
|
||||
};
|
||||
|
||||
if (!projectPath || !feature) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and feature are required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Add project path to allowed paths
|
||||
addAllowedPath(projectPath);
|
||||
|
||||
const created = await featureLoader.create(projectPath, feature);
|
||||
res.json({ success: true, feature: created });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Update a feature
|
||||
router.post("/update", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId, updates } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
updates: Partial<Feature>;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId || !updates) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath, featureId, and updates are required",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const updated = await featureLoader.update(projectPath, featureId, updates);
|
||||
res.json({ success: true, feature: updated });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete a feature
|
||||
router.post("/delete", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId are required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const success = await featureLoader.delete(projectPath, featureId);
|
||||
res.json({ success });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get agent output for a feature
|
||||
router.post("/agent-output", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId are required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const content = await featureLoader.getAgentOutput(projectPath, featureId);
|
||||
res.json({ success: true, content });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
702
apps/server/src/routes/fs.ts
Normal file
702
apps/server/src/routes/fs.ts
Normal file
@@ -0,0 +1,702 @@
|
||||
/**
|
||||
* File system routes
|
||||
* Provides REST API equivalents for Electron IPC file operations
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
import { validatePath, addAllowedPath, isPathAllowed } from "../lib/security.js";
|
||||
import type { EventEmitter } from "../lib/events.js";
|
||||
|
||||
export function createFsRoutes(_events: EventEmitter): Router {
|
||||
const router = Router();
|
||||
|
||||
// Read file
|
||||
router.post("/read", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { filePath } = req.body as { filePath: string };
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = validatePath(filePath);
|
||||
const content = await fs.readFile(resolvedPath, "utf-8");
|
||||
|
||||
res.json({ success: true, content });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Write file
|
||||
router.post("/write", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { filePath, content } = req.body as {
|
||||
filePath: string;
|
||||
content: string;
|
||||
};
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = validatePath(filePath);
|
||||
|
||||
// Ensure parent directory exists
|
||||
await fs.mkdir(path.dirname(resolvedPath), { recursive: true });
|
||||
await fs.writeFile(resolvedPath, content, "utf-8");
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Create directory
|
||||
router.post("/mkdir", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { dirPath } = req.body as { dirPath: string };
|
||||
|
||||
if (!dirPath) {
|
||||
res.status(400).json({ success: false, error: "dirPath is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = validatePath(dirPath);
|
||||
await fs.mkdir(resolvedPath, { recursive: true });
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Read directory
|
||||
router.post("/readdir", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { dirPath } = req.body as { dirPath: string };
|
||||
|
||||
if (!dirPath) {
|
||||
res.status(400).json({ success: false, error: "dirPath is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = validatePath(dirPath);
|
||||
const entries = await fs.readdir(resolvedPath, { withFileTypes: true });
|
||||
|
||||
const result = entries.map((entry) => ({
|
||||
name: entry.name,
|
||||
isDirectory: entry.isDirectory(),
|
||||
isFile: entry.isFile(),
|
||||
}));
|
||||
|
||||
res.json({ success: true, entries: result });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Check if file/directory exists
|
||||
router.post("/exists", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { filePath } = req.body as { filePath: string };
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// For exists, we check but don't require the path to be pre-allowed
|
||||
// This allows the UI to validate user-entered paths
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
|
||||
try {
|
||||
await fs.access(resolvedPath);
|
||||
res.json({ success: true, exists: true });
|
||||
} catch {
|
||||
res.json({ success: true, exists: false });
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get file stats
|
||||
router.post("/stat", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { filePath } = req.body as { filePath: string };
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = validatePath(filePath);
|
||||
const stats = await fs.stat(resolvedPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
stats: {
|
||||
isDirectory: stats.isDirectory(),
|
||||
isFile: stats.isFile(),
|
||||
size: stats.size,
|
||||
mtime: stats.mtime,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete file
|
||||
router.post("/delete", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { filePath } = req.body as { filePath: string };
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = validatePath(filePath);
|
||||
await fs.rm(resolvedPath, { recursive: true });
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Validate and add path to allowed list
|
||||
// This is the web equivalent of dialog:openDirectory
|
||||
router.post("/validate-path", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { filePath } = req.body as { filePath: string };
|
||||
|
||||
if (!filePath) {
|
||||
res.status(400).json({ success: false, error: "filePath is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
|
||||
// Check if path exists
|
||||
try {
|
||||
const stats = await fs.stat(resolvedPath);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
res.status(400).json({ success: false, error: "Path is not a directory" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Add to allowed paths
|
||||
addAllowedPath(resolvedPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
path: resolvedPath,
|
||||
isAllowed: isPathAllowed(resolvedPath),
|
||||
});
|
||||
} catch {
|
||||
res.status(400).json({ success: false, error: "Path does not exist" });
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Resolve directory path from directory name and file structure
|
||||
// Used when browser file picker only provides directory name (not full path)
|
||||
router.post("/resolve-directory", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { directoryName, sampleFiles, fileCount } = req.body as {
|
||||
directoryName: string;
|
||||
sampleFiles?: string[];
|
||||
fileCount?: number;
|
||||
};
|
||||
|
||||
if (!directoryName) {
|
||||
res.status(400).json({ success: false, error: "directoryName is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// If directoryName looks like an absolute path, try validating it directly
|
||||
if (path.isAbsolute(directoryName) || directoryName.includes(path.sep)) {
|
||||
try {
|
||||
const resolvedPath = path.resolve(directoryName);
|
||||
const stats = await fs.stat(resolvedPath);
|
||||
if (stats.isDirectory()) {
|
||||
addAllowedPath(resolvedPath);
|
||||
return res.json({
|
||||
success: true,
|
||||
path: resolvedPath,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Not a valid absolute path, continue to search
|
||||
}
|
||||
}
|
||||
|
||||
// Search for directory in common locations
|
||||
const searchPaths: string[] = [
|
||||
process.cwd(), // Current working directory
|
||||
process.env.HOME || process.env.USERPROFILE || "", // User home
|
||||
path.join(process.env.HOME || process.env.USERPROFILE || "", "Documents"),
|
||||
path.join(process.env.HOME || process.env.USERPROFILE || "", "Desktop"),
|
||||
// Common project locations
|
||||
path.join(process.env.HOME || process.env.USERPROFILE || "", "Projects"),
|
||||
].filter(Boolean);
|
||||
|
||||
// Also check parent of current working directory
|
||||
try {
|
||||
const parentDir = path.dirname(process.cwd());
|
||||
if (!searchPaths.includes(parentDir)) {
|
||||
searchPaths.push(parentDir);
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
|
||||
// Search for directory matching the name and file structure
|
||||
for (const searchPath of searchPaths) {
|
||||
try {
|
||||
const candidatePath = path.join(searchPath, directoryName);
|
||||
const stats = await fs.stat(candidatePath);
|
||||
|
||||
if (stats.isDirectory()) {
|
||||
// Verify it matches by checking for sample files
|
||||
if (sampleFiles && sampleFiles.length > 0) {
|
||||
let matches = 0;
|
||||
for (const sampleFile of sampleFiles.slice(0, 5)) {
|
||||
// Remove directory name prefix from sample file path
|
||||
const relativeFile = sampleFile.startsWith(directoryName + "/")
|
||||
? sampleFile.substring(directoryName.length + 1)
|
||||
: sampleFile.split("/").slice(1).join("/") || sampleFile.split("/").pop() || sampleFile;
|
||||
|
||||
try {
|
||||
const filePath = path.join(candidatePath, relativeFile);
|
||||
await fs.access(filePath);
|
||||
matches++;
|
||||
} catch {
|
||||
// File doesn't exist, continue checking
|
||||
}
|
||||
}
|
||||
|
||||
// If at least one file matches, consider it a match
|
||||
if (matches === 0 && sampleFiles.length > 0) {
|
||||
continue; // Try next candidate
|
||||
}
|
||||
}
|
||||
|
||||
// Found matching directory
|
||||
addAllowedPath(candidatePath);
|
||||
return res.json({
|
||||
success: true,
|
||||
path: candidatePath,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Directory doesn't exist at this location, continue searching
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Directory not found
|
||||
res.status(404).json({
|
||||
success: false,
|
||||
error: `Directory "${directoryName}" not found in common locations. Please ensure the directory exists.`,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Save image to .automaker/images directory
|
||||
router.post("/save-image", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { data, filename, mimeType, projectPath } = req.body as {
|
||||
data: string;
|
||||
filename: string;
|
||||
mimeType: string;
|
||||
projectPath: string;
|
||||
};
|
||||
|
||||
if (!data || !filename || !projectPath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "data, filename, and projectPath are required",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Create .automaker/images directory if it doesn't exist
|
||||
const imagesDir = path.join(projectPath, ".automaker", "images");
|
||||
await fs.mkdir(imagesDir, { recursive: true });
|
||||
|
||||
// Decode base64 data (remove data URL prefix if present)
|
||||
const base64Data = data.replace(/^data:image\/\w+;base64,/, "");
|
||||
const buffer = Buffer.from(base64Data, "base64");
|
||||
|
||||
// Generate unique filename with timestamp
|
||||
const timestamp = Date.now();
|
||||
const ext = path.extname(filename) || ".png";
|
||||
const baseName = path.basename(filename, ext);
|
||||
const uniqueFilename = `${baseName}-${timestamp}${ext}`;
|
||||
const filePath = path.join(imagesDir, uniqueFilename);
|
||||
|
||||
// Write file
|
||||
await fs.writeFile(filePath, buffer);
|
||||
|
||||
// Add project path to allowed paths if not already
|
||||
addAllowedPath(projectPath);
|
||||
|
||||
res.json({ success: true, path: filePath });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Browse directories - for file browser UI
|
||||
router.post("/browse", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { dirPath } = req.body as { dirPath?: string };
|
||||
|
||||
// Default to home directory if no path provided
|
||||
const targetPath = dirPath ? path.resolve(dirPath) : os.homedir();
|
||||
|
||||
// Detect available drives on Windows
|
||||
const detectDrives = async (): Promise<string[]> => {
|
||||
if (os.platform() !== "win32") {
|
||||
return [];
|
||||
}
|
||||
|
||||
const drives: string[] = [];
|
||||
const letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
|
||||
for (const letter of letters) {
|
||||
const drivePath = `${letter}:\\`;
|
||||
try {
|
||||
await fs.access(drivePath);
|
||||
drives.push(drivePath);
|
||||
} catch {
|
||||
// Drive doesn't exist, skip it
|
||||
}
|
||||
}
|
||||
|
||||
return drives;
|
||||
};
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(targetPath);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
res.status(400).json({ success: false, error: "Path is not a directory" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Read directory contents
|
||||
const entries = await fs.readdir(targetPath, { withFileTypes: true });
|
||||
|
||||
// Filter for directories only and add parent directory option
|
||||
const directories = entries
|
||||
.filter((entry) => entry.isDirectory() && !entry.name.startsWith("."))
|
||||
.map((entry) => ({
|
||||
name: entry.name,
|
||||
path: path.join(targetPath, entry.name),
|
||||
}))
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
// Get parent directory
|
||||
const parentPath = path.dirname(targetPath);
|
||||
const hasParent = parentPath !== targetPath;
|
||||
|
||||
// Get available drives
|
||||
const drives = await detectDrives();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
currentPath: targetPath,
|
||||
parentPath: hasParent ? parentPath : null,
|
||||
directories,
|
||||
drives,
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : "Failed to read directory",
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Serve image files
|
||||
router.get("/image", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { path: imagePath, projectPath } = req.query as {
|
||||
path?: string;
|
||||
projectPath?: string;
|
||||
};
|
||||
|
||||
if (!imagePath) {
|
||||
res.status(400).json({ success: false, error: "path is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Resolve full path
|
||||
const fullPath = path.isAbsolute(imagePath)
|
||||
? imagePath
|
||||
: projectPath
|
||||
? path.join(projectPath, imagePath)
|
||||
: imagePath;
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(fullPath);
|
||||
} catch {
|
||||
res.status(404).json({ success: false, error: "Image not found" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Read the file
|
||||
const buffer = await fs.readFile(fullPath);
|
||||
|
||||
// Determine MIME type from extension
|
||||
const ext = path.extname(fullPath).toLowerCase();
|
||||
const mimeTypes: Record<string, string> = {
|
||||
".png": "image/png",
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".gif": "image/gif",
|
||||
".webp": "image/webp",
|
||||
".svg": "image/svg+xml",
|
||||
".bmp": "image/bmp",
|
||||
};
|
||||
|
||||
res.setHeader("Content-Type", mimeTypes[ext] || "application/octet-stream");
|
||||
res.setHeader("Cache-Control", "public, max-age=3600");
|
||||
res.send(buffer);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Save board background image to .automaker/board directory
|
||||
router.post("/save-board-background", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { data, filename, mimeType, projectPath } = req.body as {
|
||||
data: string;
|
||||
filename: string;
|
||||
mimeType: string;
|
||||
projectPath: string;
|
||||
};
|
||||
|
||||
if (!data || !filename || !projectPath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "data, filename, and projectPath are required",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Create .automaker/board directory if it doesn't exist
|
||||
const boardDir = path.join(projectPath, ".automaker", "board");
|
||||
await fs.mkdir(boardDir, { recursive: true });
|
||||
|
||||
// Decode base64 data (remove data URL prefix if present)
|
||||
const base64Data = data.replace(/^data:image\/\w+;base64,/, "");
|
||||
const buffer = Buffer.from(base64Data, "base64");
|
||||
|
||||
// Use a fixed filename for the board background (overwrite previous)
|
||||
const ext = path.extname(filename) || ".png";
|
||||
const uniqueFilename = `background${ext}`;
|
||||
const filePath = path.join(boardDir, uniqueFilename);
|
||||
|
||||
// Write file
|
||||
await fs.writeFile(filePath, buffer);
|
||||
|
||||
// Add project path to allowed paths if not already
|
||||
addAllowedPath(projectPath);
|
||||
|
||||
// Return the relative path for storage
|
||||
const relativePath = `.automaker/board/${uniqueFilename}`;
|
||||
res.json({ success: true, path: relativePath });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete board background image
|
||||
router.post("/delete-board-background", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath is required",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const boardDir = path.join(projectPath, ".automaker", "board");
|
||||
|
||||
try {
|
||||
// Try to remove all files in the board directory
|
||||
const files = await fs.readdir(boardDir);
|
||||
for (const file of files) {
|
||||
if (file.startsWith("background")) {
|
||||
await fs.unlink(path.join(boardDir, file));
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Directory may not exist, that's fine
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Browse directories for file picker
|
||||
// SECURITY: Restricted to home directory, allowed paths, and drive roots on Windows
|
||||
router.post("/browse", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { dirPath } = req.body as { dirPath?: string };
|
||||
const homeDir = os.homedir();
|
||||
|
||||
// Detect available drives on Windows
|
||||
const detectDrives = async (): Promise<string[]> => {
|
||||
if (os.platform() !== "win32") {
|
||||
return [];
|
||||
}
|
||||
|
||||
const drives: string[] = [];
|
||||
const letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
|
||||
for (const letter of letters) {
|
||||
const drivePath = `${letter}:\\`;
|
||||
try {
|
||||
await fs.access(drivePath);
|
||||
drives.push(drivePath);
|
||||
} catch {
|
||||
// Drive doesn't exist, skip it
|
||||
}
|
||||
}
|
||||
|
||||
return drives;
|
||||
};
|
||||
|
||||
// Check if a path is safe to browse
|
||||
const isSafePath = (targetPath: string): boolean => {
|
||||
const resolved = path.resolve(targetPath);
|
||||
const normalizedHome = path.resolve(homeDir);
|
||||
|
||||
// Allow browsing within home directory
|
||||
if (resolved === normalizedHome || resolved.startsWith(normalizedHome + path.sep)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Allow browsing already-allowed paths
|
||||
if (isPathAllowed(resolved)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// On Windows, allow drive roots for initial navigation
|
||||
if (os.platform() === "win32") {
|
||||
const driveRootMatch = /^[A-Z]:\\$/i.test(resolved);
|
||||
if (driveRootMatch) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// On Unix, allow root for initial navigation (but only list, not read files)
|
||||
if (os.platform() !== "win32" && resolved === "/") {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
// Default to home directory if no path provided
|
||||
const targetPath = dirPath ? path.resolve(dirPath) : homeDir;
|
||||
|
||||
// Security check: validate the path is safe to browse
|
||||
if (!isSafePath(targetPath)) {
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
error: "Access denied: browsing is restricted to your home directory and allowed project paths",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(targetPath);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
res.status(400).json({ success: false, error: "Path is not a directory" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Read directory contents
|
||||
const entries = await fs.readdir(targetPath, { withFileTypes: true });
|
||||
|
||||
// Filter for directories only and exclude hidden directories
|
||||
const directories = entries
|
||||
.filter((entry) => entry.isDirectory() && !entry.name.startsWith("."))
|
||||
.map((entry) => ({
|
||||
name: entry.name,
|
||||
path: path.join(targetPath, entry.name),
|
||||
}))
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
// Get parent directory (only if parent is also safe to browse)
|
||||
const parentPath = path.dirname(targetPath);
|
||||
const hasParent = parentPath !== targetPath && isSafePath(parentPath);
|
||||
|
||||
// Get available drives on Windows
|
||||
const drives = await detectDrives();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
currentPath: targetPath,
|
||||
parentPath: hasParent ? parentPath : null,
|
||||
directories,
|
||||
drives,
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : "Failed to read directory",
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
102
apps/server/src/routes/git.ts
Normal file
102
apps/server/src/routes/git.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
/**
|
||||
* Git routes - HTTP API for git operations (non-worktree)
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
export function createGitRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
// Get diffs for the main project
|
||||
router.post("/diffs", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: "projectPath required" });
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const { stdout: diff } = await execAsync("git diff HEAD", {
|
||||
cwd: projectPath,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
});
|
||||
const { stdout: status } = await execAsync("git status --porcelain", {
|
||||
cwd: projectPath,
|
||||
});
|
||||
|
||||
const files = status
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((line) => {
|
||||
const statusChar = line[0];
|
||||
const filePath = line.slice(3);
|
||||
const statusMap: Record<string, string> = {
|
||||
M: "Modified",
|
||||
A: "Added",
|
||||
D: "Deleted",
|
||||
R: "Renamed",
|
||||
C: "Copied",
|
||||
U: "Updated",
|
||||
"?": "Untracked",
|
||||
};
|
||||
return {
|
||||
status: statusChar,
|
||||
path: filePath,
|
||||
statusText: statusMap[statusChar] || "Unknown",
|
||||
};
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
diff,
|
||||
files,
|
||||
hasChanges: files.length > 0,
|
||||
});
|
||||
} catch {
|
||||
res.json({ success: true, diff: "", files: [], hasChanges: false });
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get diff for a specific file
|
||||
router.post("/file-diff", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, filePath } = req.body as {
|
||||
projectPath: string;
|
||||
filePath: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !filePath) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and filePath required" });
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const { stdout: diff } = await execAsync(`git diff HEAD -- "${filePath}"`, {
|
||||
cwd: projectPath,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
});
|
||||
|
||||
res.json({ success: true, diff, filePath });
|
||||
} catch {
|
||||
res.json({ success: true, diff: "", filePath });
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
39
apps/server/src/routes/health.ts
Normal file
39
apps/server/src/routes/health.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* Health check routes
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { getAuthStatus } from "../lib/auth.js";
|
||||
|
||||
export function createHealthRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
// Basic health check
|
||||
router.get("/", (_req, res) => {
|
||||
res.json({
|
||||
status: "ok",
|
||||
timestamp: new Date().toISOString(),
|
||||
version: process.env.npm_package_version || "0.1.0",
|
||||
});
|
||||
});
|
||||
|
||||
// Detailed health check
|
||||
router.get("/detailed", (_req, res) => {
|
||||
res.json({
|
||||
status: "ok",
|
||||
timestamp: new Date().toISOString(),
|
||||
version: process.env.npm_package_version || "0.1.0",
|
||||
uptime: process.uptime(),
|
||||
memory: process.memoryUsage(),
|
||||
dataDir: process.env.DATA_DIR || "./data",
|
||||
auth: getAuthStatus(),
|
||||
env: {
|
||||
nodeVersion: process.version,
|
||||
platform: process.platform,
|
||||
arch: process.arch,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
128
apps/server/src/routes/models.ts
Normal file
128
apps/server/src/routes/models.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
/**
|
||||
* Models routes - HTTP API for model providers and availability
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
|
||||
interface ModelDefinition {
|
||||
id: string;
|
||||
name: string;
|
||||
provider: string;
|
||||
contextWindow: number;
|
||||
maxOutputTokens: number;
|
||||
supportsVision: boolean;
|
||||
supportsTools: boolean;
|
||||
}
|
||||
|
||||
interface ProviderStatus {
|
||||
available: boolean;
|
||||
hasApiKey: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export function createModelsRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
// Get available models
|
||||
router.get("/available", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const models: ModelDefinition[] = [
|
||||
{
|
||||
id: "claude-opus-4-5-20251101",
|
||||
name: "Claude Opus 4.5",
|
||||
provider: "anthropic",
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 16384,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "claude-sonnet-4-20250514",
|
||||
name: "Claude Sonnet 4",
|
||||
provider: "anthropic",
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 16384,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "claude-3-5-sonnet-20241022",
|
||||
name: "Claude 3.5 Sonnet",
|
||||
provider: "anthropic",
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 8192,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "claude-3-5-haiku-20241022",
|
||||
name: "Claude 3.5 Haiku",
|
||||
provider: "anthropic",
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 8192,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "gpt-4o",
|
||||
name: "GPT-4o",
|
||||
provider: "openai",
|
||||
contextWindow: 128000,
|
||||
maxOutputTokens: 16384,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "gpt-4o-mini",
|
||||
name: "GPT-4o Mini",
|
||||
provider: "openai",
|
||||
contextWindow: 128000,
|
||||
maxOutputTokens: 16384,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "o1",
|
||||
name: "o1",
|
||||
provider: "openai",
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 100000,
|
||||
supportsVision: true,
|
||||
supportsTools: false,
|
||||
},
|
||||
];
|
||||
|
||||
res.json({ success: true, models });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Check provider status
|
||||
router.get("/providers", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const providers: Record<string, ProviderStatus> = {
|
||||
anthropic: {
|
||||
available: !!process.env.ANTHROPIC_API_KEY,
|
||||
hasApiKey: !!process.env.ANTHROPIC_API_KEY,
|
||||
},
|
||||
openai: {
|
||||
available: !!process.env.OPENAI_API_KEY,
|
||||
hasApiKey: !!process.env.OPENAI_API_KEY,
|
||||
},
|
||||
google: {
|
||||
available: !!process.env.GOOGLE_API_KEY,
|
||||
hasApiKey: !!process.env.GOOGLE_API_KEY,
|
||||
},
|
||||
};
|
||||
|
||||
res.json({ success: true, providers });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
30
apps/server/src/routes/running-agents.ts
Normal file
30
apps/server/src/routes/running-agents.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
/**
|
||||
* Running Agents routes - HTTP API for tracking active agent executions
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import type { AutoModeService } from "../services/auto-mode-service.js";
|
||||
|
||||
export function createRunningAgentsRoutes(autoModeService: AutoModeService): Router {
|
||||
const router = Router();
|
||||
|
||||
// Get all running agents
|
||||
router.get("/", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const runningAgents = autoModeService.getRunningAgents();
|
||||
const status = autoModeService.getStatus();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
runningAgents,
|
||||
totalCount: runningAgents.length,
|
||||
autoLoopRunning: status.autoLoopRunning,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
149
apps/server/src/routes/sessions.ts
Normal file
149
apps/server/src/routes/sessions.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
/**
|
||||
* Sessions routes - HTTP API for session management
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import { AgentService } from "../services/agent-service.js";
|
||||
|
||||
export function createSessionsRoutes(agentService: AgentService): Router {
|
||||
const router = Router();
|
||||
|
||||
// List all sessions
|
||||
router.get("/", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const includeArchived = req.query.includeArchived === "true";
|
||||
const sessionsRaw = await agentService.listSessions(includeArchived);
|
||||
|
||||
// Transform to match frontend SessionListItem interface
|
||||
const sessions = await Promise.all(
|
||||
sessionsRaw.map(async (s) => {
|
||||
const messages = await agentService.loadSession(s.id);
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
const preview = lastMessage?.content?.slice(0, 100) || "";
|
||||
|
||||
return {
|
||||
id: s.id,
|
||||
name: s.name,
|
||||
projectPath: s.projectPath || s.workingDirectory,
|
||||
workingDirectory: s.workingDirectory,
|
||||
createdAt: s.createdAt,
|
||||
updatedAt: s.updatedAt,
|
||||
isArchived: s.archived || false,
|
||||
tags: s.tags || [],
|
||||
messageCount: messages.length,
|
||||
preview,
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
res.json({ success: true, sessions });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Create a new session
|
||||
router.post("/", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { name, projectPath, workingDirectory } = req.body as {
|
||||
name: string;
|
||||
projectPath?: string;
|
||||
workingDirectory?: string;
|
||||
};
|
||||
|
||||
if (!name) {
|
||||
res.status(400).json({ success: false, error: "name is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const session = await agentService.createSession(
|
||||
name,
|
||||
projectPath,
|
||||
workingDirectory
|
||||
);
|
||||
res.json({ success: true, session });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Update a session
|
||||
router.put("/:sessionId", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
const { name, tags } = req.body as {
|
||||
name?: string;
|
||||
tags?: string[];
|
||||
};
|
||||
|
||||
const session = await agentService.updateSession(sessionId, { name, tags });
|
||||
if (!session) {
|
||||
res.status(404).json({ success: false, error: "Session not found" });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true, session });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Archive a session
|
||||
router.post("/:sessionId/archive", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
const success = await agentService.archiveSession(sessionId);
|
||||
|
||||
if (!success) {
|
||||
res.status(404).json({ success: false, error: "Session not found" });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Unarchive a session
|
||||
router.post("/:sessionId/unarchive", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
const success = await agentService.unarchiveSession(sessionId);
|
||||
|
||||
if (!success) {
|
||||
res.status(404).json({ success: false, error: "Session not found" });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete a session
|
||||
router.delete("/:sessionId", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
const success = await agentService.deleteSession(sessionId);
|
||||
|
||||
if (!success) {
|
||||
res.status(404).json({ success: false, error: "Session not found" });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
521
apps/server/src/routes/setup.ts
Normal file
521
apps/server/src/routes/setup.ts
Normal file
@@ -0,0 +1,521 @@
|
||||
/**
|
||||
* Setup routes - HTTP API for CLI detection, API keys, and platform info
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
// Storage for API keys (in-memory cache)
|
||||
const apiKeys: Record<string, string> = {};
|
||||
|
||||
// Helper to persist API keys to .env file
|
||||
async function persistApiKeyToEnv(key: string, value: string): Promise<void> {
|
||||
const envPath = path.join(process.cwd(), ".env");
|
||||
|
||||
try {
|
||||
let envContent = "";
|
||||
try {
|
||||
envContent = await fs.readFile(envPath, "utf-8");
|
||||
} catch {
|
||||
// .env file doesn't exist, we'll create it
|
||||
}
|
||||
|
||||
// Parse existing env content
|
||||
const lines = envContent.split("\n");
|
||||
const keyRegex = new RegExp(`^${key}=`);
|
||||
let found = false;
|
||||
const newLines = lines.map((line) => {
|
||||
if (keyRegex.test(line)) {
|
||||
found = true;
|
||||
return `${key}=${value}`;
|
||||
}
|
||||
return line;
|
||||
});
|
||||
|
||||
if (!found) {
|
||||
// Add the key at the end
|
||||
newLines.push(`${key}=${value}`);
|
||||
}
|
||||
|
||||
await fs.writeFile(envPath, newLines.join("\n"));
|
||||
console.log(`[Setup] Persisted ${key} to .env file`);
|
||||
} catch (error) {
|
||||
console.error(`[Setup] Failed to persist ${key} to .env:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export function createSetupRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
// Get Claude CLI status
|
||||
router.get("/claude-status", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
let installed = false;
|
||||
let version = "";
|
||||
let cliPath = "";
|
||||
let method = "none";
|
||||
|
||||
// Try to find Claude CLI
|
||||
try {
|
||||
const { stdout } = await execAsync("which claude || where claude 2>/dev/null");
|
||||
cliPath = stdout.trim();
|
||||
installed = true;
|
||||
method = "path";
|
||||
|
||||
// Get version
|
||||
try {
|
||||
const { stdout: versionOut } = await execAsync("claude --version");
|
||||
version = versionOut.trim();
|
||||
} catch {
|
||||
// Version command might not be available
|
||||
}
|
||||
} catch {
|
||||
// Not in PATH, try common locations
|
||||
const commonPaths = [
|
||||
path.join(os.homedir(), ".local", "bin", "claude"),
|
||||
path.join(os.homedir(), ".claude", "local", "claude"),
|
||||
"/usr/local/bin/claude",
|
||||
path.join(os.homedir(), ".npm-global", "bin", "claude"),
|
||||
];
|
||||
|
||||
for (const p of commonPaths) {
|
||||
try {
|
||||
await fs.access(p);
|
||||
cliPath = p;
|
||||
installed = true;
|
||||
method = "local";
|
||||
|
||||
// Get version from this path
|
||||
try {
|
||||
const { stdout: versionOut } = await execAsync(`"${p}" --version`);
|
||||
version = versionOut.trim();
|
||||
} catch {
|
||||
// Version command might not be available
|
||||
}
|
||||
break;
|
||||
} catch {
|
||||
// Not found at this path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check authentication - detect all possible auth methods
|
||||
// Note: apiKeys.anthropic_oauth_token stores OAuth tokens from subscription auth
|
||||
// apiKeys.anthropic stores direct API keys for pay-per-use
|
||||
let auth = {
|
||||
authenticated: false,
|
||||
method: "none" as string,
|
||||
hasCredentialsFile: false,
|
||||
hasToken: false,
|
||||
hasStoredOAuthToken: !!apiKeys.anthropic_oauth_token,
|
||||
hasStoredApiKey: !!apiKeys.anthropic,
|
||||
hasEnvApiKey: !!process.env.ANTHROPIC_API_KEY,
|
||||
hasEnvOAuthToken: !!process.env.CLAUDE_CODE_OAUTH_TOKEN,
|
||||
// Additional fields for detailed status
|
||||
oauthTokenValid: false,
|
||||
apiKeyValid: false,
|
||||
hasCliAuth: false,
|
||||
hasRecentActivity: false,
|
||||
};
|
||||
|
||||
const claudeDir = path.join(os.homedir(), ".claude");
|
||||
|
||||
// Check for recent Claude CLI activity - indicates working authentication
|
||||
// The stats-cache.json file is only populated when the CLI is working properly
|
||||
const statsCachePath = path.join(claudeDir, "stats-cache.json");
|
||||
try {
|
||||
const statsContent = await fs.readFile(statsCachePath, "utf-8");
|
||||
const stats = JSON.parse(statsContent);
|
||||
|
||||
// Check if there's any activity (which means the CLI is authenticated and working)
|
||||
if (stats.dailyActivity && stats.dailyActivity.length > 0) {
|
||||
auth.hasRecentActivity = true;
|
||||
auth.hasCliAuth = true;
|
||||
auth.authenticated = true;
|
||||
auth.method = "cli_authenticated";
|
||||
}
|
||||
} catch {
|
||||
// Stats file doesn't exist or is invalid
|
||||
}
|
||||
|
||||
// Check for settings.json - indicates CLI has been set up
|
||||
const settingsPath = path.join(claudeDir, "settings.json");
|
||||
try {
|
||||
await fs.access(settingsPath);
|
||||
// If settings exist but no activity, CLI might be set up but not authenticated
|
||||
if (!auth.hasCliAuth) {
|
||||
// Try to check for other indicators of auth
|
||||
const sessionsDir = path.join(claudeDir, "projects");
|
||||
try {
|
||||
const sessions = await fs.readdir(sessionsDir);
|
||||
if (sessions.length > 0) {
|
||||
auth.hasCliAuth = true;
|
||||
auth.authenticated = true;
|
||||
auth.method = "cli_authenticated";
|
||||
}
|
||||
} catch {
|
||||
// Sessions directory doesn't exist
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Settings file doesn't exist
|
||||
}
|
||||
|
||||
// Check for credentials file (OAuth tokens from claude login) - legacy/alternative auth
|
||||
const credentialsPath = path.join(claudeDir, "credentials.json");
|
||||
try {
|
||||
const credentialsContent = await fs.readFile(credentialsPath, "utf-8");
|
||||
const credentials = JSON.parse(credentialsContent);
|
||||
auth.hasCredentialsFile = true;
|
||||
|
||||
// Check what type of token is in credentials
|
||||
if (credentials.oauth_token || credentials.access_token) {
|
||||
auth.hasStoredOAuthToken = true;
|
||||
auth.oauthTokenValid = true;
|
||||
auth.authenticated = true;
|
||||
auth.method = "oauth_token"; // Stored OAuth token from credentials file
|
||||
} else if (credentials.api_key) {
|
||||
auth.apiKeyValid = true;
|
||||
auth.authenticated = true;
|
||||
auth.method = "api_key"; // Stored API key in credentials file
|
||||
}
|
||||
} catch {
|
||||
// No credentials file or invalid format
|
||||
}
|
||||
|
||||
// Environment variables override stored credentials (higher priority)
|
||||
if (auth.hasEnvOAuthToken) {
|
||||
auth.authenticated = true;
|
||||
auth.oauthTokenValid = true;
|
||||
auth.method = "oauth_token_env"; // OAuth token from CLAUDE_CODE_OAUTH_TOKEN env var
|
||||
} else if (auth.hasEnvApiKey) {
|
||||
auth.authenticated = true;
|
||||
auth.apiKeyValid = true;
|
||||
auth.method = "api_key_env"; // API key from ANTHROPIC_API_KEY env var
|
||||
}
|
||||
|
||||
// In-memory stored OAuth token (from setup wizard - subscription auth)
|
||||
if (!auth.authenticated && apiKeys.anthropic_oauth_token) {
|
||||
auth.authenticated = true;
|
||||
auth.oauthTokenValid = true;
|
||||
auth.method = "oauth_token"; // Stored OAuth token from setup wizard
|
||||
}
|
||||
|
||||
// In-memory stored API key (from settings UI - pay-per-use)
|
||||
if (!auth.authenticated && apiKeys.anthropic) {
|
||||
auth.authenticated = true;
|
||||
auth.apiKeyValid = true;
|
||||
auth.method = "api_key"; // Manually stored API key
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
status: installed ? "installed" : "not_installed",
|
||||
installed,
|
||||
method,
|
||||
version,
|
||||
path: cliPath,
|
||||
auth,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get Codex CLI status
|
||||
router.get("/codex-status", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
let installed = false;
|
||||
let version = "";
|
||||
let cliPath = "";
|
||||
let method = "none";
|
||||
|
||||
// Try to find Codex CLI
|
||||
try {
|
||||
const { stdout } = await execAsync("which codex || where codex 2>/dev/null");
|
||||
cliPath = stdout.trim();
|
||||
installed = true;
|
||||
method = "path";
|
||||
|
||||
try {
|
||||
const { stdout: versionOut } = await execAsync("codex --version");
|
||||
version = versionOut.trim();
|
||||
} catch {
|
||||
// Version command might not be available
|
||||
}
|
||||
} catch {
|
||||
// Not found
|
||||
}
|
||||
|
||||
// Check for OpenAI/Codex authentication
|
||||
let auth = {
|
||||
authenticated: false,
|
||||
method: "none" as string,
|
||||
hasAuthFile: false,
|
||||
hasEnvKey: !!process.env.OPENAI_API_KEY,
|
||||
hasStoredApiKey: !!apiKeys.openai,
|
||||
hasEnvApiKey: !!process.env.OPENAI_API_KEY,
|
||||
// Additional fields for subscription/account detection
|
||||
hasSubscription: false,
|
||||
cliLoggedIn: false,
|
||||
};
|
||||
|
||||
// Check for OpenAI CLI auth file (~/.codex/auth.json or similar)
|
||||
const codexAuthPaths = [
|
||||
path.join(os.homedir(), ".codex", "auth.json"),
|
||||
path.join(os.homedir(), ".openai", "credentials"),
|
||||
path.join(os.homedir(), ".config", "openai", "credentials.json"),
|
||||
];
|
||||
|
||||
for (const authPath of codexAuthPaths) {
|
||||
try {
|
||||
const authContent = await fs.readFile(authPath, "utf-8");
|
||||
const authData = JSON.parse(authContent);
|
||||
auth.hasAuthFile = true;
|
||||
|
||||
// Check for subscription/tokens
|
||||
if (authData.subscription || authData.plan || authData.account_type) {
|
||||
auth.hasSubscription = true;
|
||||
auth.authenticated = true;
|
||||
auth.method = "subscription"; // Codex subscription (Plus/Team)
|
||||
} else if (authData.access_token || authData.api_key) {
|
||||
auth.cliLoggedIn = true;
|
||||
auth.authenticated = true;
|
||||
auth.method = "cli_verified"; // CLI logged in with account
|
||||
}
|
||||
break;
|
||||
} catch {
|
||||
// Auth file not found at this path
|
||||
}
|
||||
}
|
||||
|
||||
// Environment variable has highest priority
|
||||
if (auth.hasEnvApiKey) {
|
||||
auth.authenticated = true;
|
||||
auth.method = "env"; // OPENAI_API_KEY environment variable
|
||||
}
|
||||
|
||||
// In-memory stored API key (from settings UI)
|
||||
if (!auth.authenticated && apiKeys.openai) {
|
||||
auth.authenticated = true;
|
||||
auth.method = "api_key"; // Manually stored API key
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
status: installed ? "installed" : "not_installed",
|
||||
method,
|
||||
version,
|
||||
path: cliPath,
|
||||
auth,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Install Claude CLI
|
||||
router.post("/install-claude", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
// In web mode, we can't install CLIs directly
|
||||
// Return instructions instead
|
||||
res.json({
|
||||
success: false,
|
||||
error:
|
||||
"CLI installation requires terminal access. Please install manually using: npm install -g @anthropic-ai/claude-code",
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Install Codex CLI
|
||||
router.post("/install-codex", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
res.json({
|
||||
success: false,
|
||||
error:
|
||||
"CLI installation requires terminal access. Please install manually using: npm install -g @openai/codex",
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Auth Claude
|
||||
router.post("/auth-claude", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
res.json({
|
||||
success: true,
|
||||
requiresManualAuth: true,
|
||||
command: "claude login",
|
||||
message: "Please run 'claude login' in your terminal to authenticate",
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Auth Codex
|
||||
router.post("/auth-codex", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { apiKey } = req.body as { apiKey?: string };
|
||||
|
||||
if (apiKey) {
|
||||
apiKeys.openai = apiKey;
|
||||
process.env.OPENAI_API_KEY = apiKey;
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.json({
|
||||
success: true,
|
||||
requiresManualAuth: true,
|
||||
command: "codex auth login",
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Store API key
|
||||
router.post("/store-api-key", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { provider, apiKey } = req.body as { provider: string; apiKey: string };
|
||||
|
||||
if (!provider || !apiKey) {
|
||||
res.status(400).json({ success: false, error: "provider and apiKey required" });
|
||||
return;
|
||||
}
|
||||
|
||||
apiKeys[provider] = apiKey;
|
||||
|
||||
// Also set as environment variable and persist to .env
|
||||
// IMPORTANT: OAuth tokens and API keys must be stored separately
|
||||
// - OAuth tokens (subscription auth) -> CLAUDE_CODE_OAUTH_TOKEN
|
||||
// - API keys (pay-per-use) -> ANTHROPIC_API_KEY
|
||||
if (provider === "anthropic_oauth_token") {
|
||||
// OAuth token from claude setup-token (subscription-based auth)
|
||||
process.env.CLAUDE_CODE_OAUTH_TOKEN = apiKey;
|
||||
await persistApiKeyToEnv("CLAUDE_CODE_OAUTH_TOKEN", apiKey);
|
||||
console.log("[Setup] Stored OAuth token as CLAUDE_CODE_OAUTH_TOKEN");
|
||||
} else if (provider === "anthropic") {
|
||||
// Direct API key (pay-per-use)
|
||||
process.env.ANTHROPIC_API_KEY = apiKey;
|
||||
await persistApiKeyToEnv("ANTHROPIC_API_KEY", apiKey);
|
||||
console.log("[Setup] Stored API key as ANTHROPIC_API_KEY");
|
||||
} else if (provider === "openai") {
|
||||
process.env.OPENAI_API_KEY = apiKey;
|
||||
await persistApiKeyToEnv("OPENAI_API_KEY", apiKey);
|
||||
} else if (provider === "google") {
|
||||
process.env.GOOGLE_API_KEY = apiKey;
|
||||
await persistApiKeyToEnv("GOOGLE_API_KEY", apiKey);
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get API keys status
|
||||
router.get("/api-keys", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
res.json({
|
||||
success: true,
|
||||
hasAnthropicKey: !!apiKeys.anthropic || !!process.env.ANTHROPIC_API_KEY,
|
||||
hasOpenAIKey: !!apiKeys.openai || !!process.env.OPENAI_API_KEY,
|
||||
hasGoogleKey: !!apiKeys.google || !!process.env.GOOGLE_API_KEY,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Configure Codex MCP
|
||||
router.post("/configure-codex-mcp", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: "projectPath required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Create .codex directory and config
|
||||
const codexDir = path.join(projectPath, ".codex");
|
||||
await fs.mkdir(codexDir, { recursive: true });
|
||||
|
||||
const configPath = path.join(codexDir, "config.toml");
|
||||
const config = `# Codex configuration
|
||||
[mcp]
|
||||
enabled = true
|
||||
`;
|
||||
await fs.writeFile(configPath, config);
|
||||
|
||||
res.json({ success: true, configPath });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get platform info
|
||||
router.get("/platform", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const platform = os.platform();
|
||||
res.json({
|
||||
success: true,
|
||||
platform,
|
||||
arch: os.arch(),
|
||||
homeDir: os.homedir(),
|
||||
isWindows: platform === "win32",
|
||||
isMac: platform === "darwin",
|
||||
isLinux: platform === "linux",
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Test OpenAI connection
|
||||
router.post("/test-openai", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { apiKey } = req.body as { apiKey?: string };
|
||||
const key = apiKey || apiKeys.openai || process.env.OPENAI_API_KEY;
|
||||
|
||||
if (!key) {
|
||||
res.json({ success: false, error: "No OpenAI API key provided" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Simple test - just verify the key format
|
||||
if (!key.startsWith("sk-")) {
|
||||
res.json({ success: false, error: "Invalid OpenAI API key format" });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true, message: "API key format is valid" });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
597
apps/server/src/routes/spec-regeneration.ts
Normal file
597
apps/server/src/routes/spec-regeneration.ts
Normal file
@@ -0,0 +1,597 @@
|
||||
/**
|
||||
* Spec Regeneration routes - HTTP API for AI-powered spec generation
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import { query, type Options } from "@anthropic-ai/claude-agent-sdk";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import type { EventEmitter } from "../lib/events.js";
|
||||
|
||||
let isRunning = false;
|
||||
let currentAbortController: AbortController | null = null;
|
||||
|
||||
// Helper to log authentication status
|
||||
function logAuthStatus(context: string): void {
|
||||
const hasOAuthToken = !!process.env.CLAUDE_CODE_OAUTH_TOKEN;
|
||||
const hasApiKey = !!process.env.ANTHROPIC_API_KEY;
|
||||
|
||||
console.log(`[SpecRegeneration] ${context} - Auth Status:`);
|
||||
console.log(`[SpecRegeneration] CLAUDE_CODE_OAUTH_TOKEN: ${hasOAuthToken ? 'SET (' + process.env.CLAUDE_CODE_OAUTH_TOKEN?.substring(0, 20) + '...)' : 'NOT SET'}`);
|
||||
console.log(`[SpecRegeneration] ANTHROPIC_API_KEY: ${hasApiKey ? 'SET (' + process.env.ANTHROPIC_API_KEY?.substring(0, 20) + '...)' : 'NOT SET'}`);
|
||||
|
||||
if (!hasOAuthToken && !hasApiKey) {
|
||||
console.error(`[SpecRegeneration] ⚠️ WARNING: No authentication configured! SDK will fail.`);
|
||||
}
|
||||
}
|
||||
|
||||
export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
const router = Router();
|
||||
|
||||
// Create project spec from overview
|
||||
router.post("/create", async (req: Request, res: Response) => {
|
||||
console.log("[SpecRegeneration] ========== /create endpoint called ==========");
|
||||
console.log("[SpecRegeneration] Request body:", JSON.stringify(req.body, null, 2));
|
||||
|
||||
try {
|
||||
const { projectPath, projectOverview, generateFeatures } = req.body as {
|
||||
projectPath: string;
|
||||
projectOverview: string;
|
||||
generateFeatures?: boolean;
|
||||
};
|
||||
|
||||
console.log(`[SpecRegeneration] Parsed params:`);
|
||||
console.log(`[SpecRegeneration] projectPath: ${projectPath}`);
|
||||
console.log(`[SpecRegeneration] projectOverview length: ${projectOverview?.length || 0} chars`);
|
||||
console.log(`[SpecRegeneration] generateFeatures: ${generateFeatures}`);
|
||||
|
||||
if (!projectPath || !projectOverview) {
|
||||
console.error("[SpecRegeneration] Missing required parameters");
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath and projectOverview required",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (isRunning) {
|
||||
console.warn("[SpecRegeneration] Generation already running, rejecting request");
|
||||
res.json({ success: false, error: "Spec generation already running" });
|
||||
return;
|
||||
}
|
||||
|
||||
logAuthStatus("Before starting generation");
|
||||
|
||||
isRunning = true;
|
||||
currentAbortController = new AbortController();
|
||||
console.log("[SpecRegeneration] Starting background generation task...");
|
||||
|
||||
// Start generation in background
|
||||
generateSpec(
|
||||
projectPath,
|
||||
projectOverview,
|
||||
events,
|
||||
currentAbortController,
|
||||
generateFeatures
|
||||
)
|
||||
.catch((error) => {
|
||||
console.error("[SpecRegeneration] ❌ Generation failed with error:");
|
||||
console.error("[SpecRegeneration] Error name:", error?.name);
|
||||
console.error("[SpecRegeneration] Error message:", error?.message);
|
||||
console.error("[SpecRegeneration] Error stack:", error?.stack);
|
||||
console.error("[SpecRegeneration] Full error object:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_error",
|
||||
error: error.message || String(error),
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
console.log("[SpecRegeneration] Generation task finished (success or error)");
|
||||
isRunning = false;
|
||||
currentAbortController = null;
|
||||
});
|
||||
|
||||
console.log("[SpecRegeneration] Returning success response (generation running in background)");
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error("[SpecRegeneration] ❌ Route handler exception:");
|
||||
console.error("[SpecRegeneration] Error:", error);
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Generate from project definition
|
||||
router.post("/generate", async (req: Request, res: Response) => {
|
||||
console.log("[SpecRegeneration] ========== /generate endpoint called ==========");
|
||||
console.log("[SpecRegeneration] Request body:", JSON.stringify(req.body, null, 2));
|
||||
|
||||
try {
|
||||
const { projectPath, projectDefinition } = req.body as {
|
||||
projectPath: string;
|
||||
projectDefinition: string;
|
||||
};
|
||||
|
||||
console.log(`[SpecRegeneration] Parsed params:`);
|
||||
console.log(`[SpecRegeneration] projectPath: ${projectPath}`);
|
||||
console.log(`[SpecRegeneration] projectDefinition length: ${projectDefinition?.length || 0} chars`);
|
||||
|
||||
if (!projectPath || !projectDefinition) {
|
||||
console.error("[SpecRegeneration] Missing required parameters");
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath and projectDefinition required",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (isRunning) {
|
||||
console.warn("[SpecRegeneration] Generation already running, rejecting request");
|
||||
res.json({ success: false, error: "Spec generation already running" });
|
||||
return;
|
||||
}
|
||||
|
||||
logAuthStatus("Before starting generation");
|
||||
|
||||
isRunning = true;
|
||||
currentAbortController = new AbortController();
|
||||
console.log("[SpecRegeneration] Starting background generation task...");
|
||||
|
||||
generateSpec(
|
||||
projectPath,
|
||||
projectDefinition,
|
||||
events,
|
||||
currentAbortController,
|
||||
false
|
||||
)
|
||||
.catch((error) => {
|
||||
console.error("[SpecRegeneration] ❌ Generation failed with error:");
|
||||
console.error("[SpecRegeneration] Error name:", error?.name);
|
||||
console.error("[SpecRegeneration] Error message:", error?.message);
|
||||
console.error("[SpecRegeneration] Error stack:", error?.stack);
|
||||
console.error("[SpecRegeneration] Full error object:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_error",
|
||||
error: error.message || String(error),
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
console.log("[SpecRegeneration] Generation task finished (success or error)");
|
||||
isRunning = false;
|
||||
currentAbortController = null;
|
||||
});
|
||||
|
||||
console.log("[SpecRegeneration] Returning success response (generation running in background)");
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error("[SpecRegeneration] ❌ Route handler exception:");
|
||||
console.error("[SpecRegeneration] Error:", error);
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Generate features from existing spec
|
||||
router.post("/generate-features", async (req: Request, res: Response) => {
|
||||
console.log("[SpecRegeneration] ========== /generate-features endpoint called ==========");
|
||||
console.log("[SpecRegeneration] Request body:", JSON.stringify(req.body, null, 2));
|
||||
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
console.log(`[SpecRegeneration] projectPath: ${projectPath}`);
|
||||
|
||||
if (!projectPath) {
|
||||
console.error("[SpecRegeneration] Missing projectPath parameter");
|
||||
res.status(400).json({ success: false, error: "projectPath required" });
|
||||
return;
|
||||
}
|
||||
|
||||
if (isRunning) {
|
||||
console.warn("[SpecRegeneration] Generation already running, rejecting request");
|
||||
res.json({ success: false, error: "Generation already running" });
|
||||
return;
|
||||
}
|
||||
|
||||
logAuthStatus("Before starting feature generation");
|
||||
|
||||
isRunning = true;
|
||||
currentAbortController = new AbortController();
|
||||
console.log("[SpecRegeneration] Starting background feature generation task...");
|
||||
|
||||
generateFeaturesFromSpec(projectPath, events, currentAbortController)
|
||||
.catch((error) => {
|
||||
console.error("[SpecRegeneration] ❌ Feature generation failed with error:");
|
||||
console.error("[SpecRegeneration] Error name:", error?.name);
|
||||
console.error("[SpecRegeneration] Error message:", error?.message);
|
||||
console.error("[SpecRegeneration] Error stack:", error?.stack);
|
||||
console.error("[SpecRegeneration] Full error object:", JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_error",
|
||||
error: error.message || String(error),
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
console.log("[SpecRegeneration] Feature generation task finished (success or error)");
|
||||
isRunning = false;
|
||||
currentAbortController = null;
|
||||
});
|
||||
|
||||
console.log("[SpecRegeneration] Returning success response (generation running in background)");
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error("[SpecRegeneration] ❌ Route handler exception:");
|
||||
console.error("[SpecRegeneration] Error:", error);
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Stop generation
|
||||
router.post("/stop", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
if (currentAbortController) {
|
||||
currentAbortController.abort();
|
||||
}
|
||||
isRunning = false;
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get status
|
||||
router.get("/status", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
res.json({ success: true, isRunning });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
async function generateSpec(
|
||||
projectPath: string,
|
||||
projectOverview: string,
|
||||
events: EventEmitter,
|
||||
abortController: AbortController,
|
||||
generateFeatures?: boolean
|
||||
) {
|
||||
console.log("[SpecRegeneration] ========== generateSpec() started ==========");
|
||||
console.log(`[SpecRegeneration] projectPath: ${projectPath}`);
|
||||
console.log(`[SpecRegeneration] projectOverview length: ${projectOverview.length} chars`);
|
||||
console.log(`[SpecRegeneration] generateFeatures: ${generateFeatures}`);
|
||||
|
||||
const prompt = `You are helping to define a software project specification.
|
||||
|
||||
Project Overview:
|
||||
${projectOverview}
|
||||
|
||||
Based on this overview, analyze the project and create a comprehensive specification that includes:
|
||||
|
||||
1. **Project Summary** - Brief description of what the project does
|
||||
2. **Core Features** - Main functionality the project needs
|
||||
3. **Technical Stack** - Recommended technologies and frameworks
|
||||
4. **Architecture** - High-level system design
|
||||
5. **Data Models** - Key entities and their relationships
|
||||
6. **API Design** - Main endpoints/interfaces needed
|
||||
7. **User Experience** - Key user flows and interactions
|
||||
|
||||
${generateFeatures ? `
|
||||
Also generate a list of features to implement. For each feature provide:
|
||||
- ID (lowercase-hyphenated)
|
||||
- Title
|
||||
- Description
|
||||
- Priority (1=high, 2=medium, 3=low)
|
||||
- Estimated complexity (simple, moderate, complex)
|
||||
` : ""}
|
||||
|
||||
Format your response as markdown. Be specific and actionable.`;
|
||||
|
||||
console.log(`[SpecRegeneration] Prompt length: ${prompt.length} chars`);
|
||||
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_progress",
|
||||
content: "Starting spec generation...\n",
|
||||
});
|
||||
|
||||
const options: Options = {
|
||||
model: "claude-opus-4-5-20251101",
|
||||
maxTurns: 10,
|
||||
cwd: projectPath,
|
||||
allowedTools: ["Read", "Glob", "Grep"],
|
||||
permissionMode: "acceptEdits",
|
||||
abortController,
|
||||
};
|
||||
|
||||
console.log("[SpecRegeneration] SDK Options:", JSON.stringify(options, null, 2));
|
||||
console.log("[SpecRegeneration] Calling Claude Agent SDK query()...");
|
||||
|
||||
// Log auth status right before the SDK call
|
||||
logAuthStatus("Right before SDK query()");
|
||||
|
||||
let stream;
|
||||
try {
|
||||
stream = query({ prompt, options });
|
||||
console.log("[SpecRegeneration] query() returned stream successfully");
|
||||
} catch (queryError) {
|
||||
console.error("[SpecRegeneration] ❌ query() threw an exception:");
|
||||
console.error("[SpecRegeneration] Error:", queryError);
|
||||
throw queryError;
|
||||
}
|
||||
|
||||
let responseText = "";
|
||||
let messageCount = 0;
|
||||
|
||||
console.log("[SpecRegeneration] Starting to iterate over stream...");
|
||||
|
||||
try {
|
||||
for await (const msg of stream) {
|
||||
messageCount++;
|
||||
console.log(`[SpecRegeneration] Stream message #${messageCount}:`, JSON.stringify({ type: msg.type, subtype: (msg as any).subtype }, null, 2));
|
||||
|
||||
if (msg.type === "assistant" && msg.message.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text") {
|
||||
responseText = block.text;
|
||||
console.log(`[SpecRegeneration] Text block received (${block.text.length} chars)`);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_progress",
|
||||
content: block.text,
|
||||
});
|
||||
} else if (block.type === "tool_use") {
|
||||
console.log(`[SpecRegeneration] Tool use: ${block.name}`);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_tool",
|
||||
tool: block.name,
|
||||
input: block.input,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (msg.type === "result" && (msg as any).subtype === "success") {
|
||||
console.log("[SpecRegeneration] Received success result");
|
||||
responseText = (msg as any).result || responseText;
|
||||
} else if (msg.type === "error") {
|
||||
console.error("[SpecRegeneration] ❌ Received error message from stream:");
|
||||
console.error("[SpecRegeneration] Error message:", JSON.stringify(msg, null, 2));
|
||||
}
|
||||
}
|
||||
} catch (streamError) {
|
||||
console.error("[SpecRegeneration] ❌ Error while iterating stream:");
|
||||
console.error("[SpecRegeneration] Stream error:", streamError);
|
||||
throw streamError;
|
||||
}
|
||||
|
||||
console.log(`[SpecRegeneration] Stream iteration complete. Total messages: ${messageCount}`);
|
||||
console.log(`[SpecRegeneration] Response text length: ${responseText.length} chars`);
|
||||
|
||||
// Save spec
|
||||
const specDir = path.join(projectPath, ".automaker");
|
||||
const specPath = path.join(specDir, "app_spec.txt");
|
||||
|
||||
console.log(`[SpecRegeneration] Saving spec to: ${specPath}`);
|
||||
|
||||
await fs.mkdir(specDir, { recursive: true });
|
||||
await fs.writeFile(specPath, responseText);
|
||||
|
||||
console.log("[SpecRegeneration] Spec saved successfully");
|
||||
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "spec_complete",
|
||||
specPath,
|
||||
content: responseText,
|
||||
});
|
||||
|
||||
// If generate features was requested, parse and create them
|
||||
if (generateFeatures) {
|
||||
console.log("[SpecRegeneration] Starting feature generation...");
|
||||
await parseAndCreateFeatures(projectPath, responseText, events);
|
||||
}
|
||||
|
||||
console.log("[SpecRegeneration] ========== generateSpec() completed ==========");
|
||||
}
|
||||
|
||||
async function generateFeaturesFromSpec(
|
||||
projectPath: string,
|
||||
events: EventEmitter,
|
||||
abortController: AbortController
|
||||
) {
|
||||
console.log("[SpecRegeneration] ========== generateFeaturesFromSpec() started ==========");
|
||||
console.log(`[SpecRegeneration] projectPath: ${projectPath}`);
|
||||
|
||||
// Read existing spec
|
||||
const specPath = path.join(projectPath, ".automaker", "app_spec.txt");
|
||||
let spec: string;
|
||||
|
||||
console.log(`[SpecRegeneration] Reading spec from: ${specPath}`);
|
||||
|
||||
try {
|
||||
spec = await fs.readFile(specPath, "utf-8");
|
||||
console.log(`[SpecRegeneration] Spec loaded successfully (${spec.length} chars)`);
|
||||
} catch (readError) {
|
||||
console.error("[SpecRegeneration] ❌ Failed to read spec file:", readError);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_error",
|
||||
error: "No project spec found. Generate spec first.",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const prompt = `Based on this project specification:
|
||||
|
||||
${spec}
|
||||
|
||||
Generate a prioritized list of implementable features. For each feature provide:
|
||||
|
||||
1. **id**: A unique lowercase-hyphenated identifier
|
||||
2. **title**: Short descriptive title
|
||||
3. **description**: What this feature does (2-3 sentences)
|
||||
4. **priority**: 1 (high), 2 (medium), or 3 (low)
|
||||
5. **complexity**: "simple", "moderate", or "complex"
|
||||
6. **dependencies**: Array of feature IDs this depends on (can be empty)
|
||||
|
||||
Format as JSON:
|
||||
{
|
||||
"features": [
|
||||
{
|
||||
"id": "feature-id",
|
||||
"title": "Feature Title",
|
||||
"description": "What it does",
|
||||
"priority": 1,
|
||||
"complexity": "moderate",
|
||||
"dependencies": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Generate 5-15 features that build on each other logically.`;
|
||||
|
||||
console.log(`[SpecRegeneration] Prompt length: ${prompt.length} chars`);
|
||||
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_progress",
|
||||
content: "Analyzing spec and generating features...\n",
|
||||
});
|
||||
|
||||
const options: Options = {
|
||||
model: "claude-sonnet-4-20250514",
|
||||
maxTurns: 5,
|
||||
cwd: projectPath,
|
||||
allowedTools: ["Read", "Glob"],
|
||||
permissionMode: "acceptEdits",
|
||||
abortController,
|
||||
};
|
||||
|
||||
console.log("[SpecRegeneration] SDK Options:", JSON.stringify(options, null, 2));
|
||||
console.log("[SpecRegeneration] Calling Claude Agent SDK query() for features...");
|
||||
|
||||
logAuthStatus("Right before SDK query() for features");
|
||||
|
||||
let stream;
|
||||
try {
|
||||
stream = query({ prompt, options });
|
||||
console.log("[SpecRegeneration] query() returned stream successfully");
|
||||
} catch (queryError) {
|
||||
console.error("[SpecRegeneration] ❌ query() threw an exception:");
|
||||
console.error("[SpecRegeneration] Error:", queryError);
|
||||
throw queryError;
|
||||
}
|
||||
|
||||
let responseText = "";
|
||||
let messageCount = 0;
|
||||
|
||||
console.log("[SpecRegeneration] Starting to iterate over feature stream...");
|
||||
|
||||
try {
|
||||
for await (const msg of stream) {
|
||||
messageCount++;
|
||||
console.log(`[SpecRegeneration] Feature stream message #${messageCount}:`, JSON.stringify({ type: msg.type, subtype: (msg as any).subtype }, null, 2));
|
||||
|
||||
if (msg.type === "assistant" && msg.message.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text") {
|
||||
responseText = block.text;
|
||||
console.log(`[SpecRegeneration] Feature text block received (${block.text.length} chars)`);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_progress",
|
||||
content: block.text,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (msg.type === "result" && (msg as any).subtype === "success") {
|
||||
console.log("[SpecRegeneration] Received success result for features");
|
||||
responseText = (msg as any).result || responseText;
|
||||
} else if (msg.type === "error") {
|
||||
console.error("[SpecRegeneration] ❌ Received error message from feature stream:");
|
||||
console.error("[SpecRegeneration] Error message:", JSON.stringify(msg, null, 2));
|
||||
}
|
||||
}
|
||||
} catch (streamError) {
|
||||
console.error("[SpecRegeneration] ❌ Error while iterating feature stream:");
|
||||
console.error("[SpecRegeneration] Stream error:", streamError);
|
||||
throw streamError;
|
||||
}
|
||||
|
||||
console.log(`[SpecRegeneration] Feature stream complete. Total messages: ${messageCount}`);
|
||||
console.log(`[SpecRegeneration] Feature response length: ${responseText.length} chars`);
|
||||
|
||||
await parseAndCreateFeatures(projectPath, responseText, events);
|
||||
|
||||
console.log("[SpecRegeneration] ========== generateFeaturesFromSpec() completed ==========");
|
||||
}
|
||||
|
||||
async function parseAndCreateFeatures(
|
||||
projectPath: string,
|
||||
content: string,
|
||||
events: EventEmitter
|
||||
) {
|
||||
console.log("[SpecRegeneration] ========== parseAndCreateFeatures() started ==========");
|
||||
console.log(`[SpecRegeneration] Content length: ${content.length} chars`);
|
||||
|
||||
try {
|
||||
// Extract JSON from response
|
||||
console.log("[SpecRegeneration] Extracting JSON from response...");
|
||||
const jsonMatch = content.match(/\{[\s\S]*"features"[\s\S]*\}/);
|
||||
if (!jsonMatch) {
|
||||
console.error("[SpecRegeneration] ❌ No valid JSON found in response");
|
||||
console.error("[SpecRegeneration] Content preview:", content.substring(0, 500));
|
||||
throw new Error("No valid JSON found in response");
|
||||
}
|
||||
|
||||
console.log(`[SpecRegeneration] JSON match found (${jsonMatch[0].length} chars)`);
|
||||
|
||||
const parsed = JSON.parse(jsonMatch[0]);
|
||||
console.log(`[SpecRegeneration] Parsed ${parsed.features?.length || 0} features`);
|
||||
|
||||
const featuresDir = path.join(projectPath, ".automaker", "features");
|
||||
await fs.mkdir(featuresDir, { recursive: true });
|
||||
|
||||
const createdFeatures: Array<{ id: string; title: string }> = [];
|
||||
|
||||
for (const feature of parsed.features) {
|
||||
console.log(`[SpecRegeneration] Creating feature: ${feature.id}`);
|
||||
const featureDir = path.join(featuresDir, feature.id);
|
||||
await fs.mkdir(featureDir, { recursive: true });
|
||||
|
||||
const featureData = {
|
||||
id: feature.id,
|
||||
title: feature.title,
|
||||
description: feature.description,
|
||||
status: "backlog", // Features go to backlog - user must manually start them
|
||||
priority: feature.priority || 2,
|
||||
complexity: feature.complexity || "moderate",
|
||||
dependencies: feature.dependencies || [],
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(featureDir, "feature.json"),
|
||||
JSON.stringify(featureData, null, 2)
|
||||
);
|
||||
|
||||
createdFeatures.push({ id: feature.id, title: feature.title });
|
||||
}
|
||||
|
||||
console.log(`[SpecRegeneration] ✓ Created ${createdFeatures.length} features successfully`);
|
||||
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_complete",
|
||||
features: createdFeatures,
|
||||
count: createdFeatures.length,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[SpecRegeneration] ❌ parseAndCreateFeatures() failed:");
|
||||
console.error("[SpecRegeneration] Error:", error);
|
||||
events.emit("spec-regeneration:event", {
|
||||
type: "features_error",
|
||||
error: (error as Error).message,
|
||||
});
|
||||
}
|
||||
|
||||
console.log("[SpecRegeneration] ========== parseAndCreateFeatures() completed ==========");
|
||||
}
|
||||
192
apps/server/src/routes/suggestions.ts
Normal file
192
apps/server/src/routes/suggestions.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
/**
|
||||
* Suggestions routes - HTTP API for AI-powered feature suggestions
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import { query, type Options } from "@anthropic-ai/claude-agent-sdk";
|
||||
import type { EventEmitter } from "../lib/events.js";
|
||||
|
||||
let isRunning = false;
|
||||
let currentAbortController: AbortController | null = null;
|
||||
|
||||
export function createSuggestionsRoutes(events: EventEmitter): Router {
|
||||
const router = Router();
|
||||
|
||||
// Generate suggestions
|
||||
router.post("/generate", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, suggestionType = "features" } = req.body as {
|
||||
projectPath: string;
|
||||
suggestionType?: string;
|
||||
};
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: "projectPath required" });
|
||||
return;
|
||||
}
|
||||
|
||||
if (isRunning) {
|
||||
res.json({ success: false, error: "Suggestions generation is already running" });
|
||||
return;
|
||||
}
|
||||
|
||||
isRunning = true;
|
||||
currentAbortController = new AbortController();
|
||||
|
||||
// Start generation in background
|
||||
generateSuggestions(projectPath, suggestionType, events, currentAbortController)
|
||||
.catch((error) => {
|
||||
console.error("[Suggestions] Error:", error);
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_error",
|
||||
error: error.message,
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
isRunning = false;
|
||||
currentAbortController = null;
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Stop suggestions generation
|
||||
router.post("/stop", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
if (currentAbortController) {
|
||||
currentAbortController.abort();
|
||||
}
|
||||
isRunning = false;
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get status
|
||||
router.get("/status", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
res.json({ success: true, isRunning });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
async function generateSuggestions(
|
||||
projectPath: string,
|
||||
suggestionType: string,
|
||||
events: EventEmitter,
|
||||
abortController: AbortController
|
||||
) {
|
||||
const typePrompts: Record<string, string> = {
|
||||
features: "Analyze this project and suggest new features that would add value.",
|
||||
refactoring: "Analyze this project and identify refactoring opportunities.",
|
||||
security: "Analyze this project for security vulnerabilities and suggest fixes.",
|
||||
performance: "Analyze this project for performance issues and suggest optimizations.",
|
||||
};
|
||||
|
||||
const prompt = `${typePrompts[suggestionType] || typePrompts.features}
|
||||
|
||||
Look at the codebase and provide 3-5 concrete suggestions.
|
||||
|
||||
For each suggestion, provide:
|
||||
1. A category (e.g., "User Experience", "Security", "Performance")
|
||||
2. A clear description of what to implement
|
||||
3. Concrete steps to implement it
|
||||
4. Priority (1=high, 2=medium, 3=low)
|
||||
5. Brief reasoning for why this would help
|
||||
|
||||
Format your response as JSON:
|
||||
{
|
||||
"suggestions": [
|
||||
{
|
||||
"id": "suggestion-123",
|
||||
"category": "Category",
|
||||
"description": "What to implement",
|
||||
"steps": ["Step 1", "Step 2"],
|
||||
"priority": 1,
|
||||
"reasoning": "Why this helps"
|
||||
}
|
||||
]
|
||||
}`;
|
||||
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_progress",
|
||||
content: `Starting ${suggestionType} analysis...\n`,
|
||||
});
|
||||
|
||||
const options: Options = {
|
||||
model: "claude-opus-4-5-20251101",
|
||||
maxTurns: 5,
|
||||
cwd: projectPath,
|
||||
allowedTools: ["Read", "Glob", "Grep"],
|
||||
permissionMode: "acceptEdits",
|
||||
abortController,
|
||||
};
|
||||
|
||||
const stream = query({ prompt, options });
|
||||
let responseText = "";
|
||||
|
||||
for await (const msg of stream) {
|
||||
if (msg.type === "assistant" && msg.message.content) {
|
||||
for (const block of msg.message.content) {
|
||||
if (block.type === "text") {
|
||||
responseText = block.text;
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_progress",
|
||||
content: block.text,
|
||||
});
|
||||
} else if (block.type === "tool_use") {
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_tool",
|
||||
tool: block.name,
|
||||
input: block.input,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (msg.type === "result" && msg.subtype === "success") {
|
||||
responseText = msg.result || responseText;
|
||||
}
|
||||
}
|
||||
|
||||
// Parse suggestions from response
|
||||
try {
|
||||
const jsonMatch = responseText.match(/\{[\s\S]*"suggestions"[\s\S]*\}/);
|
||||
if (jsonMatch) {
|
||||
const parsed = JSON.parse(jsonMatch[0]);
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_complete",
|
||||
suggestions: parsed.suggestions.map((s: Record<string, unknown>, i: number) => ({
|
||||
...s,
|
||||
id: s.id || `suggestion-${Date.now()}-${i}`,
|
||||
})),
|
||||
});
|
||||
} else {
|
||||
throw new Error("No valid JSON found in response");
|
||||
}
|
||||
} catch (error) {
|
||||
// Return generic suggestions if parsing fails
|
||||
events.emit("suggestions:event", {
|
||||
type: "suggestions_complete",
|
||||
suggestions: [
|
||||
{
|
||||
id: `suggestion-${Date.now()}-0`,
|
||||
category: "Analysis",
|
||||
description: "Review the AI analysis output for insights",
|
||||
steps: ["Review the generated analysis"],
|
||||
priority: 1,
|
||||
reasoning: "The AI provided analysis but suggestions need manual review",
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
}
|
||||
181
apps/server/src/routes/templates.ts
Normal file
181
apps/server/src/routes/templates.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
/**
|
||||
* Templates routes
|
||||
* Provides API for cloning GitHub starter templates
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import { spawn } from "child_process";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import { addAllowedPath } from "../lib/security.js";
|
||||
|
||||
export function createTemplatesRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
/**
|
||||
* Clone a GitHub template to a new project directory
|
||||
* POST /api/templates/clone
|
||||
* Body: { repoUrl: string, projectName: string, parentDir: string }
|
||||
*/
|
||||
router.post("/clone", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { repoUrl, projectName, parentDir } = req.body as {
|
||||
repoUrl: string;
|
||||
projectName: string;
|
||||
parentDir: string;
|
||||
};
|
||||
|
||||
// Validate inputs
|
||||
if (!repoUrl || !projectName || !parentDir) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "repoUrl, projectName, and parentDir are required",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate repo URL is a valid GitHub URL
|
||||
const githubUrlPattern = /^https:\/\/github\.com\/[\w-]+\/[\w.-]+$/;
|
||||
if (!githubUrlPattern.test(repoUrl)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "Invalid GitHub repository URL",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Sanitize project name (allow alphanumeric, dash, underscore)
|
||||
const sanitizedName = projectName.replace(/[^a-zA-Z0-9-_]/g, "-");
|
||||
if (sanitizedName !== projectName) {
|
||||
console.log(
|
||||
`[Templates] Sanitized project name: ${projectName} -> ${sanitizedName}`
|
||||
);
|
||||
}
|
||||
|
||||
// Build full project path
|
||||
const projectPath = path.join(parentDir, sanitizedName);
|
||||
|
||||
const resolvedParent = path.resolve(parentDir);
|
||||
const resolvedProject = path.resolve(projectPath);
|
||||
const relativePath = path.relative(resolvedParent, resolvedProject);
|
||||
if (relativePath.startsWith("..") || path.isAbsolute(relativePath)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: "Invalid project name; potential path traversal attempt.",
|
||||
});
|
||||
}
|
||||
|
||||
// Check if directory already exists
|
||||
try {
|
||||
await fs.access(projectPath);
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Directory "${sanitizedName}" already exists in ${parentDir}`,
|
||||
});
|
||||
return;
|
||||
} catch {
|
||||
// Directory doesn't exist, which is what we want
|
||||
}
|
||||
|
||||
// Ensure parent directory exists
|
||||
try {
|
||||
await fs.mkdir(parentDir, { recursive: true });
|
||||
} catch (error) {
|
||||
console.error("[Templates] Failed to create parent directory:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to create parent directory",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[Templates] Cloning ${repoUrl} to ${projectPath}`);
|
||||
|
||||
// Clone the repository
|
||||
const cloneResult = await new Promise<{
|
||||
success: boolean;
|
||||
error?: string;
|
||||
}>((resolve) => {
|
||||
const gitProcess = spawn("git", ["clone", repoUrl, projectPath], {
|
||||
cwd: parentDir,
|
||||
});
|
||||
|
||||
let stderr = "";
|
||||
|
||||
gitProcess.stderr.on("data", (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
gitProcess.on("close", (code) => {
|
||||
if (code === 0) {
|
||||
resolve({ success: true });
|
||||
} else {
|
||||
resolve({
|
||||
success: false,
|
||||
error: stderr || `Git clone failed with code ${code}`,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.on("error", (error) => {
|
||||
resolve({
|
||||
success: false,
|
||||
error: `Failed to spawn git: ${error.message}`,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
if (!cloneResult.success) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: cloneResult.error || "Failed to clone repository",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Remove .git directory to start fresh
|
||||
try {
|
||||
const gitDir = path.join(projectPath, ".git");
|
||||
await fs.rm(gitDir, { recursive: true, force: true });
|
||||
console.log("[Templates] Removed .git directory");
|
||||
} catch (error) {
|
||||
console.warn("[Templates] Could not remove .git directory:", error);
|
||||
// Continue anyway - not critical
|
||||
}
|
||||
|
||||
// Initialize a fresh git repository
|
||||
await new Promise<void>((resolve) => {
|
||||
const gitInit = spawn("git", ["init"], {
|
||||
cwd: projectPath,
|
||||
});
|
||||
|
||||
gitInit.on("close", () => {
|
||||
console.log("[Templates] Initialized fresh git repository");
|
||||
resolve();
|
||||
});
|
||||
|
||||
gitInit.on("error", () => {
|
||||
console.warn("[Templates] Could not initialize git");
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
// Add to allowed paths
|
||||
addAllowedPath(projectPath);
|
||||
|
||||
console.log(`[Templates] Successfully cloned template to ${projectPath}`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
projectPath,
|
||||
projectName: sanitizedName,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[Templates] Clone error:", error);
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
113
apps/server/src/routes/workspace.ts
Normal file
113
apps/server/src/routes/workspace.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
/**
|
||||
* Workspace routes
|
||||
* Provides API endpoints for workspace directory management
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { addAllowedPath } from "../lib/security.js";
|
||||
|
||||
export function createWorkspaceRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
// Get workspace configuration status
|
||||
router.get("/config", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const workspaceDir = process.env.WORKSPACE_DIR;
|
||||
|
||||
if (!workspaceDir) {
|
||||
res.json({
|
||||
success: true,
|
||||
configured: false,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if the directory exists
|
||||
try {
|
||||
const stats = await fs.stat(workspaceDir);
|
||||
if (!stats.isDirectory()) {
|
||||
res.json({
|
||||
success: true,
|
||||
configured: false,
|
||||
error: "WORKSPACE_DIR is not a valid directory",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Add workspace dir to allowed paths
|
||||
addAllowedPath(workspaceDir);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
configured: true,
|
||||
workspaceDir,
|
||||
});
|
||||
} catch {
|
||||
res.json({
|
||||
success: true,
|
||||
configured: false,
|
||||
error: "WORKSPACE_DIR path does not exist",
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// List directories in workspace
|
||||
router.get("/directories", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const workspaceDir = process.env.WORKSPACE_DIR;
|
||||
|
||||
if (!workspaceDir) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "WORKSPACE_DIR is not configured",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if directory exists
|
||||
try {
|
||||
await fs.stat(workspaceDir);
|
||||
} catch {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "WORKSPACE_DIR path does not exist",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Add workspace dir to allowed paths
|
||||
addAllowedPath(workspaceDir);
|
||||
|
||||
// Read directory contents
|
||||
const entries = await fs.readdir(workspaceDir, { withFileTypes: true });
|
||||
|
||||
// Filter to directories only and map to result format
|
||||
const directories = entries
|
||||
.filter((entry) => entry.isDirectory() && !entry.name.startsWith("."))
|
||||
.map((entry) => ({
|
||||
name: entry.name,
|
||||
path: path.join(workspaceDir, entry.name),
|
||||
}))
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
// Add each directory to allowed paths
|
||||
directories.forEach((dir) => addAllowedPath(dir.path));
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
directories,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
355
apps/server/src/routes/worktree.ts
Normal file
355
apps/server/src/routes/worktree.ts
Normal file
@@ -0,0 +1,355 @@
|
||||
/**
|
||||
* Worktree routes - HTTP API for git worktree operations
|
||||
*/
|
||||
|
||||
import { Router, type Request, type Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
export function createWorktreeRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
// Check if a path is a git repo
|
||||
async function isGitRepo(repoPath: string): Promise<boolean> {
|
||||
try {
|
||||
await execAsync("git rev-parse --is-inside-work-tree", { cwd: repoPath });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Get worktree info
|
||||
router.post("/info", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if worktree exists
|
||||
const worktreePath = path.join(projectPath, ".automaker", "worktrees", featureId);
|
||||
try {
|
||||
await fs.access(worktreePath);
|
||||
const { stdout } = await execAsync("git rev-parse --abbrev-ref HEAD", {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
res.json({
|
||||
success: true,
|
||||
worktreePath,
|
||||
branchName: stdout.trim(),
|
||||
});
|
||||
} catch {
|
||||
res.json({ success: true, worktreePath: null, branchName: null });
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get worktree status
|
||||
router.post("/status", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const worktreePath = path.join(projectPath, ".automaker", "worktrees", featureId);
|
||||
|
||||
try {
|
||||
await fs.access(worktreePath);
|
||||
const { stdout: status } = await execAsync("git status --porcelain", {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const files = status
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((line) => line.slice(3));
|
||||
const { stdout: diffStat } = await execAsync("git diff --stat", {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const { stdout: logOutput } = await execAsync(
|
||||
'git log --oneline -5 --format="%h %s"',
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
modifiedFiles: files.length,
|
||||
files,
|
||||
diffStat: diffStat.trim(),
|
||||
recentCommits: logOutput.trim().split("\n").filter(Boolean),
|
||||
});
|
||||
} catch {
|
||||
res.json({
|
||||
success: true,
|
||||
modifiedFiles: 0,
|
||||
files: [],
|
||||
diffStat: "",
|
||||
recentCommits: [],
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// List all worktrees
|
||||
router.post("/list", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: "projectPath required" });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!(await isGitRepo(projectPath))) {
|
||||
res.json({ success: true, worktrees: [] });
|
||||
return;
|
||||
}
|
||||
|
||||
const { stdout } = await execAsync("git worktree list --porcelain", {
|
||||
cwd: projectPath,
|
||||
});
|
||||
|
||||
const worktrees: Array<{ path: string; branch: string }> = [];
|
||||
const lines = stdout.split("\n");
|
||||
let current: { path?: string; branch?: string } = {};
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.startsWith("worktree ")) {
|
||||
current.path = line.slice(9);
|
||||
} else if (line.startsWith("branch ")) {
|
||||
current.branch = line.slice(7).replace("refs/heads/", "");
|
||||
} else if (line === "") {
|
||||
if (current.path && current.branch) {
|
||||
worktrees.push({ path: current.path, branch: current.branch });
|
||||
}
|
||||
current = {};
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ success: true, worktrees });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get diffs for a worktree
|
||||
router.post("/diffs", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const worktreePath = path.join(projectPath, ".automaker", "worktrees", featureId);
|
||||
|
||||
try {
|
||||
await fs.access(worktreePath);
|
||||
const { stdout: diff } = await execAsync("git diff HEAD", {
|
||||
cwd: worktreePath,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
});
|
||||
const { stdout: status } = await execAsync("git status --porcelain", {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
|
||||
const files = status
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((line) => {
|
||||
const statusChar = line[0];
|
||||
const filePath = line.slice(3);
|
||||
const statusMap: Record<string, string> = {
|
||||
M: "Modified",
|
||||
A: "Added",
|
||||
D: "Deleted",
|
||||
R: "Renamed",
|
||||
C: "Copied",
|
||||
U: "Updated",
|
||||
"?": "Untracked",
|
||||
};
|
||||
return {
|
||||
status: statusChar,
|
||||
path: filePath,
|
||||
statusText: statusMap[statusChar] || "Unknown",
|
||||
};
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
diff,
|
||||
files,
|
||||
hasChanges: files.length > 0,
|
||||
});
|
||||
} catch {
|
||||
res.json({ success: true, diff: "", files: [], hasChanges: false });
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get diff for a specific file
|
||||
router.post("/file-diff", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId, filePath } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
filePath: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId || !filePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath, featureId, and filePath required",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const worktreePath = path.join(projectPath, ".automaker", "worktrees", featureId);
|
||||
|
||||
try {
|
||||
await fs.access(worktreePath);
|
||||
const { stdout: diff } = await execAsync(`git diff HEAD -- "${filePath}"`, {
|
||||
cwd: worktreePath,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
});
|
||||
|
||||
res.json({ success: true, diff, filePath });
|
||||
} catch {
|
||||
res.json({ success: true, diff: "", filePath });
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Revert feature (remove worktree)
|
||||
router.post("/revert", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const worktreePath = path.join(projectPath, ".automaker", "worktrees", featureId);
|
||||
|
||||
try {
|
||||
// Remove worktree
|
||||
await execAsync(`git worktree remove "${worktreePath}" --force`, {
|
||||
cwd: projectPath,
|
||||
});
|
||||
// Delete branch
|
||||
await execAsync(`git branch -D feature/${featureId}`, { cwd: projectPath });
|
||||
|
||||
res.json({ success: true, removedPath: worktreePath });
|
||||
} catch (error) {
|
||||
// Worktree might not exist
|
||||
res.json({ success: true, removedPath: null });
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
// Merge feature (merge worktree branch into main)
|
||||
router.post("/merge", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { projectPath, featureId, options } = req.body as {
|
||||
projectPath: string;
|
||||
featureId: string;
|
||||
options?: { squash?: boolean; message?: string };
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and featureId required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const branchName = `feature/${featureId}`;
|
||||
const worktreePath = path.join(projectPath, ".automaker", "worktrees", featureId);
|
||||
|
||||
// Get current branch
|
||||
const { stdout: currentBranch } = await execAsync(
|
||||
"git rev-parse --abbrev-ref HEAD",
|
||||
{ cwd: projectPath }
|
||||
);
|
||||
|
||||
// Merge the feature branch
|
||||
const mergeCmd = options?.squash
|
||||
? `git merge --squash ${branchName}`
|
||||
: `git merge ${branchName} -m "${options?.message || `Merge ${branchName}`}"`;
|
||||
|
||||
await execAsync(mergeCmd, { cwd: projectPath });
|
||||
|
||||
// If squash merge, need to commit
|
||||
if (options?.squash) {
|
||||
await execAsync(
|
||||
`git commit -m "${options?.message || `Merge ${branchName} (squash)`}"`,
|
||||
{ cwd: projectPath }
|
||||
);
|
||||
}
|
||||
|
||||
// Clean up worktree and branch
|
||||
try {
|
||||
await execAsync(`git worktree remove "${worktreePath}" --force`, {
|
||||
cwd: projectPath,
|
||||
});
|
||||
await execAsync(`git branch -D ${branchName}`, { cwd: projectPath });
|
||||
} catch {
|
||||
// Cleanup errors are non-fatal
|
||||
}
|
||||
|
||||
res.json({ success: true, mergedBranch: branchName });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
res.status(500).json({ success: false, error: message });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
@@ -1,35 +1,72 @@
|
||||
const { query, AbortError } = require("@anthropic-ai/claude-agent-sdk");
|
||||
const path = require("path");
|
||||
const fs = require("fs/promises");
|
||||
|
||||
/**
|
||||
* Agent Service - Runs Claude agents in the Electron main process
|
||||
* This service survives Next.js restarts and maintains conversation state
|
||||
* Agent Service - Runs Claude agents via the Claude Agent SDK
|
||||
* Manages conversation sessions and streams responses via WebSocket
|
||||
*/
|
||||
class AgentService {
|
||||
constructor() {
|
||||
this.sessions = new Map(); // sessionId -> { messages, isRunning, abortController }
|
||||
this.stateDir = null; // Will be set when app is ready
|
||||
|
||||
import { query, AbortError, type Options } from "@anthropic-ai/claude-agent-sdk";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import type { EventEmitter } from "../lib/events.js";
|
||||
|
||||
interface Message {
|
||||
id: string;
|
||||
role: "user" | "assistant";
|
||||
content: string;
|
||||
images?: Array<{
|
||||
data: string;
|
||||
mimeType: string;
|
||||
filename: string;
|
||||
}>;
|
||||
timestamp: string;
|
||||
isError?: boolean;
|
||||
}
|
||||
|
||||
interface Session {
|
||||
messages: Message[];
|
||||
isRunning: boolean;
|
||||
abortController: AbortController | null;
|
||||
workingDirectory: string;
|
||||
}
|
||||
|
||||
interface SessionMetadata {
|
||||
id: string;
|
||||
name: string;
|
||||
projectPath?: string;
|
||||
workingDirectory: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
archived?: boolean;
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
export class AgentService {
|
||||
private sessions = new Map<string, Session>();
|
||||
private stateDir: string;
|
||||
private metadataFile: string;
|
||||
private events: EventEmitter;
|
||||
|
||||
constructor(dataDir: string, events: EventEmitter) {
|
||||
this.stateDir = path.join(dataDir, "agent-sessions");
|
||||
this.metadataFile = path.join(dataDir, "sessions-metadata.json");
|
||||
this.events = events;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the service with app data directory
|
||||
*/
|
||||
async initialize(appDataPath) {
|
||||
this.stateDir = path.join(appDataPath, "agent-sessions");
|
||||
this.metadataFile = path.join(appDataPath, "sessions-metadata.json");
|
||||
async initialize(): Promise<void> {
|
||||
await fs.mkdir(this.stateDir, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Start or resume a conversation
|
||||
*/
|
||||
async startConversation({ sessionId, workingDirectory }) {
|
||||
|
||||
// Initialize session if it doesn't exist
|
||||
async startConversation({
|
||||
sessionId,
|
||||
workingDirectory,
|
||||
}: {
|
||||
sessionId: string;
|
||||
workingDirectory?: string;
|
||||
}) {
|
||||
if (!this.sessions.has(sessionId)) {
|
||||
const messages = await this.loadSession(sessionId);
|
||||
|
||||
this.sessions.set(sessionId, {
|
||||
messages,
|
||||
isRunning: false,
|
||||
@@ -38,7 +75,7 @@ class AgentService {
|
||||
});
|
||||
}
|
||||
|
||||
const session = this.sessions.get(sessionId);
|
||||
const session = this.sessions.get(sessionId)!;
|
||||
return {
|
||||
success: true,
|
||||
messages: session.messages,
|
||||
@@ -54,7 +91,11 @@ class AgentService {
|
||||
message,
|
||||
workingDirectory,
|
||||
imagePaths,
|
||||
sendToRenderer,
|
||||
}: {
|
||||
sessionId: string;
|
||||
message: string;
|
||||
workingDirectory?: string;
|
||||
imagePaths?: string[];
|
||||
}) {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) {
|
||||
@@ -65,20 +106,15 @@ class AgentService {
|
||||
throw new Error("Agent is already processing a message");
|
||||
}
|
||||
|
||||
// Read images from temp files and convert to base64 for storage
|
||||
const images = [];
|
||||
// Read images and convert to base64
|
||||
const images: Message["images"] = [];
|
||||
if (imagePaths && imagePaths.length > 0) {
|
||||
const fs = require("fs/promises");
|
||||
const path = require("path");
|
||||
|
||||
for (const imagePath of imagePaths) {
|
||||
try {
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const base64Data = imageBuffer.toString("base64");
|
||||
|
||||
// Determine media type from file extension
|
||||
const ext = path.extname(imagePath).toLowerCase();
|
||||
const mimeTypeMap = {
|
||||
const mimeTypeMap: Record<string, string> = {
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".png": "image/png",
|
||||
@@ -92,21 +128,14 @@ class AgentService {
|
||||
mimeType: mediaType,
|
||||
filename: path.basename(imagePath),
|
||||
});
|
||||
|
||||
console.log(
|
||||
`[AgentService] Loaded image from ${imagePath} for storage`
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[AgentService] Failed to load image from ${imagePath}:`,
|
||||
error
|
||||
);
|
||||
console.error(`[AgentService] Failed to load image ${imagePath}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add user message to conversation with base64 images
|
||||
const userMessage = {
|
||||
// Add user message
|
||||
const userMessage: Message = {
|
||||
id: this.generateId(),
|
||||
role: "user",
|
||||
content: message,
|
||||
@@ -118,19 +147,16 @@ class AgentService {
|
||||
session.isRunning = true;
|
||||
session.abortController = new AbortController();
|
||||
|
||||
// Send initial user message to renderer
|
||||
sendToRenderer({
|
||||
// Emit user message event
|
||||
this.emitAgentEvent(sessionId, {
|
||||
type: "message",
|
||||
message: userMessage,
|
||||
});
|
||||
|
||||
// Save state with base64 images
|
||||
await this.saveSession(sessionId, session.messages);
|
||||
|
||||
try {
|
||||
// Configure Claude Agent SDK options
|
||||
const options = {
|
||||
// model: "claude-sonnet-4-20250514",
|
||||
const options: Options = {
|
||||
model: "claude-opus-4-5-20251101",
|
||||
systemPrompt: this.getSystemPrompt(),
|
||||
maxTurns: 20,
|
||||
@@ -150,32 +176,26 @@ class AgentService {
|
||||
enabled: true,
|
||||
autoAllowBashIfSandboxed: true,
|
||||
},
|
||||
abortController: session.abortController,
|
||||
abortController: session.abortController!,
|
||||
};
|
||||
|
||||
// Build prompt content with text and images
|
||||
let promptContent = message;
|
||||
// Build prompt content
|
||||
let promptContent: string | Array<{ type: string; text?: string; source?: object }> =
|
||||
message;
|
||||
|
||||
// If there are images, create a content array
|
||||
if (imagePaths && imagePaths.length > 0) {
|
||||
const contentBlocks = [];
|
||||
const contentBlocks: Array<{ type: string; text?: string; source?: object }> = [];
|
||||
|
||||
// Add text block
|
||||
if (message && message.trim()) {
|
||||
contentBlocks.push({
|
||||
type: "text",
|
||||
text: message,
|
||||
});
|
||||
contentBlocks.push({ type: "text", text: message });
|
||||
}
|
||||
|
||||
// Add image blocks
|
||||
const fs = require("fs");
|
||||
for (const imagePath of imagePaths) {
|
||||
try {
|
||||
const imageBuffer = fs.readFileSync(imagePath);
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const base64Data = imageBuffer.toString("base64");
|
||||
const ext = path.extname(imagePath).toLowerCase();
|
||||
const mimeTypeMap = {
|
||||
const mimeTypeMap: Record<string, string> = {
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".png": "image/png",
|
||||
@@ -193,30 +213,23 @@ class AgentService {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[AgentService] Failed to load image ${imagePath}:`,
|
||||
error
|
||||
);
|
||||
console.error(`[AgentService] Failed to load image ${imagePath}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Use content blocks if we have images
|
||||
if (
|
||||
contentBlocks.length > 1 ||
|
||||
(contentBlocks.length === 1 && contentBlocks[0].type === "image")
|
||||
) {
|
||||
if (contentBlocks.length > 1 || contentBlocks[0]?.type === "image") {
|
||||
promptContent = contentBlocks;
|
||||
}
|
||||
}
|
||||
|
||||
// Build payload for the SDK
|
||||
// Build payload
|
||||
const promptPayload = Array.isArray(promptContent)
|
||||
? (async function* () {
|
||||
yield {
|
||||
type: "user",
|
||||
type: "user" as const,
|
||||
session_id: "",
|
||||
message: {
|
||||
role: "user",
|
||||
role: "user" as const,
|
||||
content: promptContent,
|
||||
},
|
||||
parent_tool_use_id: null,
|
||||
@@ -224,14 +237,12 @@ class AgentService {
|
||||
})()
|
||||
: promptContent;
|
||||
|
||||
// Send the query via the SDK (conversation state handled by the SDK)
|
||||
const stream = query({ prompt: promptPayload, options });
|
||||
|
||||
let currentAssistantMessage = null;
|
||||
let currentAssistantMessage: Message | null = null;
|
||||
let responseText = "";
|
||||
const toolUses = [];
|
||||
const toolUses: Array<{ name: string; input: unknown }> = [];
|
||||
|
||||
// Stream responses from the SDK
|
||||
for await (const msg of stream) {
|
||||
if (msg.type === "assistant") {
|
||||
if (msg.message.content) {
|
||||
@@ -239,7 +250,6 @@ class AgentService {
|
||||
if (block.type === "text") {
|
||||
responseText += block.text;
|
||||
|
||||
// Create or update assistant message
|
||||
if (!currentAssistantMessage) {
|
||||
currentAssistantMessage = {
|
||||
id: this.generateId(),
|
||||
@@ -252,8 +262,7 @@ class AgentService {
|
||||
currentAssistantMessage.content = responseText;
|
||||
}
|
||||
|
||||
// Stream to renderer
|
||||
sendToRenderer({
|
||||
this.emitAgentEvent(sessionId, {
|
||||
type: "stream",
|
||||
messageId: currentAssistantMessage.id,
|
||||
content: responseText,
|
||||
@@ -266,8 +275,7 @@ class AgentService {
|
||||
};
|
||||
toolUses.push(toolUse);
|
||||
|
||||
// Send tool use notification
|
||||
sendToRenderer({
|
||||
this.emitAgentEvent(sessionId, {
|
||||
type: "tool_use",
|
||||
tool: toolUse,
|
||||
});
|
||||
@@ -276,15 +284,13 @@ class AgentService {
|
||||
}
|
||||
} else if (msg.type === "result") {
|
||||
if (msg.subtype === "success" && msg.result) {
|
||||
// Use the final result
|
||||
if (currentAssistantMessage) {
|
||||
currentAssistantMessage.content = msg.result;
|
||||
responseText = msg.result;
|
||||
}
|
||||
}
|
||||
|
||||
// Send completion
|
||||
sendToRenderer({
|
||||
this.emitAgentEvent(sessionId, {
|
||||
type: "complete",
|
||||
messageId: currentAssistantMessage?.id,
|
||||
content: responseText,
|
||||
@@ -293,7 +299,6 @@ class AgentService {
|
||||
}
|
||||
}
|
||||
|
||||
// Save final state
|
||||
await this.saveSession(sessionId, session.messages);
|
||||
|
||||
session.isRunning = false;
|
||||
@@ -304,8 +309,7 @@ class AgentService {
|
||||
message: currentAssistantMessage,
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof AbortError || error?.name === "AbortError") {
|
||||
// Query aborted
|
||||
if (error instanceof AbortError || (error as Error)?.name === "AbortError") {
|
||||
session.isRunning = false;
|
||||
session.abortController = null;
|
||||
return { success: false, aborted: true };
|
||||
@@ -316,11 +320,10 @@ class AgentService {
|
||||
session.isRunning = false;
|
||||
session.abortController = null;
|
||||
|
||||
// Add error message
|
||||
const errorMessage = {
|
||||
const errorMessage: Message = {
|
||||
id: this.generateId(),
|
||||
role: "assistant",
|
||||
content: `Error: ${error.message}`,
|
||||
content: `Error: ${(error as Error).message}`,
|
||||
timestamp: new Date().toISOString(),
|
||||
isError: true,
|
||||
};
|
||||
@@ -328,9 +331,9 @@ class AgentService {
|
||||
session.messages.push(errorMessage);
|
||||
await this.saveSession(sessionId, session.messages);
|
||||
|
||||
sendToRenderer({
|
||||
this.emitAgentEvent(sessionId, {
|
||||
type: "error",
|
||||
error: error.message,
|
||||
error: (error as Error).message,
|
||||
message: errorMessage,
|
||||
});
|
||||
|
||||
@@ -341,7 +344,7 @@ class AgentService {
|
||||
/**
|
||||
* Get conversation history
|
||||
*/
|
||||
getHistory(sessionId) {
|
||||
getHistory(sessionId: string) {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) {
|
||||
return { success: false, error: "Session not found" };
|
||||
@@ -357,7 +360,7 @@ class AgentService {
|
||||
/**
|
||||
* Stop current agent execution
|
||||
*/
|
||||
async stopExecution(sessionId) {
|
||||
async stopExecution(sessionId: string) {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) {
|
||||
return { success: false, error: "Session not found" };
|
||||
@@ -375,7 +378,7 @@ class AgentService {
|
||||
/**
|
||||
* Clear conversation history
|
||||
*/
|
||||
async clearSession(sessionId) {
|
||||
async clearSession(sessionId: string) {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (session) {
|
||||
session.messages = [];
|
||||
@@ -386,56 +389,140 @@ class AgentService {
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Load session from disk
|
||||
*/
|
||||
async loadSession(sessionId) {
|
||||
if (!this.stateDir) return [];
|
||||
// Session management
|
||||
|
||||
async loadSession(sessionId: string): Promise<Message[]> {
|
||||
const sessionFile = path.join(this.stateDir, `${sessionId}.json`);
|
||||
|
||||
try {
|
||||
const data = await fs.readFile(sessionFile, "utf-8");
|
||||
const parsed = JSON.parse(data);
|
||||
console.log(
|
||||
`[AgentService] Loaded ${parsed.length} messages for ${sessionId}`
|
||||
);
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
// Session doesn't exist yet
|
||||
return JSON.parse(data);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save session to disk
|
||||
*/
|
||||
async saveSession(sessionId, messages) {
|
||||
if (!this.stateDir) return;
|
||||
|
||||
async saveSession(sessionId: string, messages: Message[]): Promise<void> {
|
||||
const sessionFile = path.join(this.stateDir, `${sessionId}.json`);
|
||||
|
||||
try {
|
||||
await fs.writeFile(
|
||||
sessionFile,
|
||||
JSON.stringify(messages, null, 2),
|
||||
"utf-8"
|
||||
);
|
||||
console.log(
|
||||
`[AgentService] Saved ${messages.length} messages for ${sessionId}`
|
||||
);
|
||||
|
||||
// Update timestamp
|
||||
await fs.writeFile(sessionFile, JSON.stringify(messages, null, 2), "utf-8");
|
||||
await this.updateSessionTimestamp(sessionId);
|
||||
} catch (error) {
|
||||
console.error("[AgentService] Failed to save session:", error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get system prompt
|
||||
*/
|
||||
getSystemPrompt() {
|
||||
async loadMetadata(): Promise<Record<string, SessionMetadata>> {
|
||||
try {
|
||||
const data = await fs.readFile(this.metadataFile, "utf-8");
|
||||
return JSON.parse(data);
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
async saveMetadata(metadata: Record<string, SessionMetadata>): Promise<void> {
|
||||
await fs.writeFile(this.metadataFile, JSON.stringify(metadata, null, 2), "utf-8");
|
||||
}
|
||||
|
||||
async updateSessionTimestamp(sessionId: string): Promise<void> {
|
||||
const metadata = await this.loadMetadata();
|
||||
if (metadata[sessionId]) {
|
||||
metadata[sessionId].updatedAt = new Date().toISOString();
|
||||
await this.saveMetadata(metadata);
|
||||
}
|
||||
}
|
||||
|
||||
async listSessions(includeArchived = false): Promise<SessionMetadata[]> {
|
||||
const metadata = await this.loadMetadata();
|
||||
let sessions = Object.values(metadata);
|
||||
|
||||
if (!includeArchived) {
|
||||
sessions = sessions.filter((s) => !s.archived);
|
||||
}
|
||||
|
||||
return sessions.sort(
|
||||
(a, b) => new Date(b.updatedAt).getTime() - new Date(a.updatedAt).getTime()
|
||||
);
|
||||
}
|
||||
|
||||
async createSession(
|
||||
name: string,
|
||||
projectPath?: string,
|
||||
workingDirectory?: string
|
||||
): Promise<SessionMetadata> {
|
||||
const sessionId = this.generateId();
|
||||
const metadata = await this.loadMetadata();
|
||||
|
||||
const session: SessionMetadata = {
|
||||
id: sessionId,
|
||||
name,
|
||||
projectPath,
|
||||
workingDirectory: workingDirectory || projectPath || process.cwd(),
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
metadata[sessionId] = session;
|
||||
await this.saveMetadata(metadata);
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
async updateSession(
|
||||
sessionId: string,
|
||||
updates: Partial<SessionMetadata>
|
||||
): Promise<SessionMetadata | null> {
|
||||
const metadata = await this.loadMetadata();
|
||||
if (!metadata[sessionId]) return null;
|
||||
|
||||
metadata[sessionId] = {
|
||||
...metadata[sessionId],
|
||||
...updates,
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await this.saveMetadata(metadata);
|
||||
return metadata[sessionId];
|
||||
}
|
||||
|
||||
async archiveSession(sessionId: string): Promise<boolean> {
|
||||
const result = await this.updateSession(sessionId, { archived: true });
|
||||
return result !== null;
|
||||
}
|
||||
|
||||
async unarchiveSession(sessionId: string): Promise<boolean> {
|
||||
const result = await this.updateSession(sessionId, { archived: false });
|
||||
return result !== null;
|
||||
}
|
||||
|
||||
async deleteSession(sessionId: string): Promise<boolean> {
|
||||
const metadata = await this.loadMetadata();
|
||||
if (!metadata[sessionId]) return false;
|
||||
|
||||
delete metadata[sessionId];
|
||||
await this.saveMetadata(metadata);
|
||||
|
||||
// Delete session file
|
||||
try {
|
||||
const sessionFile = path.join(this.stateDir, `${sessionId}.json`);
|
||||
await fs.unlink(sessionFile);
|
||||
} catch {
|
||||
// File may not exist
|
||||
}
|
||||
|
||||
// Clear from memory
|
||||
this.sessions.delete(sessionId);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private emitAgentEvent(sessionId: string, data: Record<string, unknown>): void {
|
||||
this.events.emit("agent:stream", { sessionId, ...data });
|
||||
}
|
||||
|
||||
private getSystemPrompt(): string {
|
||||
return `You are an AI assistant helping users build software. You are part of the Automaker application,
|
||||
which is designed to help developers plan, design, and implement software projects autonomously.
|
||||
|
||||
@@ -466,217 +553,10 @@ You have full access to the codebase and can:
|
||||
- Edit existing files
|
||||
- Run bash commands
|
||||
- Search for code patterns
|
||||
- Execute tests and builds
|
||||
|
||||
IMPORTANT: When making file changes, be aware that the Next.js development server may restart.
|
||||
This is normal and expected. Your conversation state is preserved across these restarts.`;
|
||||
- Execute tests and builds`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate unique ID
|
||||
*/
|
||||
generateId() {
|
||||
private generateId(): string {
|
||||
return `msg_${Date.now()}_${Math.random().toString(36).substring(2, 11)}`;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Session Management
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Load all session metadata
|
||||
*/
|
||||
async loadMetadata() {
|
||||
if (!this.metadataFile) return {};
|
||||
|
||||
try {
|
||||
const data = await fs.readFile(this.metadataFile, "utf-8");
|
||||
return JSON.parse(data);
|
||||
} catch (error) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save session metadata
|
||||
*/
|
||||
async saveMetadata(metadata) {
|
||||
if (!this.metadataFile) return;
|
||||
|
||||
try {
|
||||
await fs.writeFile(
|
||||
this.metadataFile,
|
||||
JSON.stringify(metadata, null, 2),
|
||||
"utf-8"
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("[AgentService] Failed to save metadata:", error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all sessions
|
||||
*/
|
||||
async listSessions({ includeArchived = false } = {}) {
|
||||
const metadata = await this.loadMetadata();
|
||||
const sessions = [];
|
||||
|
||||
for (const [sessionId, meta] of Object.entries(metadata)) {
|
||||
if (!includeArchived && meta.isArchived) continue;
|
||||
|
||||
const messages = await this.loadSession(sessionId);
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
|
||||
sessions.push({
|
||||
id: sessionId,
|
||||
name: meta.name || sessionId,
|
||||
projectPath: meta.projectPath || "",
|
||||
createdAt: meta.createdAt,
|
||||
updatedAt: meta.updatedAt,
|
||||
messageCount: messages.length,
|
||||
isArchived: meta.isArchived || false,
|
||||
tags: meta.tags || [],
|
||||
preview: lastMessage?.content.substring(0, 100) || "",
|
||||
});
|
||||
}
|
||||
|
||||
// Sort by most recently updated
|
||||
sessions.sort((a, b) => new Date(b.updatedAt) - new Date(a.updatedAt));
|
||||
|
||||
return sessions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new session
|
||||
*/
|
||||
async createSession({ name, projectPath, workingDirectory }) {
|
||||
const sessionId = `session_${Date.now()}_${Math.random()
|
||||
.toString(36)
|
||||
.substring(2, 11)}`;
|
||||
|
||||
const metadata = await this.loadMetadata();
|
||||
metadata[sessionId] = {
|
||||
name,
|
||||
projectPath,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
isArchived: false,
|
||||
tags: [],
|
||||
};
|
||||
|
||||
await this.saveMetadata(metadata);
|
||||
|
||||
this.sessions.set(sessionId, {
|
||||
messages: [],
|
||||
isRunning: false,
|
||||
abortController: null,
|
||||
workingDirectory: workingDirectory || projectPath,
|
||||
});
|
||||
|
||||
await this.saveSession(sessionId, []);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
sessionId,
|
||||
session: metadata[sessionId],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Update session metadata
|
||||
*/
|
||||
async updateSession({ sessionId, name, tags }) {
|
||||
const metadata = await this.loadMetadata();
|
||||
|
||||
if (!metadata[sessionId]) {
|
||||
return { success: false, error: "Session not found" };
|
||||
}
|
||||
|
||||
if (name !== undefined) metadata[sessionId].name = name;
|
||||
if (tags !== undefined) metadata[sessionId].tags = tags;
|
||||
metadata[sessionId].updatedAt = new Date().toISOString();
|
||||
|
||||
await this.saveMetadata(metadata);
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Archive a session
|
||||
*/
|
||||
async archiveSession(sessionId) {
|
||||
const metadata = await this.loadMetadata();
|
||||
|
||||
if (!metadata[sessionId]) {
|
||||
return { success: false, error: "Session not found" };
|
||||
}
|
||||
|
||||
metadata[sessionId].isArchived = true;
|
||||
metadata[sessionId].updatedAt = new Date().toISOString();
|
||||
|
||||
await this.saveMetadata(metadata);
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Unarchive a session
|
||||
*/
|
||||
async unarchiveSession(sessionId) {
|
||||
const metadata = await this.loadMetadata();
|
||||
|
||||
if (!metadata[sessionId]) {
|
||||
return { success: false, error: "Session not found" };
|
||||
}
|
||||
|
||||
metadata[sessionId].isArchived = false;
|
||||
metadata[sessionId].updatedAt = new Date().toISOString();
|
||||
|
||||
await this.saveMetadata(metadata);
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a session permanently
|
||||
*/
|
||||
async deleteSession(sessionId) {
|
||||
const metadata = await this.loadMetadata();
|
||||
|
||||
if (!metadata[sessionId]) {
|
||||
return { success: false, error: "Session not found" };
|
||||
}
|
||||
|
||||
// Remove from metadata
|
||||
delete metadata[sessionId];
|
||||
await this.saveMetadata(metadata);
|
||||
|
||||
// Remove from memory
|
||||
this.sessions.delete(sessionId);
|
||||
|
||||
// Delete session file
|
||||
const sessionFile = path.join(this.stateDir, `${sessionId}.json`);
|
||||
try {
|
||||
await fs.unlink(sessionFile);
|
||||
} catch (error) {
|
||||
console.warn("[AgentService] Failed to delete session file:", error);
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update session metadata when messages change
|
||||
*/
|
||||
async updateSessionTimestamp(sessionId) {
|
||||
const metadata = await this.loadMetadata();
|
||||
|
||||
if (metadata[sessionId]) {
|
||||
metadata[sessionId].updatedAt = new Date().toISOString();
|
||||
await this.saveMetadata(metadata);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
module.exports = new AgentService();
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user