mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-30 22:32:04 +00:00
Compare commits
3 Commits
claude/add
...
feature/cl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b88c940a36 | ||
|
|
10b49bd3b4 | ||
|
|
53298106e9 |
21
.github/workflows/release.yml
vendored
21
.github/workflows/release.yml
vendored
@@ -62,9 +62,7 @@ jobs:
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: macos-builds
|
||||
path: |
|
||||
apps/ui/release/*.dmg
|
||||
apps/ui/release/*.zip
|
||||
path: apps/ui/release/*.{dmg,zip}
|
||||
retention-days: 30
|
||||
|
||||
- name: Upload Windows artifacts
|
||||
@@ -80,10 +78,7 @@ jobs:
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: linux-builds
|
||||
path: |
|
||||
apps/ui/release/*.AppImage
|
||||
apps/ui/release/*.deb
|
||||
apps/ui/release/*.rpm
|
||||
path: apps/ui/release/*.{AppImage,deb,rpm}
|
||||
retention-days: 30
|
||||
|
||||
upload:
|
||||
@@ -114,14 +109,8 @@ jobs:
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: |
|
||||
artifacts/macos-builds/*.dmg
|
||||
artifacts/macos-builds/*.zip
|
||||
artifacts/macos-builds/*.blockmap
|
||||
artifacts/windows-builds/*.exe
|
||||
artifacts/windows-builds/*.blockmap
|
||||
artifacts/linux-builds/*.AppImage
|
||||
artifacts/linux-builds/*.deb
|
||||
artifacts/linux-builds/*.rpm
|
||||
artifacts/linux-builds/*.blockmap
|
||||
artifacts/macos-builds/*.{dmg,zip,blockmap}
|
||||
artifacts/windows-builds/*.{exe,blockmap}
|
||||
artifacts/linux-builds/*.{AppImage,deb,rpm,blockmap}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -28,7 +28,6 @@ COPY libs/platform/package*.json ./libs/platform/
|
||||
COPY libs/model-resolver/package*.json ./libs/model-resolver/
|
||||
COPY libs/dependency-resolver/package*.json ./libs/dependency-resolver/
|
||||
COPY libs/git-utils/package*.json ./libs/git-utils/
|
||||
COPY libs/spec-parser/package*.json ./libs/spec-parser/
|
||||
|
||||
# Copy scripts (needed by npm workspace)
|
||||
COPY scripts ./scripts
|
||||
|
||||
300
SECURITY_TODO.md
300
SECURITY_TODO.md
@@ -1,300 +0,0 @@
|
||||
# Security Audit Findings - v0.13.0rc Branch
|
||||
|
||||
**Date:** $(date)
|
||||
**Audit Type:** Git diff security review against v0.13.0rc branch
|
||||
**Status:** ⚠️ Security vulnerabilities found - requires fixes before release
|
||||
|
||||
## Executive Summary
|
||||
|
||||
No intentionally malicious code was detected in the changes. However, several **critical security vulnerabilities** were identified that could allow command injection attacks. These must be fixed before release.
|
||||
|
||||
---
|
||||
|
||||
## 🔴 Critical Security Issues
|
||||
|
||||
### 1. Command Injection in Merge Handler
|
||||
|
||||
**File:** `apps/server/src/routes/worktree/routes/merge.ts`
|
||||
**Lines:** 43, 54, 65-66, 93
|
||||
**Severity:** CRITICAL
|
||||
|
||||
**Issue:**
|
||||
User-controlled inputs (`branchName`, `mergeTo`, `options?.message`) are directly interpolated into shell commands without validation, allowing command injection attacks.
|
||||
|
||||
**Vulnerable Code:**
|
||||
|
||||
```typescript
|
||||
// Line 43 - branchName not validated
|
||||
await execAsync(`git rev-parse --verify ${branchName}`, { cwd: projectPath });
|
||||
|
||||
// Line 54 - mergeTo not validated
|
||||
await execAsync(`git rev-parse --verify ${mergeTo}`, { cwd: projectPath });
|
||||
|
||||
// Lines 65-66 - branchName and message not validated
|
||||
const mergeCmd = options?.squash
|
||||
? `git merge --squash ${branchName}`
|
||||
: `git merge ${branchName} -m "${options?.message || `Merge ${branchName} into ${mergeTo}`}"`;
|
||||
|
||||
// Line 93 - message not sanitized
|
||||
await execAsync(`git commit -m "${options?.message || `Merge ${branchName} (squash)`}"`, {
|
||||
cwd: projectPath,
|
||||
});
|
||||
```
|
||||
|
||||
**Attack Vector:**
|
||||
An attacker could inject shell commands via branch names or commit messages:
|
||||
|
||||
- Branch name: `main; rm -rf /`
|
||||
- Commit message: `"; malicious_command; "`
|
||||
|
||||
**Fix Required:**
|
||||
|
||||
1. Validate `branchName` and `mergeTo` using `isValidBranchName()` before use
|
||||
2. Sanitize commit messages or use `execGitCommand` with proper escaping
|
||||
3. Replace `execAsync` template literals with `execGitCommand` array-based calls
|
||||
|
||||
**Note:** `isValidBranchName` is imported but only used AFTER deletion (line 119), not before execAsync calls.
|
||||
|
||||
---
|
||||
|
||||
### 2. Command Injection in Push Handler
|
||||
|
||||
**File:** `apps/server/src/routes/worktree/routes/push.ts`
|
||||
**Lines:** 44, 49
|
||||
**Severity:** CRITICAL
|
||||
|
||||
**Issue:**
|
||||
User-controlled `remote` parameter and `branchName` are directly interpolated into shell commands without validation.
|
||||
|
||||
**Vulnerable Code:**
|
||||
|
||||
```typescript
|
||||
// Line 38 - remote defaults to 'origin' but not validated
|
||||
const targetRemote = remote || 'origin';
|
||||
|
||||
// Lines 44, 49 - targetRemote and branchName not validated
|
||||
await execAsync(`git push -u ${targetRemote} ${branchName} ${forceFlag}`, {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
await execAsync(`git push --set-upstream ${targetRemote} ${branchName} ${forceFlag}`, {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
```
|
||||
|
||||
**Attack Vector:**
|
||||
An attacker could inject commands via the remote name:
|
||||
|
||||
- Remote: `origin; malicious_command; #`
|
||||
|
||||
**Fix Required:**
|
||||
|
||||
1. Validate `targetRemote` parameter (alphanumeric + `-`, `_` only)
|
||||
2. Validate `branchName` before use (even though it comes from git output)
|
||||
3. Use `execGitCommand` with array arguments instead of template literals
|
||||
|
||||
---
|
||||
|
||||
### 3. Unsafe Environment Variable Export in Shell Script
|
||||
|
||||
**File:** `start-automaker.sh`
|
||||
**Lines:** 5068, 5085
|
||||
**Severity:** CRITICAL
|
||||
|
||||
**Issue:**
|
||||
Unsafe parsing and export of `.env` file contents using `xargs` without proper handling of special characters.
|
||||
|
||||
**Vulnerable Code:**
|
||||
|
||||
```bash
|
||||
export $(grep -v '^#' .env | xargs)
|
||||
```
|
||||
|
||||
**Attack Vector:**
|
||||
If `.env` file contains malicious content with spaces, special characters, or code, it could be executed:
|
||||
|
||||
- `.env` entry: `VAR="value; malicious_command"`
|
||||
- Could lead to code execution during startup
|
||||
|
||||
**Fix Required:**
|
||||
Replace with safer parsing method:
|
||||
|
||||
```bash
|
||||
# Safer approach
|
||||
set -a
|
||||
source <(grep -v '^#' .env | sed 's/^/export /')
|
||||
set +a
|
||||
|
||||
# Or even safer - validate each line
|
||||
while IFS= read -r line; do
|
||||
[[ "$line" =~ ^[[:space:]]*# ]] && continue
|
||||
[[ -z "$line" ]] && continue
|
||||
if [[ "$line" =~ ^([A-Za-z_][A-Za-z0-9_]*)=(.*)$ ]]; then
|
||||
export "${BASH_REMATCH[1]}"="${BASH_REMATCH[2]}"
|
||||
fi
|
||||
done < .env
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🟡 Moderate Security Concerns
|
||||
|
||||
### 4. Inconsistent Use of Secure Command Execution
|
||||
|
||||
**Issue:**
|
||||
The codebase has `execGitCommand()` function available (which uses array arguments and is safer), but it's not consistently used. Some places still use `execAsync` with template literals.
|
||||
|
||||
**Files Affected:**
|
||||
|
||||
- `apps/server/src/routes/worktree/routes/merge.ts`
|
||||
- `apps/server/src/routes/worktree/routes/push.ts`
|
||||
|
||||
**Recommendation:**
|
||||
|
||||
- Audit all `execAsync` calls with template literals
|
||||
- Replace with `execGitCommand` where possible
|
||||
- Document when `execAsync` is acceptable (only with fully validated inputs)
|
||||
|
||||
---
|
||||
|
||||
### 5. Missing Input Validation
|
||||
|
||||
**Issues:**
|
||||
|
||||
1. `targetRemote` in `push.ts` defaults to 'origin' but isn't validated
|
||||
2. Commit messages in `merge.ts` aren't sanitized before use in shell commands
|
||||
3. `worktreePath` validation relies on middleware but should be double-checked
|
||||
|
||||
**Recommendation:**
|
||||
|
||||
- Add validation functions for remote names
|
||||
- Sanitize commit messages (remove shell metacharacters)
|
||||
- Add defensive validation even when middleware exists
|
||||
|
||||
---
|
||||
|
||||
## ✅ Positive Security Findings
|
||||
|
||||
1. **No Hardcoded Credentials:** No API keys, passwords, or tokens found in the diff
|
||||
2. **No Data Exfiltration:** No suspicious network requests or data transmission patterns
|
||||
3. **No Backdoors:** No hidden functionality or unauthorized access patterns detected
|
||||
4. **Safe Command Execution:** `execGitCommand` function properly uses array arguments in some places
|
||||
5. **Environment Variable Handling:** `init-script-service.ts` properly sanitizes environment variables (lines 194-220)
|
||||
|
||||
---
|
||||
|
||||
## 📋 Action Items
|
||||
|
||||
### Immediate (Before Release)
|
||||
|
||||
- [ ] **Fix command injection in `merge.ts`**
|
||||
- [ ] Validate `branchName` with `isValidBranchName()` before line 43
|
||||
- [ ] Validate `mergeTo` with `isValidBranchName()` before line 54
|
||||
- [ ] Sanitize commit messages or use `execGitCommand` for merge commands
|
||||
- [ ] Replace `execAsync` template literals with `execGitCommand` array calls
|
||||
|
||||
- [ ] **Fix command injection in `push.ts`**
|
||||
- [ ] Add validation function for remote names
|
||||
- [ ] Validate `targetRemote` before use
|
||||
- [ ] Validate `branchName` before use (defensive programming)
|
||||
- [ ] Replace `execAsync` template literals with `execGitCommand`
|
||||
|
||||
- [ ] **Fix shell script security issue**
|
||||
- [ ] Replace unsafe `export $(grep ... | xargs)` with safer parsing
|
||||
- [ ] Add validation for `.env` file contents
|
||||
- [ ] Test with edge cases (spaces, special chars, quotes)
|
||||
|
||||
### Short-term (Next Sprint)
|
||||
|
||||
- [ ] **Audit all `execAsync` calls**
|
||||
- [ ] Create inventory of all `execAsync` calls with template literals
|
||||
- [ ] Replace with `execGitCommand` where possible
|
||||
- [ ] Document exceptions and why they're safe
|
||||
|
||||
- [ ] **Add input validation utilities**
|
||||
- [ ] Create `isValidRemoteName()` function
|
||||
- [ ] Create `sanitizeCommitMessage()` function
|
||||
- [ ] Add validation for all user-controlled inputs
|
||||
|
||||
- [ ] **Security testing**
|
||||
- [ ] Add unit tests for command injection prevention
|
||||
- [ ] Add integration tests with malicious inputs
|
||||
- [ ] Test shell script with malicious `.env` files
|
||||
|
||||
### Long-term (Security Hardening)
|
||||
|
||||
- [ ] **Code review process**
|
||||
- [ ] Add security checklist for PR reviews
|
||||
- [ ] Require security review for shell command execution changes
|
||||
- [ ] Add automated security scanning
|
||||
|
||||
- [ ] **Documentation**
|
||||
- [ ] Document secure coding practices for shell commands
|
||||
- [ ] Create security guidelines for contributors
|
||||
- [ ] Add security section to CONTRIBUTING.md
|
||||
|
||||
---
|
||||
|
||||
## 🔍 Testing Recommendations
|
||||
|
||||
### Command Injection Tests
|
||||
|
||||
```typescript
|
||||
// Test cases for merge.ts
|
||||
describe('merge handler security', () => {
|
||||
it('should reject branch names with shell metacharacters', () => {
|
||||
// Test: branchName = "main; rm -rf /"
|
||||
// Expected: Validation error, command not executed
|
||||
});
|
||||
|
||||
it('should sanitize commit messages', () => {
|
||||
// Test: message = '"; malicious_command; "'
|
||||
// Expected: Sanitized or rejected
|
||||
});
|
||||
});
|
||||
|
||||
// Test cases for push.ts
|
||||
describe('push handler security', () => {
|
||||
it('should reject remote names with shell metacharacters', () => {
|
||||
// Test: remote = "origin; malicious_command; #"
|
||||
// Expected: Validation error, command not executed
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Shell Script Tests
|
||||
|
||||
```bash
|
||||
# Test with malicious .env content
|
||||
echo 'VAR="value; echo PWNED"' > test.env
|
||||
# Expected: Should not execute the command
|
||||
|
||||
# Test with spaces in values
|
||||
echo 'VAR="value with spaces"' > test.env
|
||||
# Expected: Should handle correctly
|
||||
|
||||
# Test with special characters
|
||||
echo 'VAR="value\$with\$dollars"' > test.env
|
||||
# Expected: Should handle correctly
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📚 References
|
||||
|
||||
- [OWASP Command Injection](https://owasp.org/www-community/attacks/Command_Injection)
|
||||
- [Node.js Child Process Security](https://nodejs.org/api/child_process.html#child_process_security_concerns)
|
||||
- [Shell Script Security Best Practices](https://mywiki.wooledge.org/BashGuide/Practices)
|
||||
|
||||
---
|
||||
|
||||
## Notes
|
||||
|
||||
- All findings are based on code diff analysis
|
||||
- No runtime testing was performed
|
||||
- Assumes attacker has access to API endpoints (authenticated or unauthenticated)
|
||||
- Fixes should be tested thoroughly before deployment
|
||||
|
||||
---
|
||||
|
||||
**Last Updated:** $(date)
|
||||
**Next Review:** After fixes are implemented
|
||||
8
TODO.md
8
TODO.md
@@ -2,14 +2,6 @@
|
||||
|
||||
- Setting the default model does not seem like it works.
|
||||
|
||||
# Performance (completed)
|
||||
|
||||
- [x] Graph performance mode for large graphs (compact nodes/edges + visible-only rendering)
|
||||
- [x] Render containment on heavy scroll regions (kanban columns, chat history)
|
||||
- [x] Reduce blur/shadow effects when lists get large
|
||||
- [x] React Query tuning for heavy datasets (less refetch on focus/reconnect)
|
||||
- [x] DnD/list rendering optimizations (virtualized kanban + memoized card sections)
|
||||
|
||||
# UX
|
||||
|
||||
- Consolidate all models to a single place in the settings instead of having AI profiles and all this other stuff
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@automaker/server",
|
||||
"version": "0.13.0",
|
||||
"version": "0.12.0",
|
||||
"description": "Backend server for Automaker - provides API for both web and Electron modes",
|
||||
"author": "AutoMaker Team",
|
||||
"license": "SEE LICENSE IN LICENSE",
|
||||
@@ -40,8 +40,7 @@
|
||||
"express": "5.2.1",
|
||||
"morgan": "1.10.1",
|
||||
"node-pty": "1.1.0-beta41",
|
||||
"ws": "8.18.3",
|
||||
"yaml": "2.7.0"
|
||||
"ws": "8.18.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cookie": "0.6.0",
|
||||
|
||||
@@ -43,6 +43,7 @@ import { createEnhancePromptRoutes } from './routes/enhance-prompt/index.js';
|
||||
import { createWorktreeRoutes } from './routes/worktree/index.js';
|
||||
import { createGitRoutes } from './routes/git/index.js';
|
||||
import { createSetupRoutes } from './routes/setup/index.js';
|
||||
import { createSuggestionsRoutes } from './routes/suggestions/index.js';
|
||||
import { createModelsRoutes } from './routes/models/index.js';
|
||||
import { createRunningAgentsRoutes } from './routes/running-agents/index.js';
|
||||
import { createWorkspaceRoutes } from './routes/workspace/index.js';
|
||||
@@ -82,9 +83,6 @@ import { createNotificationsRoutes } from './routes/notifications/index.js';
|
||||
import { getNotificationService } from './services/notification-service.js';
|
||||
import { createEventHistoryRoutes } from './routes/event-history/index.js';
|
||||
import { getEventHistoryService } from './services/event-history-service.js';
|
||||
import { getTestRunnerService } from './services/test-runner-service.js';
|
||||
import { createProviderUsageRoutes } from './routes/provider-usage/index.js';
|
||||
import { ProviderUsageTracker } from './services/provider-usage-tracker.js';
|
||||
|
||||
// Load environment variables
|
||||
dotenv.config();
|
||||
@@ -238,7 +236,6 @@ const codexModelCacheService = new CodexModelCacheService(DATA_DIR, codexAppServ
|
||||
const codexUsageService = new CodexUsageService(codexAppServerService);
|
||||
const mcpTestService = new MCPTestService(settingsService);
|
||||
const ideationService = new IdeationService(events, settingsService, featureLoader);
|
||||
const providerUsageTracker = new ProviderUsageTracker(codexUsageService);
|
||||
|
||||
// Initialize DevServerService with event emitter for real-time log streaming
|
||||
const devServerService = getDevServerService();
|
||||
@@ -251,12 +248,8 @@ notificationService.setEventEmitter(events);
|
||||
// Initialize Event History Service
|
||||
const eventHistoryService = getEventHistoryService();
|
||||
|
||||
// Initialize Test Runner Service with event emitter for real-time test output streaming
|
||||
const testRunnerService = getTestRunnerService();
|
||||
testRunnerService.setEventEmitter(events);
|
||||
|
||||
// Initialize Event Hook Service for custom event triggers (with history storage)
|
||||
eventHookService.initialize(events, settingsService, eventHistoryService, featureLoader);
|
||||
eventHookService.initialize(events, settingsService, eventHistoryService);
|
||||
|
||||
// Initialize services
|
||||
(async () => {
|
||||
@@ -333,6 +326,7 @@ app.use('/api/auto-mode', createAutoModeRoutes(autoModeService));
|
||||
app.use('/api/enhance-prompt', createEnhancePromptRoutes(settingsService));
|
||||
app.use('/api/worktree', createWorktreeRoutes(events, settingsService));
|
||||
app.use('/api/git', createGitRoutes());
|
||||
app.use('/api/suggestions', createSuggestionsRoutes(events, settingsService));
|
||||
app.use('/api/models', createModelsRoutes());
|
||||
app.use('/api/spec-regeneration', createSpecRegenerationRoutes(events, settingsService));
|
||||
app.use('/api/running-agents', createRunningAgentsRoutes(autoModeService));
|
||||
@@ -350,7 +344,6 @@ app.use('/api/pipeline', createPipelineRoutes(pipelineService));
|
||||
app.use('/api/ideation', createIdeationRoutes(events, ideationService, featureLoader));
|
||||
app.use('/api/notifications', createNotificationsRoutes(notificationService));
|
||||
app.use('/api/event-history', createEventHistoryRoutes(eventHistoryService, settingsService));
|
||||
app.use('/api/provider-usage', createProviderUsageRoutes(providerUsageTracker));
|
||||
|
||||
// Create HTTP server
|
||||
const server = createServer(app);
|
||||
|
||||
@@ -23,13 +23,6 @@ const SESSION_COOKIE_NAME = 'automaker_session';
|
||||
const SESSION_MAX_AGE_MS = 30 * 24 * 60 * 60 * 1000; // 30 days
|
||||
const WS_TOKEN_MAX_AGE_MS = 5 * 60 * 1000; // 5 minutes for WebSocket connection tokens
|
||||
|
||||
/**
|
||||
* Check if an environment variable is set to 'true'
|
||||
*/
|
||||
function isEnvTrue(envVar: string | undefined): boolean {
|
||||
return envVar === 'true';
|
||||
}
|
||||
|
||||
// Session store - persisted to file for survival across server restarts
|
||||
const validSessions = new Map<string, { createdAt: number; expiresAt: number }>();
|
||||
|
||||
@@ -141,8 +134,8 @@ const API_KEY = ensureApiKey();
|
||||
const BOX_CONTENT_WIDTH = 67;
|
||||
|
||||
// Print API key to console for web mode users (unless suppressed for production logging)
|
||||
if (!isEnvTrue(process.env.AUTOMAKER_HIDE_API_KEY)) {
|
||||
const autoLoginEnabled = isEnvTrue(process.env.AUTOMAKER_AUTO_LOGIN);
|
||||
if (process.env.AUTOMAKER_HIDE_API_KEY !== 'true') {
|
||||
const autoLoginEnabled = process.env.AUTOMAKER_AUTO_LOGIN === 'true';
|
||||
const autoLoginStatus = autoLoginEnabled ? 'enabled (auto-login active)' : 'disabled';
|
||||
|
||||
// Build box lines with exact padding
|
||||
@@ -382,12 +375,6 @@ function checkAuthentication(
|
||||
* 5. Session cookie (for web mode)
|
||||
*/
|
||||
export function authMiddleware(req: Request, res: Response, next: NextFunction): void {
|
||||
// Allow disabling auth for local/trusted networks
|
||||
if (isEnvTrue(process.env.AUTOMAKER_DISABLE_AUTH)) {
|
||||
next();
|
||||
return;
|
||||
}
|
||||
|
||||
const result = checkAuthentication(
|
||||
req.headers as Record<string, string | string[] | undefined>,
|
||||
req.query as Record<string, string | undefined>,
|
||||
@@ -433,10 +420,9 @@ export function isAuthEnabled(): boolean {
|
||||
* Get authentication status for health endpoint
|
||||
*/
|
||||
export function getAuthStatus(): { enabled: boolean; method: string } {
|
||||
const disabled = isEnvTrue(process.env.AUTOMAKER_DISABLE_AUTH);
|
||||
return {
|
||||
enabled: !disabled,
|
||||
method: disabled ? 'disabled' : 'api_key_or_session',
|
||||
enabled: true,
|
||||
method: 'api_key_or_session',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -444,7 +430,6 @@ export function getAuthStatus(): { enabled: boolean; method: string } {
|
||||
* Check if a request is authenticated (for status endpoint)
|
||||
*/
|
||||
export function isRequestAuthenticated(req: Request): boolean {
|
||||
if (isEnvTrue(process.env.AUTOMAKER_DISABLE_AUTH)) return true;
|
||||
const result = checkAuthentication(
|
||||
req.headers as Record<string, string | string[] | undefined>,
|
||||
req.query as Record<string, string | undefined>,
|
||||
@@ -462,6 +447,5 @@ export function checkRawAuthentication(
|
||||
query: Record<string, string | undefined>,
|
||||
cookies: Record<string, string | undefined>
|
||||
): boolean {
|
||||
if (isEnvTrue(process.env.AUTOMAKER_DISABLE_AUTH)) return true;
|
||||
return checkAuthentication(headers, query, cookies).authenticated;
|
||||
}
|
||||
|
||||
@@ -10,12 +10,7 @@ import type {
|
||||
McpServerConfig,
|
||||
PromptCustomization,
|
||||
ClaudeApiProfile,
|
||||
ClaudeCompatibleProvider,
|
||||
PhaseModelKey,
|
||||
PhaseModelEntry,
|
||||
Credentials,
|
||||
} from '@automaker/types';
|
||||
import { DEFAULT_PHASE_MODELS } from '@automaker/types';
|
||||
import {
|
||||
mergeAutoModePrompts,
|
||||
mergeAgentPrompts,
|
||||
@@ -365,22 +360,16 @@ export interface ActiveClaudeApiProfileResult {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the active Claude API profile and credentials from settings.
|
||||
* Checks project settings first for per-project overrides, then falls back to global settings.
|
||||
* Get the active Claude API profile and credentials from global settings.
|
||||
* Returns both the profile and credentials for resolving 'credentials' apiKeySource.
|
||||
*
|
||||
* @deprecated Use getProviderById and getPhaseModelWithOverrides instead for the new provider system.
|
||||
* This function is kept for backward compatibility during migration.
|
||||
*
|
||||
* @param settingsService - Optional settings service instance
|
||||
* @param logPrefix - Prefix for log messages (e.g., '[AgentService]')
|
||||
* @param projectPath - Optional project path for per-project override
|
||||
* @returns Promise resolving to object with profile and credentials
|
||||
*/
|
||||
export async function getActiveClaudeApiProfile(
|
||||
settingsService?: SettingsService | null,
|
||||
logPrefix = '[SettingsHelper]',
|
||||
projectPath?: string
|
||||
logPrefix = '[SettingsHelper]'
|
||||
): Promise<ActiveClaudeApiProfileResult> {
|
||||
if (!settingsService) {
|
||||
return { profile: undefined, credentials: undefined };
|
||||
@@ -390,30 +379,10 @@ export async function getActiveClaudeApiProfile(
|
||||
const globalSettings = await settingsService.getGlobalSettings();
|
||||
const credentials = await settingsService.getCredentials();
|
||||
const profiles = globalSettings.claudeApiProfiles || [];
|
||||
|
||||
// Check for project-level override first
|
||||
let activeProfileId: string | null | undefined;
|
||||
let isProjectOverride = false;
|
||||
|
||||
if (projectPath) {
|
||||
const projectSettings = await settingsService.getProjectSettings(projectPath);
|
||||
// undefined = use global, null = explicit no profile, string = specific profile
|
||||
if (projectSettings.activeClaudeApiProfileId !== undefined) {
|
||||
activeProfileId = projectSettings.activeClaudeApiProfileId;
|
||||
isProjectOverride = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to global if project doesn't specify
|
||||
if (activeProfileId === undefined && !isProjectOverride) {
|
||||
activeProfileId = globalSettings.activeClaudeApiProfileId;
|
||||
}
|
||||
const activeProfileId = globalSettings.activeClaudeApiProfileId;
|
||||
|
||||
// No active profile selected - use direct Anthropic API
|
||||
if (!activeProfileId) {
|
||||
if (isProjectOverride && activeProfileId === null) {
|
||||
logger.info(`${logPrefix} Project explicitly using Direct Anthropic API`);
|
||||
}
|
||||
return { profile: undefined, credentials };
|
||||
}
|
||||
|
||||
@@ -421,8 +390,7 @@ export async function getActiveClaudeApiProfile(
|
||||
const activeProfile = profiles.find((p) => p.id === activeProfileId);
|
||||
|
||||
if (activeProfile) {
|
||||
const overrideSuffix = isProjectOverride ? ' (project override)' : '';
|
||||
logger.info(`${logPrefix} Using Claude API profile: ${activeProfile.name}${overrideSuffix}`);
|
||||
logger.info(`${logPrefix} Using Claude API profile: ${activeProfile.name}`);
|
||||
return { profile: activeProfile, credentials };
|
||||
} else {
|
||||
logger.warn(
|
||||
@@ -435,296 +403,3 @@ export async function getActiveClaudeApiProfile(
|
||||
return { profile: undefined, credentials: undefined };
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// New Provider System Helpers
|
||||
// ============================================================================
|
||||
|
||||
/** Result from getProviderById */
|
||||
export interface ProviderByIdResult {
|
||||
/** The provider, or undefined if not found */
|
||||
provider: ClaudeCompatibleProvider | undefined;
|
||||
/** Credentials for resolving 'credentials' apiKeySource */
|
||||
credentials: Credentials | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a ClaudeCompatibleProvider by its ID.
|
||||
* Returns the provider configuration and credentials for API key resolution.
|
||||
*
|
||||
* @param providerId - The provider ID to look up
|
||||
* @param settingsService - Settings service instance
|
||||
* @param logPrefix - Prefix for log messages
|
||||
* @returns Promise resolving to object with provider and credentials
|
||||
*/
|
||||
export async function getProviderById(
|
||||
providerId: string,
|
||||
settingsService: SettingsService,
|
||||
logPrefix = '[SettingsHelper]'
|
||||
): Promise<ProviderByIdResult> {
|
||||
try {
|
||||
const globalSettings = await settingsService.getGlobalSettings();
|
||||
const credentials = await settingsService.getCredentials();
|
||||
const providers = globalSettings.claudeCompatibleProviders || [];
|
||||
|
||||
const provider = providers.find((p) => p.id === providerId);
|
||||
|
||||
if (provider) {
|
||||
if (provider.enabled === false) {
|
||||
logger.warn(`${logPrefix} Provider "${provider.name}" (${providerId}) is disabled`);
|
||||
} else {
|
||||
logger.debug(`${logPrefix} Found provider: ${provider.name}`);
|
||||
}
|
||||
return { provider, credentials };
|
||||
} else {
|
||||
logger.warn(`${logPrefix} Provider not found: ${providerId}`);
|
||||
return { provider: undefined, credentials };
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`${logPrefix} Failed to load provider by ID:`, error);
|
||||
return { provider: undefined, credentials: undefined };
|
||||
}
|
||||
}
|
||||
|
||||
/** Result from getPhaseModelWithOverrides */
|
||||
export interface PhaseModelWithOverridesResult {
|
||||
/** The resolved phase model entry */
|
||||
phaseModel: PhaseModelEntry;
|
||||
/** Whether a project override was applied */
|
||||
isProjectOverride: boolean;
|
||||
/** The provider if providerId is set and found */
|
||||
provider: ClaudeCompatibleProvider | undefined;
|
||||
/** Credentials for API key resolution */
|
||||
credentials: Credentials | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the phase model configuration for a specific phase, applying project overrides if available.
|
||||
* Also resolves the provider if the phase model has a providerId.
|
||||
*
|
||||
* @param phase - The phase key (e.g., 'enhancementModel', 'specGenerationModel')
|
||||
* @param settingsService - Optional settings service instance (returns defaults if undefined)
|
||||
* @param projectPath - Optional project path for checking overrides
|
||||
* @param logPrefix - Prefix for log messages
|
||||
* @returns Promise resolving to phase model with provider info
|
||||
*/
|
||||
export async function getPhaseModelWithOverrides(
|
||||
phase: PhaseModelKey,
|
||||
settingsService?: SettingsService | null,
|
||||
projectPath?: string,
|
||||
logPrefix = '[SettingsHelper]'
|
||||
): Promise<PhaseModelWithOverridesResult> {
|
||||
// Handle undefined settingsService gracefully
|
||||
if (!settingsService) {
|
||||
logger.info(`${logPrefix} SettingsService not available, using default for ${phase}`);
|
||||
return {
|
||||
phaseModel: DEFAULT_PHASE_MODELS[phase] || { model: 'sonnet' },
|
||||
isProjectOverride: false,
|
||||
provider: undefined,
|
||||
credentials: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const globalSettings = await settingsService.getGlobalSettings();
|
||||
const credentials = await settingsService.getCredentials();
|
||||
const globalPhaseModels = globalSettings.phaseModels || {};
|
||||
|
||||
// Start with global phase model
|
||||
let phaseModel = globalPhaseModels[phase];
|
||||
let isProjectOverride = false;
|
||||
|
||||
// Check for project override
|
||||
if (projectPath) {
|
||||
const projectSettings = await settingsService.getProjectSettings(projectPath);
|
||||
const projectOverrides = projectSettings.phaseModelOverrides || {};
|
||||
|
||||
if (projectOverrides[phase]) {
|
||||
phaseModel = projectOverrides[phase];
|
||||
isProjectOverride = true;
|
||||
logger.debug(`${logPrefix} Using project override for ${phase}`);
|
||||
}
|
||||
}
|
||||
|
||||
// If no phase model found, use per-phase default
|
||||
if (!phaseModel) {
|
||||
phaseModel = DEFAULT_PHASE_MODELS[phase] || { model: 'sonnet' };
|
||||
logger.debug(`${logPrefix} No ${phase} configured, using default: ${phaseModel.model}`);
|
||||
}
|
||||
|
||||
// Resolve provider if providerId is set
|
||||
let provider: ClaudeCompatibleProvider | undefined;
|
||||
if (phaseModel.providerId) {
|
||||
const providers = globalSettings.claudeCompatibleProviders || [];
|
||||
provider = providers.find((p) => p.id === phaseModel.providerId);
|
||||
|
||||
if (provider) {
|
||||
if (provider.enabled === false) {
|
||||
logger.warn(
|
||||
`${logPrefix} Provider "${provider.name}" for ${phase} is disabled, falling back to direct API`
|
||||
);
|
||||
provider = undefined;
|
||||
} else {
|
||||
logger.debug(`${logPrefix} Using provider "${provider.name}" for ${phase}`);
|
||||
}
|
||||
} else {
|
||||
logger.warn(
|
||||
`${logPrefix} Provider ${phaseModel.providerId} not found for ${phase}, falling back to direct API`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
phaseModel,
|
||||
isProjectOverride,
|
||||
provider,
|
||||
credentials,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${logPrefix} Failed to get phase model with overrides:`, error);
|
||||
// Return a safe default
|
||||
return {
|
||||
phaseModel: { model: 'sonnet' },
|
||||
isProjectOverride: false,
|
||||
provider: undefined,
|
||||
credentials: undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/** Result from getProviderByModelId */
|
||||
export interface ProviderByModelIdResult {
|
||||
/** The provider that contains this model, or undefined if not found */
|
||||
provider: ClaudeCompatibleProvider | undefined;
|
||||
/** The model configuration if found */
|
||||
modelConfig: import('@automaker/types').ProviderModel | undefined;
|
||||
/** Credentials for API key resolution */
|
||||
credentials: Credentials | undefined;
|
||||
/** The resolved Claude model ID to use for API calls (from mapsToClaudeModel) */
|
||||
resolvedModel: string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a ClaudeCompatibleProvider by one of its model IDs.
|
||||
* Searches through all enabled providers to find one that contains the specified model.
|
||||
* This is useful when you have a model string from the UI but need the provider config.
|
||||
*
|
||||
* Also resolves the `mapsToClaudeModel` field to get the actual Claude model ID to use
|
||||
* when calling the API (e.g., "GLM-4.5-Air" -> "claude-haiku-4-5").
|
||||
*
|
||||
* @param modelId - The model ID to search for (e.g., "GLM-4.7", "MiniMax-M2.1")
|
||||
* @param settingsService - Settings service instance
|
||||
* @param logPrefix - Prefix for log messages
|
||||
* @returns Promise resolving to object with provider, model config, credentials, and resolved model
|
||||
*/
|
||||
export async function getProviderByModelId(
|
||||
modelId: string,
|
||||
settingsService: SettingsService,
|
||||
logPrefix = '[SettingsHelper]'
|
||||
): Promise<ProviderByModelIdResult> {
|
||||
try {
|
||||
const globalSettings = await settingsService.getGlobalSettings();
|
||||
const credentials = await settingsService.getCredentials();
|
||||
const providers = globalSettings.claudeCompatibleProviders || [];
|
||||
|
||||
// Search through all enabled providers for this model
|
||||
for (const provider of providers) {
|
||||
// Skip disabled providers
|
||||
if (provider.enabled === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this provider has the model
|
||||
const modelConfig = provider.models?.find(
|
||||
(m) => m.id === modelId || m.id.toLowerCase() === modelId.toLowerCase()
|
||||
);
|
||||
|
||||
if (modelConfig) {
|
||||
logger.info(`${logPrefix} Found model "${modelId}" in provider "${provider.name}"`);
|
||||
|
||||
// Resolve the mapped Claude model if specified
|
||||
let resolvedModel: string | undefined;
|
||||
if (modelConfig.mapsToClaudeModel) {
|
||||
// Import resolveModelString to convert alias to full model ID
|
||||
const { resolveModelString } = await import('@automaker/model-resolver');
|
||||
resolvedModel = resolveModelString(modelConfig.mapsToClaudeModel);
|
||||
logger.info(
|
||||
`${logPrefix} Model "${modelId}" maps to Claude model "${modelConfig.mapsToClaudeModel}" -> "${resolvedModel}"`
|
||||
);
|
||||
}
|
||||
|
||||
return { provider, modelConfig, credentials, resolvedModel };
|
||||
}
|
||||
}
|
||||
|
||||
// Model not found in any provider
|
||||
logger.debug(`${logPrefix} Model "${modelId}" not found in any provider`);
|
||||
return {
|
||||
provider: undefined,
|
||||
modelConfig: undefined,
|
||||
credentials: undefined,
|
||||
resolvedModel: undefined,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${logPrefix} Failed to find provider by model ID:`, error);
|
||||
return {
|
||||
provider: undefined,
|
||||
modelConfig: undefined,
|
||||
credentials: undefined,
|
||||
resolvedModel: undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all enabled provider models for use in model dropdowns.
|
||||
* Returns models from all enabled ClaudeCompatibleProviders.
|
||||
*
|
||||
* @param settingsService - Settings service instance
|
||||
* @param logPrefix - Prefix for log messages
|
||||
* @returns Promise resolving to array of provider models with their provider info
|
||||
*/
|
||||
export async function getAllProviderModels(
|
||||
settingsService: SettingsService,
|
||||
logPrefix = '[SettingsHelper]'
|
||||
): Promise<
|
||||
Array<{
|
||||
providerId: string;
|
||||
providerName: string;
|
||||
model: import('@automaker/types').ProviderModel;
|
||||
}>
|
||||
> {
|
||||
try {
|
||||
const globalSettings = await settingsService.getGlobalSettings();
|
||||
const providers = globalSettings.claudeCompatibleProviders || [];
|
||||
|
||||
const allModels: Array<{
|
||||
providerId: string;
|
||||
providerName: string;
|
||||
model: import('@automaker/types').ProviderModel;
|
||||
}> = [];
|
||||
|
||||
for (const provider of providers) {
|
||||
// Skip disabled providers
|
||||
if (provider.enabled === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const model of provider.models || []) {
|
||||
allModels.push({
|
||||
providerId: provider.id,
|
||||
providerName: provider.name,
|
||||
model,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`${logPrefix} Found ${allModels.length} models from ${providers.length} providers`
|
||||
);
|
||||
return allModels;
|
||||
} catch (error) {
|
||||
logger.error(`${logPrefix} Failed to get all provider models:`, error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,17 +14,8 @@ import {
|
||||
getThinkingTokenBudget,
|
||||
validateBareModelId,
|
||||
type ClaudeApiProfile,
|
||||
type ClaudeCompatibleProvider,
|
||||
type Credentials,
|
||||
} from '@automaker/types';
|
||||
|
||||
/**
|
||||
* ProviderConfig - Union type for provider configuration
|
||||
*
|
||||
* Accepts either the legacy ClaudeApiProfile or new ClaudeCompatibleProvider.
|
||||
* Both share the same connection settings structure.
|
||||
*/
|
||||
type ProviderConfig = ClaudeApiProfile | ClaudeCompatibleProvider;
|
||||
import type {
|
||||
ExecuteOptions,
|
||||
ProviderMessage,
|
||||
@@ -60,48 +51,34 @@ const ALLOWED_ENV_VARS = [
|
||||
// System vars are always passed from process.env regardless of profile
|
||||
const SYSTEM_ENV_VARS = ['PATH', 'HOME', 'SHELL', 'TERM', 'USER', 'LANG', 'LC_ALL'];
|
||||
|
||||
/**
|
||||
* Check if the config is a ClaudeCompatibleProvider (new system)
|
||||
* by checking for the 'models' array property
|
||||
*/
|
||||
function isClaudeCompatibleProvider(config: ProviderConfig): config is ClaudeCompatibleProvider {
|
||||
return 'models' in config && Array.isArray(config.models);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build environment for the SDK with only explicitly allowed variables.
|
||||
* When a provider/profile is provided, uses its configuration (clean switch - don't inherit from process.env).
|
||||
* When no provider is provided, uses direct Anthropic API settings from process.env.
|
||||
* When a profile is provided, uses profile configuration (clean switch - don't inherit from process.env).
|
||||
* When no profile is provided, uses direct Anthropic API settings from process.env.
|
||||
*
|
||||
* Supports both:
|
||||
* - ClaudeCompatibleProvider (new system with models[] array)
|
||||
* - ClaudeApiProfile (legacy system with modelMappings)
|
||||
*
|
||||
* @param providerConfig - Optional provider configuration for alternative endpoint
|
||||
* @param profile - Optional Claude API profile for alternative endpoint configuration
|
||||
* @param credentials - Optional credentials object for resolving 'credentials' apiKeySource
|
||||
*/
|
||||
function buildEnv(
|
||||
providerConfig?: ProviderConfig,
|
||||
profile?: ClaudeApiProfile,
|
||||
credentials?: Credentials
|
||||
): Record<string, string | undefined> {
|
||||
const env: Record<string, string | undefined> = {};
|
||||
|
||||
if (providerConfig) {
|
||||
// Use provider configuration (clean switch - don't inherit non-system vars from process.env)
|
||||
logger.debug('[buildEnv] Using provider configuration:', {
|
||||
name: providerConfig.name,
|
||||
baseUrl: providerConfig.baseUrl,
|
||||
apiKeySource: providerConfig.apiKeySource ?? 'inline',
|
||||
isNewProvider: isClaudeCompatibleProvider(providerConfig),
|
||||
if (profile) {
|
||||
// Use profile configuration (clean switch - don't inherit non-system vars from process.env)
|
||||
logger.debug('Building environment from Claude API profile:', {
|
||||
name: profile.name,
|
||||
apiKeySource: profile.apiKeySource ?? 'inline',
|
||||
});
|
||||
|
||||
// Resolve API key based on source strategy
|
||||
let apiKey: string | undefined;
|
||||
const source = providerConfig.apiKeySource ?? 'inline'; // Default to inline for backwards compat
|
||||
const source = profile.apiKeySource ?? 'inline'; // Default to inline for backwards compat
|
||||
|
||||
switch (source) {
|
||||
case 'inline':
|
||||
apiKey = providerConfig.apiKey;
|
||||
apiKey = profile.apiKey;
|
||||
break;
|
||||
case 'env':
|
||||
apiKey = process.env.ANTHROPIC_API_KEY;
|
||||
@@ -113,55 +90,46 @@ function buildEnv(
|
||||
|
||||
// Warn if no API key found
|
||||
if (!apiKey) {
|
||||
logger.warn(`No API key found for provider "${providerConfig.name}" with source "${source}"`);
|
||||
logger.warn(`No API key found for profile "${profile.name}" with source "${source}"`);
|
||||
}
|
||||
|
||||
// Authentication
|
||||
if (providerConfig.useAuthToken) {
|
||||
if (profile.useAuthToken) {
|
||||
env['ANTHROPIC_AUTH_TOKEN'] = apiKey;
|
||||
} else {
|
||||
env['ANTHROPIC_API_KEY'] = apiKey;
|
||||
}
|
||||
|
||||
// Endpoint configuration
|
||||
env['ANTHROPIC_BASE_URL'] = providerConfig.baseUrl;
|
||||
logger.debug(`[buildEnv] Set ANTHROPIC_BASE_URL to: ${providerConfig.baseUrl}`);
|
||||
env['ANTHROPIC_BASE_URL'] = profile.baseUrl;
|
||||
|
||||
if (providerConfig.timeoutMs) {
|
||||
env['API_TIMEOUT_MS'] = String(providerConfig.timeoutMs);
|
||||
if (profile.timeoutMs) {
|
||||
env['API_TIMEOUT_MS'] = String(profile.timeoutMs);
|
||||
}
|
||||
|
||||
// Model mappings - only for legacy ClaudeApiProfile
|
||||
// For ClaudeCompatibleProvider, the model is passed directly (no mapping needed)
|
||||
if (!isClaudeCompatibleProvider(providerConfig) && providerConfig.modelMappings) {
|
||||
if (providerConfig.modelMappings.haiku) {
|
||||
env['ANTHROPIC_DEFAULT_HAIKU_MODEL'] = providerConfig.modelMappings.haiku;
|
||||
}
|
||||
if (providerConfig.modelMappings.sonnet) {
|
||||
env['ANTHROPIC_DEFAULT_SONNET_MODEL'] = providerConfig.modelMappings.sonnet;
|
||||
}
|
||||
if (providerConfig.modelMappings.opus) {
|
||||
env['ANTHROPIC_DEFAULT_OPUS_MODEL'] = providerConfig.modelMappings.opus;
|
||||
}
|
||||
// Model mappings
|
||||
if (profile.modelMappings?.haiku) {
|
||||
env['ANTHROPIC_DEFAULT_HAIKU_MODEL'] = profile.modelMappings.haiku;
|
||||
}
|
||||
if (profile.modelMappings?.sonnet) {
|
||||
env['ANTHROPIC_DEFAULT_SONNET_MODEL'] = profile.modelMappings.sonnet;
|
||||
}
|
||||
if (profile.modelMappings?.opus) {
|
||||
env['ANTHROPIC_DEFAULT_OPUS_MODEL'] = profile.modelMappings.opus;
|
||||
}
|
||||
|
||||
// Traffic control
|
||||
if (providerConfig.disableNonessentialTraffic) {
|
||||
if (profile.disableNonessentialTraffic) {
|
||||
env['CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC'] = '1';
|
||||
}
|
||||
} else {
|
||||
// Use direct Anthropic API - pass through credentials or environment variables
|
||||
// This supports:
|
||||
// 1. API Key mode: ANTHROPIC_API_KEY from credentials (UI settings) or env
|
||||
// Use direct Anthropic API - two modes:
|
||||
// 1. API Key mode: ANTHROPIC_API_KEY from credentials/env
|
||||
// 2. Claude Max plan: Uses CLI OAuth auth (SDK handles this automatically)
|
||||
// 3. Custom endpoints via ANTHROPIC_BASE_URL env var (backward compatibility)
|
||||
//
|
||||
// Priority: credentials file (UI settings) -> environment variable
|
||||
// Note: Only auth and endpoint vars are passed. Model mappings and traffic
|
||||
// control are NOT passed (those require a profile for explicit configuration).
|
||||
if (credentials?.apiKeys?.anthropic) {
|
||||
env['ANTHROPIC_API_KEY'] = credentials.apiKeys.anthropic;
|
||||
} else if (process.env.ANTHROPIC_API_KEY) {
|
||||
// IMPORTANT: Do NOT set any profile vars (base URL, model mappings, etc.)
|
||||
// This ensures clean switching - only pass through what's in process.env
|
||||
if (process.env.ANTHROPIC_API_KEY) {
|
||||
env['ANTHROPIC_API_KEY'] = process.env.ANTHROPIC_API_KEY;
|
||||
}
|
||||
// If using Claude Max plan via CLI auth, the SDK handles auth automatically
|
||||
@@ -170,10 +138,9 @@ function buildEnv(
|
||||
if (process.env.ANTHROPIC_AUTH_TOKEN) {
|
||||
env['ANTHROPIC_AUTH_TOKEN'] = process.env.ANTHROPIC_AUTH_TOKEN;
|
||||
}
|
||||
// Pass through ANTHROPIC_BASE_URL if set in environment (backward compatibility)
|
||||
if (process.env.ANTHROPIC_BASE_URL) {
|
||||
env['ANTHROPIC_BASE_URL'] = process.env.ANTHROPIC_BASE_URL;
|
||||
}
|
||||
// Do NOT set ANTHROPIC_BASE_URL - let SDK use default Anthropic endpoint
|
||||
// Do NOT set model mappings - use standard Claude model names
|
||||
// Do NOT set CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC
|
||||
}
|
||||
|
||||
// Always add system vars from process.env
|
||||
@@ -211,14 +178,9 @@ export class ClaudeProvider extends BaseProvider {
|
||||
sdkSessionId,
|
||||
thinkingLevel,
|
||||
claudeApiProfile,
|
||||
claudeCompatibleProvider,
|
||||
credentials,
|
||||
} = options;
|
||||
|
||||
// Determine which provider config to use
|
||||
// claudeCompatibleProvider takes precedence over claudeApiProfile
|
||||
const providerConfig = claudeCompatibleProvider || claudeApiProfile;
|
||||
|
||||
// Convert thinking level to token budget
|
||||
const maxThinkingTokens = getThinkingTokenBudget(thinkingLevel);
|
||||
|
||||
@@ -229,9 +191,9 @@ export class ClaudeProvider extends BaseProvider {
|
||||
maxTurns,
|
||||
cwd,
|
||||
// Pass only explicitly allowed environment variables to SDK
|
||||
// When a provider is active, uses provider settings (clean switch)
|
||||
// When no provider, uses direct Anthropic API (from process.env or CLI OAuth)
|
||||
env: buildEnv(providerConfig, credentials),
|
||||
// When a profile is active, uses profile settings (clean switch)
|
||||
// When no profile, uses direct Anthropic API (from process.env or CLI OAuth)
|
||||
env: buildEnv(claudeApiProfile, credentials),
|
||||
// Pass through allowedTools if provided by caller (decided by sdk-options.ts)
|
||||
...(allowedTools && { allowedTools }),
|
||||
// AUTONOMOUS MODE: Always bypass permissions for fully autonomous operation
|
||||
@@ -276,18 +238,6 @@ export class ClaudeProvider extends BaseProvider {
|
||||
promptPayload = prompt;
|
||||
}
|
||||
|
||||
// Log the environment being passed to the SDK for debugging
|
||||
const envForSdk = sdkOptions.env as Record<string, string | undefined>;
|
||||
logger.debug('[ClaudeProvider] SDK Configuration:', {
|
||||
model: sdkOptions.model,
|
||||
baseUrl: envForSdk?.['ANTHROPIC_BASE_URL'] || '(default Anthropic API)',
|
||||
hasApiKey: !!envForSdk?.['ANTHROPIC_API_KEY'],
|
||||
hasAuthToken: !!envForSdk?.['ANTHROPIC_AUTH_TOKEN'],
|
||||
providerName: providerConfig?.name || '(direct Anthropic)',
|
||||
maxTurns: sdkOptions.maxTurns,
|
||||
maxThinkingTokens: sdkOptions.maxThinkingTokens,
|
||||
});
|
||||
|
||||
// Execute via Claude Agent SDK
|
||||
try {
|
||||
const stream = query({ prompt: promptPayload, options: sdkOptions });
|
||||
|
||||
@@ -337,11 +337,10 @@ export class CursorProvider extends CliProvider {
|
||||
'--stream-partial-output' // Real-time streaming
|
||||
);
|
||||
|
||||
// In read-only mode, use --mode ask for Q&A style (no tools)
|
||||
// Otherwise, add --force to allow file edits
|
||||
if (options.readOnly) {
|
||||
cliArgs.push('--mode', 'ask');
|
||||
} else {
|
||||
// Only add --force if NOT in read-only mode
|
||||
// Without --force, Cursor CLI suggests changes but doesn't apply them
|
||||
// With --force, Cursor CLI can actually edit files
|
||||
if (!options.readOnly) {
|
||||
cliArgs.push('--force');
|
||||
}
|
||||
|
||||
@@ -673,13 +672,10 @@ export class CursorProvider extends CliProvider {
|
||||
);
|
||||
}
|
||||
|
||||
// Embed system prompt into user prompt (Cursor CLI doesn't support separate system messages)
|
||||
const effectiveOptions = this.embedSystemPromptIntoPrompt(options);
|
||||
|
||||
// Extract prompt text to pass via stdin (avoids shell escaping issues)
|
||||
const promptText = this.extractPromptText(effectiveOptions);
|
||||
const promptText = this.extractPromptText(options);
|
||||
|
||||
const cliArgs = this.buildCliArgs(effectiveOptions);
|
||||
const cliArgs = this.buildCliArgs(options);
|
||||
const subprocessOptions = this.buildSubprocessOptions(options, cliArgs);
|
||||
|
||||
// Pass prompt via stdin to avoid shell interpretation of special characters
|
||||
|
||||
@@ -1,815 +0,0 @@
|
||||
/**
|
||||
* Gemini Provider - Executes queries using the Gemini CLI
|
||||
*
|
||||
* Extends CliProvider with Gemini-specific:
|
||||
* - Event normalization for Gemini's JSONL streaming format
|
||||
* - Google account and API key authentication support
|
||||
* - Thinking level configuration
|
||||
*
|
||||
* Based on https://github.com/google-gemini/gemini-cli
|
||||
*/
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import { CliProvider, type CliSpawnConfig, type CliErrorInfo } from './cli-provider.js';
|
||||
import type {
|
||||
ProviderConfig,
|
||||
ExecuteOptions,
|
||||
ProviderMessage,
|
||||
InstallationStatus,
|
||||
ModelDefinition,
|
||||
ContentBlock,
|
||||
} from './types.js';
|
||||
import { validateBareModelId } from '@automaker/types';
|
||||
import { GEMINI_MODEL_MAP, type GeminiAuthStatus } from '@automaker/types';
|
||||
import { createLogger, isAbortError } from '@automaker/utils';
|
||||
import { spawnJSONLProcess } from '@automaker/platform';
|
||||
|
||||
// Create logger for this module
|
||||
const logger = createLogger('GeminiProvider');
|
||||
|
||||
// =============================================================================
|
||||
// Gemini Stream Event Types
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Base event structure from Gemini CLI --output-format stream-json
|
||||
*
|
||||
* Actual CLI output format:
|
||||
* {"type":"init","timestamp":"...","session_id":"...","model":"..."}
|
||||
* {"type":"message","timestamp":"...","role":"user","content":"..."}
|
||||
* {"type":"message","timestamp":"...","role":"assistant","content":"...","delta":true}
|
||||
* {"type":"tool_use","timestamp":"...","tool_name":"...","tool_id":"...","parameters":{...}}
|
||||
* {"type":"tool_result","timestamp":"...","tool_id":"...","status":"success","output":"..."}
|
||||
* {"type":"result","timestamp":"...","status":"success","stats":{...}}
|
||||
*/
|
||||
interface GeminiStreamEvent {
|
||||
type: 'init' | 'message' | 'tool_use' | 'tool_result' | 'result' | 'error';
|
||||
timestamp?: string;
|
||||
session_id?: string;
|
||||
}
|
||||
|
||||
interface GeminiInitEvent extends GeminiStreamEvent {
|
||||
type: 'init';
|
||||
session_id: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
interface GeminiMessageEvent extends GeminiStreamEvent {
|
||||
type: 'message';
|
||||
role: 'user' | 'assistant';
|
||||
content: string;
|
||||
delta?: boolean;
|
||||
session_id?: string;
|
||||
}
|
||||
|
||||
interface GeminiToolUseEvent extends GeminiStreamEvent {
|
||||
type: 'tool_use';
|
||||
tool_id: string;
|
||||
tool_name: string;
|
||||
parameters: Record<string, unknown>;
|
||||
session_id?: string;
|
||||
}
|
||||
|
||||
interface GeminiToolResultEvent extends GeminiStreamEvent {
|
||||
type: 'tool_result';
|
||||
tool_id: string;
|
||||
status: 'success' | 'error';
|
||||
output: string;
|
||||
session_id?: string;
|
||||
}
|
||||
|
||||
interface GeminiResultEvent extends GeminiStreamEvent {
|
||||
type: 'result';
|
||||
status: 'success' | 'error';
|
||||
stats?: {
|
||||
total_tokens?: number;
|
||||
input_tokens?: number;
|
||||
output_tokens?: number;
|
||||
cached?: number;
|
||||
input?: number;
|
||||
duration_ms?: number;
|
||||
tool_calls?: number;
|
||||
};
|
||||
error?: string;
|
||||
session_id?: string;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Error Codes
|
||||
// =============================================================================
|
||||
|
||||
export enum GeminiErrorCode {
|
||||
NOT_INSTALLED = 'GEMINI_NOT_INSTALLED',
|
||||
NOT_AUTHENTICATED = 'GEMINI_NOT_AUTHENTICATED',
|
||||
RATE_LIMITED = 'GEMINI_RATE_LIMITED',
|
||||
MODEL_UNAVAILABLE = 'GEMINI_MODEL_UNAVAILABLE',
|
||||
NETWORK_ERROR = 'GEMINI_NETWORK_ERROR',
|
||||
PROCESS_CRASHED = 'GEMINI_PROCESS_CRASHED',
|
||||
TIMEOUT = 'GEMINI_TIMEOUT',
|
||||
UNKNOWN = 'GEMINI_UNKNOWN_ERROR',
|
||||
}
|
||||
|
||||
export interface GeminiError extends Error {
|
||||
code: GeminiErrorCode;
|
||||
recoverable: boolean;
|
||||
suggestion?: string;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Tool Name Normalization
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Gemini CLI tool name to standard tool name mapping
|
||||
* This allows the UI to properly categorize and display Gemini tool calls
|
||||
*/
|
||||
const GEMINI_TOOL_NAME_MAP: Record<string, string> = {
|
||||
write_todos: 'TodoWrite',
|
||||
read_file: 'Read',
|
||||
read_many_files: 'Read',
|
||||
replace: 'Edit',
|
||||
write_file: 'Write',
|
||||
run_shell_command: 'Bash',
|
||||
search_file_content: 'Grep',
|
||||
glob: 'Glob',
|
||||
list_directory: 'Ls',
|
||||
web_fetch: 'WebFetch',
|
||||
google_web_search: 'WebSearch',
|
||||
};
|
||||
|
||||
/**
|
||||
* Normalize Gemini tool names to standard tool names
|
||||
*/
|
||||
function normalizeGeminiToolName(geminiToolName: string): string {
|
||||
return GEMINI_TOOL_NAME_MAP[geminiToolName] || geminiToolName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize Gemini tool input parameters to standard format
|
||||
*
|
||||
* Gemini `write_todos` format:
|
||||
* {"todos": [{"description": "Task text", "status": "pending|in_progress|completed|cancelled"}]}
|
||||
*
|
||||
* Claude `TodoWrite` format:
|
||||
* {"todos": [{"content": "Task text", "status": "pending|in_progress|completed", "activeForm": "..."}]}
|
||||
*/
|
||||
function normalizeGeminiToolInput(
|
||||
toolName: string,
|
||||
input: Record<string, unknown>
|
||||
): Record<string, unknown> {
|
||||
// Normalize write_todos: map 'description' to 'content', handle 'cancelled' status
|
||||
if (toolName === 'write_todos' && Array.isArray(input.todos)) {
|
||||
return {
|
||||
todos: input.todos.map((todo: { description?: string; status?: string }) => ({
|
||||
content: todo.description || '',
|
||||
// Map 'cancelled' to 'completed' since Claude doesn't have cancelled status
|
||||
status: todo.status === 'cancelled' ? 'completed' : todo.status,
|
||||
// Use description as activeForm since Gemini doesn't have it
|
||||
activeForm: todo.description || '',
|
||||
})),
|
||||
};
|
||||
}
|
||||
return input;
|
||||
}
|
||||
|
||||
/**
|
||||
* GeminiProvider - Integrates Gemini CLI as an AI provider
|
||||
*
|
||||
* Features:
|
||||
* - Google account OAuth login support
|
||||
* - API key authentication (GEMINI_API_KEY)
|
||||
* - Vertex AI support
|
||||
* - Thinking level configuration
|
||||
* - Streaming JSON output
|
||||
*/
|
||||
export class GeminiProvider extends CliProvider {
|
||||
constructor(config: ProviderConfig = {}) {
|
||||
super(config);
|
||||
// Trigger CLI detection on construction
|
||||
this.ensureCliDetected();
|
||||
}
|
||||
|
||||
// ==========================================================================
|
||||
// CliProvider Abstract Method Implementations
|
||||
// ==========================================================================
|
||||
|
||||
getName(): string {
|
||||
return 'gemini';
|
||||
}
|
||||
|
||||
getCliName(): string {
|
||||
return 'gemini';
|
||||
}
|
||||
|
||||
getSpawnConfig(): CliSpawnConfig {
|
||||
return {
|
||||
windowsStrategy: 'npx', // Gemini CLI can be run via npx
|
||||
npxPackage: '@google/gemini-cli', // Official Google Gemini CLI package
|
||||
commonPaths: {
|
||||
linux: [
|
||||
path.join(os.homedir(), '.local/bin/gemini'),
|
||||
'/usr/local/bin/gemini',
|
||||
path.join(os.homedir(), '.npm-global/bin/gemini'),
|
||||
],
|
||||
darwin: [
|
||||
path.join(os.homedir(), '.local/bin/gemini'),
|
||||
'/usr/local/bin/gemini',
|
||||
'/opt/homebrew/bin/gemini',
|
||||
path.join(os.homedir(), '.npm-global/bin/gemini'),
|
||||
],
|
||||
win32: [
|
||||
path.join(os.homedir(), 'AppData', 'Roaming', 'npm', 'gemini.cmd'),
|
||||
path.join(os.homedir(), '.npm-global', 'gemini.cmd'),
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract prompt text from ExecuteOptions
|
||||
*/
|
||||
private extractPromptText(options: ExecuteOptions): string {
|
||||
if (typeof options.prompt === 'string') {
|
||||
return options.prompt;
|
||||
} else if (Array.isArray(options.prompt)) {
|
||||
return options.prompt
|
||||
.filter((p) => p.type === 'text' && p.text)
|
||||
.map((p) => p.text)
|
||||
.join('\n');
|
||||
} else {
|
||||
throw new Error('Invalid prompt format');
|
||||
}
|
||||
}
|
||||
|
||||
buildCliArgs(options: ExecuteOptions): string[] {
|
||||
// Model comes in stripped of provider prefix (e.g., '2.5-flash' from 'gemini-2.5-flash')
|
||||
// We need to add 'gemini-' back since it's part of the actual CLI model name
|
||||
const bareModel = options.model || '2.5-flash';
|
||||
const cliArgs: string[] = [];
|
||||
|
||||
// Streaming JSON output format for real-time updates
|
||||
cliArgs.push('--output-format', 'stream-json');
|
||||
|
||||
// Model selection - Gemini CLI expects full model names like "gemini-2.5-flash"
|
||||
// Unlike Cursor CLI where 'cursor-' is just a routing prefix, for Gemini CLI
|
||||
// the 'gemini-' is part of the actual model name Google expects
|
||||
if (bareModel && bareModel !== 'auto') {
|
||||
// Add gemini- prefix if not already present (handles edge cases)
|
||||
const cliModel = bareModel.startsWith('gemini-') ? bareModel : `gemini-${bareModel}`;
|
||||
cliArgs.push('--model', cliModel);
|
||||
}
|
||||
|
||||
// Disable sandbox mode for faster execution (sandbox adds overhead)
|
||||
cliArgs.push('--sandbox', 'false');
|
||||
|
||||
// YOLO mode for automatic approval (required for non-interactive use)
|
||||
// Use explicit approval-mode for clearer semantics
|
||||
cliArgs.push('--approval-mode', 'yolo');
|
||||
|
||||
// Explicitly include the working directory in allowed workspace directories
|
||||
// This ensures Gemini CLI allows file operations in the project directory,
|
||||
// even if it has a different workspace cached from a previous session
|
||||
if (options.cwd) {
|
||||
cliArgs.push('--include-directories', options.cwd);
|
||||
}
|
||||
|
||||
// Note: Gemini CLI doesn't have a --thinking-level flag.
|
||||
// Thinking capabilities are determined by the model selection (e.g., gemini-2.5-pro).
|
||||
// The model handles thinking internally based on the task complexity.
|
||||
|
||||
// The prompt will be passed as the last positional argument
|
||||
// We'll append it in executeQuery after extracting the text
|
||||
|
||||
return cliArgs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Gemini event to AutoMaker ProviderMessage format
|
||||
*/
|
||||
normalizeEvent(event: unknown): ProviderMessage | null {
|
||||
const geminiEvent = event as GeminiStreamEvent;
|
||||
|
||||
switch (geminiEvent.type) {
|
||||
case 'init': {
|
||||
// Init event - capture session but don't yield a message
|
||||
const initEvent = geminiEvent as GeminiInitEvent;
|
||||
logger.debug(
|
||||
`Gemini init event: session=${initEvent.session_id}, model=${initEvent.model}`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
case 'message': {
|
||||
const messageEvent = geminiEvent as GeminiMessageEvent;
|
||||
|
||||
// Skip user messages - already handled by caller
|
||||
if (messageEvent.role === 'user') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Handle assistant messages
|
||||
if (messageEvent.role === 'assistant') {
|
||||
return {
|
||||
type: 'assistant',
|
||||
session_id: messageEvent.session_id,
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: [{ type: 'text', text: messageEvent.content }],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
case 'tool_use': {
|
||||
const toolEvent = geminiEvent as GeminiToolUseEvent;
|
||||
const normalizedName = normalizeGeminiToolName(toolEvent.tool_name);
|
||||
const normalizedInput = normalizeGeminiToolInput(
|
||||
toolEvent.tool_name,
|
||||
toolEvent.parameters as Record<string, unknown>
|
||||
);
|
||||
|
||||
return {
|
||||
type: 'assistant',
|
||||
session_id: toolEvent.session_id,
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: [
|
||||
{
|
||||
type: 'tool_use',
|
||||
name: normalizedName,
|
||||
tool_use_id: toolEvent.tool_id,
|
||||
input: normalizedInput,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
case 'tool_result': {
|
||||
const toolResultEvent = geminiEvent as GeminiToolResultEvent;
|
||||
// If tool result is an error, prefix with error indicator
|
||||
const content =
|
||||
toolResultEvent.status === 'error'
|
||||
? `[ERROR] ${toolResultEvent.output}`
|
||||
: toolResultEvent.output;
|
||||
return {
|
||||
type: 'assistant',
|
||||
session_id: toolResultEvent.session_id,
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: [
|
||||
{
|
||||
type: 'tool_result',
|
||||
tool_use_id: toolResultEvent.tool_id,
|
||||
content,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
case 'result': {
|
||||
const resultEvent = geminiEvent as GeminiResultEvent;
|
||||
|
||||
if (resultEvent.status === 'error') {
|
||||
return {
|
||||
type: 'error',
|
||||
session_id: resultEvent.session_id,
|
||||
error: resultEvent.error || 'Unknown error',
|
||||
};
|
||||
}
|
||||
|
||||
// Success result - include stats for logging
|
||||
logger.debug(
|
||||
`Gemini result: status=${resultEvent.status}, tokens=${resultEvent.stats?.total_tokens}`
|
||||
);
|
||||
return {
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
session_id: resultEvent.session_id,
|
||||
};
|
||||
}
|
||||
|
||||
case 'error': {
|
||||
const errorEvent = geminiEvent as GeminiResultEvent;
|
||||
return {
|
||||
type: 'error',
|
||||
session_id: errorEvent.session_id,
|
||||
error: errorEvent.error || 'Unknown error',
|
||||
};
|
||||
}
|
||||
|
||||
default:
|
||||
logger.debug(`Unknown Gemini event type: ${geminiEvent.type}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ==========================================================================
|
||||
// CliProvider Overrides
|
||||
// ==========================================================================
|
||||
|
||||
/**
|
||||
* Override error mapping for Gemini-specific error codes
|
||||
*/
|
||||
protected mapError(stderr: string, exitCode: number | null): CliErrorInfo {
|
||||
const lower = stderr.toLowerCase();
|
||||
|
||||
if (
|
||||
lower.includes('not authenticated') ||
|
||||
lower.includes('please log in') ||
|
||||
lower.includes('unauthorized') ||
|
||||
lower.includes('login required') ||
|
||||
lower.includes('error authenticating') ||
|
||||
lower.includes('loadcodeassist') ||
|
||||
(lower.includes('econnrefused') && lower.includes('8888'))
|
||||
) {
|
||||
return {
|
||||
code: GeminiErrorCode.NOT_AUTHENTICATED,
|
||||
message: 'Gemini CLI is not authenticated',
|
||||
recoverable: true,
|
||||
suggestion:
|
||||
'Run "gemini" interactively to log in, or set GEMINI_API_KEY environment variable',
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
lower.includes('rate limit') ||
|
||||
lower.includes('too many requests') ||
|
||||
lower.includes('429') ||
|
||||
lower.includes('quota exceeded')
|
||||
) {
|
||||
return {
|
||||
code: GeminiErrorCode.RATE_LIMITED,
|
||||
message: 'Gemini API rate limit exceeded',
|
||||
recoverable: true,
|
||||
suggestion: 'Wait a few minutes and try again. Free tier: 60 req/min, 1000 req/day',
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
lower.includes('model not available') ||
|
||||
lower.includes('invalid model') ||
|
||||
lower.includes('unknown model') ||
|
||||
lower.includes('modelnotfounderror') ||
|
||||
lower.includes('model not found') ||
|
||||
(lower.includes('not found') && lower.includes('404'))
|
||||
) {
|
||||
return {
|
||||
code: GeminiErrorCode.MODEL_UNAVAILABLE,
|
||||
message: 'Requested model is not available',
|
||||
recoverable: true,
|
||||
suggestion: 'Try using "gemini-2.5-flash" or select a different model',
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
lower.includes('network') ||
|
||||
lower.includes('connection') ||
|
||||
lower.includes('econnrefused') ||
|
||||
lower.includes('timeout')
|
||||
) {
|
||||
return {
|
||||
code: GeminiErrorCode.NETWORK_ERROR,
|
||||
message: 'Network connection error',
|
||||
recoverable: true,
|
||||
suggestion: 'Check your internet connection and try again',
|
||||
};
|
||||
}
|
||||
|
||||
if (exitCode === 137 || lower.includes('killed') || lower.includes('sigterm')) {
|
||||
return {
|
||||
code: GeminiErrorCode.PROCESS_CRASHED,
|
||||
message: 'Gemini CLI process was terminated',
|
||||
recoverable: true,
|
||||
suggestion: 'The process may have run out of memory. Try a simpler task.',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
code: GeminiErrorCode.UNKNOWN,
|
||||
message: stderr || `Gemini CLI exited with code ${exitCode}`,
|
||||
recoverable: false,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Override install instructions for Gemini-specific guidance
|
||||
*/
|
||||
protected getInstallInstructions(): string {
|
||||
return 'Install with: npm install -g @google/gemini-cli (or visit https://github.com/google-gemini/gemini-cli)';
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a prompt using Gemini CLI with streaming
|
||||
*/
|
||||
async *executeQuery(options: ExecuteOptions): AsyncGenerator<ProviderMessage> {
|
||||
this.ensureCliDetected();
|
||||
|
||||
// Validate that model doesn't have a provider prefix
|
||||
validateBareModelId(options.model, 'GeminiProvider');
|
||||
|
||||
if (!this.cliPath) {
|
||||
throw this.createError(
|
||||
GeminiErrorCode.NOT_INSTALLED,
|
||||
'Gemini CLI is not installed',
|
||||
true,
|
||||
this.getInstallInstructions()
|
||||
);
|
||||
}
|
||||
|
||||
// Extract prompt text to pass as positional argument
|
||||
const promptText = this.extractPromptText(options);
|
||||
|
||||
// Build CLI args and append the prompt as the last positional argument
|
||||
const cliArgs = this.buildCliArgs(options);
|
||||
cliArgs.push(promptText); // Gemini CLI uses positional args for the prompt
|
||||
|
||||
const subprocessOptions = this.buildSubprocessOptions(options, cliArgs);
|
||||
|
||||
let sessionId: string | undefined;
|
||||
|
||||
logger.debug(`GeminiProvider.executeQuery called with model: "${options.model}"`);
|
||||
|
||||
try {
|
||||
for await (const rawEvent of spawnJSONLProcess(subprocessOptions)) {
|
||||
const event = rawEvent as GeminiStreamEvent;
|
||||
|
||||
// Capture session ID from init event
|
||||
if (event.type === 'init') {
|
||||
const initEvent = event as GeminiInitEvent;
|
||||
sessionId = initEvent.session_id;
|
||||
logger.debug(`Session started: ${sessionId}, model: ${initEvent.model}`);
|
||||
}
|
||||
|
||||
// Normalize and yield the event
|
||||
const normalized = this.normalizeEvent(event);
|
||||
if (normalized) {
|
||||
if (!normalized.session_id && sessionId) {
|
||||
normalized.session_id = sessionId;
|
||||
}
|
||||
yield normalized;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (isAbortError(error)) {
|
||||
logger.debug('Query aborted');
|
||||
return;
|
||||
}
|
||||
|
||||
// Map CLI errors to GeminiError
|
||||
if (error instanceof Error && 'stderr' in error) {
|
||||
const errorInfo = this.mapError(
|
||||
(error as { stderr?: string }).stderr || error.message,
|
||||
(error as { exitCode?: number | null }).exitCode ?? null
|
||||
);
|
||||
throw this.createError(
|
||||
errorInfo.code as GeminiErrorCode,
|
||||
errorInfo.message,
|
||||
errorInfo.recoverable,
|
||||
errorInfo.suggestion
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// ==========================================================================
|
||||
// Gemini-Specific Methods
|
||||
// ==========================================================================
|
||||
|
||||
/**
|
||||
* Create a GeminiError with details
|
||||
*/
|
||||
private createError(
|
||||
code: GeminiErrorCode,
|
||||
message: string,
|
||||
recoverable: boolean = false,
|
||||
suggestion?: string
|
||||
): GeminiError {
|
||||
const error = new Error(message) as GeminiError;
|
||||
error.code = code;
|
||||
error.recoverable = recoverable;
|
||||
error.suggestion = suggestion;
|
||||
error.name = 'GeminiError';
|
||||
return error;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Gemini CLI version
|
||||
*/
|
||||
async getVersion(): Promise<string | null> {
|
||||
this.ensureCliDetected();
|
||||
if (!this.cliPath) return null;
|
||||
|
||||
try {
|
||||
const result = execSync(`"${this.cliPath}" --version`, {
|
||||
encoding: 'utf8',
|
||||
timeout: 5000,
|
||||
stdio: 'pipe',
|
||||
}).trim();
|
||||
return result;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check authentication status
|
||||
*
|
||||
* Uses a fast credential check approach:
|
||||
* 1. Check for GEMINI_API_KEY environment variable
|
||||
* 2. Check for Google Cloud credentials
|
||||
* 3. Check for Gemini settings file with stored credentials
|
||||
* 4. Quick CLI auth test with --help (fast, doesn't make API calls)
|
||||
*/
|
||||
async checkAuth(): Promise<GeminiAuthStatus> {
|
||||
this.ensureCliDetected();
|
||||
if (!this.cliPath) {
|
||||
logger.debug('checkAuth: CLI not found');
|
||||
return { authenticated: false, method: 'none' };
|
||||
}
|
||||
|
||||
logger.debug('checkAuth: Starting credential check');
|
||||
|
||||
// Determine the likely auth method based on environment
|
||||
const hasApiKey = !!process.env.GEMINI_API_KEY;
|
||||
const hasEnvApiKey = hasApiKey;
|
||||
const hasVertexAi = !!(
|
||||
process.env.GOOGLE_APPLICATION_CREDENTIALS || process.env.GOOGLE_CLOUD_PROJECT
|
||||
);
|
||||
|
||||
logger.debug(`checkAuth: hasApiKey=${hasApiKey}, hasVertexAi=${hasVertexAi}`);
|
||||
|
||||
// Check for Gemini credentials file (~/.gemini/settings.json)
|
||||
const geminiConfigDir = path.join(os.homedir(), '.gemini');
|
||||
const settingsPath = path.join(geminiConfigDir, 'settings.json');
|
||||
let hasCredentialsFile = false;
|
||||
let authType: string | null = null;
|
||||
|
||||
try {
|
||||
await fs.access(settingsPath);
|
||||
logger.debug(`checkAuth: Found settings file at ${settingsPath}`);
|
||||
try {
|
||||
const content = await fs.readFile(settingsPath, 'utf8');
|
||||
const settings = JSON.parse(content);
|
||||
|
||||
// Auth config is at security.auth.selectedType (e.g., "oauth-personal", "oauth-adc", "api-key")
|
||||
const selectedType = settings?.security?.auth?.selectedType;
|
||||
if (selectedType) {
|
||||
hasCredentialsFile = true;
|
||||
authType = selectedType;
|
||||
logger.debug(`checkAuth: Settings file has auth config, selectedType=${selectedType}`);
|
||||
} else {
|
||||
logger.debug(`checkAuth: Settings file found but no auth type configured`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug(`checkAuth: Failed to parse settings file: ${e}`);
|
||||
}
|
||||
} catch {
|
||||
logger.debug('checkAuth: No settings file found');
|
||||
}
|
||||
|
||||
// If we have an API key, we're authenticated
|
||||
if (hasApiKey) {
|
||||
logger.debug('checkAuth: Using API key authentication');
|
||||
return {
|
||||
authenticated: true,
|
||||
method: 'api_key',
|
||||
hasApiKey,
|
||||
hasEnvApiKey,
|
||||
hasCredentialsFile,
|
||||
};
|
||||
}
|
||||
|
||||
// If we have Vertex AI credentials, we're authenticated
|
||||
if (hasVertexAi) {
|
||||
logger.debug('checkAuth: Using Vertex AI authentication');
|
||||
return {
|
||||
authenticated: true,
|
||||
method: 'vertex_ai',
|
||||
hasApiKey,
|
||||
hasEnvApiKey,
|
||||
hasCredentialsFile,
|
||||
};
|
||||
}
|
||||
|
||||
// Check if settings file indicates configured authentication
|
||||
if (hasCredentialsFile && authType) {
|
||||
// OAuth types: "oauth-personal", "oauth-adc"
|
||||
// API key type: "api-key"
|
||||
// Code assist: "code-assist" (requires IDE integration)
|
||||
if (authType.startsWith('oauth')) {
|
||||
logger.debug(`checkAuth: OAuth authentication configured (${authType})`);
|
||||
return {
|
||||
authenticated: true,
|
||||
method: 'google_login',
|
||||
hasApiKey,
|
||||
hasEnvApiKey,
|
||||
hasCredentialsFile,
|
||||
};
|
||||
}
|
||||
|
||||
if (authType === 'api-key') {
|
||||
logger.debug('checkAuth: API key authentication configured in settings');
|
||||
return {
|
||||
authenticated: true,
|
||||
method: 'api_key',
|
||||
hasApiKey,
|
||||
hasEnvApiKey,
|
||||
hasCredentialsFile,
|
||||
};
|
||||
}
|
||||
|
||||
if (authType === 'code-assist' || authType === 'codeassist') {
|
||||
logger.debug('checkAuth: Code Assist auth configured but requires local server');
|
||||
return {
|
||||
authenticated: false,
|
||||
method: 'google_login',
|
||||
hasApiKey,
|
||||
hasEnvApiKey,
|
||||
hasCredentialsFile,
|
||||
error:
|
||||
'Code Assist authentication requires IDE integration. Please use "gemini" CLI to log in with a different method, or set GEMINI_API_KEY.',
|
||||
};
|
||||
}
|
||||
|
||||
// Unknown auth type but something is configured
|
||||
logger.debug(`checkAuth: Unknown auth type configured: ${authType}`);
|
||||
return {
|
||||
authenticated: true,
|
||||
method: 'google_login',
|
||||
hasApiKey,
|
||||
hasEnvApiKey,
|
||||
hasCredentialsFile,
|
||||
};
|
||||
}
|
||||
|
||||
// No credentials found
|
||||
logger.debug('checkAuth: No valid credentials found');
|
||||
return {
|
||||
authenticated: false,
|
||||
method: 'none',
|
||||
hasApiKey,
|
||||
hasEnvApiKey,
|
||||
hasCredentialsFile,
|
||||
error:
|
||||
'No authentication configured. Run "gemini" interactively to log in, or set GEMINI_API_KEY.',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect installation status (required by BaseProvider)
|
||||
*/
|
||||
async detectInstallation(): Promise<InstallationStatus> {
|
||||
const installed = await this.isInstalled();
|
||||
const version = installed ? await this.getVersion() : undefined;
|
||||
const auth = await this.checkAuth();
|
||||
|
||||
return {
|
||||
installed,
|
||||
version: version || undefined,
|
||||
path: this.cliPath || undefined,
|
||||
method: 'cli',
|
||||
hasApiKey: !!process.env.GEMINI_API_KEY,
|
||||
authenticated: auth.authenticated,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the detected CLI path (public accessor for status endpoints)
|
||||
*/
|
||||
getCliPath(): string | null {
|
||||
this.ensureCliDetected();
|
||||
return this.cliPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available Gemini models
|
||||
*/
|
||||
getAvailableModels(): ModelDefinition[] {
|
||||
return Object.entries(GEMINI_MODEL_MAP).map(([id, config]) => ({
|
||||
id, // Full model ID with gemini- prefix (e.g., 'gemini-2.5-flash')
|
||||
name: config.label,
|
||||
modelString: id, // Same as id - CLI uses the full model name
|
||||
provider: 'gemini',
|
||||
description: config.description,
|
||||
supportsTools: true,
|
||||
supportsVision: config.supportsVision,
|
||||
contextWindow: config.contextWindow,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a feature is supported
|
||||
*/
|
||||
supportsFeature(feature: string): boolean {
|
||||
const supported = ['tools', 'text', 'streaming', 'vision', 'thinking'];
|
||||
return supported.includes(feature);
|
||||
}
|
||||
}
|
||||
@@ -16,16 +16,6 @@ export type {
|
||||
ProviderMessage,
|
||||
InstallationStatus,
|
||||
ModelDefinition,
|
||||
AgentDefinition,
|
||||
ReasoningEffort,
|
||||
SystemPromptPreset,
|
||||
ConversationMessage,
|
||||
ContentBlock,
|
||||
ValidationResult,
|
||||
McpServerConfig,
|
||||
McpStdioServerConfig,
|
||||
McpSSEServerConfig,
|
||||
McpHttpServerConfig,
|
||||
} from './types.js';
|
||||
|
||||
// Claude provider
|
||||
|
||||
@@ -25,6 +25,7 @@ import type {
|
||||
InstallationStatus,
|
||||
ContentBlock,
|
||||
} from '@automaker/types';
|
||||
import { stripProviderPrefix } from '@automaker/types';
|
||||
import { type SubprocessOptions, getOpenCodeAuthIndicators } from '@automaker/platform';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
@@ -327,18 +328,10 @@ export class OpencodeProvider extends CliProvider {
|
||||
args.push('--format', 'json');
|
||||
|
||||
// Handle model selection
|
||||
// Convert canonical prefix format (opencode-xxx) to CLI slash format (opencode/xxx)
|
||||
// OpenCode CLI expects provider/model format (e.g., 'opencode/big-model')
|
||||
// Strip 'opencode-' prefix if present, OpenCode uses format like 'anthropic/claude-sonnet-4-5'
|
||||
if (options.model) {
|
||||
// Strip opencode- prefix if present, then ensure slash format
|
||||
const model = options.model.startsWith('opencode-')
|
||||
? options.model.slice('opencode-'.length)
|
||||
: options.model;
|
||||
|
||||
// If model has slash, it's already provider/model format; otherwise prepend opencode/
|
||||
const cliModel = model.includes('/') ? model : `opencode/${model}`;
|
||||
|
||||
args.push('--model', cliModel);
|
||||
const model = stripProviderPrefix(options.model);
|
||||
args.push('--model', model);
|
||||
}
|
||||
|
||||
// Note: OpenCode reads from stdin automatically when input is piped
|
||||
@@ -1042,7 +1035,7 @@ export class OpencodeProvider extends CliProvider {
|
||||
'lm studio': 'lmstudio',
|
||||
lmstudio: 'lmstudio',
|
||||
opencode: 'opencode',
|
||||
'z.ai coding plan': 'zai-coding-plan',
|
||||
'z.ai coding plan': 'z-ai',
|
||||
'z.ai': 'z-ai',
|
||||
};
|
||||
|
||||
|
||||
@@ -7,13 +7,7 @@
|
||||
|
||||
import { BaseProvider } from './base-provider.js';
|
||||
import type { InstallationStatus, ModelDefinition } from './types.js';
|
||||
import {
|
||||
isCursorModel,
|
||||
isCodexModel,
|
||||
isOpencodeModel,
|
||||
isGeminiModel,
|
||||
type ModelProvider,
|
||||
} from '@automaker/types';
|
||||
import { isCursorModel, isCodexModel, isOpencodeModel, type ModelProvider } from '@automaker/types';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
@@ -22,7 +16,6 @@ const DISCONNECTED_MARKERS: Record<string, string> = {
|
||||
codex: '.codex-disconnected',
|
||||
cursor: '.cursor-disconnected',
|
||||
opencode: '.opencode-disconnected',
|
||||
gemini: '.gemini-disconnected',
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -246,8 +239,8 @@ export class ProviderFactory {
|
||||
model.modelString === modelId ||
|
||||
model.id.endsWith(`-${modelId}`) ||
|
||||
model.modelString.endsWith(`-${modelId}`) ||
|
||||
model.modelString === modelId.replace(/^(claude|cursor|codex|gemini)-/, '') ||
|
||||
model.modelString === modelId.replace(/-(claude|cursor|codex|gemini)$/, '')
|
||||
model.modelString === modelId.replace(/^(claude|cursor|codex)-/, '') ||
|
||||
model.modelString === modelId.replace(/-(claude|cursor|codex)$/, '')
|
||||
) {
|
||||
return model.supportsVision ?? true;
|
||||
}
|
||||
@@ -274,7 +267,6 @@ import { ClaudeProvider } from './claude-provider.js';
|
||||
import { CursorProvider } from './cursor-provider.js';
|
||||
import { CodexProvider } from './codex-provider.js';
|
||||
import { OpencodeProvider } from './opencode-provider.js';
|
||||
import { GeminiProvider } from './gemini-provider.js';
|
||||
|
||||
// Register Claude provider
|
||||
registerProvider('claude', {
|
||||
@@ -309,11 +301,3 @@ registerProvider('opencode', {
|
||||
canHandleModel: (model: string) => isOpencodeModel(model),
|
||||
priority: 3, // Between codex (5) and claude (0)
|
||||
});
|
||||
|
||||
// Register Gemini provider
|
||||
registerProvider('gemini', {
|
||||
factory: () => new GeminiProvider(),
|
||||
aliases: ['google'],
|
||||
canHandleModel: (model: string) => isGeminiModel(model),
|
||||
priority: 4, // Between opencode (3) and codex (5)
|
||||
});
|
||||
|
||||
@@ -21,7 +21,6 @@ import type {
|
||||
ThinkingLevel,
|
||||
ReasoningEffort,
|
||||
ClaudeApiProfile,
|
||||
ClaudeCompatibleProvider,
|
||||
Credentials,
|
||||
} from '@automaker/types';
|
||||
import { stripProviderPrefix } from '@automaker/types';
|
||||
@@ -57,17 +56,9 @@ export interface SimpleQueryOptions {
|
||||
readOnly?: boolean;
|
||||
/** Setting sources for CLAUDE.md loading */
|
||||
settingSources?: Array<'user' | 'project' | 'local'>;
|
||||
/**
|
||||
* Active Claude API profile for alternative endpoint configuration
|
||||
* @deprecated Use claudeCompatibleProvider instead
|
||||
*/
|
||||
/** Active Claude API profile for alternative endpoint configuration */
|
||||
claudeApiProfile?: ClaudeApiProfile;
|
||||
/**
|
||||
* Claude-compatible provider for alternative endpoint configuration.
|
||||
* Takes precedence over claudeApiProfile if both are set.
|
||||
*/
|
||||
claudeCompatibleProvider?: ClaudeCompatibleProvider;
|
||||
/** Credentials for resolving 'credentials' apiKeySource in Claude API profiles/providers */
|
||||
/** Credentials for resolving 'credentials' apiKeySource in Claude API profiles */
|
||||
credentials?: Credentials;
|
||||
}
|
||||
|
||||
@@ -140,8 +131,7 @@ export async function simpleQuery(options: SimpleQueryOptions): Promise<SimpleQu
|
||||
reasoningEffort: options.reasoningEffort,
|
||||
readOnly: options.readOnly,
|
||||
settingSources: options.settingSources,
|
||||
claudeApiProfile: options.claudeApiProfile, // Legacy: Pass active Claude API profile for alternative endpoint configuration
|
||||
claudeCompatibleProvider: options.claudeCompatibleProvider, // New: Pass Claude-compatible provider (takes precedence)
|
||||
claudeApiProfile: options.claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials: options.credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
};
|
||||
|
||||
@@ -225,8 +215,7 @@ export async function streamingQuery(options: StreamingQueryOptions): Promise<Si
|
||||
reasoningEffort: options.reasoningEffort,
|
||||
readOnly: options.readOnly,
|
||||
settingSources: options.settingSources,
|
||||
claudeApiProfile: options.claudeApiProfile, // Legacy: Pass active Claude API profile for alternative endpoint configuration
|
||||
claudeCompatibleProvider: options.claudeCompatibleProvider, // New: Pass Claude-compatible provider (takes precedence)
|
||||
claudeApiProfile: options.claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials: options.credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
};
|
||||
|
||||
|
||||
@@ -19,7 +19,4 @@ export type {
|
||||
InstallationStatus,
|
||||
ValidationResult,
|
||||
ModelDefinition,
|
||||
AgentDefinition,
|
||||
ReasoningEffort,
|
||||
SystemPromptPreset,
|
||||
} from '@automaker/types';
|
||||
|
||||
@@ -17,7 +17,7 @@ import type { SettingsService } from '../../services/settings-service.js';
|
||||
import {
|
||||
getAutoLoadClaudeMdSetting,
|
||||
getPromptCustomization,
|
||||
getPhaseModelWithOverrides,
|
||||
getActiveClaudeApiProfile,
|
||||
} from '../../lib/settings-helpers.js';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
|
||||
@@ -119,26 +119,19 @@ Generate ${featureCount} NEW features that build on each other logically. Rememb
|
||||
'[FeatureGeneration]'
|
||||
);
|
||||
|
||||
// Get model from phase settings with provider info
|
||||
const {
|
||||
phaseModel: phaseModelEntry,
|
||||
provider,
|
||||
credentials,
|
||||
} = settingsService
|
||||
? await getPhaseModelWithOverrides(
|
||||
'featureGenerationModel',
|
||||
settingsService,
|
||||
projectPath,
|
||||
'[FeatureGeneration]'
|
||||
)
|
||||
: {
|
||||
phaseModel: DEFAULT_PHASE_MODELS.featureGenerationModel,
|
||||
provider: undefined,
|
||||
credentials: undefined,
|
||||
};
|
||||
// Get model from phase settings
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.featureGenerationModel || DEFAULT_PHASE_MODELS.featureGenerationModel;
|
||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||
|
||||
logger.info('Using model:', model, provider ? `via provider: ${provider.name}` : 'direct API');
|
||||
logger.info('Using model:', model);
|
||||
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
settingsService,
|
||||
'[FeatureGeneration]'
|
||||
);
|
||||
|
||||
// Use streamingQuery with event callbacks
|
||||
const result = await streamingQuery({
|
||||
@@ -151,7 +144,7 @@ Generate ${featureCount} NEW features that build on each other logically. Rememb
|
||||
thinkingLevel,
|
||||
readOnly: true, // Feature generation only reads code, doesn't write
|
||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||
claudeCompatibleProvider: provider, // Pass provider for alternative endpoint configuration
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
onText: (text) => {
|
||||
logger.debug(`Feature text block received (${text.length} chars)`);
|
||||
|
||||
@@ -19,7 +19,7 @@ import type { SettingsService } from '../../services/settings-service.js';
|
||||
import {
|
||||
getAutoLoadClaudeMdSetting,
|
||||
getPromptCustomization,
|
||||
getPhaseModelWithOverrides,
|
||||
getActiveClaudeApiProfile,
|
||||
} from '../../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
@@ -96,26 +96,19 @@ ${prompts.appSpec.structuredSpecInstructions}`;
|
||||
'[SpecRegeneration]'
|
||||
);
|
||||
|
||||
// Get model from phase settings with provider info
|
||||
const {
|
||||
phaseModel: phaseModelEntry,
|
||||
provider,
|
||||
credentials,
|
||||
} = settingsService
|
||||
? await getPhaseModelWithOverrides(
|
||||
'specGenerationModel',
|
||||
settingsService,
|
||||
projectPath,
|
||||
'[SpecRegeneration]'
|
||||
)
|
||||
: {
|
||||
phaseModel: DEFAULT_PHASE_MODELS.specGenerationModel,
|
||||
provider: undefined,
|
||||
credentials: undefined,
|
||||
};
|
||||
// Get model from phase settings
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.specGenerationModel || DEFAULT_PHASE_MODELS.specGenerationModel;
|
||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||
|
||||
logger.info('Using model:', model, provider ? `via provider: ${provider.name}` : 'direct API');
|
||||
logger.info('Using model:', model);
|
||||
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
settingsService,
|
||||
'[SpecRegeneration]'
|
||||
);
|
||||
|
||||
let responseText = '';
|
||||
let structuredOutput: SpecOutput | null = null;
|
||||
@@ -149,7 +142,7 @@ Your entire response should be valid JSON starting with { and ending with }. No
|
||||
thinkingLevel,
|
||||
readOnly: true, // Spec generation only reads code, we write the spec ourselves
|
||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||
claudeCompatibleProvider: provider, // Pass provider for alternative endpoint configuration
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
outputFormat: useStructuredOutput
|
||||
? {
|
||||
|
||||
@@ -17,7 +17,7 @@ import { getAppSpecPath } from '@automaker/platform';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import {
|
||||
getAutoLoadClaudeMdSetting,
|
||||
getPhaseModelWithOverrides,
|
||||
getActiveClaudeApiProfile,
|
||||
} from '../../lib/settings-helpers.js';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
import {
|
||||
@@ -155,26 +155,16 @@ export async function syncSpec(
|
||||
'[SpecSync]'
|
||||
);
|
||||
|
||||
// Get model from phase settings with provider info
|
||||
const {
|
||||
phaseModel: phaseModelEntry,
|
||||
provider,
|
||||
credentials,
|
||||
} = settingsService
|
||||
? await getPhaseModelWithOverrides(
|
||||
'specGenerationModel',
|
||||
settingsService,
|
||||
projectPath,
|
||||
'[SpecSync]'
|
||||
)
|
||||
: {
|
||||
phaseModel: DEFAULT_PHASE_MODELS.specGenerationModel,
|
||||
provider: undefined,
|
||||
credentials: undefined,
|
||||
};
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.specGenerationModel || DEFAULT_PHASE_MODELS.specGenerationModel;
|
||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||
|
||||
logger.info('Using model:', model, provider ? `via provider: ${provider.name}` : 'direct API');
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
settingsService,
|
||||
'[SpecSync]'
|
||||
);
|
||||
|
||||
// Use AI to analyze tech stack
|
||||
const techAnalysisPrompt = `Analyze this project and return ONLY a JSON object with the current technology stack.
|
||||
@@ -204,7 +194,7 @@ Return ONLY this JSON format, no other text:
|
||||
thinkingLevel,
|
||||
readOnly: true,
|
||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||
claudeCompatibleProvider: provider, // Pass provider for alternative endpoint configuration
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
onText: (text) => {
|
||||
logger.debug(`Tech analysis text: ${text.substring(0, 100)}`);
|
||||
|
||||
@@ -26,24 +26,6 @@ export function createRunFeatureHandler(autoModeService: AutoModeService) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check per-worktree capacity before starting
|
||||
const capacity = await autoModeService.checkWorktreeCapacity(projectPath, featureId);
|
||||
if (!capacity.hasCapacity) {
|
||||
const worktreeDesc = capacity.branchName
|
||||
? `worktree "${capacity.branchName}"`
|
||||
: 'main worktree';
|
||||
res.status(429).json({
|
||||
success: false,
|
||||
error: `Agent limit reached for ${worktreeDesc} (${capacity.currentAgents}/${capacity.maxAgents}). Wait for running tasks to complete or increase the limit.`,
|
||||
details: {
|
||||
currentAgents: capacity.currentAgents,
|
||||
maxAgents: capacity.maxAgents,
|
||||
branchName: capacity.branchName,
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Start execution in background
|
||||
// executeFeature derives workDir from feature.branchName
|
||||
autoModeService
|
||||
|
||||
@@ -12,9 +12,8 @@ const logger = createLogger('AutoMode');
|
||||
export function createStartHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, branchName, maxConcurrency } = req.body as {
|
||||
const { projectPath, maxConcurrency } = req.body as {
|
||||
projectPath: string;
|
||||
branchName?: string | null;
|
||||
maxConcurrency?: number;
|
||||
};
|
||||
|
||||
@@ -26,38 +25,26 @@ export function createStartHandler(autoModeService: AutoModeService) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Normalize branchName: undefined becomes null
|
||||
const normalizedBranchName = branchName ?? null;
|
||||
const worktreeDesc = normalizedBranchName
|
||||
? `worktree ${normalizedBranchName}`
|
||||
: 'main worktree';
|
||||
|
||||
// Check if already running
|
||||
if (autoModeService.isAutoLoopRunningForProject(projectPath, normalizedBranchName)) {
|
||||
if (autoModeService.isAutoLoopRunningForProject(projectPath)) {
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Auto mode is already running for ${worktreeDesc}`,
|
||||
message: 'Auto mode is already running for this project',
|
||||
alreadyRunning: true,
|
||||
branchName: normalizedBranchName,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Start the auto loop for this project/worktree
|
||||
const resolvedMaxConcurrency = await autoModeService.startAutoLoopForProject(
|
||||
projectPath,
|
||||
normalizedBranchName,
|
||||
maxConcurrency
|
||||
);
|
||||
// Start the auto loop for this project
|
||||
await autoModeService.startAutoLoopForProject(projectPath, maxConcurrency ?? 3);
|
||||
|
||||
logger.info(
|
||||
`Started auto loop for ${worktreeDesc} in project: ${projectPath} with maxConcurrency: ${resolvedMaxConcurrency}`
|
||||
`Started auto loop for project: ${projectPath} with maxConcurrency: ${maxConcurrency ?? 3}`
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Auto mode started with max ${resolvedMaxConcurrency} concurrent features`,
|
||||
branchName: normalizedBranchName,
|
||||
message: `Auto mode started with max ${maxConcurrency ?? 3} concurrent features`,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Start auto mode failed');
|
||||
|
||||
@@ -12,19 +12,11 @@ import { getErrorMessage, logError } from '../common.js';
|
||||
export function createStatusHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, branchName } = req.body as {
|
||||
projectPath?: string;
|
||||
branchName?: string | null;
|
||||
};
|
||||
const { projectPath } = req.body as { projectPath?: string };
|
||||
|
||||
// If projectPath is provided, return per-project/worktree status
|
||||
// If projectPath is provided, return per-project status
|
||||
if (projectPath) {
|
||||
// Normalize branchName: undefined becomes null
|
||||
const normalizedBranchName = branchName ?? null;
|
||||
const projectStatus = autoModeService.getStatusForProject(
|
||||
projectPath,
|
||||
normalizedBranchName
|
||||
);
|
||||
const projectStatus = autoModeService.getStatusForProject(projectPath);
|
||||
res.json({
|
||||
success: true,
|
||||
isRunning: projectStatus.runningCount > 0,
|
||||
@@ -33,7 +25,6 @@ export function createStatusHandler(autoModeService: AutoModeService) {
|
||||
runningCount: projectStatus.runningCount,
|
||||
maxConcurrency: projectStatus.maxConcurrency,
|
||||
projectPath,
|
||||
branchName: normalizedBranchName,
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -41,12 +32,10 @@ export function createStatusHandler(autoModeService: AutoModeService) {
|
||||
// Fall back to global status for backward compatibility
|
||||
const status = autoModeService.getStatus();
|
||||
const activeProjects = autoModeService.getActiveAutoLoopProjects();
|
||||
const activeWorktrees = autoModeService.getActiveAutoLoopWorktrees();
|
||||
res.json({
|
||||
success: true,
|
||||
...status,
|
||||
activeAutoLoopProjects: activeProjects,
|
||||
activeAutoLoopWorktrees: activeWorktrees,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Get status failed');
|
||||
|
||||
@@ -12,9 +12,8 @@ const logger = createLogger('AutoMode');
|
||||
export function createStopHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, branchName } = req.body as {
|
||||
const { projectPath } = req.body as {
|
||||
projectPath: string;
|
||||
branchName?: string | null;
|
||||
};
|
||||
|
||||
if (!projectPath) {
|
||||
@@ -25,38 +24,27 @@ export function createStopHandler(autoModeService: AutoModeService) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Normalize branchName: undefined becomes null
|
||||
const normalizedBranchName = branchName ?? null;
|
||||
const worktreeDesc = normalizedBranchName
|
||||
? `worktree ${normalizedBranchName}`
|
||||
: 'main worktree';
|
||||
|
||||
// Check if running
|
||||
if (!autoModeService.isAutoLoopRunningForProject(projectPath, normalizedBranchName)) {
|
||||
if (!autoModeService.isAutoLoopRunningForProject(projectPath)) {
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Auto mode is not running for ${worktreeDesc}`,
|
||||
message: 'Auto mode is not running for this project',
|
||||
wasRunning: false,
|
||||
branchName: normalizedBranchName,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Stop the auto loop for this project/worktree
|
||||
const runningCount = await autoModeService.stopAutoLoopForProject(
|
||||
projectPath,
|
||||
normalizedBranchName
|
||||
);
|
||||
// Stop the auto loop for this project
|
||||
const runningCount = await autoModeService.stopAutoLoopForProject(projectPath);
|
||||
|
||||
logger.info(
|
||||
`Stopped auto loop for ${worktreeDesc} in project: ${projectPath}, ${runningCount} features still running`
|
||||
`Stopped auto loop for project: ${projectPath}, ${runningCount} features still running`
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Auto mode stopped',
|
||||
runningFeaturesCount: runningCount,
|
||||
branchName: normalizedBranchName,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Stop auto mode failed');
|
||||
|
||||
@@ -28,7 +28,7 @@ import type { SettingsService } from '../../services/settings-service.js';
|
||||
import {
|
||||
getAutoLoadClaudeMdSetting,
|
||||
getPromptCustomization,
|
||||
getPhaseModelWithOverrides,
|
||||
getActiveClaudeApiProfile,
|
||||
} from '../../lib/settings-helpers.js';
|
||||
|
||||
const featureLoader = new FeatureLoader();
|
||||
@@ -121,42 +121,18 @@ export async function generateBacklogPlan(
|
||||
content: 'Generating plan with AI...',
|
||||
});
|
||||
|
||||
// Get the model to use from settings or provided override with provider info
|
||||
// Get the model to use from settings or provided override
|
||||
let effectiveModel = model;
|
||||
let thinkingLevel: ThinkingLevel | undefined;
|
||||
let claudeCompatibleProvider: import('@automaker/types').ClaudeCompatibleProvider | undefined;
|
||||
let credentials: import('@automaker/types').Credentials | undefined;
|
||||
|
||||
if (effectiveModel) {
|
||||
// Use explicit override - resolve model alias and get credentials
|
||||
const resolved = resolvePhaseModel({ model: effectiveModel });
|
||||
effectiveModel = resolved.model;
|
||||
thinkingLevel = resolved.thinkingLevel;
|
||||
credentials = await settingsService?.getCredentials();
|
||||
} else if (settingsService) {
|
||||
// Use settings-based model with provider info
|
||||
const phaseResult = await getPhaseModelWithOverrides(
|
||||
'backlogPlanningModel',
|
||||
settingsService,
|
||||
projectPath,
|
||||
'[BacklogPlan]'
|
||||
);
|
||||
const resolved = resolvePhaseModel(phaseResult.phaseModel);
|
||||
effectiveModel = resolved.model;
|
||||
thinkingLevel = resolved.thinkingLevel;
|
||||
claudeCompatibleProvider = phaseResult.provider;
|
||||
credentials = phaseResult.credentials;
|
||||
} else {
|
||||
// Fallback to defaults
|
||||
const resolved = resolvePhaseModel(DEFAULT_PHASE_MODELS.backlogPlanningModel);
|
||||
if (!effectiveModel) {
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.backlogPlanningModel || DEFAULT_PHASE_MODELS.backlogPlanningModel;
|
||||
const resolved = resolvePhaseModel(phaseModelEntry);
|
||||
effectiveModel = resolved.model;
|
||||
thinkingLevel = resolved.thinkingLevel;
|
||||
}
|
||||
logger.info(
|
||||
'[BacklogPlan] Using model:',
|
||||
effectiveModel,
|
||||
claudeCompatibleProvider ? `via provider: ${claudeCompatibleProvider.name}` : 'direct API'
|
||||
);
|
||||
logger.info('[BacklogPlan] Using model:', effectiveModel);
|
||||
|
||||
const provider = ProviderFactory.getProviderForModel(effectiveModel);
|
||||
// Strip provider prefix - providers expect bare model IDs
|
||||
@@ -189,6 +165,12 @@ ${userPrompt}`;
|
||||
finalSystemPrompt = undefined; // System prompt is now embedded in the user prompt
|
||||
}
|
||||
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
settingsService,
|
||||
'[BacklogPlan]'
|
||||
);
|
||||
|
||||
// Execute the query
|
||||
const stream = provider.executeQuery({
|
||||
prompt: finalPrompt,
|
||||
@@ -201,7 +183,7 @@ ${userPrompt}`;
|
||||
settingSources: autoLoadClaudeMd ? ['user', 'project'] : undefined,
|
||||
readOnly: true, // Plan generation only generates text, doesn't write files
|
||||
thinkingLevel, // Pass thinking level for extended thinking
|
||||
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
});
|
||||
|
||||
|
||||
@@ -85,9 +85,8 @@ export function createApplyHandler() {
|
||||
if (!change.feature) continue;
|
||||
|
||||
try {
|
||||
// Create the new feature - use the AI-generated ID if provided
|
||||
// Create the new feature
|
||||
const newFeature = await featureLoader.create(projectPath, {
|
||||
id: change.feature.id, // Use descriptive ID from AI if provided
|
||||
title: change.feature.title,
|
||||
description: change.feature.description || '',
|
||||
category: change.feature.category || 'Uncategorized',
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { DEFAULT_PHASE_MODELS } from '@automaker/types';
|
||||
import { PathNotAllowedError } from '@automaker/platform';
|
||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||
import { simpleQuery } from '../../../providers/simple-query-service.js';
|
||||
@@ -21,7 +22,7 @@ import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import {
|
||||
getAutoLoadClaudeMdSetting,
|
||||
getPromptCustomization,
|
||||
getPhaseModelWithOverrides,
|
||||
getActiveClaudeApiProfile,
|
||||
} from '../../../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('DescribeFile');
|
||||
@@ -155,22 +156,20 @@ ${contentToAnalyze}`;
|
||||
'[DescribeFile]'
|
||||
);
|
||||
|
||||
// Get model from phase settings with provider info
|
||||
const {
|
||||
phaseModel: phaseModelEntry,
|
||||
provider,
|
||||
credentials,
|
||||
} = await getPhaseModelWithOverrides(
|
||||
'fileDescriptionModel',
|
||||
settingsService,
|
||||
cwd,
|
||||
'[DescribeFile]'
|
||||
);
|
||||
// Get model from phase settings
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
logger.info(`Raw phaseModels from settings:`, JSON.stringify(settings?.phaseModels, null, 2));
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.fileDescriptionModel || DEFAULT_PHASE_MODELS.fileDescriptionModel;
|
||||
logger.info(`fileDescriptionModel entry:`, JSON.stringify(phaseModelEntry));
|
||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||
|
||||
logger.info(
|
||||
`Resolved model: ${model}, thinkingLevel: ${thinkingLevel}`,
|
||||
provider ? `via provider: ${provider.name}` : 'direct API'
|
||||
logger.info(`Resolved model: ${model}, thinkingLevel: ${thinkingLevel}`);
|
||||
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
settingsService,
|
||||
'[DescribeFile]'
|
||||
);
|
||||
|
||||
// Use simpleQuery - provider abstraction handles routing to correct provider
|
||||
@@ -183,7 +182,7 @@ ${contentToAnalyze}`;
|
||||
thinkingLevel,
|
||||
readOnly: true, // File description only reads, doesn't write
|
||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||
claudeCompatibleProvider: provider, // Pass provider for alternative endpoint configuration
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
});
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { createLogger, readImageAsBase64 } from '@automaker/utils';
|
||||
import { isCursorModel } from '@automaker/types';
|
||||
import { DEFAULT_PHASE_MODELS, isCursorModel } from '@automaker/types';
|
||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||
import { simpleQuery } from '../../../providers/simple-query-service.js';
|
||||
import * as secureFs from '../../../lib/secure-fs.js';
|
||||
@@ -22,7 +22,7 @@ import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import {
|
||||
getAutoLoadClaudeMdSetting,
|
||||
getPromptCustomization,
|
||||
getPhaseModelWithOverrides,
|
||||
getActiveClaudeApiProfile,
|
||||
} from '../../../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('DescribeImage');
|
||||
@@ -274,27 +274,23 @@ export function createDescribeImageHandler(
|
||||
'[DescribeImage]'
|
||||
);
|
||||
|
||||
// Get model from phase settings with provider info
|
||||
const {
|
||||
phaseModel: phaseModelEntry,
|
||||
provider,
|
||||
credentials,
|
||||
} = await getPhaseModelWithOverrides(
|
||||
'imageDescriptionModel',
|
||||
settingsService,
|
||||
cwd,
|
||||
'[DescribeImage]'
|
||||
);
|
||||
// Get model from phase settings
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.imageDescriptionModel || DEFAULT_PHASE_MODELS.imageDescriptionModel;
|
||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Using model: ${model}`,
|
||||
provider ? `via provider: ${provider.name}` : 'direct API'
|
||||
);
|
||||
logger.info(`[${requestId}] Using model: ${model}`);
|
||||
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(settingsService, '[DescribeImage]');
|
||||
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
settingsService,
|
||||
'[DescribeImage]'
|
||||
);
|
||||
|
||||
// Build the instruction text from centralized prompts
|
||||
const instructionText = prompts.contextDescription.describeImagePrompt;
|
||||
|
||||
@@ -336,7 +332,7 @@ export function createDescribeImageHandler(
|
||||
thinkingLevel,
|
||||
readOnly: true, // Image description only reads, doesn't write
|
||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||
claudeCompatibleProvider: provider, // Pass provider for alternative endpoint configuration
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
});
|
||||
|
||||
|
||||
@@ -12,7 +12,10 @@ import { resolveModelString } from '@automaker/model-resolver';
|
||||
import { CLAUDE_MODEL_MAP, type ThinkingLevel } from '@automaker/types';
|
||||
import { simpleQuery } from '../../../providers/simple-query-service.js';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getPromptCustomization, getProviderByModelId } from '../../../lib/settings-helpers.js';
|
||||
import {
|
||||
getPromptCustomization,
|
||||
getActiveClaudeApiProfile,
|
||||
} from '../../../lib/settings-helpers.js';
|
||||
import {
|
||||
buildUserPrompt,
|
||||
isValidEnhancementMode,
|
||||
@@ -33,8 +36,6 @@ interface EnhanceRequestBody {
|
||||
model?: string;
|
||||
/** Optional thinking level for Claude models */
|
||||
thinkingLevel?: ThinkingLevel;
|
||||
/** Optional project path for per-project Claude API profile */
|
||||
projectPath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -64,7 +65,7 @@ export function createEnhanceHandler(
|
||||
): (req: Request, res: Response) => Promise<void> {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { originalText, enhancementMode, model, thinkingLevel, projectPath } =
|
||||
const { originalText, enhancementMode, model, thinkingLevel } =
|
||||
req.body as EnhanceRequestBody;
|
||||
|
||||
// Validate required fields
|
||||
@@ -123,35 +124,17 @@ export function createEnhanceHandler(
|
||||
// Build the user prompt with few-shot examples
|
||||
const userPrompt = buildUserPrompt(validMode, trimmedText, true);
|
||||
|
||||
// Check if the model is a provider model (like "GLM-4.5-Air")
|
||||
// If so, get the provider config and resolved Claude model
|
||||
let claudeCompatibleProvider: import('@automaker/types').ClaudeCompatibleProvider | undefined;
|
||||
let providerResolvedModel: string | undefined;
|
||||
let credentials = await settingsService?.getCredentials();
|
||||
|
||||
if (model && settingsService) {
|
||||
const providerResult = await getProviderByModelId(
|
||||
model,
|
||||
settingsService,
|
||||
'[EnhancePrompt]'
|
||||
);
|
||||
if (providerResult.provider) {
|
||||
claudeCompatibleProvider = providerResult.provider;
|
||||
providerResolvedModel = providerResult.resolvedModel;
|
||||
credentials = providerResult.credentials;
|
||||
logger.info(
|
||||
`Using provider "${providerResult.provider.name}" for model "${model}"` +
|
||||
(providerResolvedModel ? ` -> resolved to "${providerResolvedModel}"` : '')
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve the model - use provider resolved model, passed model, or default to sonnet
|
||||
const resolvedModel =
|
||||
providerResolvedModel || resolveModelString(model, CLAUDE_MODEL_MAP.sonnet);
|
||||
// Resolve the model - use the passed model, default to sonnet for quality
|
||||
const resolvedModel = resolveModelString(model, CLAUDE_MODEL_MAP.sonnet);
|
||||
|
||||
logger.debug(`Using model: ${resolvedModel}`);
|
||||
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
settingsService,
|
||||
'[EnhancePrompt]'
|
||||
);
|
||||
|
||||
// Use simpleQuery - provider abstraction handles routing to correct provider
|
||||
// The system prompt is combined with user prompt since some providers
|
||||
// don't have a separate system prompt concept
|
||||
@@ -163,8 +146,8 @@ export function createEnhanceHandler(
|
||||
allowedTools: [],
|
||||
thinkingLevel,
|
||||
readOnly: true, // Prompt enhancement only generates text, doesn't write files
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||
});
|
||||
|
||||
const enhancedText = result.text;
|
||||
|
||||
@@ -16,8 +16,6 @@ import { createBulkDeleteHandler } from './routes/bulk-delete.js';
|
||||
import { createDeleteHandler } from './routes/delete.js';
|
||||
import { createAgentOutputHandler, createRawOutputHandler } from './routes/agent-output.js';
|
||||
import { createGenerateTitleHandler } from './routes/generate-title.js';
|
||||
import { createExportHandler } from './routes/export.js';
|
||||
import { createImportHandler, createConflictCheckHandler } from './routes/import.js';
|
||||
|
||||
export function createFeaturesRoutes(
|
||||
featureLoader: FeatureLoader,
|
||||
@@ -48,13 +46,6 @@ export function createFeaturesRoutes(
|
||||
router.post('/agent-output', createAgentOutputHandler(featureLoader));
|
||||
router.post('/raw-output', createRawOutputHandler(featureLoader));
|
||||
router.post('/generate-title', createGenerateTitleHandler(settingsService));
|
||||
router.post('/export', validatePathParams('projectPath'), createExportHandler(featureLoader));
|
||||
router.post('/import', validatePathParams('projectPath'), createImportHandler(featureLoader));
|
||||
router.post(
|
||||
'/check-conflicts',
|
||||
validatePathParams('projectPath'),
|
||||
createConflictCheckHandler(featureLoader)
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -1,96 +0,0 @@
|
||||
/**
|
||||
* POST /export endpoint - Export features to JSON or YAML format
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import {
|
||||
getFeatureExportService,
|
||||
type ExportFormat,
|
||||
type BulkExportOptions,
|
||||
} from '../../../services/feature-export-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
interface ExportRequest {
|
||||
projectPath: string;
|
||||
/** Feature IDs to export. If empty/undefined, exports all features */
|
||||
featureIds?: string[];
|
||||
/** Export format: 'json' or 'yaml' */
|
||||
format?: ExportFormat;
|
||||
/** Whether to include description history */
|
||||
includeHistory?: boolean;
|
||||
/** Whether to include plan spec */
|
||||
includePlanSpec?: boolean;
|
||||
/** Filter by category */
|
||||
category?: string;
|
||||
/** Filter by status */
|
||||
status?: string;
|
||||
/** Pretty print output */
|
||||
prettyPrint?: boolean;
|
||||
/** Optional metadata to include */
|
||||
metadata?: {
|
||||
projectName?: string;
|
||||
projectPath?: string;
|
||||
branch?: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
}
|
||||
|
||||
export function createExportHandler(featureLoader: FeatureLoader) {
|
||||
const exportService = getFeatureExportService();
|
||||
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const {
|
||||
projectPath,
|
||||
featureIds,
|
||||
format = 'json',
|
||||
includeHistory = true,
|
||||
includePlanSpec = true,
|
||||
category,
|
||||
status,
|
||||
prettyPrint = true,
|
||||
metadata,
|
||||
} = req.body as ExportRequest;
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate format
|
||||
if (format !== 'json' && format !== 'yaml') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'format must be "json" or "yaml"',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const options: BulkExportOptions = {
|
||||
format,
|
||||
includeHistory,
|
||||
includePlanSpec,
|
||||
category,
|
||||
status,
|
||||
featureIds,
|
||||
prettyPrint,
|
||||
metadata,
|
||||
};
|
||||
|
||||
const exportData = await exportService.exportFeatures(projectPath, options);
|
||||
|
||||
// Return the export data as a string in the response
|
||||
res.json({
|
||||
success: true,
|
||||
data: exportData,
|
||||
format,
|
||||
contentType: format === 'json' ? 'application/json' : 'application/x-yaml',
|
||||
filename: `features-export.${format === 'json' ? 'json' : 'yaml'}`,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Export features failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -10,13 +10,15 @@ import { createLogger } from '@automaker/utils';
|
||||
import { CLAUDE_MODEL_MAP } from '@automaker/model-resolver';
|
||||
import { simpleQuery } from '../../../providers/simple-query-service.js';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getPromptCustomization } from '../../../lib/settings-helpers.js';
|
||||
import {
|
||||
getPromptCustomization,
|
||||
getActiveClaudeApiProfile,
|
||||
} from '../../../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('GenerateTitle');
|
||||
|
||||
interface GenerateTitleRequestBody {
|
||||
description: string;
|
||||
projectPath?: string;
|
||||
}
|
||||
|
||||
interface GenerateTitleSuccessResponse {
|
||||
@@ -34,7 +36,7 @@ export function createGenerateTitleHandler(
|
||||
): (req: Request, res: Response) => Promise<void> {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { description, projectPath } = req.body as GenerateTitleRequestBody;
|
||||
const { description } = req.body as GenerateTitleRequestBody;
|
||||
|
||||
if (!description || typeof description !== 'string') {
|
||||
const response: GenerateTitleErrorResponse = {
|
||||
@@ -61,8 +63,11 @@ export function createGenerateTitleHandler(
|
||||
const prompts = await getPromptCustomization(settingsService, '[GenerateTitle]');
|
||||
const systemPrompt = prompts.titleGeneration.systemPrompt;
|
||||
|
||||
// Get credentials for API calls (uses hardcoded haiku model, no phase setting)
|
||||
const credentials = await settingsService?.getCredentials();
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
settingsService,
|
||||
'[GenerateTitle]'
|
||||
);
|
||||
|
||||
const userPrompt = `Generate a concise title for this feature:\n\n${trimmedDescription}`;
|
||||
|
||||
@@ -73,6 +78,7 @@ export function createGenerateTitleHandler(
|
||||
cwd: process.cwd(),
|
||||
maxTurns: 1,
|
||||
allowedTools: [],
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
});
|
||||
|
||||
|
||||
@@ -1,210 +0,0 @@
|
||||
/**
|
||||
* POST /import endpoint - Import features from JSON or YAML format
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import type { FeatureImportResult, Feature, FeatureExport } from '@automaker/types';
|
||||
import { getFeatureExportService } from '../../../services/feature-export-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
interface ImportRequest {
|
||||
projectPath: string;
|
||||
/** Raw JSON or YAML string containing feature data */
|
||||
data: string;
|
||||
/** Whether to overwrite existing features with same ID */
|
||||
overwrite?: boolean;
|
||||
/** Whether to preserve branch info from imported features */
|
||||
preserveBranchInfo?: boolean;
|
||||
/** Optional category to assign to all imported features */
|
||||
targetCategory?: string;
|
||||
}
|
||||
|
||||
interface ConflictCheckRequest {
|
||||
projectPath: string;
|
||||
/** Raw JSON or YAML string containing feature data */
|
||||
data: string;
|
||||
}
|
||||
|
||||
interface ConflictInfo {
|
||||
featureId: string;
|
||||
title?: string;
|
||||
existingTitle?: string;
|
||||
hasConflict: boolean;
|
||||
}
|
||||
|
||||
export function createImportHandler(featureLoader: FeatureLoader) {
|
||||
const exportService = getFeatureExportService();
|
||||
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const {
|
||||
projectPath,
|
||||
data,
|
||||
overwrite = false,
|
||||
preserveBranchInfo = false,
|
||||
targetCategory,
|
||||
} = req.body as ImportRequest;
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
res.status(400).json({ success: false, error: 'data is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Detect format and parse the data
|
||||
const format = exportService.detectFormat(data);
|
||||
if (!format) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid data format. Expected valid JSON or YAML.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const parsed = exportService.parseImportData(data);
|
||||
if (!parsed) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Failed to parse import data. Ensure it is valid JSON or YAML.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine if this is a single feature or bulk import
|
||||
const isBulkImport =
|
||||
'features' in parsed && Array.isArray((parsed as { features: unknown }).features);
|
||||
|
||||
let results: FeatureImportResult[];
|
||||
|
||||
if (isBulkImport) {
|
||||
// Bulk import
|
||||
results = await exportService.importFeatures(projectPath, data, {
|
||||
overwrite,
|
||||
preserveBranchInfo,
|
||||
targetCategory,
|
||||
});
|
||||
} else {
|
||||
// Single feature import - we know it's not a bulk export at this point
|
||||
// It must be either a Feature or FeatureExport
|
||||
const singleData = parsed as Feature | FeatureExport;
|
||||
|
||||
const result = await exportService.importFeature(projectPath, {
|
||||
data: singleData,
|
||||
overwrite,
|
||||
preserveBranchInfo,
|
||||
targetCategory,
|
||||
});
|
||||
results = [result];
|
||||
}
|
||||
|
||||
const successCount = results.filter((r) => r.success).length;
|
||||
const failureCount = results.filter((r) => !r.success).length;
|
||||
const allSuccessful = failureCount === 0;
|
||||
|
||||
res.json({
|
||||
success: allSuccessful,
|
||||
importedCount: successCount,
|
||||
failedCount: failureCount,
|
||||
results,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Import features failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create handler for checking conflicts before import
|
||||
*/
|
||||
export function createConflictCheckHandler(featureLoader: FeatureLoader) {
|
||||
const exportService = getFeatureExportService();
|
||||
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, data } = req.body as ConflictCheckRequest;
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
res.status(400).json({ success: false, error: 'data is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse the import data
|
||||
const format = exportService.detectFormat(data);
|
||||
if (!format) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid data format. Expected valid JSON or YAML.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const parsed = exportService.parseImportData(data);
|
||||
if (!parsed) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Failed to parse import data.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract features from the data using type guards
|
||||
let featuresToCheck: Array<{ id: string; title?: string }> = [];
|
||||
|
||||
if (exportService.isBulkExport(parsed)) {
|
||||
// Bulk export format
|
||||
featuresToCheck = parsed.features.map((f) => ({
|
||||
id: f.feature.id,
|
||||
title: f.feature.title,
|
||||
}));
|
||||
} else if (exportService.isFeatureExport(parsed)) {
|
||||
// Single FeatureExport format
|
||||
featuresToCheck = [
|
||||
{
|
||||
id: parsed.feature.id,
|
||||
title: parsed.feature.title,
|
||||
},
|
||||
];
|
||||
} else if (exportService.isRawFeature(parsed)) {
|
||||
// Raw Feature format
|
||||
featuresToCheck = [{ id: parsed.id, title: parsed.title }];
|
||||
}
|
||||
|
||||
// Check each feature for conflicts in parallel
|
||||
const conflicts: ConflictInfo[] = await Promise.all(
|
||||
featuresToCheck.map(async (feature) => {
|
||||
const existing = await featureLoader.get(projectPath, feature.id);
|
||||
return {
|
||||
featureId: feature.id,
|
||||
title: feature.title,
|
||||
existingTitle: existing?.title,
|
||||
hasConflict: !!existing,
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
const hasConflicts = conflicts.some((c) => c.hasConflict);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
hasConflicts,
|
||||
conflicts,
|
||||
totalFeatures: featuresToCheck.length,
|
||||
conflictCount: conflicts.filter((c) => c.hasConflict).length,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Conflict check failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -37,7 +37,7 @@ import {
|
||||
import {
|
||||
getPromptCustomization,
|
||||
getAutoLoadClaudeMdSetting,
|
||||
getProviderByModelId,
|
||||
getActiveClaudeApiProfile,
|
||||
} from '../../../lib/settings-helpers.js';
|
||||
import {
|
||||
trySetValidationRunning,
|
||||
@@ -167,33 +167,18 @@ ${basePrompt}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the model is a provider model (like "GLM-4.5-Air")
|
||||
// If so, get the provider config and resolved Claude model
|
||||
let claudeCompatibleProvider: import('@automaker/types').ClaudeCompatibleProvider | undefined;
|
||||
let providerResolvedModel: string | undefined;
|
||||
let credentials = await settingsService?.getCredentials();
|
||||
logger.info(`Using model: ${model}`);
|
||||
|
||||
if (settingsService) {
|
||||
const providerResult = await getProviderByModelId(model, settingsService, '[ValidateIssue]');
|
||||
if (providerResult.provider) {
|
||||
claudeCompatibleProvider = providerResult.provider;
|
||||
providerResolvedModel = providerResult.resolvedModel;
|
||||
credentials = providerResult.credentials;
|
||||
logger.info(
|
||||
`Using provider "${providerResult.provider.name}" for model "${model}"` +
|
||||
(providerResolvedModel ? ` -> resolved to "${providerResolvedModel}"` : '')
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Use provider resolved model if available, otherwise use original model
|
||||
const effectiveModel = providerResolvedModel || (model as string);
|
||||
logger.info(`Using model: ${effectiveModel}`);
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
settingsService,
|
||||
'[IssueValidation]'
|
||||
);
|
||||
|
||||
// Use streamingQuery with event callbacks
|
||||
const result = await streamingQuery({
|
||||
prompt: finalPrompt,
|
||||
model: effectiveModel,
|
||||
model: model as string,
|
||||
cwd: projectPath,
|
||||
systemPrompt: useStructuredOutput ? issueValidationSystemPrompt : undefined,
|
||||
abortController,
|
||||
@@ -201,7 +186,7 @@ ${basePrompt}`;
|
||||
reasoningEffort: effectiveReasoningEffort,
|
||||
readOnly: true, // Issue validation only reads code, doesn't write
|
||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
outputFormat: useStructuredOutput
|
||||
? {
|
||||
|
||||
@@ -1,143 +0,0 @@
|
||||
/**
|
||||
* Provider Usage Routes
|
||||
*
|
||||
* API endpoints for fetching usage data from all AI providers.
|
||||
*
|
||||
* Endpoints:
|
||||
* - GET /api/provider-usage - Get usage for all enabled providers
|
||||
* - GET /api/provider-usage/:providerId - Get usage for a specific provider
|
||||
* - GET /api/provider-usage/availability - Check availability of all providers
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { UsageProviderId } from '@automaker/types';
|
||||
import { ProviderUsageTracker } from '../../services/provider-usage-tracker.js';
|
||||
|
||||
const logger = createLogger('ProviderUsageRoutes');
|
||||
|
||||
// Valid provider IDs
|
||||
const VALID_PROVIDER_IDS: UsageProviderId[] = [
|
||||
'claude',
|
||||
'codex',
|
||||
'cursor',
|
||||
'gemini',
|
||||
'copilot',
|
||||
'opencode',
|
||||
'minimax',
|
||||
'glm',
|
||||
];
|
||||
|
||||
export function createProviderUsageRoutes(tracker: ProviderUsageTracker): Router {
|
||||
const router = Router();
|
||||
|
||||
/**
|
||||
* GET /api/provider-usage
|
||||
* Fetch usage for all enabled providers
|
||||
*/
|
||||
router.get('/', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const forceRefresh = req.query.refresh === 'true';
|
||||
const usage = await tracker.fetchAllUsage(forceRefresh);
|
||||
res.json({
|
||||
success: true,
|
||||
data: usage,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||
logger.error('Error fetching all provider usage:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/provider-usage/availability
|
||||
* Check which providers are available
|
||||
*/
|
||||
router.get('/availability', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const availability = await tracker.checkAvailability();
|
||||
res.json({
|
||||
success: true,
|
||||
data: availability,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||
logger.error('Error checking provider availability:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/provider-usage/:providerId
|
||||
* Fetch usage for a specific provider
|
||||
*/
|
||||
router.get('/:providerId', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const providerId = req.params.providerId as UsageProviderId;
|
||||
|
||||
// Validate provider ID
|
||||
if (!VALID_PROVIDER_IDS.includes(providerId)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Invalid provider ID: ${providerId}. Valid providers: ${VALID_PROVIDER_IDS.join(', ')}`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if provider is enabled
|
||||
if (!tracker.isProviderEnabled(providerId)) {
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
data: {
|
||||
providerId,
|
||||
providerName: providerId,
|
||||
available: false,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
error: 'Provider is disabled',
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const forceRefresh = req.query.refresh === 'true';
|
||||
const usage = await tracker.fetchProviderUsage(providerId, forceRefresh);
|
||||
|
||||
if (!usage) {
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
data: {
|
||||
providerId,
|
||||
providerName: providerId,
|
||||
available: false,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
error: 'Failed to fetch usage data',
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: usage,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||
logger.error(`Error fetching usage for ${req.params.providerId}:`, error);
|
||||
|
||||
// Return 200 with error in data to avoid triggering logout
|
||||
res.status(200).json({
|
||||
success: false,
|
||||
error: message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
@@ -24,9 +24,6 @@ import { createDeauthCursorHandler } from './routes/deauth-cursor.js';
|
||||
import { createAuthOpencodeHandler } from './routes/auth-opencode.js';
|
||||
import { createDeauthOpencodeHandler } from './routes/deauth-opencode.js';
|
||||
import { createOpencodeStatusHandler } from './routes/opencode-status.js';
|
||||
import { createGeminiStatusHandler } from './routes/gemini-status.js';
|
||||
import { createAuthGeminiHandler } from './routes/auth-gemini.js';
|
||||
import { createDeauthGeminiHandler } from './routes/deauth-gemini.js';
|
||||
import {
|
||||
createGetOpencodeModelsHandler,
|
||||
createRefreshOpencodeModelsHandler,
|
||||
@@ -75,11 +72,6 @@ export function createSetupRoutes(): Router {
|
||||
router.post('/auth-opencode', createAuthOpencodeHandler());
|
||||
router.post('/deauth-opencode', createDeauthOpencodeHandler());
|
||||
|
||||
// Gemini CLI routes
|
||||
router.get('/gemini-status', createGeminiStatusHandler());
|
||||
router.post('/auth-gemini', createAuthGeminiHandler());
|
||||
router.post('/deauth-gemini', createDeauthGeminiHandler());
|
||||
|
||||
// OpenCode Dynamic Model Discovery routes
|
||||
router.get('/opencode/models', createGetOpencodeModelsHandler());
|
||||
router.post('/opencode/models/refresh', createRefreshOpencodeModelsHandler());
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
/**
|
||||
* POST /auth-gemini endpoint - Connect Gemini CLI to the app
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
|
||||
const DISCONNECTED_MARKER_FILE = '.gemini-disconnected';
|
||||
|
||||
/**
|
||||
* Creates handler for POST /api/setup/auth-gemini
|
||||
* Removes the disconnection marker to allow Gemini CLI to be used
|
||||
*/
|
||||
export function createAuthGeminiHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const projectRoot = process.cwd();
|
||||
const automakerDir = path.join(projectRoot, '.automaker');
|
||||
const markerPath = path.join(automakerDir, DISCONNECTED_MARKER_FILE);
|
||||
|
||||
// Remove the disconnection marker if it exists
|
||||
try {
|
||||
await fs.unlink(markerPath);
|
||||
} catch {
|
||||
// File doesn't exist, nothing to remove
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Gemini CLI connected to app',
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Auth Gemini failed');
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
/**
|
||||
* POST /deauth-gemini endpoint - Disconnect Gemini CLI from the app
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
|
||||
const DISCONNECTED_MARKER_FILE = '.gemini-disconnected';
|
||||
|
||||
/**
|
||||
* Creates handler for POST /api/setup/deauth-gemini
|
||||
* Creates a marker file to disconnect Gemini CLI from the app
|
||||
*/
|
||||
export function createDeauthGeminiHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const projectRoot = process.cwd();
|
||||
const automakerDir = path.join(projectRoot, '.automaker');
|
||||
|
||||
// Ensure .automaker directory exists
|
||||
await fs.mkdir(automakerDir, { recursive: true });
|
||||
|
||||
const markerPath = path.join(automakerDir, DISCONNECTED_MARKER_FILE);
|
||||
|
||||
// Create the disconnection marker
|
||||
await fs.writeFile(markerPath, 'Gemini CLI disconnected from app');
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Gemini CLI disconnected from app',
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Deauth Gemini failed');
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
/**
|
||||
* GET /gemini-status endpoint - Get Gemini CLI installation and auth status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { GeminiProvider } from '../../../providers/gemini-provider.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
|
||||
const DISCONNECTED_MARKER_FILE = '.gemini-disconnected';
|
||||
|
||||
async function isGeminiDisconnectedFromApp(): Promise<boolean> {
|
||||
try {
|
||||
const projectRoot = process.cwd();
|
||||
const markerPath = path.join(projectRoot, '.automaker', DISCONNECTED_MARKER_FILE);
|
||||
await fs.access(markerPath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates handler for GET /api/setup/gemini-status
|
||||
* Returns Gemini CLI installation and authentication status
|
||||
*/
|
||||
export function createGeminiStatusHandler() {
|
||||
const installCommand = 'npm install -g @google/gemini-cli';
|
||||
const loginCommand = 'gemini';
|
||||
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
// Check if user has manually disconnected from the app
|
||||
if (await isGeminiDisconnectedFromApp()) {
|
||||
res.json({
|
||||
success: true,
|
||||
installed: true,
|
||||
version: null,
|
||||
path: null,
|
||||
auth: {
|
||||
authenticated: false,
|
||||
method: 'none',
|
||||
hasApiKey: false,
|
||||
},
|
||||
installCommand,
|
||||
loginCommand,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const provider = new GeminiProvider();
|
||||
const status = await provider.detectInstallation();
|
||||
const auth = await provider.checkAuth();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
installed: status.installed,
|
||||
version: status.version || null,
|
||||
path: status.path || null,
|
||||
auth: {
|
||||
authenticated: auth.authenticated,
|
||||
method: auth.method,
|
||||
hasApiKey: auth.hasApiKey || false,
|
||||
hasEnvApiKey: auth.hasEnvApiKey || false,
|
||||
error: auth.error,
|
||||
},
|
||||
installCommand,
|
||||
loginCommand,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Get Gemini status failed');
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
34
apps/server/src/routes/suggestions/common.ts
Normal file
34
apps/server/src/routes/suggestions/common.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
/**
|
||||
* Common utilities and state for suggestions routes
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
const logger = createLogger('Suggestions');
|
||||
|
||||
// Shared state for tracking generation status - private
|
||||
let isRunning = false;
|
||||
let currentAbortController: AbortController | null = null;
|
||||
|
||||
/**
|
||||
* Get the current running state
|
||||
*/
|
||||
export function getSuggestionsStatus(): {
|
||||
isRunning: boolean;
|
||||
currentAbortController: AbortController | null;
|
||||
} {
|
||||
return { isRunning, currentAbortController };
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the running state and abort controller
|
||||
*/
|
||||
export function setRunningState(running: boolean, controller: AbortController | null = null): void {
|
||||
isRunning = running;
|
||||
currentAbortController = controller;
|
||||
}
|
||||
|
||||
// Re-export shared utilities
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
export const logError = createLogError(logger);
|
||||
308
apps/server/src/routes/suggestions/generate-suggestions.ts
Normal file
308
apps/server/src/routes/suggestions/generate-suggestions.ts
Normal file
@@ -0,0 +1,308 @@
|
||||
/**
|
||||
* Business logic for generating suggestions
|
||||
*
|
||||
* Model is configurable via phaseModels.suggestionsModel in settings
|
||||
* (AI Suggestions in the UI). Supports both Claude and Cursor models.
|
||||
*/
|
||||
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { DEFAULT_PHASE_MODELS, isCursorModel, type ThinkingLevel } from '@automaker/types';
|
||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||
import { extractJsonWithArray } from '../../lib/json-extractor.js';
|
||||
import { streamingQuery } from '../../providers/simple-query-service.js';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
import { getAppSpecPath } from '@automaker/platform';
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import {
|
||||
getAutoLoadClaudeMdSetting,
|
||||
getPromptCustomization,
|
||||
getActiveClaudeApiProfile,
|
||||
} from '../../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('Suggestions');
|
||||
|
||||
/**
|
||||
* Extract implemented features from app_spec.txt XML content
|
||||
*
|
||||
* Note: This uses regex-based parsing which is sufficient for our controlled
|
||||
* XML structure. If more complex XML parsing is needed in the future, consider
|
||||
* using a library like 'fast-xml-parser' or 'xml2js'.
|
||||
*/
|
||||
function extractImplementedFeatures(specContent: string): string[] {
|
||||
const features: string[] = [];
|
||||
|
||||
// Match <implemented_features>...</implemented_features> section
|
||||
const implementedMatch = specContent.match(
|
||||
/<implemented_features>([\s\S]*?)<\/implemented_features>/
|
||||
);
|
||||
|
||||
if (implementedMatch) {
|
||||
const implementedSection = implementedMatch[1];
|
||||
|
||||
// Extract feature names from <name>...</name> tags using matchAll
|
||||
const nameRegex = /<name>(.*?)<\/name>/g;
|
||||
const matches = implementedSection.matchAll(nameRegex);
|
||||
|
||||
for (const match of matches) {
|
||||
features.push(match[1].trim());
|
||||
}
|
||||
}
|
||||
|
||||
return features;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load existing context (app spec and backlog features) to avoid duplicates
|
||||
*/
|
||||
async function loadExistingContext(projectPath: string): Promise<string> {
|
||||
let context = '';
|
||||
|
||||
// 1. Read app_spec.txt for implemented features
|
||||
try {
|
||||
const appSpecPath = getAppSpecPath(projectPath);
|
||||
const specContent = (await secureFs.readFile(appSpecPath, 'utf-8')) as string;
|
||||
|
||||
if (specContent && specContent.trim().length > 0) {
|
||||
const implementedFeatures = extractImplementedFeatures(specContent);
|
||||
|
||||
if (implementedFeatures.length > 0) {
|
||||
context += '\n\n=== ALREADY IMPLEMENTED FEATURES ===\n';
|
||||
context += 'These features are already implemented in the codebase:\n';
|
||||
context += implementedFeatures.map((feature) => `- ${feature}`).join('\n') + '\n';
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// app_spec.txt doesn't exist or can't be read - that's okay
|
||||
logger.debug('No app_spec.txt found or error reading it:', error);
|
||||
}
|
||||
|
||||
// 2. Load existing features from backlog
|
||||
try {
|
||||
const featureLoader = new FeatureLoader();
|
||||
const features = await featureLoader.getAll(projectPath);
|
||||
|
||||
if (features.length > 0) {
|
||||
context += '\n\n=== EXISTING FEATURES IN BACKLOG ===\n';
|
||||
context += 'These features are already planned or in progress:\n';
|
||||
context +=
|
||||
features
|
||||
.map((feature) => {
|
||||
const status = feature.status || 'pending';
|
||||
const title = feature.title || feature.description?.substring(0, 50) || 'Untitled';
|
||||
return `- ${title} (${status})`;
|
||||
})
|
||||
.join('\n') + '\n';
|
||||
}
|
||||
} catch (error) {
|
||||
// Features directory doesn't exist or can't be read - that's okay
|
||||
logger.debug('No features found or error loading them:', error);
|
||||
}
|
||||
|
||||
return context;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON Schema for suggestions output
|
||||
*/
|
||||
const suggestionsSchema = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
suggestions: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
category: { type: 'string' },
|
||||
description: { type: 'string' },
|
||||
priority: {
|
||||
type: 'number',
|
||||
minimum: 1,
|
||||
maximum: 3,
|
||||
},
|
||||
reasoning: { type: 'string' },
|
||||
},
|
||||
required: ['category', 'description', 'priority', 'reasoning'],
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ['suggestions'],
|
||||
additionalProperties: false,
|
||||
};
|
||||
|
||||
export async function generateSuggestions(
|
||||
projectPath: string,
|
||||
suggestionType: string,
|
||||
events: EventEmitter,
|
||||
abortController: AbortController,
|
||||
settingsService?: SettingsService,
|
||||
modelOverride?: string,
|
||||
thinkingLevelOverride?: ThinkingLevel
|
||||
): Promise<void> {
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(settingsService, '[Suggestions]');
|
||||
|
||||
// Map suggestion types to their prompts
|
||||
const typePrompts: Record<string, string> = {
|
||||
features: prompts.suggestions.featuresPrompt,
|
||||
refactoring: prompts.suggestions.refactoringPrompt,
|
||||
security: prompts.suggestions.securityPrompt,
|
||||
performance: prompts.suggestions.performancePrompt,
|
||||
};
|
||||
|
||||
// Load existing context to avoid duplicates
|
||||
const existingContext = await loadExistingContext(projectPath);
|
||||
|
||||
const prompt = `${typePrompts[suggestionType] || typePrompts.features}
|
||||
${existingContext}
|
||||
|
||||
${existingContext ? '\nIMPORTANT: Do NOT suggest features that are already implemented or already in the backlog above. Focus on NEW ideas that complement what already exists.\n' : ''}
|
||||
${prompts.suggestions.baseTemplate}`;
|
||||
|
||||
// Don't send initial message - let the agent output speak for itself
|
||||
// The first agent message will be captured as an info entry
|
||||
|
||||
// Load autoLoadClaudeMd setting
|
||||
const autoLoadClaudeMd = await getAutoLoadClaudeMdSetting(
|
||||
projectPath,
|
||||
settingsService,
|
||||
'[Suggestions]'
|
||||
);
|
||||
|
||||
// Get model from phase settings (AI Suggestions = suggestionsModel)
|
||||
// Use override if provided, otherwise fall back to settings
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
let model: string;
|
||||
let thinkingLevel: ThinkingLevel | undefined;
|
||||
|
||||
if (modelOverride) {
|
||||
// Use explicit override - resolve the model string
|
||||
const resolved = resolvePhaseModel({
|
||||
model: modelOverride,
|
||||
thinkingLevel: thinkingLevelOverride,
|
||||
});
|
||||
model = resolved.model;
|
||||
thinkingLevel = resolved.thinkingLevel;
|
||||
} else {
|
||||
// Use settings-based model
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.suggestionsModel || DEFAULT_PHASE_MODELS.suggestionsModel;
|
||||
const resolved = resolvePhaseModel(phaseModelEntry);
|
||||
model = resolved.model;
|
||||
thinkingLevel = resolved.thinkingLevel;
|
||||
}
|
||||
|
||||
logger.info('[Suggestions] Using model:', model);
|
||||
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
settingsService,
|
||||
'[Suggestions]'
|
||||
);
|
||||
|
||||
let responseText = '';
|
||||
|
||||
// Determine if we should use structured output (Claude supports it, Cursor doesn't)
|
||||
const useStructuredOutput = !isCursorModel(model);
|
||||
|
||||
// Build the final prompt - for Cursor, include JSON schema instructions
|
||||
let finalPrompt = prompt;
|
||||
if (!useStructuredOutput) {
|
||||
finalPrompt = `${prompt}
|
||||
|
||||
CRITICAL INSTRUCTIONS:
|
||||
1. DO NOT write any files. Return the JSON in your response only.
|
||||
2. After analyzing the project, respond with ONLY a JSON object - no explanations, no markdown, just raw JSON.
|
||||
3. The JSON must match this exact schema:
|
||||
|
||||
${JSON.stringify(suggestionsSchema, null, 2)}
|
||||
|
||||
Your entire response should be valid JSON starting with { and ending with }. No text before or after.`;
|
||||
}
|
||||
|
||||
// Use streamingQuery with event callbacks
|
||||
const result = await streamingQuery({
|
||||
prompt: finalPrompt,
|
||||
model,
|
||||
cwd: projectPath,
|
||||
maxTurns: 250,
|
||||
allowedTools: ['Read', 'Glob', 'Grep'],
|
||||
abortController,
|
||||
thinkingLevel,
|
||||
readOnly: true, // Suggestions only reads code, doesn't write
|
||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
outputFormat: useStructuredOutput
|
||||
? {
|
||||
type: 'json_schema',
|
||||
schema: suggestionsSchema,
|
||||
}
|
||||
: undefined,
|
||||
onText: (text) => {
|
||||
responseText += text;
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_progress',
|
||||
content: text,
|
||||
});
|
||||
},
|
||||
onToolUse: (tool, input) => {
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_tool',
|
||||
tool,
|
||||
input,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
// Use structured output if available, otherwise fall back to parsing text
|
||||
try {
|
||||
let structuredOutput: { suggestions: Array<Record<string, unknown>> } | null = null;
|
||||
|
||||
if (result.structured_output) {
|
||||
structuredOutput = result.structured_output as {
|
||||
suggestions: Array<Record<string, unknown>>;
|
||||
};
|
||||
logger.debug('Received structured output:', structuredOutput);
|
||||
} else if (responseText) {
|
||||
// Fallback: try to parse from text using shared extraction utility
|
||||
logger.warn('No structured output received, attempting to parse from text');
|
||||
structuredOutput = extractJsonWithArray<{ suggestions: Array<Record<string, unknown>> }>(
|
||||
responseText,
|
||||
'suggestions',
|
||||
{ logger }
|
||||
);
|
||||
}
|
||||
|
||||
if (structuredOutput && structuredOutput.suggestions) {
|
||||
// Use structured output directly
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_complete',
|
||||
suggestions: structuredOutput.suggestions.map((s: Record<string, unknown>, i: number) => ({
|
||||
...s,
|
||||
id: s.id || `suggestion-${Date.now()}-${i}`,
|
||||
})),
|
||||
});
|
||||
} else {
|
||||
throw new Error('No valid JSON found in response');
|
||||
}
|
||||
} catch (error) {
|
||||
// Log the parsing error for debugging
|
||||
logger.error('Failed to parse suggestions JSON from AI response:', error);
|
||||
// Return generic suggestions if parsing fails
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_complete',
|
||||
suggestions: [
|
||||
{
|
||||
id: `suggestion-${Date.now()}-0`,
|
||||
category: 'Analysis',
|
||||
description: 'Review the AI analysis output for insights',
|
||||
priority: 1,
|
||||
reasoning: 'The AI provided analysis but suggestions need manual review',
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
}
|
||||
28
apps/server/src/routes/suggestions/index.ts
Normal file
28
apps/server/src/routes/suggestions/index.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* Suggestions routes - HTTP API for AI-powered feature suggestions
|
||||
*/
|
||||
|
||||
import { Router } from 'express';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createGenerateHandler } from './routes/generate.js';
|
||||
import { createStopHandler } from './routes/stop.js';
|
||||
import { createStatusHandler } from './routes/status.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
|
||||
export function createSuggestionsRoutes(
|
||||
events: EventEmitter,
|
||||
settingsService?: SettingsService
|
||||
): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post(
|
||||
'/generate',
|
||||
validatePathParams('projectPath'),
|
||||
createGenerateHandler(events, settingsService)
|
||||
);
|
||||
router.post('/stop', createStopHandler());
|
||||
router.get('/status', createStatusHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
75
apps/server/src/routes/suggestions/routes/generate.ts
Normal file
75
apps/server/src/routes/suggestions/routes/generate.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
/**
|
||||
* POST /generate endpoint - Generate suggestions
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { ThinkingLevel } from '@automaker/types';
|
||||
import { getSuggestionsStatus, setRunningState, getErrorMessage, logError } from '../common.js';
|
||||
import { generateSuggestions } from '../generate-suggestions.js';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
|
||||
const logger = createLogger('Suggestions');
|
||||
|
||||
export function createGenerateHandler(events: EventEmitter, settingsService?: SettingsService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const {
|
||||
projectPath,
|
||||
suggestionType = 'features',
|
||||
model,
|
||||
thinkingLevel,
|
||||
} = req.body as {
|
||||
projectPath: string;
|
||||
suggestionType?: string;
|
||||
model?: string;
|
||||
thinkingLevel?: ThinkingLevel;
|
||||
};
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: 'projectPath required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const { isRunning } = getSuggestionsStatus();
|
||||
if (isRunning) {
|
||||
res.json({
|
||||
success: false,
|
||||
error: 'Suggestions generation is already running',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
setRunningState(true);
|
||||
const abortController = new AbortController();
|
||||
setRunningState(true, abortController);
|
||||
|
||||
// Start generation in background
|
||||
generateSuggestions(
|
||||
projectPath,
|
||||
suggestionType,
|
||||
events,
|
||||
abortController,
|
||||
settingsService,
|
||||
model,
|
||||
thinkingLevel
|
||||
)
|
||||
.catch((error) => {
|
||||
logError(error, 'Generate suggestions failed (background)');
|
||||
events.emit('suggestions:event', {
|
||||
type: 'suggestions_error',
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
setRunningState(false, null);
|
||||
});
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, 'Generate suggestions failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
18
apps/server/src/routes/suggestions/routes/status.ts
Normal file
18
apps/server/src/routes/suggestions/routes/status.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* GET /status endpoint - Get status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { getSuggestionsStatus, getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStatusHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { isRunning } = getSuggestionsStatus();
|
||||
res.json({ success: true, isRunning });
|
||||
} catch (error) {
|
||||
logError(error, 'Get status failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
22
apps/server/src/routes/suggestions/routes/stop.ts
Normal file
22
apps/server/src/routes/suggestions/routes/stop.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* POST /stop endpoint - Stop suggestions generation
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { getSuggestionsStatus, setRunningState, getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStopHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { currentAbortController } = getSuggestionsStatus();
|
||||
if (currentAbortController) {
|
||||
currentAbortController.abort();
|
||||
}
|
||||
setRunningState(false, null);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, 'Stop suggestions failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -42,18 +42,12 @@ import { createStartDevHandler } from './routes/start-dev.js';
|
||||
import { createStopDevHandler } from './routes/stop-dev.js';
|
||||
import { createListDevServersHandler } from './routes/list-dev-servers.js';
|
||||
import { createGetDevServerLogsHandler } from './routes/dev-server-logs.js';
|
||||
import { createStartTestsHandler } from './routes/start-tests.js';
|
||||
import { createStopTestsHandler } from './routes/stop-tests.js';
|
||||
import { createGetTestLogsHandler } from './routes/test-logs.js';
|
||||
import {
|
||||
createGetInitScriptHandler,
|
||||
createPutInitScriptHandler,
|
||||
createDeleteInitScriptHandler,
|
||||
createRunInitScriptHandler,
|
||||
} from './routes/init-script.js';
|
||||
import { createDiscardChangesHandler } from './routes/discard-changes.js';
|
||||
import { createListRemotesHandler } from './routes/list-remotes.js';
|
||||
import { createAddRemoteHandler } from './routes/add-remote.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
|
||||
export function createWorktreeRoutes(
|
||||
@@ -134,7 +128,7 @@ export function createWorktreeRoutes(
|
||||
router.post(
|
||||
'/start-dev',
|
||||
validatePathParams('projectPath', 'worktreePath'),
|
||||
createStartDevHandler(settingsService)
|
||||
createStartDevHandler()
|
||||
);
|
||||
router.post('/stop-dev', createStopDevHandler());
|
||||
router.post('/list-dev-servers', createListDevServersHandler());
|
||||
@@ -144,15 +138,6 @@ export function createWorktreeRoutes(
|
||||
createGetDevServerLogsHandler()
|
||||
);
|
||||
|
||||
// Test runner routes
|
||||
router.post(
|
||||
'/start-tests',
|
||||
validatePathParams('worktreePath', 'projectPath?'),
|
||||
createStartTestsHandler(settingsService)
|
||||
);
|
||||
router.post('/stop-tests', createStopTestsHandler());
|
||||
router.get('/test-logs', validatePathParams('worktreePath?'), createGetTestLogsHandler());
|
||||
|
||||
// Init script routes
|
||||
router.get('/init-script', createGetInitScriptHandler());
|
||||
router.put('/init-script', validatePathParams('projectPath'), createPutInitScriptHandler());
|
||||
@@ -163,29 +148,5 @@ export function createWorktreeRoutes(
|
||||
createRunInitScriptHandler(events)
|
||||
);
|
||||
|
||||
// Discard changes route
|
||||
router.post(
|
||||
'/discard-changes',
|
||||
validatePathParams('worktreePath'),
|
||||
requireGitRepoOnly,
|
||||
createDiscardChangesHandler()
|
||||
);
|
||||
|
||||
// List remotes route
|
||||
router.post(
|
||||
'/list-remotes',
|
||||
validatePathParams('worktreePath'),
|
||||
requireValidWorktree,
|
||||
createListRemotesHandler()
|
||||
);
|
||||
|
||||
// Add remote route
|
||||
router.post(
|
||||
'/add-remote',
|
||||
validatePathParams('worktreePath'),
|
||||
requireGitRepoOnly,
|
||||
createAddRemoteHandler()
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -1,166 +0,0 @@
|
||||
/**
|
||||
* POST /add-remote endpoint - Add a new remote to a git repository
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
||||
* the requireValidWorktree middleware in index.ts
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { execFile } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logWorktreeError } from '../common.js';
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
/** Maximum allowed length for remote names */
|
||||
const MAX_REMOTE_NAME_LENGTH = 250;
|
||||
|
||||
/** Maximum allowed length for remote URLs */
|
||||
const MAX_REMOTE_URL_LENGTH = 2048;
|
||||
|
||||
/** Timeout for git fetch operations (30 seconds) */
|
||||
const FETCH_TIMEOUT_MS = 30000;
|
||||
|
||||
/**
|
||||
* Validate remote name - must be alphanumeric with dashes/underscores
|
||||
* Git remote names have similar restrictions to branch names
|
||||
*/
|
||||
function isValidRemoteName(name: string): boolean {
|
||||
// Remote names should be alphanumeric, may contain dashes, underscores, periods
|
||||
// Cannot start with a dash or period, cannot be empty
|
||||
if (!name || name.length === 0 || name.length > MAX_REMOTE_NAME_LENGTH) {
|
||||
return false;
|
||||
}
|
||||
return /^[a-zA-Z0-9][a-zA-Z0-9._-]*$/.test(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate remote URL - basic validation for git remote URLs
|
||||
* Supports HTTPS, SSH, and git:// protocols
|
||||
*/
|
||||
function isValidRemoteUrl(url: string): boolean {
|
||||
if (!url || url.length === 0 || url.length > MAX_REMOTE_URL_LENGTH) {
|
||||
return false;
|
||||
}
|
||||
// Support common git URL formats:
|
||||
// - https://github.com/user/repo.git
|
||||
// - git@github.com:user/repo.git
|
||||
// - git://github.com/user/repo.git
|
||||
// - ssh://git@github.com/user/repo.git
|
||||
const httpsPattern = /^https?:\/\/.+/;
|
||||
const sshPattern = /^[a-zA-Z0-9._-]+@[a-zA-Z0-9._-]+:.+/;
|
||||
const gitProtocolPattern = /^git:\/\/.+/;
|
||||
const sshProtocolPattern = /^ssh:\/\/.+/;
|
||||
|
||||
return (
|
||||
httpsPattern.test(url) ||
|
||||
sshPattern.test(url) ||
|
||||
gitProtocolPattern.test(url) ||
|
||||
sshProtocolPattern.test(url)
|
||||
);
|
||||
}
|
||||
|
||||
export function createAddRemoteHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath, remoteName, remoteUrl } = req.body as {
|
||||
worktreePath: string;
|
||||
remoteName: string;
|
||||
remoteUrl: string;
|
||||
};
|
||||
|
||||
// Validate required fields
|
||||
const requiredFields = { worktreePath, remoteName, remoteUrl };
|
||||
for (const [key, value] of Object.entries(requiredFields)) {
|
||||
if (!value) {
|
||||
res.status(400).json({ success: false, error: `${key} required` });
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Validate remote name
|
||||
if (!isValidRemoteName(remoteName)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error:
|
||||
'Invalid remote name. Must start with alphanumeric character and contain only letters, numbers, dashes, underscores, or periods.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate remote URL
|
||||
if (!isValidRemoteUrl(remoteUrl)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid remote URL. Must be a valid git URL (HTTPS, SSH, or git:// protocol).',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if remote already exists
|
||||
try {
|
||||
const { stdout: existingRemotes } = await execFileAsync('git', ['remote'], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const remoteNames = existingRemotes
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((r) => r.trim());
|
||||
if (remoteNames.includes(remoteName)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Remote '${remoteName}' already exists`,
|
||||
code: 'REMOTE_EXISTS',
|
||||
});
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
// If git remote fails, continue with adding the remote. Log for debugging.
|
||||
logWorktreeError(
|
||||
error,
|
||||
'Checking for existing remotes failed, proceeding to add.',
|
||||
worktreePath
|
||||
);
|
||||
}
|
||||
|
||||
// Add the remote using execFile with array arguments to prevent command injection
|
||||
await execFileAsync('git', ['remote', 'add', remoteName, remoteUrl], {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
|
||||
// Optionally fetch from the new remote to get its branches
|
||||
let fetchSucceeded = false;
|
||||
try {
|
||||
await execFileAsync('git', ['fetch', remoteName, '--quiet'], {
|
||||
cwd: worktreePath,
|
||||
timeout: FETCH_TIMEOUT_MS,
|
||||
});
|
||||
fetchSucceeded = true;
|
||||
} catch (fetchError) {
|
||||
// Fetch failed (maybe offline or invalid URL), but remote was added successfully
|
||||
logWorktreeError(
|
||||
fetchError,
|
||||
`Fetch from new remote '${remoteName}' failed (remote added successfully)`,
|
||||
worktreePath
|
||||
);
|
||||
fetchSucceeded = false;
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
remoteName,
|
||||
remoteUrl,
|
||||
fetched: fetchSucceeded,
|
||||
message: fetchSucceeded
|
||||
? `Successfully added remote '${remoteName}' and fetched its branches`
|
||||
: `Successfully added remote '${remoteName}' (fetch failed - you may need to fetch manually)`,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
const worktreePath = req.body?.worktreePath;
|
||||
logWorktreeError(error, 'Add remote failed', worktreePath);
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -39,10 +39,7 @@ export function createDiffsHandler() {
|
||||
}
|
||||
|
||||
// Git worktrees are stored in project directory
|
||||
// Sanitize featureId the same way it's sanitized when creating worktrees
|
||||
// (see create.ts: branchName.replace(/[^a-zA-Z0-9_-]/g, '-'))
|
||||
const sanitizedFeatureId = featureId.replace(/[^a-zA-Z0-9_-]/g, '-');
|
||||
const worktreePath = path.join(projectPath, '.worktrees', sanitizedFeatureId);
|
||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
||||
|
||||
try {
|
||||
// Check if worktree exists
|
||||
|
||||
@@ -1,112 +0,0 @@
|
||||
/**
|
||||
* POST /discard-changes endpoint - Discard all uncommitted changes in a worktree
|
||||
*
|
||||
* This performs a destructive operation that:
|
||||
* 1. Resets staged changes (git reset HEAD)
|
||||
* 2. Discards modified tracked files (git checkout .)
|
||||
* 3. Removes untracked files and directories (git clean -fd)
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo) is handled by
|
||||
* the requireGitRepoOnly middleware in index.ts
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
export function createDiscardChangesHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath } = req.body as {
|
||||
worktreePath: string;
|
||||
};
|
||||
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'worktreePath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for uncommitted changes first
|
||||
const { stdout: status } = await execAsync('git status --porcelain', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
|
||||
if (!status.trim()) {
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
discarded: false,
|
||||
message: 'No changes to discard',
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Count the files that will be affected
|
||||
const lines = status.trim().split('\n').filter(Boolean);
|
||||
const fileCount = lines.length;
|
||||
|
||||
// Get branch name before discarding
|
||||
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const branchName = branchOutput.trim();
|
||||
|
||||
// Discard all changes:
|
||||
// 1. Reset any staged changes
|
||||
await execAsync('git reset HEAD', { cwd: worktreePath }).catch(() => {
|
||||
// Ignore errors - might fail if there's nothing staged
|
||||
});
|
||||
|
||||
// 2. Discard changes in tracked files
|
||||
await execAsync('git checkout .', { cwd: worktreePath }).catch(() => {
|
||||
// Ignore errors - might fail if there are no tracked changes
|
||||
});
|
||||
|
||||
// 3. Remove untracked files and directories
|
||||
await execAsync('git clean -fd', { cwd: worktreePath }).catch(() => {
|
||||
// Ignore errors - might fail if there are no untracked files
|
||||
});
|
||||
|
||||
// Verify all changes were discarded
|
||||
const { stdout: finalStatus } = await execAsync('git status --porcelain', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
|
||||
if (finalStatus.trim()) {
|
||||
// Some changes couldn't be discarded (possibly ignored files or permission issues)
|
||||
const remainingCount = finalStatus.trim().split('\n').filter(Boolean).length;
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
discarded: true,
|
||||
filesDiscarded: fileCount - remainingCount,
|
||||
filesRemaining: remainingCount,
|
||||
branch: branchName,
|
||||
message: `Discarded ${fileCount - remainingCount} files, ${remainingCount} files could not be removed`,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
discarded: true,
|
||||
filesDiscarded: fileCount,
|
||||
filesRemaining: 0,
|
||||
branch: branchName,
|
||||
message: `Discarded ${fileCount} ${fileCount === 1 ? 'file' : 'files'}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, 'Discard changes failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -37,10 +37,7 @@ export function createFileDiffHandler() {
|
||||
}
|
||||
|
||||
// Git worktrees are stored in project directory
|
||||
// Sanitize featureId the same way it's sanitized when creating worktrees
|
||||
// (see create.ts: branchName.replace(/[^a-zA-Z0-9_-]/g, '-'))
|
||||
const sanitizedFeatureId = featureId.replace(/[^a-zA-Z0-9_-]/g, '-');
|
||||
const worktreePath = path.join(projectPath, '.worktrees', sanitizedFeatureId);
|
||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
||||
|
||||
try {
|
||||
await secureFs.access(worktreePath);
|
||||
|
||||
@@ -11,13 +11,13 @@ import { promisify } from 'util';
|
||||
import { existsSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { isCursorModel, stripProviderPrefix } from '@automaker/types';
|
||||
import { DEFAULT_PHASE_MODELS, isCursorModel, stripProviderPrefix } from '@automaker/types';
|
||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||
import { mergeCommitMessagePrompts } from '@automaker/prompts';
|
||||
import { ProviderFactory } from '../../../providers/provider-factory.js';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { getPhaseModelWithOverrides } from '../../../lib/settings-helpers.js';
|
||||
import { getActiveClaudeApiProfile } from '../../../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('GenerateCommitMessage');
|
||||
const execAsync = promisify(exec);
|
||||
@@ -157,29 +157,25 @@ export function createGenerateCommitMessageHandler(
|
||||
|
||||
const userPrompt = `Generate a commit message for these changes:\n\n\`\`\`diff\n${truncatedDiff}\n\`\`\``;
|
||||
|
||||
// Get model from phase settings with provider info
|
||||
const {
|
||||
phaseModel: phaseModelEntry,
|
||||
provider: claudeCompatibleProvider,
|
||||
credentials,
|
||||
} = await getPhaseModelWithOverrides(
|
||||
'commitMessageModel',
|
||||
settingsService,
|
||||
worktreePath,
|
||||
'[GenerateCommitMessage]'
|
||||
);
|
||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||
// Get model from phase settings
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.commitMessageModel || DEFAULT_PHASE_MODELS.commitMessageModel;
|
||||
const { model } = resolvePhaseModel(phaseModelEntry);
|
||||
|
||||
logger.info(
|
||||
`Using model for commit message: ${model}`,
|
||||
claudeCompatibleProvider ? `via provider: ${claudeCompatibleProvider.name}` : 'direct API'
|
||||
);
|
||||
logger.info(`Using model for commit message: ${model}`);
|
||||
|
||||
// Get the effective system prompt (custom or default)
|
||||
const systemPrompt = await getSystemPrompt(settingsService);
|
||||
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
settingsService,
|
||||
'[GenerateCommitMessage]'
|
||||
);
|
||||
|
||||
// Get provider for the model type
|
||||
const aiProvider = ProviderFactory.getProviderForModel(model);
|
||||
const provider = ProviderFactory.getProviderForModel(model);
|
||||
const bareModel = stripProviderPrefix(model);
|
||||
|
||||
// For Cursor models, combine prompts since Cursor doesn't support systemPrompt separation
|
||||
@@ -188,10 +184,10 @@ export function createGenerateCommitMessageHandler(
|
||||
: userPrompt;
|
||||
const effectiveSystemPrompt = isCursorModel(model) ? undefined : systemPrompt;
|
||||
|
||||
logger.info(`Using ${aiProvider.getName()} provider for model: ${model}`);
|
||||
logger.info(`Using ${provider.getName()} provider for model: ${model}`);
|
||||
|
||||
let responseText = '';
|
||||
const stream = aiProvider.executeQuery({
|
||||
const stream = provider.executeQuery({
|
||||
prompt: effectivePrompt,
|
||||
model: bareModel,
|
||||
cwd: worktreePath,
|
||||
@@ -199,8 +195,7 @@ export function createGenerateCommitMessageHandler(
|
||||
maxTurns: 1,
|
||||
allowedTools: [],
|
||||
readOnly: true,
|
||||
thinkingLevel, // Pass thinking level for extended thinking support
|
||||
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
});
|
||||
|
||||
|
||||
@@ -28,10 +28,7 @@ export function createInfoHandler() {
|
||||
}
|
||||
|
||||
// Check if worktree exists (git worktrees are stored in project directory)
|
||||
// Sanitize featureId the same way it's sanitized when creating worktrees
|
||||
// (see create.ts: branchName.replace(/[^a-zA-Z0-9_-]/g, '-'))
|
||||
const sanitizedFeatureId = featureId.replace(/[^a-zA-Z0-9_-]/g, '-');
|
||||
const worktreePath = path.join(projectPath, '.worktrees', sanitizedFeatureId);
|
||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
||||
try {
|
||||
await secureFs.access(worktreePath);
|
||||
const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
|
||||
@@ -110,22 +110,9 @@ export function createListBranchesHandler() {
|
||||
}
|
||||
}
|
||||
|
||||
// Check if any remotes are configured for this repository
|
||||
let hasAnyRemotes = false;
|
||||
try {
|
||||
const { stdout: remotesOutput } = await execAsync('git remote', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
hasAnyRemotes = remotesOutput.trim().length > 0;
|
||||
} catch {
|
||||
// If git remote fails, assume no remotes
|
||||
hasAnyRemotes = false;
|
||||
}
|
||||
|
||||
// Get ahead/behind count for current branch and check if remote branch exists
|
||||
// Get ahead/behind count for current branch
|
||||
let aheadCount = 0;
|
||||
let behindCount = 0;
|
||||
let hasRemoteBranch = false;
|
||||
try {
|
||||
// First check if there's a remote tracking branch
|
||||
const { stdout: upstreamOutput } = await execAsync(
|
||||
@@ -134,7 +121,6 @@ export function createListBranchesHandler() {
|
||||
);
|
||||
|
||||
if (upstreamOutput.trim()) {
|
||||
hasRemoteBranch = true;
|
||||
const { stdout: aheadBehindOutput } = await execAsync(
|
||||
`git rev-list --left-right --count ${currentBranch}@{upstream}...HEAD`,
|
||||
{ cwd: worktreePath }
|
||||
@@ -144,18 +130,7 @@ export function createListBranchesHandler() {
|
||||
behindCount = behind || 0;
|
||||
}
|
||||
} catch {
|
||||
// No upstream branch set - check if the branch exists on any remote
|
||||
try {
|
||||
// Check if there's a matching branch on origin (most common remote)
|
||||
const { stdout: remoteBranchOutput } = await execAsync(
|
||||
`git ls-remote --heads origin ${currentBranch}`,
|
||||
{ cwd: worktreePath, timeout: 5000 }
|
||||
);
|
||||
hasRemoteBranch = remoteBranchOutput.trim().length > 0;
|
||||
} catch {
|
||||
// No remote branch found or origin doesn't exist
|
||||
hasRemoteBranch = false;
|
||||
}
|
||||
// No upstream branch set, that's okay
|
||||
}
|
||||
|
||||
res.json({
|
||||
@@ -165,8 +140,6 @@ export function createListBranchesHandler() {
|
||||
branches,
|
||||
aheadCount,
|
||||
behindCount,
|
||||
hasRemoteBranch,
|
||||
hasAnyRemotes,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,127 +0,0 @@
|
||||
/**
|
||||
* POST /list-remotes endpoint - List all remotes and their branches
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
||||
* the requireValidWorktree middleware in index.ts
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logWorktreeError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
interface RemoteBranch {
|
||||
name: string;
|
||||
fullRef: string;
|
||||
}
|
||||
|
||||
interface RemoteInfo {
|
||||
name: string;
|
||||
url: string;
|
||||
branches: RemoteBranch[];
|
||||
}
|
||||
|
||||
export function createListRemotesHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath } = req.body as {
|
||||
worktreePath: string;
|
||||
};
|
||||
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'worktreePath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Get list of remotes
|
||||
const { stdout: remotesOutput } = await execAsync('git remote -v', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
|
||||
// Parse remotes (each remote appears twice - once for fetch, once for push)
|
||||
const remotesSet = new Map<string, string>();
|
||||
remotesOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((line) => line.trim())
|
||||
.forEach((line) => {
|
||||
const match = line.match(/^(\S+)\s+(\S+)\s+\(fetch\)$/);
|
||||
if (match) {
|
||||
remotesSet.set(match[1], match[2]);
|
||||
}
|
||||
});
|
||||
|
||||
// Fetch latest from all remotes (silently, don't fail if offline)
|
||||
try {
|
||||
await execAsync('git fetch --all --quiet', {
|
||||
cwd: worktreePath,
|
||||
timeout: 15000, // 15 second timeout
|
||||
});
|
||||
} catch {
|
||||
// Ignore fetch errors - we'll use cached remote refs
|
||||
}
|
||||
|
||||
// Get all remote branches
|
||||
const { stdout: remoteBranchesOutput } = await execAsync(
|
||||
'git branch -r --format="%(refname:short)"',
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
|
||||
// Group branches by remote
|
||||
const remotesBranches = new Map<string, RemoteBranch[]>();
|
||||
remotesSet.forEach((_, remoteName) => {
|
||||
remotesBranches.set(remoteName, []);
|
||||
});
|
||||
|
||||
remoteBranchesOutput
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((line) => line.trim())
|
||||
.forEach((line) => {
|
||||
const cleanLine = line.trim().replace(/^['"]|['"]$/g, '');
|
||||
// Skip HEAD pointers like "origin/HEAD"
|
||||
if (cleanLine.includes('/HEAD')) return;
|
||||
|
||||
// Parse remote name from branch ref (e.g., "origin/main" -> "origin")
|
||||
const slashIndex = cleanLine.indexOf('/');
|
||||
if (slashIndex === -1) return;
|
||||
|
||||
const remoteName = cleanLine.substring(0, slashIndex);
|
||||
const branchName = cleanLine.substring(slashIndex + 1);
|
||||
|
||||
if (remotesBranches.has(remoteName)) {
|
||||
remotesBranches.get(remoteName)!.push({
|
||||
name: branchName,
|
||||
fullRef: cleanLine,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Build final result
|
||||
const remotes: RemoteInfo[] = [];
|
||||
remotesSet.forEach((url, name) => {
|
||||
remotes.push({
|
||||
name,
|
||||
url,
|
||||
branches: remotesBranches.get(name) || [],
|
||||
});
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
remotes,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
const worktreePath = req.body?.worktreePath;
|
||||
logWorktreeError(error, 'List remotes failed', worktreePath);
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
/**
|
||||
* POST /merge endpoint - Merge feature (merge worktree branch into a target branch)
|
||||
*
|
||||
* Allows merging a worktree branch into any target branch (defaults to 'main').
|
||||
* POST /merge endpoint - Merge feature (merge worktree branch into main)
|
||||
*
|
||||
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
||||
* the requireValidProject middleware in index.ts
|
||||
@@ -10,21 +8,18 @@
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError, isValidBranchName, execGitCommand } from '../common.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
const logger = createLogger('Worktree');
|
||||
|
||||
export function createMergeHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, branchName, worktreePath, targetBranch, options } = req.body as {
|
||||
const { projectPath, branchName, worktreePath, options } = req.body as {
|
||||
projectPath: string;
|
||||
branchName: string;
|
||||
worktreePath: string;
|
||||
targetBranch?: string; // Branch to merge into (defaults to 'main')
|
||||
options?: { squash?: boolean; message?: string; deleteWorktreeAndBranch?: boolean };
|
||||
options?: { squash?: boolean; message?: string };
|
||||
};
|
||||
|
||||
if (!projectPath || !branchName || !worktreePath) {
|
||||
@@ -35,10 +30,7 @@ export function createMergeHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine the target branch (default to 'main')
|
||||
const mergeTo = targetBranch || 'main';
|
||||
|
||||
// Validate source branch exists
|
||||
// Validate branch exists
|
||||
try {
|
||||
await execAsync(`git rev-parse --verify ${branchName}`, { cwd: projectPath });
|
||||
} catch {
|
||||
@@ -49,44 +41,12 @@ export function createMergeHandler() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate target branch exists
|
||||
try {
|
||||
await execAsync(`git rev-parse --verify ${mergeTo}`, { cwd: projectPath });
|
||||
} catch {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Target branch "${mergeTo}" does not exist`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Merge the feature branch into the target branch
|
||||
// Merge the feature branch
|
||||
const mergeCmd = options?.squash
|
||||
? `git merge --squash ${branchName}`
|
||||
: `git merge ${branchName} -m "${options?.message || `Merge ${branchName} into ${mergeTo}`}"`;
|
||||
: `git merge ${branchName} -m "${options?.message || `Merge ${branchName}`}"`;
|
||||
|
||||
try {
|
||||
await execAsync(mergeCmd, { cwd: projectPath });
|
||||
} catch (mergeError: unknown) {
|
||||
// Check if this is a merge conflict
|
||||
const err = mergeError as { stdout?: string; stderr?: string; message?: string };
|
||||
const output = `${err.stdout || ''} ${err.stderr || ''} ${err.message || ''}`;
|
||||
const hasConflicts =
|
||||
output.includes('CONFLICT') || output.includes('Automatic merge failed');
|
||||
|
||||
if (hasConflicts) {
|
||||
// Return conflict-specific error message that frontend can detect
|
||||
res.status(409).json({
|
||||
success: false,
|
||||
error: `Merge CONFLICT: Automatic merge of "${branchName}" into "${mergeTo}" failed. Please resolve conflicts manually.`,
|
||||
hasConflicts: true,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Re-throw non-conflict errors to be handled by outer catch
|
||||
throw mergeError;
|
||||
}
|
||||
await execAsync(mergeCmd, { cwd: projectPath });
|
||||
|
||||
// If squash merge, need to commit
|
||||
if (options?.squash) {
|
||||
@@ -95,46 +55,17 @@ export function createMergeHandler() {
|
||||
});
|
||||
}
|
||||
|
||||
// Optionally delete the worktree and branch after merging
|
||||
let worktreeDeleted = false;
|
||||
let branchDeleted = false;
|
||||
|
||||
if (options?.deleteWorktreeAndBranch) {
|
||||
// Remove the worktree
|
||||
try {
|
||||
await execGitCommand(['worktree', 'remove', worktreePath, '--force'], projectPath);
|
||||
worktreeDeleted = true;
|
||||
} catch {
|
||||
// Try with prune if remove fails
|
||||
try {
|
||||
await execGitCommand(['worktree', 'prune'], projectPath);
|
||||
worktreeDeleted = true;
|
||||
} catch {
|
||||
logger.warn(`Failed to remove worktree: ${worktreePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the branch (but not main/master)
|
||||
if (branchName !== 'main' && branchName !== 'master') {
|
||||
if (!isValidBranchName(branchName)) {
|
||||
logger.warn(`Invalid branch name detected, skipping deletion: ${branchName}`);
|
||||
} else {
|
||||
try {
|
||||
await execGitCommand(['branch', '-D', branchName], projectPath);
|
||||
branchDeleted = true;
|
||||
} catch {
|
||||
logger.warn(`Failed to delete branch: ${branchName}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Clean up worktree and branch
|
||||
try {
|
||||
await execAsync(`git worktree remove "${worktreePath}" --force`, {
|
||||
cwd: projectPath,
|
||||
});
|
||||
await execAsync(`git branch -D ${branchName}`, { cwd: projectPath });
|
||||
} catch {
|
||||
// Cleanup errors are non-fatal
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
mergedBranch: branchName,
|
||||
targetBranch: mergeTo,
|
||||
deleted: options?.deleteWorktreeAndBranch ? { worktreeDeleted, branchDeleted } : undefined,
|
||||
});
|
||||
res.json({ success: true, mergedBranch: branchName });
|
||||
} catch (error) {
|
||||
logError(error, 'Merge worktree failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
|
||||
@@ -15,10 +15,9 @@ const execAsync = promisify(exec);
|
||||
export function createPushHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath, force, remote } = req.body as {
|
||||
const { worktreePath, force } = req.body as {
|
||||
worktreePath: string;
|
||||
force?: boolean;
|
||||
remote?: string;
|
||||
};
|
||||
|
||||
if (!worktreePath) {
|
||||
@@ -35,18 +34,15 @@ export function createPushHandler() {
|
||||
});
|
||||
const branchName = branchOutput.trim();
|
||||
|
||||
// Use specified remote or default to 'origin'
|
||||
const targetRemote = remote || 'origin';
|
||||
|
||||
// Push the branch
|
||||
const forceFlag = force ? '--force' : '';
|
||||
try {
|
||||
await execAsync(`git push -u ${targetRemote} ${branchName} ${forceFlag}`, {
|
||||
await execAsync(`git push -u origin ${branchName} ${forceFlag}`, {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
} catch {
|
||||
// Try setting upstream
|
||||
await execAsync(`git push --set-upstream ${targetRemote} ${branchName} ${forceFlag}`, {
|
||||
await execAsync(`git push --set-upstream origin ${branchName} ${forceFlag}`, {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
}
|
||||
@@ -56,7 +52,7 @@ export function createPushHandler() {
|
||||
result: {
|
||||
branch: branchName,
|
||||
pushed: true,
|
||||
message: `Successfully pushed ${branchName} to ${targetRemote}`,
|
||||
message: `Successfully pushed ${branchName} to origin`,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,22 +1,16 @@
|
||||
/**
|
||||
* POST /start-dev endpoint - Start a dev server for a worktree
|
||||
*
|
||||
* Spins up a development server in the worktree directory on a unique port,
|
||||
* allowing preview of the worktree's changes without affecting the main dev server.
|
||||
*
|
||||
* If a custom devCommand is configured in project settings, it will be used.
|
||||
* Otherwise, auto-detection based on package manager (npm/yarn/pnpm/bun run dev) is used.
|
||||
* Spins up a development server (npm run dev) in the worktree directory
|
||||
* on a unique port, allowing preview of the worktree's changes without
|
||||
* affecting the main dev server.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getDevServerService } from '../../../services/dev-server-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('start-dev');
|
||||
|
||||
export function createStartDevHandler(settingsService?: SettingsService) {
|
||||
export function createStartDevHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, worktreePath } = req.body as {
|
||||
@@ -40,25 +34,8 @@ export function createStartDevHandler(settingsService?: SettingsService) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get custom dev command from project settings (if configured)
|
||||
let customCommand: string | undefined;
|
||||
if (settingsService) {
|
||||
const projectSettings = await settingsService.getProjectSettings(projectPath);
|
||||
const devCommand = projectSettings?.devCommand?.trim();
|
||||
if (devCommand) {
|
||||
customCommand = devCommand;
|
||||
logger.debug(`Using custom dev command from project settings: ${customCommand}`);
|
||||
} else {
|
||||
logger.debug('No custom dev command configured, using auto-detection');
|
||||
}
|
||||
}
|
||||
|
||||
const devServerService = getDevServerService();
|
||||
const result = await devServerService.startDevServer(
|
||||
projectPath,
|
||||
worktreePath,
|
||||
customCommand
|
||||
);
|
||||
const result = await devServerService.startDevServer(projectPath, worktreePath);
|
||||
|
||||
if (result.success && result.result) {
|
||||
res.json({
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
/**
|
||||
* POST /start-tests endpoint - Start tests for a worktree
|
||||
*
|
||||
* Runs the test command configured in project settings.
|
||||
* If no testCommand is configured, returns an error.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import { getTestRunnerService } from '../../../services/test-runner-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStartTestsHandler(settingsService?: SettingsService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const body = req.body;
|
||||
|
||||
// Validate request body
|
||||
if (!body || typeof body !== 'object') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Request body must be an object',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const worktreePath = typeof body.worktreePath === 'string' ? body.worktreePath : undefined;
|
||||
const projectPath = typeof body.projectPath === 'string' ? body.projectPath : undefined;
|
||||
const testFile = typeof body.testFile === 'string' ? body.testFile : undefined;
|
||||
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'worktreePath is required and must be a string',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Get project settings to find the test command
|
||||
// Use projectPath if provided, otherwise use worktreePath
|
||||
const settingsPath = projectPath || worktreePath;
|
||||
|
||||
if (!settingsService) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Settings service not available',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const projectSettings = await settingsService.getProjectSettings(settingsPath);
|
||||
const testCommand = projectSettings?.testCommand;
|
||||
|
||||
if (!testCommand) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error:
|
||||
'No test command configured. Please configure a test command in Project Settings > Testing Configuration.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const testRunnerService = getTestRunnerService();
|
||||
const result = await testRunnerService.startTests(worktreePath, {
|
||||
command: testCommand,
|
||||
testFile,
|
||||
});
|
||||
|
||||
if (result.success && result.result) {
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
sessionId: result.result.sessionId,
|
||||
worktreePath: result.result.worktreePath,
|
||||
command: result.result.command,
|
||||
status: result.result.status,
|
||||
testFile: result.result.testFile,
|
||||
message: result.result.message,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: result.error || 'Failed to start tests',
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, 'Start tests failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -28,10 +28,7 @@ export function createStatusHandler() {
|
||||
}
|
||||
|
||||
// Git worktrees are stored in project directory
|
||||
// Sanitize featureId the same way it's sanitized when creating worktrees
|
||||
// (see create.ts: branchName.replace(/[^a-zA-Z0-9_-]/g, '-'))
|
||||
const sanitizedFeatureId = featureId.replace(/[^a-zA-Z0-9_-]/g, '-');
|
||||
const worktreePath = path.join(projectPath, '.worktrees', sanitizedFeatureId);
|
||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
||||
|
||||
try {
|
||||
await secureFs.access(worktreePath);
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
/**
|
||||
* POST /stop-tests endpoint - Stop a running test session
|
||||
*
|
||||
* Stops the test runner process for a specific session,
|
||||
* cancelling any ongoing tests and freeing up resources.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { getTestRunnerService } from '../../../services/test-runner-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStopTestsHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const body = req.body;
|
||||
|
||||
// Validate request body
|
||||
if (!body || typeof body !== 'object') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Request body must be an object',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const sessionId = typeof body.sessionId === 'string' ? body.sessionId : undefined;
|
||||
|
||||
if (!sessionId) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'sessionId is required and must be a string',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const testRunnerService = getTestRunnerService();
|
||||
const result = await testRunnerService.stopTests(sessionId);
|
||||
|
||||
if (result.success && result.result) {
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
sessionId: result.result.sessionId,
|
||||
message: result.result.message,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: result.error || 'Failed to stop tests',
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, 'Stop tests failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -1,160 +0,0 @@
|
||||
/**
|
||||
* GET /test-logs endpoint - Get buffered logs for a test runner session
|
||||
*
|
||||
* Returns the scrollback buffer containing historical log output for a test run.
|
||||
* Used by clients to populate the log panel on initial connection
|
||||
* before subscribing to real-time updates via WebSocket.
|
||||
*
|
||||
* Query parameters:
|
||||
* - worktreePath: Path to the worktree (optional if sessionId provided)
|
||||
* - sessionId: Specific test session ID (optional, uses active session if not provided)
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { getTestRunnerService } from '../../../services/test-runner-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
interface SessionInfo {
|
||||
sessionId: string;
|
||||
worktreePath?: string;
|
||||
command?: string;
|
||||
testFile?: string;
|
||||
exitCode?: number | null;
|
||||
}
|
||||
|
||||
interface OutputResult {
|
||||
sessionId: string;
|
||||
status: string;
|
||||
output: string;
|
||||
startedAt: string;
|
||||
finishedAt?: string | null;
|
||||
}
|
||||
|
||||
function buildLogsResponse(session: SessionInfo, output: OutputResult) {
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
sessionId: session.sessionId,
|
||||
worktreePath: session.worktreePath,
|
||||
command: session.command,
|
||||
status: output.status,
|
||||
testFile: session.testFile,
|
||||
logs: output.output,
|
||||
startedAt: output.startedAt,
|
||||
finishedAt: output.finishedAt,
|
||||
exitCode: session.exitCode ?? null,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function createGetTestLogsHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath, sessionId } = req.query as {
|
||||
worktreePath?: string;
|
||||
sessionId?: string;
|
||||
};
|
||||
|
||||
const testRunnerService = getTestRunnerService();
|
||||
|
||||
// If sessionId is provided, get logs for that specific session
|
||||
if (sessionId) {
|
||||
const result = testRunnerService.getSessionOutput(sessionId);
|
||||
|
||||
if (result.success && result.result) {
|
||||
const session = testRunnerService.getSession(sessionId);
|
||||
res.json(
|
||||
buildLogsResponse(
|
||||
{
|
||||
sessionId: result.result.sessionId,
|
||||
worktreePath: session?.worktreePath,
|
||||
command: session?.command,
|
||||
testFile: session?.testFile,
|
||||
exitCode: session?.exitCode,
|
||||
},
|
||||
result.result
|
||||
)
|
||||
);
|
||||
} else {
|
||||
res.status(404).json({
|
||||
success: false,
|
||||
error: result.error || 'Failed to get test logs',
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// If worktreePath is provided, get logs for the active session
|
||||
if (worktreePath) {
|
||||
const activeSession = testRunnerService.getActiveSession(worktreePath);
|
||||
|
||||
if (activeSession) {
|
||||
const result = testRunnerService.getSessionOutput(activeSession.id);
|
||||
|
||||
if (result.success && result.result) {
|
||||
res.json(
|
||||
buildLogsResponse(
|
||||
{
|
||||
sessionId: activeSession.id,
|
||||
worktreePath: activeSession.worktreePath,
|
||||
command: activeSession.command,
|
||||
testFile: activeSession.testFile,
|
||||
exitCode: activeSession.exitCode,
|
||||
},
|
||||
result.result
|
||||
)
|
||||
);
|
||||
} else {
|
||||
res.status(404).json({
|
||||
success: false,
|
||||
error: result.error || 'Failed to get test logs',
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// No active session - check for most recent session for this worktree
|
||||
const sessions = testRunnerService.listSessions(worktreePath);
|
||||
if (sessions.result.sessions.length > 0) {
|
||||
// Get the most recent session (list is not sorted, so find it)
|
||||
const mostRecent = sessions.result.sessions.reduce((latest, current) => {
|
||||
const latestTime = new Date(latest.startedAt).getTime();
|
||||
const currentTime = new Date(current.startedAt).getTime();
|
||||
return currentTime > latestTime ? current : latest;
|
||||
});
|
||||
|
||||
const result = testRunnerService.getSessionOutput(mostRecent.sessionId);
|
||||
if (result.success && result.result) {
|
||||
res.json(
|
||||
buildLogsResponse(
|
||||
{
|
||||
sessionId: mostRecent.sessionId,
|
||||
worktreePath: mostRecent.worktreePath,
|
||||
command: mostRecent.command,
|
||||
testFile: mostRecent.testFile,
|
||||
exitCode: mostRecent.exitCode,
|
||||
},
|
||||
result.result
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
res.status(404).json({
|
||||
success: false,
|
||||
error: 'No test sessions found for this worktree',
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Neither sessionId nor worktreePath provided
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Either worktreePath or sessionId query parameter is required',
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Get test logs failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -29,7 +29,7 @@ import {
|
||||
getSkillsConfiguration,
|
||||
getSubagentsConfiguration,
|
||||
getCustomSubagents,
|
||||
getProviderByModelId,
|
||||
getActiveClaudeApiProfile,
|
||||
} from '../lib/settings-helpers.js';
|
||||
|
||||
interface Message {
|
||||
@@ -275,29 +275,11 @@ export class AgentService {
|
||||
? await getCustomSubagents(this.settingsService, effectiveWorkDir)
|
||||
: undefined;
|
||||
|
||||
// Get credentials for API calls
|
||||
const credentials = await this.settingsService?.getCredentials();
|
||||
|
||||
// Try to find a provider for the model (if it's a provider model like "GLM-4.7")
|
||||
// This allows users to select provider models in the Agent Runner UI
|
||||
let claudeCompatibleProvider: import('@automaker/types').ClaudeCompatibleProvider | undefined;
|
||||
let providerResolvedModel: string | undefined;
|
||||
const requestedModel = model || session.model;
|
||||
if (requestedModel && this.settingsService) {
|
||||
const providerResult = await getProviderByModelId(
|
||||
requestedModel,
|
||||
this.settingsService,
|
||||
'[AgentService]'
|
||||
);
|
||||
if (providerResult.provider) {
|
||||
claudeCompatibleProvider = providerResult.provider;
|
||||
providerResolvedModel = providerResult.resolvedModel;
|
||||
this.logger.info(
|
||||
`[AgentService] Using provider "${providerResult.provider.name}" for model "${requestedModel}"` +
|
||||
(providerResolvedModel ? ` -> resolved to "${providerResolvedModel}"` : '')
|
||||
);
|
||||
}
|
||||
}
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
this.settingsService,
|
||||
'[AgentService]'
|
||||
);
|
||||
|
||||
// Load project context files (CLAUDE.md, CODE_QUALITY.md, etc.) and memory files
|
||||
// Use the user's message as task context for smart memory selection
|
||||
@@ -324,16 +306,10 @@ export class AgentService {
|
||||
// Use thinking level and reasoning effort from request, or fall back to session's stored values
|
||||
const effectiveThinkingLevel = thinkingLevel ?? session.thinkingLevel;
|
||||
const effectiveReasoningEffort = reasoningEffort ?? session.reasoningEffort;
|
||||
|
||||
// When using a provider model, use the resolved Claude model (from mapsToClaudeModel)
|
||||
// e.g., "GLM-4.5-Air" -> "claude-haiku-4-5"
|
||||
const modelForSdk = providerResolvedModel || model;
|
||||
const sessionModelForSdk = providerResolvedModel ? undefined : session.model;
|
||||
|
||||
const sdkOptions = createChatOptions({
|
||||
cwd: effectiveWorkDir,
|
||||
model: modelForSdk,
|
||||
sessionModel: sessionModelForSdk,
|
||||
model: model,
|
||||
sessionModel: session.model,
|
||||
systemPrompt: combinedSystemPrompt,
|
||||
abortController: session.abortController!,
|
||||
autoLoadClaudeMd,
|
||||
@@ -409,8 +385,8 @@ export class AgentService {
|
||||
agents: customSubagents, // Pass custom subagents for task delegation
|
||||
thinkingLevel: effectiveThinkingLevel, // Pass thinking level for Claude models
|
||||
reasoningEffort: effectiveReasoningEffort, // Pass reasoning effort for Codex models
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration (GLM, MiniMax, etc.)
|
||||
};
|
||||
|
||||
// Build prompt content with images
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -468,41 +468,10 @@ export class ClaudeUsageService {
|
||||
|
||||
/**
|
||||
* Strip ANSI escape codes from text
|
||||
* Handles CSI, OSC, and other common ANSI sequences
|
||||
*/
|
||||
private stripAnsiCodes(text: string): string {
|
||||
// First strip ANSI sequences (colors, etc) and handle CR
|
||||
// eslint-disable-next-line no-control-regex
|
||||
let clean = text
|
||||
// CSI sequences: ESC [ ... (letter or @)
|
||||
.replace(/\x1B\[[0-9;?]*[A-Za-z@]/g, '')
|
||||
// OSC sequences: ESC ] ... terminated by BEL, ST, or another ESC
|
||||
.replace(/\x1B\][^\x07\x1B]*(?:\x07|\x1B\\)?/g, '')
|
||||
// Other ESC sequences: ESC (letter)
|
||||
.replace(/\x1B[A-Za-z]/g, '')
|
||||
// Carriage returns: replace with newline to avoid concatenation
|
||||
.replace(/\r\n/g, '\n')
|
||||
.replace(/\r/g, '\n');
|
||||
|
||||
// Handle backspaces (\x08) by applying them
|
||||
// If we encounter a backspace, remove the character before it
|
||||
while (clean.includes('\x08')) {
|
||||
clean = clean.replace(/[^\x08]\x08/, '');
|
||||
clean = clean.replace(/^\x08+/, '');
|
||||
}
|
||||
|
||||
// Explicitly strip known "Synchronized Output" and "Window Title" garbage
|
||||
// even if ESC is missing (seen in some environments)
|
||||
clean = clean
|
||||
.replace(/\[\?2026[hl]/g, '') // CSI ? 2026 h/l
|
||||
.replace(/\]0;[^\x07]*\x07/g, '') // OSC 0; Title BEL
|
||||
.replace(/\]0;.*?(\[\?|$)/g, ''); // OSC 0; Title ... (unterminated or hit next sequence)
|
||||
|
||||
// Strip remaining non-printable control characters (except newline \n)
|
||||
// ASCII 0-8, 11-31, 127
|
||||
clean = clean.replace(/[\x00-\x08\x0B-\x1F\x7F]/g, '');
|
||||
|
||||
return clean;
|
||||
return text.replace(/\x1B\[[0-9;]*[A-Za-z]/g, '');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -581,7 +550,7 @@ export class ClaudeUsageService {
|
||||
sectionLabel: string,
|
||||
type: string
|
||||
): { percentage: number; resetTime: string; resetText: string } {
|
||||
let percentage: number | null = null;
|
||||
let percentage = 0;
|
||||
let resetTime = this.getDefaultResetTime(type);
|
||||
let resetText = '';
|
||||
|
||||
@@ -595,7 +564,7 @@ export class ClaudeUsageService {
|
||||
}
|
||||
|
||||
if (sectionIndex === -1) {
|
||||
return { percentage: 0, resetTime, resetText };
|
||||
return { percentage, resetTime, resetText };
|
||||
}
|
||||
|
||||
// Look at the lines following the section header (within a window of 5 lines)
|
||||
@@ -603,8 +572,7 @@ export class ClaudeUsageService {
|
||||
|
||||
for (const line of searchWindow) {
|
||||
// Extract percentage - only take the first match (avoid picking up next section's data)
|
||||
// Use null to track "not found" since 0% is a valid percentage (100% left = 0% used)
|
||||
if (percentage === null) {
|
||||
if (percentage === 0) {
|
||||
const percentMatch = line.match(/(\d{1,3})\s*%\s*(left|used|remaining)/i);
|
||||
if (percentMatch) {
|
||||
const value = parseInt(percentMatch[1], 10);
|
||||
@@ -616,31 +584,18 @@ export class ClaudeUsageService {
|
||||
|
||||
// Extract reset time - only take the first match
|
||||
if (!resetText && line.toLowerCase().includes('reset')) {
|
||||
// Only extract the part starting from "Resets" (or "Reset") to avoid garbage prefixes
|
||||
const match = line.match(/(Resets?.*)$/i);
|
||||
// If regex fails despite 'includes', likely a complex string issues - verify match before using line
|
||||
// Only fallback to line if it's reasonably short/clean, otherwise skip it to avoid showing garbage
|
||||
if (match) {
|
||||
resetText = match[1];
|
||||
}
|
||||
resetText = line;
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the reset time if we found one
|
||||
if (resetText) {
|
||||
// Clean up resetText: remove percentage info if it was matched on the same line
|
||||
// e.g. "46%used Resets5:59pm" -> " Resets5:59pm"
|
||||
resetText = resetText.replace(/(\d{1,3})\s*%\s*(left|used|remaining)/i, '').trim();
|
||||
|
||||
// Ensure space after "Resets" if missing (e.g. "Resets5:59pm" -> "Resets 5:59pm")
|
||||
resetText = resetText.replace(/(resets?)(\d)/i, '$1 $2');
|
||||
|
||||
resetTime = this.parseResetTime(resetText, type);
|
||||
// Strip timezone like "(Asia/Dubai)" from the display text
|
||||
resetText = resetText.replace(/\s*\([A-Za-z_\/]+\)\s*$/, '').trim();
|
||||
}
|
||||
|
||||
return { percentage: percentage ?? 0, resetTime, resetText };
|
||||
return { percentage, resetTime, resetText };
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -669,7 +624,7 @@ export class ClaudeUsageService {
|
||||
}
|
||||
|
||||
// Try to parse simple time-only format: "Resets 11am" or "Resets 3pm"
|
||||
const simpleTimeMatch = text.match(/resets\s*(\d{1,2})(?::(\d{2}))?\s*(am|pm)/i);
|
||||
const simpleTimeMatch = text.match(/resets\s+(\d{1,2})(?::(\d{2}))?\s*(am|pm)/i);
|
||||
if (simpleTimeMatch) {
|
||||
let hours = parseInt(simpleTimeMatch[1], 10);
|
||||
const minutes = simpleTimeMatch[2] ? parseInt(simpleTimeMatch[2], 10) : 0;
|
||||
@@ -694,11 +649,8 @@ export class ClaudeUsageService {
|
||||
}
|
||||
|
||||
// Try to parse date format: "Resets Dec 22 at 8pm" or "Resets Jan 15, 3:30pm"
|
||||
// The regex explicitly matches only valid 3-letter month abbreviations to avoid
|
||||
// matching words like "Resets" when there's no space separator.
|
||||
// Optional "resets\s*" prefix handles cases with or without space after "Resets"
|
||||
const dateMatch = text.match(
|
||||
/(?:resets\s*)?(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+(\d{1,2})(?:\s+at\s+|\s*,?\s*)(\d{1,2})(?::(\d{2}))?\s*(am|pm)/i
|
||||
/([A-Za-z]{3,})\s+(\d{1,2})(?:\s+at\s+|\s*,?\s*)(\d{1,2})(?::(\d{2}))?\s*(am|pm)/i
|
||||
);
|
||||
if (dateMatch) {
|
||||
const monthName = dateMatch[1];
|
||||
|
||||
@@ -1,288 +0,0 @@
|
||||
/**
|
||||
* GitHub Copilot Usage Service
|
||||
*
|
||||
* Fetches usage data from GitHub's Copilot API using GitHub OAuth.
|
||||
* Based on CodexBar reference implementation.
|
||||
*
|
||||
* Authentication methods:
|
||||
* 1. GitHub CLI token (~/.config/gh/hosts.yml)
|
||||
* 2. GitHub OAuth device flow (stored in config)
|
||||
*
|
||||
* API Endpoints:
|
||||
* - GET https://api.github.com/copilot_internal/user - Quota and plan info
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import { execSync } from 'child_process';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { CopilotProviderUsage, UsageWindow } from '@automaker/types';
|
||||
|
||||
const logger = createLogger('CopilotUsage');
|
||||
|
||||
// GitHub API endpoint for Copilot
|
||||
const COPILOT_USER_ENDPOINT = 'https://api.github.com/copilot_internal/user';
|
||||
|
||||
interface CopilotQuotaSnapshot {
|
||||
percentageUsed?: number;
|
||||
percentageRemaining?: number;
|
||||
limit?: number;
|
||||
used?: number;
|
||||
}
|
||||
|
||||
interface CopilotUserResponse {
|
||||
copilotPlan?: string;
|
||||
copilot_plan?: string;
|
||||
quotaSnapshots?: {
|
||||
premiumInteractions?: CopilotQuotaSnapshot;
|
||||
chat?: CopilotQuotaSnapshot;
|
||||
};
|
||||
plan?: string;
|
||||
}
|
||||
|
||||
export class CopilotUsageService {
|
||||
private cachedToken: string | null = null;
|
||||
|
||||
/**
|
||||
* Check if GitHub Copilot credentials are available
|
||||
*/
|
||||
async isAvailable(): Promise<boolean> {
|
||||
const token = await this.getGitHubToken();
|
||||
return !!token;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get GitHub token from various sources
|
||||
*/
|
||||
private async getGitHubToken(): Promise<string | null> {
|
||||
if (this.cachedToken) {
|
||||
return this.cachedToken;
|
||||
}
|
||||
|
||||
// 1. Check environment variable
|
||||
if (process.env.GITHUB_TOKEN) {
|
||||
this.cachedToken = process.env.GITHUB_TOKEN;
|
||||
return this.cachedToken;
|
||||
}
|
||||
|
||||
// 2. Check GH_TOKEN (GitHub CLI uses this)
|
||||
if (process.env.GH_TOKEN) {
|
||||
this.cachedToken = process.env.GH_TOKEN;
|
||||
return this.cachedToken;
|
||||
}
|
||||
|
||||
// 3. Try to get token from GitHub CLI
|
||||
try {
|
||||
const token = execSync('gh auth token', {
|
||||
encoding: 'utf8',
|
||||
timeout: 5000,
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
}).trim();
|
||||
|
||||
if (token) {
|
||||
this.cachedToken = token;
|
||||
return this.cachedToken;
|
||||
}
|
||||
} catch {
|
||||
logger.debug('Failed to get token from gh CLI');
|
||||
}
|
||||
|
||||
// 4. Check GitHub CLI hosts.yml file
|
||||
const ghHostsPath = path.join(os.homedir(), '.config', 'gh', 'hosts.yml');
|
||||
if (fs.existsSync(ghHostsPath)) {
|
||||
try {
|
||||
const content = fs.readFileSync(ghHostsPath, 'utf8');
|
||||
// Simple YAML parsing for oauth_token
|
||||
const match = content.match(/oauth_token:\s*(.+)/);
|
||||
if (match) {
|
||||
this.cachedToken = match[1].trim();
|
||||
return this.cachedToken;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug('Failed to read gh hosts.yml:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Check CodexBar config (for users who also use CodexBar)
|
||||
const codexbarConfigPath = path.join(os.homedir(), '.codexbar', 'config.json');
|
||||
if (fs.existsSync(codexbarConfigPath)) {
|
||||
try {
|
||||
const content = fs.readFileSync(codexbarConfigPath, 'utf8');
|
||||
const config = JSON.parse(content);
|
||||
if (config.github?.oauth_token) {
|
||||
this.cachedToken = config.github.oauth_token;
|
||||
return this.cachedToken;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug('Failed to read CodexBar config:', error);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make an authenticated request to GitHub Copilot API
|
||||
*/
|
||||
private async makeRequest<T>(url: string): Promise<T | null> {
|
||||
const token = await this.getGitHubToken();
|
||||
if (!token) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: 'application/json',
|
||||
'User-Agent': 'automaker/1.0',
|
||||
// Copilot-specific headers (from CodexBar reference)
|
||||
'Editor-Version': 'vscode/1.96.2',
|
||||
'Editor-Plugin-Version': 'copilot-chat/0.26.7',
|
||||
'X-Github-Api-Version': '2025-04-01',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 401 || response.status === 403) {
|
||||
// Clear cached token on auth failure
|
||||
this.cachedToken = null;
|
||||
logger.warn('GitHub Copilot API authentication failed');
|
||||
return null;
|
||||
}
|
||||
if (response.status === 404) {
|
||||
// User may not have Copilot access
|
||||
logger.info('GitHub Copilot not available for this user');
|
||||
return null;
|
||||
}
|
||||
logger.error(`GitHub Copilot API error: ${response.status} ${response.statusText}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return (await response.json()) as T;
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch from GitHub Copilot API:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch usage data from GitHub Copilot
|
||||
*/
|
||||
async fetchUsageData(): Promise<CopilotProviderUsage> {
|
||||
logger.info('[fetchUsageData] Starting GitHub Copilot usage fetch...');
|
||||
|
||||
const baseUsage: CopilotProviderUsage = {
|
||||
providerId: 'copilot',
|
||||
providerName: 'GitHub Copilot',
|
||||
available: false,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Check if token is available
|
||||
const hasToken = await this.getGitHubToken();
|
||||
if (!hasToken) {
|
||||
baseUsage.error = 'GitHub authentication not available';
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
// Fetch Copilot user data
|
||||
const userResponse = await this.makeRequest<CopilotUserResponse>(COPILOT_USER_ENDPOINT);
|
||||
if (!userResponse) {
|
||||
baseUsage.error = 'Failed to fetch GitHub Copilot usage data';
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
baseUsage.available = true;
|
||||
|
||||
// Parse quota snapshots
|
||||
const quotas = userResponse.quotaSnapshots;
|
||||
if (quotas) {
|
||||
// Premium interactions quota
|
||||
if (quotas.premiumInteractions) {
|
||||
const premium = quotas.premiumInteractions;
|
||||
const usedPercent =
|
||||
premium.percentageUsed !== undefined
|
||||
? premium.percentageUsed
|
||||
: premium.percentageRemaining !== undefined
|
||||
? 100 - premium.percentageRemaining
|
||||
: 0;
|
||||
|
||||
const premiumWindow: UsageWindow = {
|
||||
name: 'Premium Interactions',
|
||||
usedPercent,
|
||||
resetsAt: '', // GitHub doesn't provide reset time
|
||||
resetText: 'Resets monthly',
|
||||
limit: premium.limit,
|
||||
used: premium.used,
|
||||
};
|
||||
|
||||
baseUsage.primary = premiumWindow;
|
||||
baseUsage.premiumInteractions = premiumWindow;
|
||||
}
|
||||
|
||||
// Chat quota
|
||||
if (quotas.chat) {
|
||||
const chat = quotas.chat;
|
||||
const usedPercent =
|
||||
chat.percentageUsed !== undefined
|
||||
? chat.percentageUsed
|
||||
: chat.percentageRemaining !== undefined
|
||||
? 100 - chat.percentageRemaining
|
||||
: 0;
|
||||
|
||||
const chatWindow: UsageWindow = {
|
||||
name: 'Chat',
|
||||
usedPercent,
|
||||
resetsAt: '',
|
||||
resetText: 'Resets monthly',
|
||||
limit: chat.limit,
|
||||
used: chat.used,
|
||||
};
|
||||
|
||||
baseUsage.secondary = chatWindow;
|
||||
baseUsage.chatQuota = chatWindow;
|
||||
}
|
||||
}
|
||||
|
||||
// Parse plan type
|
||||
const planType = userResponse.copilotPlan || userResponse.copilot_plan || userResponse.plan;
|
||||
if (planType) {
|
||||
baseUsage.copilotPlan = planType;
|
||||
baseUsage.plan = {
|
||||
type: planType,
|
||||
displayName: this.formatPlanName(planType),
|
||||
isPaid: planType.toLowerCase() !== 'free',
|
||||
};
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[fetchUsageData] ✓ GitHub Copilot usage: Premium=${baseUsage.premiumInteractions?.usedPercent || 0}%, ` +
|
||||
`Chat=${baseUsage.chatQuota?.usedPercent || 0}%, Plan=${planType || 'unknown'}`
|
||||
);
|
||||
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format plan name for display
|
||||
*/
|
||||
private formatPlanName(plan: string): string {
|
||||
const planMap: Record<string, string> = {
|
||||
free: 'Free',
|
||||
individual: 'Individual',
|
||||
business: 'Business',
|
||||
enterprise: 'Enterprise',
|
||||
};
|
||||
return planMap[plan.toLowerCase()] || plan;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cached token
|
||||
*/
|
||||
clearCache(): void {
|
||||
this.cachedToken = null;
|
||||
}
|
||||
}
|
||||
@@ -1,331 +0,0 @@
|
||||
/**
|
||||
* Cursor Usage Service
|
||||
*
|
||||
* Fetches usage data from Cursor's API using session cookies or access token.
|
||||
* Based on CodexBar reference implementation.
|
||||
*
|
||||
* Authentication methods (in priority order):
|
||||
* 1. Cached session cookie from browser import
|
||||
* 2. Access token from credentials file
|
||||
*
|
||||
* API Endpoints:
|
||||
* - GET https://cursor.com/api/usage-summary - Plan usage, on-demand, billing dates
|
||||
* - GET https://cursor.com/api/auth/me - User email and name
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { CursorProviderUsage, UsageWindow } from '@automaker/types';
|
||||
|
||||
const logger = createLogger('CursorUsage');
|
||||
|
||||
// Cursor API endpoints
|
||||
const CURSOR_API_BASE = 'https://cursor.com/api';
|
||||
const USAGE_SUMMARY_ENDPOINT = `${CURSOR_API_BASE}/usage-summary`;
|
||||
const AUTH_ME_ENDPOINT = `${CURSOR_API_BASE}/auth/me`;
|
||||
|
||||
// Session cookie names used by Cursor
|
||||
const SESSION_COOKIE_NAMES = [
|
||||
'WorkosCursorSessionToken',
|
||||
'__Secure-next-auth.session-token',
|
||||
'next-auth.session-token',
|
||||
];
|
||||
|
||||
interface CursorUsageSummary {
|
||||
planUsage?: {
|
||||
percent: number;
|
||||
resetAt?: string;
|
||||
};
|
||||
onDemandUsage?: {
|
||||
percent: number;
|
||||
costUsd?: number;
|
||||
};
|
||||
billingCycleEnd?: string;
|
||||
plan?: string;
|
||||
}
|
||||
|
||||
interface CursorAuthMe {
|
||||
email?: string;
|
||||
name?: string;
|
||||
plan?: string;
|
||||
}
|
||||
|
||||
export class CursorUsageService {
|
||||
private cachedSessionCookie: string | null = null;
|
||||
private cachedAccessToken: string | null = null;
|
||||
|
||||
/**
|
||||
* Check if Cursor credentials are available
|
||||
*/
|
||||
async isAvailable(): Promise<boolean> {
|
||||
return await this.hasValidCredentials();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if we have valid Cursor credentials
|
||||
*/
|
||||
private async hasValidCredentials(): Promise<boolean> {
|
||||
const token = await this.getAccessToken();
|
||||
return !!token;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get access token from credentials file
|
||||
*/
|
||||
private async getAccessToken(): Promise<string | null> {
|
||||
if (this.cachedAccessToken) {
|
||||
return this.cachedAccessToken;
|
||||
}
|
||||
|
||||
// Check environment variable first
|
||||
if (process.env.CURSOR_ACCESS_TOKEN) {
|
||||
this.cachedAccessToken = process.env.CURSOR_ACCESS_TOKEN;
|
||||
return this.cachedAccessToken;
|
||||
}
|
||||
|
||||
// Check credentials files
|
||||
const credentialPaths = [
|
||||
path.join(os.homedir(), '.cursor', 'credentials.json'),
|
||||
path.join(os.homedir(), '.config', 'cursor', 'credentials.json'),
|
||||
];
|
||||
|
||||
for (const credPath of credentialPaths) {
|
||||
try {
|
||||
if (fs.existsSync(credPath)) {
|
||||
const content = fs.readFileSync(credPath, 'utf8');
|
||||
const creds = JSON.parse(content);
|
||||
if (creds.accessToken) {
|
||||
this.cachedAccessToken = creds.accessToken;
|
||||
return this.cachedAccessToken;
|
||||
}
|
||||
if (creds.token) {
|
||||
this.cachedAccessToken = creds.token;
|
||||
return this.cachedAccessToken;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug(`Failed to read credentials from ${credPath}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session cookie for API calls
|
||||
* Returns a cookie string like "WorkosCursorSessionToken=xxx"
|
||||
*/
|
||||
private async getSessionCookie(): Promise<string | null> {
|
||||
if (this.cachedSessionCookie) {
|
||||
return this.cachedSessionCookie;
|
||||
}
|
||||
|
||||
// Check for cookie in environment
|
||||
if (process.env.CURSOR_SESSION_COOKIE) {
|
||||
this.cachedSessionCookie = process.env.CURSOR_SESSION_COOKIE;
|
||||
return this.cachedSessionCookie;
|
||||
}
|
||||
|
||||
// Check for saved session file
|
||||
const sessionPath = path.join(os.homedir(), '.cursor', 'session.json');
|
||||
try {
|
||||
if (fs.existsSync(sessionPath)) {
|
||||
const content = fs.readFileSync(sessionPath, 'utf8');
|
||||
const session = JSON.parse(content);
|
||||
for (const cookieName of SESSION_COOKIE_NAMES) {
|
||||
if (session[cookieName]) {
|
||||
this.cachedSessionCookie = `${cookieName}=${session[cookieName]}`;
|
||||
return this.cachedSessionCookie;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug('Failed to read session file:', error);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make an authenticated request to Cursor API
|
||||
*/
|
||||
private async makeRequest<T>(url: string): Promise<T | null> {
|
||||
const headers: Record<string, string> = {
|
||||
Accept: 'application/json',
|
||||
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36',
|
||||
};
|
||||
|
||||
// Try access token first
|
||||
const accessToken = await this.getAccessToken();
|
||||
if (accessToken) {
|
||||
headers['Authorization'] = `Bearer ${accessToken}`;
|
||||
}
|
||||
|
||||
// Try session cookie as fallback
|
||||
const sessionCookie = await this.getSessionCookie();
|
||||
if (sessionCookie) {
|
||||
headers['Cookie'] = sessionCookie;
|
||||
}
|
||||
|
||||
if (!accessToken && !sessionCookie) {
|
||||
logger.warn('No Cursor credentials available for API request');
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 401 || response.status === 403) {
|
||||
// Clear cached credentials on auth failure
|
||||
this.cachedAccessToken = null;
|
||||
this.cachedSessionCookie = null;
|
||||
logger.warn('Cursor API authentication failed');
|
||||
return null;
|
||||
}
|
||||
logger.error(`Cursor API error: ${response.status} ${response.statusText}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return (await response.json()) as T;
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch from Cursor API:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch usage data from Cursor
|
||||
*/
|
||||
async fetchUsageData(): Promise<CursorProviderUsage> {
|
||||
logger.info('[fetchUsageData] Starting Cursor usage fetch...');
|
||||
|
||||
const baseUsage: CursorProviderUsage = {
|
||||
providerId: 'cursor',
|
||||
providerName: 'Cursor',
|
||||
available: false,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Check if credentials are available
|
||||
const hasCredentials = await this.hasValidCredentials();
|
||||
if (!hasCredentials) {
|
||||
baseUsage.error = 'Cursor credentials not available';
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
// Fetch usage summary
|
||||
const usageSummary = await this.makeRequest<CursorUsageSummary>(USAGE_SUMMARY_ENDPOINT);
|
||||
if (!usageSummary) {
|
||||
baseUsage.error = 'Failed to fetch Cursor usage data';
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
baseUsage.available = true;
|
||||
|
||||
// Parse plan usage
|
||||
if (usageSummary.planUsage) {
|
||||
const planWindow: UsageWindow = {
|
||||
name: 'Plan Usage',
|
||||
usedPercent: usageSummary.planUsage.percent || 0,
|
||||
resetsAt: usageSummary.planUsage.resetAt || '',
|
||||
resetText: usageSummary.planUsage.resetAt
|
||||
? this.formatResetTime(usageSummary.planUsage.resetAt)
|
||||
: '',
|
||||
};
|
||||
baseUsage.primary = planWindow;
|
||||
baseUsage.planUsage = planWindow;
|
||||
}
|
||||
|
||||
// Parse on-demand usage
|
||||
if (usageSummary.onDemandUsage) {
|
||||
const onDemandWindow: UsageWindow = {
|
||||
name: 'On-Demand Usage',
|
||||
usedPercent: usageSummary.onDemandUsage.percent || 0,
|
||||
resetsAt: usageSummary.billingCycleEnd || '',
|
||||
resetText: usageSummary.billingCycleEnd
|
||||
? this.formatResetTime(usageSummary.billingCycleEnd)
|
||||
: '',
|
||||
};
|
||||
baseUsage.secondary = onDemandWindow;
|
||||
baseUsage.onDemandUsage = onDemandWindow;
|
||||
|
||||
if (usageSummary.onDemandUsage.costUsd !== undefined) {
|
||||
baseUsage.onDemandCostUsd = usageSummary.onDemandUsage.costUsd;
|
||||
}
|
||||
}
|
||||
|
||||
// Parse billing cycle end
|
||||
if (usageSummary.billingCycleEnd) {
|
||||
baseUsage.billingCycleEnd = usageSummary.billingCycleEnd;
|
||||
}
|
||||
|
||||
// Parse plan type
|
||||
if (usageSummary.plan) {
|
||||
baseUsage.plan = {
|
||||
type: usageSummary.plan,
|
||||
displayName: this.formatPlanName(usageSummary.plan),
|
||||
isPaid: usageSummary.plan.toLowerCase() !== 'free',
|
||||
};
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[fetchUsageData] ✓ Cursor usage: Plan=${baseUsage.planUsage?.usedPercent || 0}%, ` +
|
||||
`OnDemand=${baseUsage.onDemandUsage?.usedPercent || 0}%`
|
||||
);
|
||||
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format reset time as human-readable string
|
||||
*/
|
||||
private formatResetTime(resetAt: string): string {
|
||||
try {
|
||||
const date = new Date(resetAt);
|
||||
const now = new Date();
|
||||
const diff = date.getTime() - now.getTime();
|
||||
|
||||
if (diff < 0) return 'Expired';
|
||||
|
||||
const hours = Math.floor(diff / 3600000);
|
||||
const days = Math.floor(hours / 24);
|
||||
|
||||
if (days > 0) {
|
||||
return `Resets in ${days}d`;
|
||||
}
|
||||
if (hours > 0) {
|
||||
return `Resets in ${hours}h`;
|
||||
}
|
||||
return 'Resets soon';
|
||||
} catch {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format plan name for display
|
||||
*/
|
||||
private formatPlanName(plan: string): string {
|
||||
const planMap: Record<string, string> = {
|
||||
free: 'Free',
|
||||
pro: 'Pro',
|
||||
business: 'Business',
|
||||
enterprise: 'Enterprise',
|
||||
};
|
||||
return planMap[plan.toLowerCase()] || plan;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cached credentials (useful for logout)
|
||||
*/
|
||||
clearCache(): void {
|
||||
this.cachedAccessToken = null;
|
||||
this.cachedSessionCookie = null;
|
||||
}
|
||||
}
|
||||
@@ -273,56 +273,12 @@ class DevServerService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a custom command string into cmd and args
|
||||
* Handles quoted strings with spaces (e.g., "my command" arg1 arg2)
|
||||
*/
|
||||
private parseCustomCommand(command: string): { cmd: string; args: string[] } {
|
||||
const tokens: string[] = [];
|
||||
let current = '';
|
||||
let inQuote = false;
|
||||
let quoteChar = '';
|
||||
|
||||
for (let i = 0; i < command.length; i++) {
|
||||
const char = command[i];
|
||||
|
||||
if (inQuote) {
|
||||
if (char === quoteChar) {
|
||||
inQuote = false;
|
||||
} else {
|
||||
current += char;
|
||||
}
|
||||
} else if (char === '"' || char === "'") {
|
||||
inQuote = true;
|
||||
quoteChar = char;
|
||||
} else if (char === ' ') {
|
||||
if (current) {
|
||||
tokens.push(current);
|
||||
current = '';
|
||||
}
|
||||
} else {
|
||||
current += char;
|
||||
}
|
||||
}
|
||||
|
||||
if (current) {
|
||||
tokens.push(current);
|
||||
}
|
||||
|
||||
const [cmd, ...args] = tokens;
|
||||
return { cmd: cmd || '', args };
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a dev server for a worktree
|
||||
* @param projectPath - The project root path
|
||||
* @param worktreePath - The worktree directory path
|
||||
* @param customCommand - Optional custom command to run instead of auto-detected dev command
|
||||
*/
|
||||
async startDevServer(
|
||||
projectPath: string,
|
||||
worktreePath: string,
|
||||
customCommand?: string
|
||||
worktreePath: string
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
result?: {
|
||||
@@ -355,41 +311,22 @@ class DevServerService {
|
||||
};
|
||||
}
|
||||
|
||||
// Determine the dev command to use
|
||||
let devCommand: { cmd: string; args: string[] };
|
||||
// Check for package.json
|
||||
const packageJsonPath = path.join(worktreePath, 'package.json');
|
||||
if (!(await this.fileExists(packageJsonPath))) {
|
||||
return {
|
||||
success: false,
|
||||
error: `No package.json found in: ${worktreePath}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Normalize custom command: trim whitespace and treat empty strings as undefined
|
||||
const normalizedCustomCommand = customCommand?.trim();
|
||||
|
||||
if (normalizedCustomCommand) {
|
||||
// Use the provided custom command
|
||||
devCommand = this.parseCustomCommand(normalizedCustomCommand);
|
||||
if (!devCommand.cmd) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Invalid custom command: command cannot be empty',
|
||||
};
|
||||
}
|
||||
logger.debug(`Using custom command: ${normalizedCustomCommand}`);
|
||||
} else {
|
||||
// Check for package.json when auto-detecting
|
||||
const packageJsonPath = path.join(worktreePath, 'package.json');
|
||||
if (!(await this.fileExists(packageJsonPath))) {
|
||||
return {
|
||||
success: false,
|
||||
error: `No package.json found in: ${worktreePath}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Get dev command from package manager detection
|
||||
const detectedCommand = await this.getDevCommand(worktreePath);
|
||||
if (!detectedCommand) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Could not determine dev command for: ${worktreePath}`,
|
||||
};
|
||||
}
|
||||
devCommand = detectedCommand;
|
||||
// Get dev command
|
||||
const devCommand = await this.getDevCommand(worktreePath);
|
||||
if (!devCommand) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Could not determine dev command for: ${worktreePath}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Find available port
|
||||
|
||||
@@ -21,7 +21,6 @@ import { createLogger } from '@automaker/utils';
|
||||
import type { EventEmitter } from '../lib/events.js';
|
||||
import type { SettingsService } from './settings-service.js';
|
||||
import type { EventHistoryService } from './event-history-service.js';
|
||||
import type { FeatureLoader } from './feature-loader.js';
|
||||
import type {
|
||||
EventHook,
|
||||
EventHookTrigger,
|
||||
@@ -58,7 +57,6 @@ interface HookContext {
|
||||
interface AutoModeEventPayload {
|
||||
type?: string;
|
||||
featureId?: string;
|
||||
featureName?: string;
|
||||
passes?: boolean;
|
||||
message?: string;
|
||||
error?: string;
|
||||
@@ -85,22 +83,19 @@ export class EventHookService {
|
||||
private emitter: EventEmitter | null = null;
|
||||
private settingsService: SettingsService | null = null;
|
||||
private eventHistoryService: EventHistoryService | null = null;
|
||||
private featureLoader: FeatureLoader | null = null;
|
||||
private unsubscribe: (() => void) | null = null;
|
||||
|
||||
/**
|
||||
* Initialize the service with event emitter, settings service, event history service, and feature loader
|
||||
* Initialize the service with event emitter, settings service, and event history service
|
||||
*/
|
||||
initialize(
|
||||
emitter: EventEmitter,
|
||||
settingsService: SettingsService,
|
||||
eventHistoryService?: EventHistoryService,
|
||||
featureLoader?: FeatureLoader
|
||||
eventHistoryService?: EventHistoryService
|
||||
): void {
|
||||
this.emitter = emitter;
|
||||
this.settingsService = settingsService;
|
||||
this.eventHistoryService = eventHistoryService || null;
|
||||
this.featureLoader = featureLoader || null;
|
||||
|
||||
// Subscribe to events
|
||||
this.unsubscribe = emitter.subscribe((type, payload) => {
|
||||
@@ -125,7 +120,6 @@ export class EventHookService {
|
||||
this.emitter = null;
|
||||
this.settingsService = null;
|
||||
this.eventHistoryService = null;
|
||||
this.featureLoader = null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -155,23 +149,9 @@ export class EventHookService {
|
||||
|
||||
if (!trigger) return;
|
||||
|
||||
// Load feature name if we have featureId but no featureName
|
||||
let featureName: string | undefined = undefined;
|
||||
if (payload.featureId && payload.projectPath && this.featureLoader) {
|
||||
try {
|
||||
const feature = await this.featureLoader.get(payload.projectPath, payload.featureId);
|
||||
if (feature?.title) {
|
||||
featureName = feature.title;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to load feature ${payload.featureId} for event hook:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Build context for variable substitution
|
||||
const context: HookContext = {
|
||||
featureId: payload.featureId,
|
||||
featureName: payload.featureName,
|
||||
projectPath: payload.projectPath,
|
||||
projectName: payload.projectPath ? this.extractProjectName(payload.projectPath) : undefined,
|
||||
error: payload.error || payload.message,
|
||||
@@ -333,7 +313,6 @@ export class EventHookService {
|
||||
eventType: context.eventType,
|
||||
timestamp: context.timestamp,
|
||||
featureId: context.featureId,
|
||||
featureName: context.featureName,
|
||||
projectPath: context.projectPath,
|
||||
projectName: context.projectName,
|
||||
error: context.error,
|
||||
|
||||
@@ -1,540 +0,0 @@
|
||||
/**
|
||||
* Feature Export Service - Handles exporting and importing features in JSON/YAML formats
|
||||
*
|
||||
* Provides functionality to:
|
||||
* - Export single features to JSON or YAML format
|
||||
* - Export multiple features (bulk export)
|
||||
* - Import features from JSON or YAML data
|
||||
* - Validate import data for compatibility
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { stringify as yamlStringify, parse as yamlParse } from 'yaml';
|
||||
import type { Feature, FeatureExport, FeatureImport, FeatureImportResult } from '@automaker/types';
|
||||
import { FeatureLoader } from './feature-loader.js';
|
||||
|
||||
const logger = createLogger('FeatureExportService');
|
||||
|
||||
/** Current export format version */
|
||||
export const FEATURE_EXPORT_VERSION = '1.0.0';
|
||||
|
||||
/** Supported export formats */
|
||||
export type ExportFormat = 'json' | 'yaml';
|
||||
|
||||
/** Options for exporting features */
|
||||
export interface ExportOptions {
|
||||
/** Format to export in (default: 'json') */
|
||||
format?: ExportFormat;
|
||||
/** Whether to include description history (default: true) */
|
||||
includeHistory?: boolean;
|
||||
/** Whether to include plan spec (default: true) */
|
||||
includePlanSpec?: boolean;
|
||||
/** Optional metadata to include */
|
||||
metadata?: {
|
||||
projectName?: string;
|
||||
projectPath?: string;
|
||||
branch?: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
/** Who/what is performing the export */
|
||||
exportedBy?: string;
|
||||
/** Pretty print output (default: true) */
|
||||
prettyPrint?: boolean;
|
||||
}
|
||||
|
||||
/** Options for bulk export */
|
||||
export interface BulkExportOptions extends ExportOptions {
|
||||
/** Filter by category */
|
||||
category?: string;
|
||||
/** Filter by status */
|
||||
status?: string;
|
||||
/** Feature IDs to include (if not specified, exports all) */
|
||||
featureIds?: string[];
|
||||
}
|
||||
|
||||
/** Result of a bulk export */
|
||||
export interface BulkExportResult {
|
||||
/** Export format version */
|
||||
version: string;
|
||||
/** ISO date string when the export was created */
|
||||
exportedAt: string;
|
||||
/** Number of features exported */
|
||||
count: number;
|
||||
/** The exported features */
|
||||
features: FeatureExport[];
|
||||
/** Export metadata */
|
||||
metadata?: {
|
||||
projectName?: string;
|
||||
projectPath?: string;
|
||||
branch?: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* FeatureExportService - Manages feature export and import operations
|
||||
*/
|
||||
export class FeatureExportService {
|
||||
private featureLoader: FeatureLoader;
|
||||
|
||||
constructor(featureLoader?: FeatureLoader) {
|
||||
this.featureLoader = featureLoader || new FeatureLoader();
|
||||
}
|
||||
|
||||
/**
|
||||
* Export a single feature to the specified format
|
||||
*
|
||||
* @param projectPath - Path to the project
|
||||
* @param featureId - ID of the feature to export
|
||||
* @param options - Export options
|
||||
* @returns Promise resolving to the exported feature string
|
||||
*/
|
||||
async exportFeature(
|
||||
projectPath: string,
|
||||
featureId: string,
|
||||
options: ExportOptions = {}
|
||||
): Promise<string> {
|
||||
const feature = await this.featureLoader.get(projectPath, featureId);
|
||||
if (!feature) {
|
||||
throw new Error(`Feature ${featureId} not found`);
|
||||
}
|
||||
|
||||
return this.exportFeatureData(feature, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Export feature data to the specified format (without fetching from disk)
|
||||
*
|
||||
* @param feature - The feature to export
|
||||
* @param options - Export options
|
||||
* @returns The exported feature string
|
||||
*/
|
||||
exportFeatureData(feature: Feature, options: ExportOptions = {}): string {
|
||||
const {
|
||||
format = 'json',
|
||||
includeHistory = true,
|
||||
includePlanSpec = true,
|
||||
metadata,
|
||||
exportedBy,
|
||||
prettyPrint = true,
|
||||
} = options;
|
||||
|
||||
// Prepare feature data, optionally excluding some fields
|
||||
const featureData = this.prepareFeatureForExport(feature, {
|
||||
includeHistory,
|
||||
includePlanSpec,
|
||||
});
|
||||
|
||||
const exportData: FeatureExport = {
|
||||
version: FEATURE_EXPORT_VERSION,
|
||||
feature: featureData,
|
||||
exportedAt: new Date().toISOString(),
|
||||
...(exportedBy ? { exportedBy } : {}),
|
||||
...(metadata ? { metadata } : {}),
|
||||
};
|
||||
|
||||
return this.serialize(exportData, format, prettyPrint);
|
||||
}
|
||||
|
||||
/**
|
||||
* Export multiple features to the specified format
|
||||
*
|
||||
* @param projectPath - Path to the project
|
||||
* @param options - Bulk export options
|
||||
* @returns Promise resolving to the exported features string
|
||||
*/
|
||||
async exportFeatures(projectPath: string, options: BulkExportOptions = {}): Promise<string> {
|
||||
const {
|
||||
format = 'json',
|
||||
category,
|
||||
status,
|
||||
featureIds,
|
||||
includeHistory = true,
|
||||
includePlanSpec = true,
|
||||
metadata,
|
||||
prettyPrint = true,
|
||||
} = options;
|
||||
|
||||
// Get all features
|
||||
let features = await this.featureLoader.getAll(projectPath);
|
||||
|
||||
// Apply filters
|
||||
if (featureIds && featureIds.length > 0) {
|
||||
const idSet = new Set(featureIds);
|
||||
features = features.filter((f) => idSet.has(f.id));
|
||||
}
|
||||
if (category) {
|
||||
features = features.filter((f) => f.category === category);
|
||||
}
|
||||
if (status) {
|
||||
features = features.filter((f) => f.status === status);
|
||||
}
|
||||
|
||||
// Generate timestamp once for consistent export time across all features
|
||||
const exportedAt = new Date().toISOString();
|
||||
|
||||
// Prepare feature exports
|
||||
const featureExports: FeatureExport[] = features.map((feature) => ({
|
||||
version: FEATURE_EXPORT_VERSION,
|
||||
feature: this.prepareFeatureForExport(feature, { includeHistory, includePlanSpec }),
|
||||
exportedAt,
|
||||
}));
|
||||
|
||||
const bulkExport: BulkExportResult = {
|
||||
version: FEATURE_EXPORT_VERSION,
|
||||
exportedAt,
|
||||
count: featureExports.length,
|
||||
features: featureExports,
|
||||
...(metadata ? { metadata } : {}),
|
||||
};
|
||||
|
||||
logger.info(`Exported ${featureExports.length} features from ${projectPath}`);
|
||||
|
||||
return this.serialize(bulkExport, format, prettyPrint);
|
||||
}
|
||||
|
||||
/**
|
||||
* Import a feature from JSON or YAML data
|
||||
*
|
||||
* @param projectPath - Path to the project
|
||||
* @param importData - Import configuration
|
||||
* @returns Promise resolving to the import result
|
||||
*/
|
||||
async importFeature(
|
||||
projectPath: string,
|
||||
importData: FeatureImport
|
||||
): Promise<FeatureImportResult> {
|
||||
const warnings: string[] = [];
|
||||
const errors: string[] = [];
|
||||
|
||||
try {
|
||||
// Extract feature from data (handle both raw Feature and wrapped FeatureExport)
|
||||
const feature = this.extractFeatureFromImport(importData.data);
|
||||
if (!feature) {
|
||||
return {
|
||||
success: false,
|
||||
importedAt: new Date().toISOString(),
|
||||
errors: ['Invalid import data: could not extract feature'],
|
||||
};
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
const validationErrors = this.validateFeature(feature);
|
||||
if (validationErrors.length > 0) {
|
||||
return {
|
||||
success: false,
|
||||
importedAt: new Date().toISOString(),
|
||||
errors: validationErrors,
|
||||
};
|
||||
}
|
||||
|
||||
// Determine the feature ID to use
|
||||
const featureId = importData.newId || feature.id || this.featureLoader.generateFeatureId();
|
||||
|
||||
// Check for existing feature
|
||||
const existingFeature = await this.featureLoader.get(projectPath, featureId);
|
||||
if (existingFeature && !importData.overwrite) {
|
||||
return {
|
||||
success: false,
|
||||
importedAt: new Date().toISOString(),
|
||||
errors: [`Feature with ID ${featureId} already exists. Set overwrite: true to replace.`],
|
||||
};
|
||||
}
|
||||
|
||||
// Prepare feature for import
|
||||
const featureToImport: Feature = {
|
||||
...feature,
|
||||
id: featureId,
|
||||
// Optionally override category
|
||||
...(importData.targetCategory ? { category: importData.targetCategory } : {}),
|
||||
// Clear branch info if not preserving
|
||||
...(importData.preserveBranchInfo ? {} : { branchName: undefined }),
|
||||
};
|
||||
|
||||
// Clear runtime-specific fields that shouldn't be imported
|
||||
delete featureToImport.titleGenerating;
|
||||
delete featureToImport.error;
|
||||
|
||||
// Handle image paths - they won't be valid after import
|
||||
if (featureToImport.imagePaths && featureToImport.imagePaths.length > 0) {
|
||||
warnings.push(
|
||||
`Feature had ${featureToImport.imagePaths.length} image path(s) that were cleared during import. Images must be re-attached.`
|
||||
);
|
||||
featureToImport.imagePaths = [];
|
||||
}
|
||||
|
||||
// Handle text file paths - they won't be valid after import
|
||||
if (featureToImport.textFilePaths && featureToImport.textFilePaths.length > 0) {
|
||||
warnings.push(
|
||||
`Feature had ${featureToImport.textFilePaths.length} text file path(s) that were cleared during import. Files must be re-attached.`
|
||||
);
|
||||
featureToImport.textFilePaths = [];
|
||||
}
|
||||
|
||||
// Create or update the feature
|
||||
if (existingFeature) {
|
||||
await this.featureLoader.update(projectPath, featureId, featureToImport);
|
||||
logger.info(`Updated feature ${featureId} via import`);
|
||||
} else {
|
||||
await this.featureLoader.create(projectPath, featureToImport);
|
||||
logger.info(`Created feature ${featureId} via import`);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
featureId,
|
||||
importedAt: new Date().toISOString(),
|
||||
warnings: warnings.length > 0 ? warnings : undefined,
|
||||
wasOverwritten: !!existingFeature,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Failed to import feature:', error);
|
||||
return {
|
||||
success: false,
|
||||
importedAt: new Date().toISOString(),
|
||||
errors: [`Import failed: ${error instanceof Error ? error.message : String(error)}`],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Import multiple features from JSON or YAML data
|
||||
*
|
||||
* @param projectPath - Path to the project
|
||||
* @param data - Raw JSON or YAML string, or parsed data
|
||||
* @param options - Import options applied to all features
|
||||
* @returns Promise resolving to array of import results
|
||||
*/
|
||||
async importFeatures(
|
||||
projectPath: string,
|
||||
data: string | BulkExportResult,
|
||||
options: Omit<FeatureImport, 'data'> = {}
|
||||
): Promise<FeatureImportResult[]> {
|
||||
let bulkData: BulkExportResult;
|
||||
|
||||
// Parse if string
|
||||
if (typeof data === 'string') {
|
||||
const parsed = this.parseImportData(data);
|
||||
if (!parsed || !this.isBulkExport(parsed)) {
|
||||
return [
|
||||
{
|
||||
success: false,
|
||||
importedAt: new Date().toISOString(),
|
||||
errors: ['Invalid bulk import data: expected BulkExportResult format'],
|
||||
},
|
||||
];
|
||||
}
|
||||
bulkData = parsed as BulkExportResult;
|
||||
} else {
|
||||
bulkData = data;
|
||||
}
|
||||
|
||||
// Import each feature
|
||||
const results: FeatureImportResult[] = [];
|
||||
for (const featureExport of bulkData.features) {
|
||||
const result = await this.importFeature(projectPath, {
|
||||
data: featureExport,
|
||||
...options,
|
||||
});
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
const successCount = results.filter((r) => r.success).length;
|
||||
logger.info(`Bulk import complete: ${successCount}/${results.length} features imported`);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse import data from JSON or YAML string
|
||||
*
|
||||
* @param data - Raw JSON or YAML string
|
||||
* @returns Parsed data or null if parsing fails
|
||||
*/
|
||||
parseImportData(data: string): Feature | FeatureExport | BulkExportResult | null {
|
||||
const trimmed = data.trim();
|
||||
|
||||
// Try JSON first
|
||||
if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
|
||||
try {
|
||||
return JSON.parse(trimmed);
|
||||
} catch {
|
||||
// Fall through to YAML
|
||||
}
|
||||
}
|
||||
|
||||
// Try YAML
|
||||
try {
|
||||
return yamlParse(trimmed);
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse import data:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect the format of import data
|
||||
*
|
||||
* @param data - Raw string data
|
||||
* @returns Detected format or null if unknown
|
||||
*/
|
||||
detectFormat(data: string): ExportFormat | null {
|
||||
const trimmed = data.trim();
|
||||
|
||||
// JSON detection
|
||||
if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
|
||||
try {
|
||||
JSON.parse(trimmed);
|
||||
return 'json';
|
||||
} catch {
|
||||
// Not valid JSON
|
||||
}
|
||||
}
|
||||
|
||||
// YAML detection (if it parses and wasn't JSON)
|
||||
try {
|
||||
yamlParse(trimmed);
|
||||
return 'yaml';
|
||||
} catch {
|
||||
// Not valid YAML either
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare a feature for export by optionally removing fields
|
||||
*/
|
||||
private prepareFeatureForExport(
|
||||
feature: Feature,
|
||||
options: { includeHistory?: boolean; includePlanSpec?: boolean }
|
||||
): Feature {
|
||||
const { includeHistory = true, includePlanSpec = true } = options;
|
||||
|
||||
// Clone to avoid modifying original
|
||||
const exported: Feature = { ...feature };
|
||||
|
||||
// Remove transient fields that shouldn't be exported
|
||||
delete exported.titleGenerating;
|
||||
delete exported.error;
|
||||
|
||||
// Optionally exclude history
|
||||
if (!includeHistory) {
|
||||
delete exported.descriptionHistory;
|
||||
}
|
||||
|
||||
// Optionally exclude plan spec
|
||||
if (!includePlanSpec) {
|
||||
delete exported.planSpec;
|
||||
}
|
||||
|
||||
return exported;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a Feature from import data (handles both raw and wrapped formats)
|
||||
*/
|
||||
private extractFeatureFromImport(data: Feature | FeatureExport): Feature | null {
|
||||
if (!data || typeof data !== 'object') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check if it's a FeatureExport wrapper
|
||||
if ('version' in data && 'feature' in data && 'exportedAt' in data) {
|
||||
const exportData = data as FeatureExport;
|
||||
return exportData.feature;
|
||||
}
|
||||
|
||||
// Assume it's a raw Feature
|
||||
return data as Feature;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if parsed data is a bulk export
|
||||
*/
|
||||
isBulkExport(data: unknown): data is BulkExportResult {
|
||||
if (!data || typeof data !== 'object') {
|
||||
return false;
|
||||
}
|
||||
const obj = data as Record<string, unknown>;
|
||||
return 'version' in obj && 'features' in obj && Array.isArray(obj.features);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if parsed data is a single FeatureExport
|
||||
*/
|
||||
isFeatureExport(data: unknown): data is FeatureExport {
|
||||
if (!data || typeof data !== 'object') {
|
||||
return false;
|
||||
}
|
||||
const obj = data as Record<string, unknown>;
|
||||
return (
|
||||
'version' in obj &&
|
||||
'feature' in obj &&
|
||||
'exportedAt' in obj &&
|
||||
typeof obj.feature === 'object' &&
|
||||
obj.feature !== null &&
|
||||
'id' in (obj.feature as Record<string, unknown>)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if parsed data is a raw Feature
|
||||
*/
|
||||
isRawFeature(data: unknown): data is Feature {
|
||||
if (!data || typeof data !== 'object') {
|
||||
return false;
|
||||
}
|
||||
const obj = data as Record<string, unknown>;
|
||||
// A raw feature has 'id' but not the 'version' + 'feature' wrapper of FeatureExport
|
||||
return 'id' in obj && !('feature' in obj && 'version' in obj);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a feature has required fields
|
||||
*/
|
||||
private validateFeature(feature: Feature): string[] {
|
||||
const errors: string[] = [];
|
||||
|
||||
if (!feature.description && !feature.title) {
|
||||
errors.push('Feature must have at least a title or description');
|
||||
}
|
||||
|
||||
if (!feature.category) {
|
||||
errors.push('Feature must have a category');
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize export data to string (handles both single feature and bulk exports)
|
||||
*/
|
||||
private serialize<T extends FeatureExport | BulkExportResult>(
|
||||
data: T,
|
||||
format: ExportFormat,
|
||||
prettyPrint: boolean
|
||||
): string {
|
||||
if (format === 'yaml') {
|
||||
return yamlStringify(data, {
|
||||
indent: 2,
|
||||
lineWidth: 120,
|
||||
});
|
||||
}
|
||||
|
||||
return prettyPrint ? JSON.stringify(data, null, 2) : JSON.stringify(data);
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
let featureExportServiceInstance: FeatureExportService | null = null;
|
||||
|
||||
/**
|
||||
* Get the singleton feature export service instance
|
||||
*/
|
||||
export function getFeatureExportService(): FeatureExportService {
|
||||
if (!featureExportServiceInstance) {
|
||||
featureExportServiceInstance = new FeatureExportService();
|
||||
}
|
||||
return featureExportServiceInstance;
|
||||
}
|
||||
@@ -1,362 +0,0 @@
|
||||
/**
|
||||
* Gemini Usage Service
|
||||
*
|
||||
* Fetches usage data from Google's Gemini/Cloud Code API using OAuth credentials.
|
||||
* Based on CodexBar reference implementation.
|
||||
*
|
||||
* Authentication methods:
|
||||
* 1. OAuth credentials from ~/.gemini/oauth_creds.json
|
||||
* 2. API key (limited - only supports API calls, not quota info)
|
||||
*
|
||||
* API Endpoints:
|
||||
* - POST https://cloudcode-pa.googleapis.com/v1internal:retrieveUserQuota - Quota info
|
||||
* - POST https://cloudcode-pa.googleapis.com/v1internal:loadCodeAssist - Tier detection
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { GeminiProviderUsage, UsageWindow } from '@automaker/types';
|
||||
|
||||
const logger = createLogger('GeminiUsage');
|
||||
|
||||
// Gemini API endpoints
|
||||
const QUOTA_ENDPOINT = 'https://cloudcode-pa.googleapis.com/v1internal:retrieveUserQuota';
|
||||
const CODE_ASSIST_ENDPOINT = 'https://cloudcode-pa.googleapis.com/v1internal:loadCodeAssist';
|
||||
const TOKEN_REFRESH_ENDPOINT = 'https://oauth2.googleapis.com/token';
|
||||
|
||||
// Gemini CLI client credentials (from Gemini CLI installation)
|
||||
// These are embedded in the Gemini CLI and are public
|
||||
const GEMINI_CLIENT_ID =
|
||||
'764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com';
|
||||
const GEMINI_CLIENT_SECRET = 'd-FL95Q19q7MQmFpd7hHD0Ty';
|
||||
|
||||
interface GeminiOAuthCreds {
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
id_token?: string;
|
||||
expiry_date: number;
|
||||
}
|
||||
|
||||
interface GeminiQuotaResponse {
|
||||
quotas?: Array<{
|
||||
remainingFraction: number;
|
||||
resetTime: string;
|
||||
modelId?: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface GeminiCodeAssistResponse {
|
||||
tier?: string;
|
||||
claims?: {
|
||||
hd?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export class GeminiUsageService {
|
||||
private cachedCreds: GeminiOAuthCreds | null = null;
|
||||
private settingsPath = path.join(os.homedir(), '.gemini', 'settings.json');
|
||||
private credsPath = path.join(os.homedir(), '.gemini', 'oauth_creds.json');
|
||||
|
||||
/**
|
||||
* Check if Gemini credentials are available
|
||||
*/
|
||||
async isAvailable(): Promise<boolean> {
|
||||
const creds = await this.getOAuthCreds();
|
||||
return !!creds;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get authentication type from settings
|
||||
*/
|
||||
private getAuthType(): string | null {
|
||||
try {
|
||||
if (fs.existsSync(this.settingsPath)) {
|
||||
const content = fs.readFileSync(this.settingsPath, 'utf8');
|
||||
const settings = JSON.parse(content);
|
||||
return settings.auth_type || settings.authType || null;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug('Failed to read Gemini settings:', error);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get OAuth credentials from file
|
||||
*/
|
||||
private async getOAuthCreds(): Promise<GeminiOAuthCreds | null> {
|
||||
// Check auth type - only oauth-personal supports quota API
|
||||
const authType = this.getAuthType();
|
||||
if (authType && authType !== 'oauth-personal') {
|
||||
logger.debug(`Gemini auth type is ${authType}, not oauth-personal - quota API not available`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check cached credentials
|
||||
if (this.cachedCreds) {
|
||||
// Check if expired
|
||||
if (this.cachedCreds.expiry_date > Date.now()) {
|
||||
return this.cachedCreds;
|
||||
}
|
||||
// Try to refresh
|
||||
const refreshed = await this.refreshToken(this.cachedCreds.refresh_token);
|
||||
if (refreshed) {
|
||||
this.cachedCreds = refreshed;
|
||||
return this.cachedCreds;
|
||||
}
|
||||
}
|
||||
|
||||
// Load from file
|
||||
try {
|
||||
if (fs.existsSync(this.credsPath)) {
|
||||
const content = fs.readFileSync(this.credsPath, 'utf8');
|
||||
const creds = JSON.parse(content) as GeminiOAuthCreds;
|
||||
|
||||
// Check if expired
|
||||
if (creds.expiry_date && creds.expiry_date <= Date.now()) {
|
||||
// Try to refresh
|
||||
if (creds.refresh_token) {
|
||||
const refreshed = await this.refreshToken(creds.refresh_token);
|
||||
if (refreshed) {
|
||||
this.cachedCreds = refreshed;
|
||||
// Save refreshed credentials
|
||||
this.saveCreds(refreshed);
|
||||
return this.cachedCreds;
|
||||
}
|
||||
}
|
||||
logger.warn('Gemini OAuth token expired and refresh failed');
|
||||
return null;
|
||||
}
|
||||
|
||||
this.cachedCreds = creds;
|
||||
return this.cachedCreds;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug('Failed to read Gemini OAuth credentials:', error);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh OAuth token
|
||||
*/
|
||||
private async refreshToken(refreshToken: string): Promise<GeminiOAuthCreds | null> {
|
||||
try {
|
||||
const response = await fetch(TOKEN_REFRESH_ENDPOINT, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
body: new URLSearchParams({
|
||||
client_id: GEMINI_CLIENT_ID,
|
||||
client_secret: GEMINI_CLIENT_SECRET,
|
||||
refresh_token: refreshToken,
|
||||
grant_type: 'refresh_token',
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(`Token refresh failed: ${response.status}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const data = (await response.json()) as {
|
||||
access_token: string;
|
||||
expires_in: number;
|
||||
id_token?: string;
|
||||
};
|
||||
|
||||
return {
|
||||
access_token: data.access_token,
|
||||
refresh_token: refreshToken,
|
||||
id_token: data.id_token,
|
||||
expiry_date: Date.now() + data.expires_in * 1000,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Failed to refresh Gemini token:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save credentials to file
|
||||
*/
|
||||
private saveCreds(creds: GeminiOAuthCreds): void {
|
||||
try {
|
||||
const dir = path.dirname(this.credsPath);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
fs.writeFileSync(this.credsPath, JSON.stringify(creds, null, 2));
|
||||
} catch (error) {
|
||||
logger.warn('Failed to save Gemini credentials:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Make an authenticated request to Gemini API
|
||||
*/
|
||||
private async makeRequest<T>(url: string, body?: unknown): Promise<T | null> {
|
||||
const creds = await this.getOAuthCreds();
|
||||
if (!creds) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${creds.access_token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: body ? JSON.stringify(body) : undefined,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 401 || response.status === 403) {
|
||||
// Clear cached credentials on auth failure
|
||||
this.cachedCreds = null;
|
||||
logger.warn('Gemini API authentication failed');
|
||||
return null;
|
||||
}
|
||||
logger.error(`Gemini API error: ${response.status} ${response.statusText}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return (await response.json()) as T;
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch from Gemini API:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch usage data from Gemini
|
||||
*/
|
||||
async fetchUsageData(): Promise<GeminiProviderUsage> {
|
||||
logger.info('[fetchUsageData] Starting Gemini usage fetch...');
|
||||
|
||||
const baseUsage: GeminiProviderUsage = {
|
||||
providerId: 'gemini',
|
||||
providerName: 'Gemini',
|
||||
available: false,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Check if credentials are available
|
||||
const creds = await this.getOAuthCreds();
|
||||
if (!creds) {
|
||||
baseUsage.error = 'Gemini OAuth credentials not available';
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
// Fetch quota information
|
||||
const quotaResponse = await this.makeRequest<GeminiQuotaResponse>(QUOTA_ENDPOINT, {
|
||||
projectId: '-', // Use default project
|
||||
});
|
||||
|
||||
if (quotaResponse?.quotas && quotaResponse.quotas.length > 0) {
|
||||
baseUsage.available = true;
|
||||
|
||||
const primaryQuota = quotaResponse.quotas[0];
|
||||
|
||||
// Convert remaining fraction to used percent
|
||||
const usedPercent = Math.round((1 - (primaryQuota.remainingFraction || 0)) * 100);
|
||||
|
||||
const quotaWindow: UsageWindow = {
|
||||
name: 'Quota',
|
||||
usedPercent,
|
||||
resetsAt: primaryQuota.resetTime || '',
|
||||
resetText: primaryQuota.resetTime ? this.formatResetTime(primaryQuota.resetTime) : '',
|
||||
};
|
||||
|
||||
baseUsage.primary = quotaWindow;
|
||||
baseUsage.remainingFraction = primaryQuota.remainingFraction;
|
||||
baseUsage.modelId = primaryQuota.modelId;
|
||||
}
|
||||
|
||||
// Fetch tier information
|
||||
const codeAssistResponse = await this.makeRequest<GeminiCodeAssistResponse>(
|
||||
CODE_ASSIST_ENDPOINT,
|
||||
{
|
||||
metadata: {
|
||||
ide: 'automaker',
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
if (codeAssistResponse?.tier) {
|
||||
baseUsage.tierType = codeAssistResponse.tier;
|
||||
|
||||
// Determine plan info from tier
|
||||
const tierMap: Record<string, { type: string; displayName: string; isPaid: boolean }> = {
|
||||
'standard-tier': { type: 'paid', displayName: 'Paid', isPaid: true },
|
||||
'free-tier': {
|
||||
type: codeAssistResponse.claims?.hd ? 'workspace' : 'free',
|
||||
displayName: codeAssistResponse.claims?.hd ? 'Workspace' : 'Free',
|
||||
isPaid: false,
|
||||
},
|
||||
'legacy-tier': { type: 'legacy', displayName: 'Legacy', isPaid: false },
|
||||
};
|
||||
|
||||
const tierInfo = tierMap[codeAssistResponse.tier] || {
|
||||
type: codeAssistResponse.tier,
|
||||
displayName: codeAssistResponse.tier,
|
||||
isPaid: false,
|
||||
};
|
||||
|
||||
baseUsage.plan = tierInfo;
|
||||
}
|
||||
|
||||
if (baseUsage.available) {
|
||||
logger.info(
|
||||
`[fetchUsageData] ✓ Gemini usage: ${baseUsage.primary?.usedPercent || 0}% used, ` +
|
||||
`tier=${baseUsage.tierType || 'unknown'}`
|
||||
);
|
||||
} else {
|
||||
baseUsage.error = 'Failed to fetch Gemini quota data';
|
||||
}
|
||||
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format reset time as human-readable string
|
||||
*/
|
||||
private formatResetTime(resetAt: string): string {
|
||||
try {
|
||||
const date = new Date(resetAt);
|
||||
const now = new Date();
|
||||
const diff = date.getTime() - now.getTime();
|
||||
|
||||
if (diff < 0) return 'Expired';
|
||||
|
||||
const minutes = Math.floor(diff / 60000);
|
||||
const hours = Math.floor(minutes / 60);
|
||||
const days = Math.floor(hours / 24);
|
||||
|
||||
if (days > 0) {
|
||||
return `Resets in ${days}d ${hours % 24}h`;
|
||||
}
|
||||
if (hours > 0) {
|
||||
return `Resets in ${hours}h ${minutes % 60}m`;
|
||||
}
|
||||
if (minutes > 0) {
|
||||
return `Resets in ${minutes}m`;
|
||||
}
|
||||
return 'Resets soon';
|
||||
} catch {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cached credentials
|
||||
*/
|
||||
clearCache(): void {
|
||||
this.cachedCreds = null;
|
||||
}
|
||||
}
|
||||
@@ -1,140 +0,0 @@
|
||||
/**
|
||||
* GLM (z.AI) Usage Service
|
||||
*
|
||||
* Fetches usage data from z.AI's API.
|
||||
* GLM is a Claude-compatible provider offered by z.AI.
|
||||
*
|
||||
* Authentication:
|
||||
* - API Token from provider config or GLM_API_KEY environment variable
|
||||
*
|
||||
* Note: z.AI's API may not expose a dedicated usage endpoint.
|
||||
* This service checks for API availability and reports basic status.
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { GLMProviderUsage, ClaudeCompatibleProvider } from '@automaker/types';
|
||||
|
||||
const logger = createLogger('GLMUsage');
|
||||
|
||||
// GLM API base (z.AI)
|
||||
const GLM_API_BASE = 'https://api.z.ai';
|
||||
|
||||
export class GLMUsageService {
|
||||
private providerConfig: ClaudeCompatibleProvider | null = null;
|
||||
private cachedApiKey: string | null = null;
|
||||
|
||||
/**
|
||||
* Set the provider config (called from settings)
|
||||
*/
|
||||
setProviderConfig(config: ClaudeCompatibleProvider | null): void {
|
||||
this.providerConfig = config;
|
||||
this.cachedApiKey = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if GLM is available
|
||||
*/
|
||||
async isAvailable(): Promise<boolean> {
|
||||
const apiKey = this.getApiKey();
|
||||
return !!apiKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get API key from various sources
|
||||
*/
|
||||
private getApiKey(): string | null {
|
||||
if (this.cachedApiKey) {
|
||||
return this.cachedApiKey;
|
||||
}
|
||||
|
||||
// 1. Check environment variable
|
||||
if (process.env.GLM_API_KEY) {
|
||||
this.cachedApiKey = process.env.GLM_API_KEY;
|
||||
return this.cachedApiKey;
|
||||
}
|
||||
|
||||
// 2. Check provider config
|
||||
if (this.providerConfig?.apiKey) {
|
||||
this.cachedApiKey = this.providerConfig.apiKey;
|
||||
return this.cachedApiKey;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch usage data from GLM
|
||||
*
|
||||
* Note: z.AI may not have a public usage API.
|
||||
* This returns basic availability status.
|
||||
*/
|
||||
async fetchUsageData(): Promise<GLMProviderUsage> {
|
||||
logger.info('[fetchUsageData] Starting GLM usage fetch...');
|
||||
|
||||
const baseUsage: GLMProviderUsage = {
|
||||
providerId: 'glm',
|
||||
providerName: 'z.AI GLM',
|
||||
available: false,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
};
|
||||
|
||||
const apiKey = this.getApiKey();
|
||||
if (!apiKey) {
|
||||
baseUsage.error = 'GLM API key not available';
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
// GLM/z.AI is available if we have an API key
|
||||
// z.AI doesn't appear to have a public usage endpoint
|
||||
baseUsage.available = true;
|
||||
|
||||
// Check if API key is valid by making a simple request
|
||||
try {
|
||||
const baseUrl = this.providerConfig?.baseUrl || GLM_API_BASE;
|
||||
const response = await fetch(`${baseUrl}/api/anthropic/v1/messages`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
'anthropic-version': '2023-06-01',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'GLM-4.7',
|
||||
max_tokens: 1,
|
||||
messages: [{ role: 'user', content: 'hi' }],
|
||||
}),
|
||||
});
|
||||
|
||||
// We just want to check if auth works, not actually make a request
|
||||
// A 400 with invalid request is fine - it means auth worked
|
||||
if (response.status === 401 || response.status === 403) {
|
||||
baseUsage.available = false;
|
||||
baseUsage.error = 'GLM API authentication failed';
|
||||
}
|
||||
} catch (error) {
|
||||
// Network error or other issue - still mark as available since we have the key
|
||||
logger.debug('GLM API check failed (may be fine):', error);
|
||||
}
|
||||
|
||||
// Note: z.AI doesn't appear to expose usage metrics via API
|
||||
// Users should check their z.AI dashboard for detailed usage
|
||||
if (baseUsage.available) {
|
||||
baseUsage.plan = {
|
||||
type: 'api',
|
||||
displayName: 'API Access',
|
||||
isPaid: true,
|
||||
};
|
||||
}
|
||||
|
||||
logger.info(`[fetchUsageData] GLM available: ${baseUsage.available}`);
|
||||
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cached credentials
|
||||
*/
|
||||
clearCache(): void {
|
||||
this.cachedApiKey = null;
|
||||
}
|
||||
}
|
||||
@@ -39,13 +39,9 @@ import { ProviderFactory } from '../providers/provider-factory.js';
|
||||
import type { SettingsService } from './settings-service.js';
|
||||
import type { FeatureLoader } from './feature-loader.js';
|
||||
import { createChatOptions, validateWorkingDirectory } from '../lib/sdk-options.js';
|
||||
import { resolveModelString, resolvePhaseModel } from '@automaker/model-resolver';
|
||||
import { resolveModelString } from '@automaker/model-resolver';
|
||||
import { stripProviderPrefix } from '@automaker/types';
|
||||
import {
|
||||
getPromptCustomization,
|
||||
getProviderByModelId,
|
||||
getPhaseModelWithOverrides,
|
||||
} from '../lib/settings-helpers.js';
|
||||
import { getPromptCustomization, getActiveClaudeApiProfile } from '../lib/settings-helpers.js';
|
||||
|
||||
const logger = createLogger('IdeationService');
|
||||
|
||||
@@ -212,27 +208,7 @@ export class IdeationService {
|
||||
);
|
||||
|
||||
// Resolve model alias to canonical identifier (with prefix)
|
||||
let modelId = resolveModelString(options?.model ?? 'sonnet');
|
||||
|
||||
// Try to find a provider for this model (e.g., GLM, MiniMax models)
|
||||
let claudeCompatibleProvider: import('@automaker/types').ClaudeCompatibleProvider | undefined;
|
||||
let credentials = await this.settingsService?.getCredentials();
|
||||
|
||||
if (this.settingsService && options?.model) {
|
||||
const providerResult = await getProviderByModelId(
|
||||
options.model,
|
||||
this.settingsService,
|
||||
'[IdeationService]'
|
||||
);
|
||||
if (providerResult.provider) {
|
||||
claudeCompatibleProvider = providerResult.provider;
|
||||
// Use resolved model from provider if available (maps to Claude model)
|
||||
if (providerResult.resolvedModel) {
|
||||
modelId = providerResult.resolvedModel;
|
||||
}
|
||||
credentials = providerResult.credentials ?? credentials;
|
||||
}
|
||||
}
|
||||
const modelId = resolveModelString(options?.model ?? 'sonnet');
|
||||
|
||||
// Create SDK options
|
||||
const sdkOptions = createChatOptions({
|
||||
@@ -247,6 +223,12 @@ export class IdeationService {
|
||||
// Strip provider prefix - providers need bare model IDs
|
||||
const bareModel = stripProviderPrefix(modelId);
|
||||
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
this.settingsService,
|
||||
'[IdeationService]'
|
||||
);
|
||||
|
||||
const executeOptions: ExecuteOptions = {
|
||||
prompt: message,
|
||||
model: bareModel,
|
||||
@@ -256,7 +238,7 @@ export class IdeationService {
|
||||
maxTurns: 1, // Single turn for ideation
|
||||
abortController: activeSession.abortController!,
|
||||
conversationHistory: conversationHistory.length > 0 ? conversationHistory : undefined,
|
||||
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
};
|
||||
|
||||
@@ -688,24 +670,8 @@ export class IdeationService {
|
||||
existingWorkContext
|
||||
);
|
||||
|
||||
// Get model from phase settings with provider info (ideationModel)
|
||||
const phaseResult = await getPhaseModelWithOverrides(
|
||||
'ideationModel',
|
||||
this.settingsService,
|
||||
projectPath,
|
||||
'[IdeationService]'
|
||||
);
|
||||
const resolved = resolvePhaseModel(phaseResult.phaseModel);
|
||||
// resolvePhaseModel already resolves model aliases internally - no need to call resolveModelString again
|
||||
const modelId = resolved.model;
|
||||
const claudeCompatibleProvider = phaseResult.provider;
|
||||
const credentials = phaseResult.credentials;
|
||||
|
||||
logger.info(
|
||||
'generateSuggestions using model:',
|
||||
modelId,
|
||||
claudeCompatibleProvider ? `via provider: ${claudeCompatibleProvider.name}` : 'direct API'
|
||||
);
|
||||
// Resolve model alias to canonical identifier (with prefix)
|
||||
const modelId = resolveModelString('sonnet');
|
||||
|
||||
// Create SDK options
|
||||
const sdkOptions = createChatOptions({
|
||||
@@ -720,6 +686,12 @@ export class IdeationService {
|
||||
// Strip provider prefix - providers need bare model IDs
|
||||
const bareModel = stripProviderPrefix(modelId);
|
||||
|
||||
// Get active Claude API profile for alternative endpoint configuration
|
||||
const { profile: claudeApiProfile, credentials } = await getActiveClaudeApiProfile(
|
||||
this.settingsService,
|
||||
'[IdeationService]'
|
||||
);
|
||||
|
||||
const executeOptions: ExecuteOptions = {
|
||||
prompt: prompt.prompt,
|
||||
model: bareModel,
|
||||
@@ -730,8 +702,7 @@ export class IdeationService {
|
||||
// Disable all tools - we just want text generation, not codebase analysis
|
||||
allowedTools: [],
|
||||
abortController: new AbortController(),
|
||||
readOnly: true, // Suggestions only need to return JSON, never write files
|
||||
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||
claudeApiProfile, // Pass active Claude API profile for alternative endpoint configuration
|
||||
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||
};
|
||||
|
||||
|
||||
@@ -1,260 +0,0 @@
|
||||
/**
|
||||
* MiniMax Usage Service
|
||||
*
|
||||
* Fetches usage data from MiniMax's coding plan API.
|
||||
* Based on CodexBar reference implementation.
|
||||
*
|
||||
* Authentication methods:
|
||||
* 1. API Token (MINIMAX_API_KEY environment variable or provider config)
|
||||
* 2. Cookie-based authentication (from platform login)
|
||||
*
|
||||
* API Endpoints:
|
||||
* - GET https://api.minimax.io/v1/coding_plan/remains - Token-based usage
|
||||
* - GET https://platform.minimax.io/v1/api/openplatform/coding_plan/remains - Fallback
|
||||
*
|
||||
* For China mainland: platform.minimaxi.com
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { MiniMaxProviderUsage, UsageWindow, ClaudeCompatibleProvider } from '@automaker/types';
|
||||
|
||||
const logger = createLogger('MiniMaxUsage');
|
||||
|
||||
// MiniMax API endpoints
|
||||
const MINIMAX_API_BASE = 'https://api.minimax.io';
|
||||
const MINIMAX_PLATFORM_BASE = 'https://platform.minimax.io';
|
||||
const MINIMAX_CHINA_BASE = 'https://platform.minimaxi.com';
|
||||
|
||||
const CODING_PLAN_ENDPOINT = '/v1/coding_plan/remains';
|
||||
const PLATFORM_CODING_PLAN_ENDPOINT = '/v1/api/openplatform/coding_plan/remains';
|
||||
|
||||
interface MiniMaxCodingPlanResponse {
|
||||
base_resp?: {
|
||||
status_code?: number;
|
||||
status_msg?: string;
|
||||
};
|
||||
model_remains?: Array<{
|
||||
model: string;
|
||||
used: number;
|
||||
total: number;
|
||||
}>;
|
||||
remains_time?: number; // Seconds until reset
|
||||
start_time?: string;
|
||||
end_time?: string;
|
||||
}
|
||||
|
||||
export class MiniMaxUsageService {
|
||||
private providerConfig: ClaudeCompatibleProvider | null = null;
|
||||
private cachedApiKey: string | null = null;
|
||||
|
||||
/**
|
||||
* Set the provider config (called from settings)
|
||||
*/
|
||||
setProviderConfig(config: ClaudeCompatibleProvider | null): void {
|
||||
this.providerConfig = config;
|
||||
this.cachedApiKey = null; // Clear cache when config changes
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if MiniMax is available
|
||||
*/
|
||||
async isAvailable(): Promise<boolean> {
|
||||
const apiKey = this.getApiKey();
|
||||
return !!apiKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get API key from various sources
|
||||
*/
|
||||
private getApiKey(): string | null {
|
||||
if (this.cachedApiKey) {
|
||||
return this.cachedApiKey;
|
||||
}
|
||||
|
||||
// 1. Check environment variable
|
||||
if (process.env.MINIMAX_API_KEY) {
|
||||
this.cachedApiKey = process.env.MINIMAX_API_KEY;
|
||||
return this.cachedApiKey;
|
||||
}
|
||||
|
||||
// 2. Check provider config
|
||||
if (this.providerConfig?.apiKey) {
|
||||
this.cachedApiKey = this.providerConfig.apiKey;
|
||||
return this.cachedApiKey;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if we should use China endpoint
|
||||
*/
|
||||
private isChina(): boolean {
|
||||
if (this.providerConfig?.baseUrl) {
|
||||
return this.providerConfig.baseUrl.includes('minimaxi.com');
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make an authenticated request to MiniMax API
|
||||
*/
|
||||
private async makeRequest<T>(url: string): Promise<T | null> {
|
||||
const apiKey = this.getApiKey();
|
||||
if (!apiKey) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 401 || response.status === 403) {
|
||||
this.cachedApiKey = null;
|
||||
logger.warn('MiniMax API authentication failed');
|
||||
return null;
|
||||
}
|
||||
logger.error(`MiniMax API error: ${response.status} ${response.statusText}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return (await response.json()) as T;
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch from MiniMax API:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch usage data from MiniMax
|
||||
*/
|
||||
async fetchUsageData(): Promise<MiniMaxProviderUsage> {
|
||||
logger.info('[fetchUsageData] Starting MiniMax usage fetch...');
|
||||
|
||||
const baseUsage: MiniMaxProviderUsage = {
|
||||
providerId: 'minimax',
|
||||
providerName: 'MiniMax',
|
||||
available: false,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
};
|
||||
|
||||
const apiKey = this.getApiKey();
|
||||
if (!apiKey) {
|
||||
baseUsage.error = 'MiniMax API key not available';
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
// Determine the correct endpoint
|
||||
const isChina = this.isChina();
|
||||
const baseUrl = isChina ? MINIMAX_CHINA_BASE : MINIMAX_API_BASE;
|
||||
const endpoint = `${baseUrl}${CODING_PLAN_ENDPOINT}`;
|
||||
|
||||
// Fetch coding plan data
|
||||
let codingPlan = await this.makeRequest<MiniMaxCodingPlanResponse>(endpoint);
|
||||
|
||||
// Try fallback endpoint if primary fails
|
||||
if (!codingPlan) {
|
||||
const platformBase = isChina ? MINIMAX_CHINA_BASE : MINIMAX_PLATFORM_BASE;
|
||||
const fallbackEndpoint = `${platformBase}${PLATFORM_CODING_PLAN_ENDPOINT}`;
|
||||
codingPlan = await this.makeRequest<MiniMaxCodingPlanResponse>(fallbackEndpoint);
|
||||
}
|
||||
|
||||
if (!codingPlan) {
|
||||
baseUsage.error = 'Failed to fetch MiniMax usage data';
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
// Check for error response
|
||||
if (codingPlan.base_resp?.status_code && codingPlan.base_resp.status_code !== 0) {
|
||||
baseUsage.error = codingPlan.base_resp.status_msg || 'MiniMax API error';
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
baseUsage.available = true;
|
||||
|
||||
// Parse model remains
|
||||
if (codingPlan.model_remains && codingPlan.model_remains.length > 0) {
|
||||
let totalUsed = 0;
|
||||
let totalLimit = 0;
|
||||
|
||||
for (const model of codingPlan.model_remains) {
|
||||
totalUsed += model.used;
|
||||
totalLimit += model.total;
|
||||
}
|
||||
|
||||
const usedPercent = totalLimit > 0 ? Math.round((totalUsed / totalLimit) * 100) : 0;
|
||||
|
||||
// Calculate reset time
|
||||
const resetsAt = codingPlan.remains_time
|
||||
? new Date(Date.now() + codingPlan.remains_time * 1000).toISOString()
|
||||
: codingPlan.end_time || '';
|
||||
|
||||
const usageWindow: UsageWindow = {
|
||||
name: 'Coding Plan',
|
||||
usedPercent,
|
||||
resetsAt,
|
||||
resetText: resetsAt ? this.formatResetTime(resetsAt) : '',
|
||||
used: totalUsed,
|
||||
limit: totalLimit,
|
||||
};
|
||||
|
||||
baseUsage.primary = usageWindow;
|
||||
baseUsage.tokenRemains = totalLimit - totalUsed;
|
||||
baseUsage.totalTokens = totalLimit;
|
||||
}
|
||||
|
||||
// Parse plan times
|
||||
if (codingPlan.start_time) {
|
||||
baseUsage.planStartTime = codingPlan.start_time;
|
||||
}
|
||||
if (codingPlan.end_time) {
|
||||
baseUsage.planEndTime = codingPlan.end_time;
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[fetchUsageData] ✓ MiniMax usage: ${baseUsage.primary?.usedPercent || 0}% used, ` +
|
||||
`${baseUsage.tokenRemains || 0} tokens remaining`
|
||||
);
|
||||
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format reset time as human-readable string
|
||||
*/
|
||||
private formatResetTime(resetAt: string): string {
|
||||
try {
|
||||
const date = new Date(resetAt);
|
||||
const now = new Date();
|
||||
const diff = date.getTime() - now.getTime();
|
||||
|
||||
if (diff < 0) return 'Expired';
|
||||
|
||||
const hours = Math.floor(diff / 3600000);
|
||||
const days = Math.floor(hours / 24);
|
||||
|
||||
if (days > 0) {
|
||||
return `Resets in ${days}d`;
|
||||
}
|
||||
if (hours > 0) {
|
||||
return `Resets in ${hours}h`;
|
||||
}
|
||||
return 'Resets soon';
|
||||
} catch {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cached credentials
|
||||
*/
|
||||
clearCache(): void {
|
||||
this.cachedApiKey = null;
|
||||
}
|
||||
}
|
||||
@@ -1,144 +0,0 @@
|
||||
/**
|
||||
* OpenCode Usage Service
|
||||
*
|
||||
* Fetches usage data from OpenCode's server API.
|
||||
* Based on CodexBar reference implementation.
|
||||
*
|
||||
* Note: OpenCode usage tracking is limited as they use a proprietary
|
||||
* server function API that requires browser cookies for authentication.
|
||||
* This service provides basic status checking based on local config.
|
||||
*
|
||||
* API Endpoints (require browser cookies):
|
||||
* - POST https://opencode.ai/_server - Server functions
|
||||
* - workspaces: Get workspace info
|
||||
* - subscription.get: Get usage data
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { OpenCodeProviderUsage, UsageWindow } from '@automaker/types';
|
||||
|
||||
const logger = createLogger('OpenCodeUsage');
|
||||
|
||||
// OpenCode config locations
|
||||
const OPENCODE_CONFIG_PATHS = [
|
||||
path.join(os.homedir(), '.opencode', 'config.json'),
|
||||
path.join(os.homedir(), '.config', 'opencode', 'config.json'),
|
||||
];
|
||||
|
||||
interface OpenCodeConfig {
|
||||
workspaceId?: string;
|
||||
email?: string;
|
||||
authenticated?: boolean;
|
||||
}
|
||||
|
||||
interface OpenCodeUsageData {
|
||||
rollingUsage?: {
|
||||
usagePercent: number;
|
||||
resetInSec: number;
|
||||
};
|
||||
weeklyUsage?: {
|
||||
usagePercent: number;
|
||||
resetInSec: number;
|
||||
};
|
||||
}
|
||||
|
||||
export class OpenCodeUsageService {
|
||||
private cachedConfig: OpenCodeConfig | null = null;
|
||||
|
||||
/**
|
||||
* Check if OpenCode is available
|
||||
*/
|
||||
async isAvailable(): Promise<boolean> {
|
||||
const config = this.getConfig();
|
||||
return !!config?.authenticated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get OpenCode config from disk
|
||||
*/
|
||||
private getConfig(): OpenCodeConfig | null {
|
||||
if (this.cachedConfig) {
|
||||
return this.cachedConfig;
|
||||
}
|
||||
|
||||
// Check environment variable for workspace ID
|
||||
if (process.env.OPENCODE_WORKSPACE_ID) {
|
||||
this.cachedConfig = {
|
||||
workspaceId: process.env.OPENCODE_WORKSPACE_ID,
|
||||
authenticated: true,
|
||||
};
|
||||
return this.cachedConfig;
|
||||
}
|
||||
|
||||
// Check config files
|
||||
for (const configPath of OPENCODE_CONFIG_PATHS) {
|
||||
try {
|
||||
if (fs.existsSync(configPath)) {
|
||||
const content = fs.readFileSync(configPath, 'utf8');
|
||||
const config = JSON.parse(content) as OpenCodeConfig;
|
||||
this.cachedConfig = config;
|
||||
return this.cachedConfig;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug(`Failed to read OpenCode config from ${configPath}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch usage data from OpenCode
|
||||
*
|
||||
* Note: OpenCode's usage API requires browser cookies which we don't have access to.
|
||||
* This implementation returns basic availability status.
|
||||
* For full usage tracking, users should check the OpenCode dashboard.
|
||||
*/
|
||||
async fetchUsageData(): Promise<OpenCodeProviderUsage> {
|
||||
logger.info('[fetchUsageData] Starting OpenCode usage fetch...');
|
||||
|
||||
const baseUsage: OpenCodeProviderUsage = {
|
||||
providerId: 'opencode',
|
||||
providerName: 'OpenCode',
|
||||
available: false,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
};
|
||||
|
||||
const config = this.getConfig();
|
||||
if (!config) {
|
||||
baseUsage.error = 'OpenCode not configured';
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
if (!config.authenticated) {
|
||||
baseUsage.error = 'OpenCode not authenticated';
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
// OpenCode is available but we can't get detailed usage without browser cookies
|
||||
baseUsage.available = true;
|
||||
baseUsage.workspaceId = config.workspaceId;
|
||||
|
||||
// Note: Full usage tracking requires browser cookie authentication
|
||||
// which is not available in a server-side context.
|
||||
// Users should check the OpenCode dashboard for detailed usage.
|
||||
baseUsage.error =
|
||||
'Usage details require browser authentication. Check opencode.ai for details.';
|
||||
|
||||
logger.info(
|
||||
`[fetchUsageData] OpenCode available, workspace: ${config.workspaceId || 'unknown'}`
|
||||
);
|
||||
|
||||
return baseUsage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cached config
|
||||
*/
|
||||
clearCache(): void {
|
||||
this.cachedConfig = null;
|
||||
}
|
||||
}
|
||||
@@ -234,75 +234,51 @@ export class PipelineService {
|
||||
*
|
||||
* Determines what status a feature should transition to based on current status.
|
||||
* Flow: in_progress -> pipeline_step_0 -> pipeline_step_1 -> ... -> final status
|
||||
* Steps in the excludedStepIds array will be skipped.
|
||||
*
|
||||
* @param currentStatus - Current feature status
|
||||
* @param config - Pipeline configuration (or null if no pipeline)
|
||||
* @param skipTests - Whether to skip tests (affects final status)
|
||||
* @param excludedStepIds - Optional array of step IDs to skip
|
||||
* @returns The next status in the pipeline flow
|
||||
*/
|
||||
getNextStatus(
|
||||
currentStatus: FeatureStatusWithPipeline,
|
||||
config: PipelineConfig | null,
|
||||
skipTests: boolean,
|
||||
excludedStepIds?: string[]
|
||||
skipTests: boolean
|
||||
): FeatureStatusWithPipeline {
|
||||
const steps = config?.steps || [];
|
||||
const exclusions = new Set(excludedStepIds || []);
|
||||
|
||||
// Sort steps by order and filter out excluded steps
|
||||
const sortedSteps = [...steps]
|
||||
.sort((a, b) => a.order - b.order)
|
||||
.filter((step) => !exclusions.has(step.id));
|
||||
// Sort steps by order
|
||||
const sortedSteps = [...steps].sort((a, b) => a.order - b.order);
|
||||
|
||||
// If no pipeline steps (or all excluded), use original logic
|
||||
// If no pipeline steps, use original logic
|
||||
if (sortedSteps.length === 0) {
|
||||
// If coming from in_progress or already in a pipeline step, go to final status
|
||||
if (currentStatus === 'in_progress' || currentStatus.startsWith('pipeline_')) {
|
||||
if (currentStatus === 'in_progress') {
|
||||
return skipTests ? 'waiting_approval' : 'verified';
|
||||
}
|
||||
return currentStatus;
|
||||
}
|
||||
|
||||
// Coming from in_progress -> go to first non-excluded pipeline step
|
||||
// Coming from in_progress -> go to first pipeline step
|
||||
if (currentStatus === 'in_progress') {
|
||||
return `pipeline_${sortedSteps[0].id}`;
|
||||
}
|
||||
|
||||
// Coming from a pipeline step -> go to next non-excluded step or final status
|
||||
// Coming from a pipeline step -> go to next step or final status
|
||||
if (currentStatus.startsWith('pipeline_')) {
|
||||
const currentStepId = currentStatus.replace('pipeline_', '');
|
||||
const currentIndex = sortedSteps.findIndex((s) => s.id === currentStepId);
|
||||
|
||||
if (currentIndex === -1) {
|
||||
// Current step not found in filtered list (might be excluded or invalid)
|
||||
// Find next valid step after this one from the original sorted list
|
||||
const allSortedSteps = [...steps].sort((a, b) => a.order - b.order);
|
||||
const originalIndex = allSortedSteps.findIndex((s) => s.id === currentStepId);
|
||||
|
||||
if (originalIndex === -1) {
|
||||
// Step truly doesn't exist, go to final status
|
||||
return skipTests ? 'waiting_approval' : 'verified';
|
||||
}
|
||||
|
||||
// Find the next non-excluded step after the current one
|
||||
for (let i = originalIndex + 1; i < allSortedSteps.length; i++) {
|
||||
if (!exclusions.has(allSortedSteps[i].id)) {
|
||||
return `pipeline_${allSortedSteps[i].id}`;
|
||||
}
|
||||
}
|
||||
|
||||
// No more non-excluded steps, go to final status
|
||||
// Step not found, go to final status
|
||||
return skipTests ? 'waiting_approval' : 'verified';
|
||||
}
|
||||
|
||||
if (currentIndex < sortedSteps.length - 1) {
|
||||
// Go to next non-excluded step
|
||||
// Go to next step
|
||||
return `pipeline_${sortedSteps[currentIndex + 1].id}`;
|
||||
}
|
||||
|
||||
// Last non-excluded step completed, go to final status
|
||||
// Last step completed, go to final status
|
||||
return skipTests ? 'waiting_approval' : 'verified';
|
||||
}
|
||||
|
||||
|
||||
@@ -1,447 +0,0 @@
|
||||
/**
|
||||
* Provider Usage Tracker
|
||||
*
|
||||
* Unified service that aggregates usage data from all supported AI providers.
|
||||
* Manages caching, polling, and coordination of individual usage services.
|
||||
*
|
||||
* Supported providers:
|
||||
* - Claude (via ClaudeUsageService)
|
||||
* - Codex (via CodexUsageService)
|
||||
* - Cursor (via CursorUsageService)
|
||||
* - Gemini (via GeminiUsageService)
|
||||
* - GitHub Copilot (via CopilotUsageService)
|
||||
* - OpenCode (via OpenCodeUsageService)
|
||||
* - MiniMax (via MiniMaxUsageService)
|
||||
* - GLM (via GLMUsageService)
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type {
|
||||
UsageProviderId,
|
||||
ProviderUsage,
|
||||
AllProvidersUsage,
|
||||
ClaudeProviderUsage,
|
||||
CodexProviderUsage,
|
||||
ClaudeCompatibleProvider,
|
||||
} from '@automaker/types';
|
||||
import { ClaudeUsageService } from './claude-usage-service.js';
|
||||
import { CodexUsageService, type CodexUsageData } from './codex-usage-service.js';
|
||||
import { CursorUsageService } from './cursor-usage-service.js';
|
||||
import { GeminiUsageService } from './gemini-usage-service.js';
|
||||
import { CopilotUsageService } from './copilot-usage-service.js';
|
||||
import { OpenCodeUsageService } from './opencode-usage-service.js';
|
||||
import { MiniMaxUsageService } from './minimax-usage-service.js';
|
||||
import { GLMUsageService } from './glm-usage-service.js';
|
||||
import type { ClaudeUsage } from '../routes/claude/types.js';
|
||||
|
||||
const logger = createLogger('ProviderUsageTracker');
|
||||
|
||||
// Cache TTL in milliseconds (1 minute)
|
||||
const CACHE_TTL_MS = 60 * 1000;
|
||||
|
||||
interface CachedUsage {
|
||||
data: ProviderUsage;
|
||||
fetchedAt: number;
|
||||
}
|
||||
|
||||
export class ProviderUsageTracker {
|
||||
private claudeService: ClaudeUsageService;
|
||||
private codexService: CodexUsageService;
|
||||
private cursorService: CursorUsageService;
|
||||
private geminiService: GeminiUsageService;
|
||||
private copilotService: CopilotUsageService;
|
||||
private opencodeService: OpenCodeUsageService;
|
||||
private minimaxService: MiniMaxUsageService;
|
||||
private glmService: GLMUsageService;
|
||||
|
||||
private cache: Map<UsageProviderId, CachedUsage> = new Map();
|
||||
private enabledProviders: Set<UsageProviderId> = new Set([
|
||||
'claude',
|
||||
'codex',
|
||||
'cursor',
|
||||
'gemini',
|
||||
'copilot',
|
||||
'opencode',
|
||||
'minimax',
|
||||
'glm',
|
||||
]);
|
||||
|
||||
constructor(codexService?: CodexUsageService) {
|
||||
this.claudeService = new ClaudeUsageService();
|
||||
this.codexService = codexService || new CodexUsageService();
|
||||
this.cursorService = new CursorUsageService();
|
||||
this.geminiService = new GeminiUsageService();
|
||||
this.copilotService = new CopilotUsageService();
|
||||
this.opencodeService = new OpenCodeUsageService();
|
||||
this.minimaxService = new MiniMaxUsageService();
|
||||
this.glmService = new GLMUsageService();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set enabled providers (called when settings change)
|
||||
*/
|
||||
setEnabledProviders(providers: UsageProviderId[]): void {
|
||||
this.enabledProviders = new Set(providers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update custom provider configs (MiniMax, GLM)
|
||||
*/
|
||||
updateCustomProviderConfigs(providers: ClaudeCompatibleProvider[]): void {
|
||||
const minimaxConfig = providers.find(
|
||||
(p) => p.providerType === 'minimax' && p.enabled !== false
|
||||
);
|
||||
const glmConfig = providers.find((p) => p.providerType === 'glm' && p.enabled !== false);
|
||||
|
||||
this.minimaxService.setProviderConfig(minimaxConfig || null);
|
||||
this.glmService.setProviderConfig(glmConfig || null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a provider is enabled
|
||||
*/
|
||||
isProviderEnabled(providerId: UsageProviderId): boolean {
|
||||
return this.enabledProviders.has(providerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if cached data is still fresh
|
||||
*/
|
||||
private isCacheFresh(providerId: UsageProviderId): boolean {
|
||||
const cached = this.cache.get(providerId);
|
||||
if (!cached) return false;
|
||||
return Date.now() - cached.fetchedAt < CACHE_TTL_MS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cached data for a provider
|
||||
*/
|
||||
private getCached(providerId: UsageProviderId): ProviderUsage | null {
|
||||
const cached = this.cache.get(providerId);
|
||||
return cached?.data || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set cached data for a provider
|
||||
*/
|
||||
private setCached(providerId: UsageProviderId, data: ProviderUsage): void {
|
||||
this.cache.set(providerId, {
|
||||
data,
|
||||
fetchedAt: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Claude usage to unified format
|
||||
*/
|
||||
private convertClaudeUsage(usage: ClaudeUsage): ClaudeProviderUsage {
|
||||
return {
|
||||
providerId: 'claude',
|
||||
providerName: 'Claude',
|
||||
available: true,
|
||||
lastUpdated: usage.lastUpdated,
|
||||
userTimezone: usage.userTimezone,
|
||||
primary: {
|
||||
name: 'Session (5-hour)',
|
||||
usedPercent: usage.sessionPercentage,
|
||||
resetsAt: usage.sessionResetTime,
|
||||
resetText: usage.sessionResetText,
|
||||
},
|
||||
secondary: {
|
||||
name: 'Weekly (All Models)',
|
||||
usedPercent: usage.weeklyPercentage,
|
||||
resetsAt: usage.weeklyResetTime,
|
||||
resetText: usage.weeklyResetText,
|
||||
},
|
||||
sessionWindow: {
|
||||
name: 'Session (5-hour)',
|
||||
usedPercent: usage.sessionPercentage,
|
||||
resetsAt: usage.sessionResetTime,
|
||||
resetText: usage.sessionResetText,
|
||||
},
|
||||
weeklyWindow: {
|
||||
name: 'Weekly (All Models)',
|
||||
usedPercent: usage.weeklyPercentage,
|
||||
resetsAt: usage.weeklyResetTime,
|
||||
resetText: usage.weeklyResetText,
|
||||
},
|
||||
sonnetWindow: {
|
||||
name: 'Weekly (Sonnet)',
|
||||
usedPercent: usage.sonnetWeeklyPercentage,
|
||||
resetsAt: usage.weeklyResetTime,
|
||||
resetText: usage.sonnetResetText,
|
||||
},
|
||||
cost:
|
||||
usage.costUsed !== null
|
||||
? {
|
||||
used: usage.costUsed,
|
||||
limit: usage.costLimit,
|
||||
currency: usage.costCurrency || 'USD',
|
||||
}
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Codex usage to unified format
|
||||
*/
|
||||
private convertCodexUsage(usage: CodexUsageData): CodexProviderUsage {
|
||||
const result: CodexProviderUsage = {
|
||||
providerId: 'codex',
|
||||
providerName: 'Codex',
|
||||
available: true,
|
||||
lastUpdated: usage.lastUpdated,
|
||||
planType: usage.rateLimits?.planType,
|
||||
};
|
||||
|
||||
if (usage.rateLimits?.primary) {
|
||||
result.primary = {
|
||||
name: `${usage.rateLimits.primary.windowDurationMins}min Window`,
|
||||
usedPercent: usage.rateLimits.primary.usedPercent,
|
||||
resetsAt: new Date(usage.rateLimits.primary.resetsAt * 1000).toISOString(),
|
||||
resetText: this.formatResetTime(usage.rateLimits.primary.resetsAt * 1000),
|
||||
windowDurationMins: usage.rateLimits.primary.windowDurationMins,
|
||||
};
|
||||
}
|
||||
|
||||
if (usage.rateLimits?.secondary) {
|
||||
result.secondary = {
|
||||
name: `${usage.rateLimits.secondary.windowDurationMins}min Window`,
|
||||
usedPercent: usage.rateLimits.secondary.usedPercent,
|
||||
resetsAt: new Date(usage.rateLimits.secondary.resetsAt * 1000).toISOString(),
|
||||
resetText: this.formatResetTime(usage.rateLimits.secondary.resetsAt * 1000),
|
||||
windowDurationMins: usage.rateLimits.secondary.windowDurationMins,
|
||||
};
|
||||
}
|
||||
|
||||
if (usage.rateLimits?.planType) {
|
||||
result.plan = {
|
||||
type: usage.rateLimits.planType,
|
||||
displayName:
|
||||
usage.rateLimits.planType.charAt(0).toUpperCase() + usage.rateLimits.planType.slice(1),
|
||||
isPaid: usage.rateLimits.planType !== 'free',
|
||||
};
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format reset time as human-readable string
|
||||
*/
|
||||
private formatResetTime(resetAtMs: number): string {
|
||||
const diff = resetAtMs - Date.now();
|
||||
if (diff < 0) return 'Expired';
|
||||
|
||||
const minutes = Math.floor(diff / 60000);
|
||||
const hours = Math.floor(minutes / 60);
|
||||
const days = Math.floor(hours / 24);
|
||||
|
||||
if (days > 0) return `Resets in ${days}d ${hours % 24}h`;
|
||||
if (hours > 0) return `Resets in ${hours}h ${minutes % 60}m`;
|
||||
if (minutes > 0) return `Resets in ${minutes}m`;
|
||||
return 'Resets soon';
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch usage for a specific provider
|
||||
*/
|
||||
async fetchProviderUsage(
|
||||
providerId: UsageProviderId,
|
||||
forceRefresh = false
|
||||
): Promise<ProviderUsage | null> {
|
||||
// Check cache first
|
||||
if (!forceRefresh && this.isCacheFresh(providerId)) {
|
||||
return this.getCached(providerId);
|
||||
}
|
||||
|
||||
try {
|
||||
let usage: ProviderUsage | null = null;
|
||||
|
||||
switch (providerId) {
|
||||
case 'claude': {
|
||||
if (await this.claudeService.isAvailable()) {
|
||||
const claudeUsage = await this.claudeService.fetchUsageData();
|
||||
usage = this.convertClaudeUsage(claudeUsage);
|
||||
} else {
|
||||
usage = {
|
||||
providerId: 'claude',
|
||||
providerName: 'Claude',
|
||||
available: false,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
error: 'Claude CLI not available',
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'codex': {
|
||||
if (await this.codexService.isAvailable()) {
|
||||
const codexUsage = await this.codexService.fetchUsageData();
|
||||
usage = this.convertCodexUsage(codexUsage);
|
||||
} else {
|
||||
usage = {
|
||||
providerId: 'codex',
|
||||
providerName: 'Codex',
|
||||
available: false,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
error: 'Codex CLI not available',
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'cursor': {
|
||||
usage = await this.cursorService.fetchUsageData();
|
||||
break;
|
||||
}
|
||||
|
||||
case 'gemini': {
|
||||
usage = await this.geminiService.fetchUsageData();
|
||||
break;
|
||||
}
|
||||
|
||||
case 'copilot': {
|
||||
usage = await this.copilotService.fetchUsageData();
|
||||
break;
|
||||
}
|
||||
|
||||
case 'opencode': {
|
||||
usage = await this.opencodeService.fetchUsageData();
|
||||
break;
|
||||
}
|
||||
|
||||
case 'minimax': {
|
||||
usage = await this.minimaxService.fetchUsageData();
|
||||
break;
|
||||
}
|
||||
|
||||
case 'glm': {
|
||||
usage = await this.glmService.fetchUsageData();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (usage) {
|
||||
this.setCached(providerId, usage);
|
||||
}
|
||||
|
||||
return usage;
|
||||
} catch (error) {
|
||||
logger.error(`Failed to fetch usage for ${providerId}:`, error);
|
||||
return {
|
||||
providerId,
|
||||
providerName: this.getProviderName(providerId),
|
||||
available: false,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
} as ProviderUsage;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get provider display name
|
||||
*/
|
||||
private getProviderName(providerId: UsageProviderId): string {
|
||||
const names: Record<UsageProviderId, string> = {
|
||||
claude: 'Claude',
|
||||
codex: 'Codex',
|
||||
cursor: 'Cursor',
|
||||
gemini: 'Gemini',
|
||||
copilot: 'GitHub Copilot',
|
||||
opencode: 'OpenCode',
|
||||
minimax: 'MiniMax',
|
||||
glm: 'z.AI GLM',
|
||||
};
|
||||
return names[providerId] || providerId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch usage for all enabled providers
|
||||
*/
|
||||
async fetchAllUsage(forceRefresh = false): Promise<AllProvidersUsage> {
|
||||
const providers: Partial<Record<UsageProviderId, ProviderUsage>> = {};
|
||||
const errors: Array<{ providerId: UsageProviderId; message: string }> = [];
|
||||
|
||||
// Fetch all enabled providers in parallel
|
||||
const enabledList = Array.from(this.enabledProviders);
|
||||
const results = await Promise.allSettled(
|
||||
enabledList.map((providerId) => this.fetchProviderUsage(providerId, forceRefresh))
|
||||
);
|
||||
|
||||
results.forEach((result, index) => {
|
||||
const providerId = enabledList[index];
|
||||
|
||||
if (result.status === 'fulfilled' && result.value) {
|
||||
providers[providerId] = result.value;
|
||||
if (result.value.error) {
|
||||
errors.push({
|
||||
providerId,
|
||||
message: result.value.error,
|
||||
});
|
||||
}
|
||||
} else if (result.status === 'rejected') {
|
||||
errors.push({
|
||||
providerId,
|
||||
message: result.reason?.message || 'Unknown error',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
providers,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
errors,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check availability for all providers
|
||||
*/
|
||||
async checkAvailability(): Promise<Record<UsageProviderId, boolean>> {
|
||||
const availability: Record<string, boolean> = {};
|
||||
|
||||
const checks = await Promise.allSettled([
|
||||
this.claudeService.isAvailable(),
|
||||
this.codexService.isAvailable(),
|
||||
this.cursorService.isAvailable(),
|
||||
this.geminiService.isAvailable(),
|
||||
this.copilotService.isAvailable(),
|
||||
this.opencodeService.isAvailable(),
|
||||
this.minimaxService.isAvailable(),
|
||||
this.glmService.isAvailable(),
|
||||
]);
|
||||
|
||||
const providerIds: UsageProviderId[] = [
|
||||
'claude',
|
||||
'codex',
|
||||
'cursor',
|
||||
'gemini',
|
||||
'copilot',
|
||||
'opencode',
|
||||
'minimax',
|
||||
'glm',
|
||||
];
|
||||
|
||||
checks.forEach((result, index) => {
|
||||
availability[providerIds[index]] =
|
||||
result.status === 'fulfilled' ? result.value : false;
|
||||
});
|
||||
|
||||
return availability as Record<UsageProviderId, boolean>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all caches
|
||||
*/
|
||||
clearCache(): void {
|
||||
this.cache.clear();
|
||||
this.claudeService = new ClaudeUsageService(); // Reset Claude service
|
||||
this.cursorService.clearCache();
|
||||
this.geminiService.clearCache();
|
||||
this.copilotService.clearCache();
|
||||
this.opencodeService.clearCache();
|
||||
this.minimaxService.clearCache();
|
||||
this.glmService.clearCache();
|
||||
}
|
||||
}
|
||||
@@ -31,9 +31,6 @@ import type {
|
||||
WorktreeInfo,
|
||||
PhaseModelConfig,
|
||||
PhaseModelEntry,
|
||||
ClaudeApiProfile,
|
||||
ClaudeCompatibleProvider,
|
||||
ProviderModel,
|
||||
} from '../types/settings.js';
|
||||
import {
|
||||
DEFAULT_GLOBAL_SETTINGS,
|
||||
@@ -44,12 +41,7 @@ import {
|
||||
CREDENTIALS_VERSION,
|
||||
PROJECT_SETTINGS_VERSION,
|
||||
} from '../types/settings.js';
|
||||
import {
|
||||
DEFAULT_MAX_CONCURRENCY,
|
||||
migrateModelId,
|
||||
migrateCursorModelIds,
|
||||
migrateOpencodeModelIds,
|
||||
} from '@automaker/types';
|
||||
import { migrateModelId, migrateCursorModelIds, migrateOpencodeModelIds } from '@automaker/types';
|
||||
|
||||
const logger = createLogger('SettingsService');
|
||||
|
||||
@@ -209,28 +201,6 @@ export class SettingsService {
|
||||
needsSave = true;
|
||||
}
|
||||
|
||||
// Migration v5 -> v6: Convert claudeApiProfiles to claudeCompatibleProviders
|
||||
// The new system uses a models[] array instead of modelMappings, and removes
|
||||
// the "active profile" concept - models are selected directly in phase model configs.
|
||||
if (storedVersion < 6) {
|
||||
const legacyProfiles = settings.claudeApiProfiles || [];
|
||||
if (
|
||||
legacyProfiles.length > 0 &&
|
||||
(!result.claudeCompatibleProviders || result.claudeCompatibleProviders.length === 0)
|
||||
) {
|
||||
logger.info(
|
||||
`Migration v5->v6: Converting ${legacyProfiles.length} Claude API profile(s) to compatible providers`
|
||||
);
|
||||
result.claudeCompatibleProviders = this.migrateProfilesToProviders(legacyProfiles);
|
||||
}
|
||||
// Remove the deprecated activeClaudeApiProfileId field
|
||||
if (result.activeClaudeApiProfileId) {
|
||||
logger.info('Migration v5->v6: Removing deprecated activeClaudeApiProfileId');
|
||||
delete result.activeClaudeApiProfileId;
|
||||
}
|
||||
needsSave = true;
|
||||
}
|
||||
|
||||
// Update version if any migration occurred
|
||||
if (needsSave) {
|
||||
result.version = SETTINGS_VERSION;
|
||||
@@ -315,139 +285,6 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate ClaudeApiProfiles to ClaudeCompatibleProviders
|
||||
*
|
||||
* Converts the legacy profile format (with modelMappings) to the new
|
||||
* provider format (with models[] array). Each model mapping entry becomes
|
||||
* a ProviderModel with appropriate tier assignment.
|
||||
*
|
||||
* @param profiles - Legacy ClaudeApiProfile array
|
||||
* @returns Array of ClaudeCompatibleProvider
|
||||
*/
|
||||
private migrateProfilesToProviders(profiles: ClaudeApiProfile[]): ClaudeCompatibleProvider[] {
|
||||
return profiles.map((profile): ClaudeCompatibleProvider => {
|
||||
// Convert modelMappings to models array
|
||||
const models: ProviderModel[] = [];
|
||||
|
||||
if (profile.modelMappings) {
|
||||
// Haiku mapping
|
||||
if (profile.modelMappings.haiku) {
|
||||
models.push({
|
||||
id: profile.modelMappings.haiku,
|
||||
displayName: this.inferModelDisplayName(profile.modelMappings.haiku, 'haiku'),
|
||||
mapsToClaudeModel: 'haiku',
|
||||
});
|
||||
}
|
||||
// Sonnet mapping
|
||||
if (profile.modelMappings.sonnet) {
|
||||
models.push({
|
||||
id: profile.modelMappings.sonnet,
|
||||
displayName: this.inferModelDisplayName(profile.modelMappings.sonnet, 'sonnet'),
|
||||
mapsToClaudeModel: 'sonnet',
|
||||
});
|
||||
}
|
||||
// Opus mapping
|
||||
if (profile.modelMappings.opus) {
|
||||
models.push({
|
||||
id: profile.modelMappings.opus,
|
||||
displayName: this.inferModelDisplayName(profile.modelMappings.opus, 'opus'),
|
||||
mapsToClaudeModel: 'opus',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Infer provider type from base URL or name
|
||||
const providerType = this.inferProviderType(profile);
|
||||
|
||||
return {
|
||||
id: profile.id,
|
||||
name: profile.name,
|
||||
providerType,
|
||||
enabled: true,
|
||||
baseUrl: profile.baseUrl,
|
||||
apiKeySource: profile.apiKeySource ?? 'inline',
|
||||
apiKey: profile.apiKey,
|
||||
useAuthToken: profile.useAuthToken,
|
||||
timeoutMs: profile.timeoutMs,
|
||||
disableNonessentialTraffic: profile.disableNonessentialTraffic,
|
||||
models,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Infer a display name for a model based on its ID and tier
|
||||
*
|
||||
* @param modelId - The raw model ID
|
||||
* @param tier - The tier hint (haiku/sonnet/opus)
|
||||
* @returns A user-friendly display name
|
||||
*/
|
||||
private inferModelDisplayName(modelId: string, tier: 'haiku' | 'sonnet' | 'opus'): string {
|
||||
// Common patterns in model IDs
|
||||
const lowerModelId = modelId.toLowerCase();
|
||||
|
||||
// GLM models
|
||||
if (lowerModelId.includes('glm')) {
|
||||
return modelId.replace(/-/g, ' ').replace(/glm/i, 'GLM');
|
||||
}
|
||||
|
||||
// MiniMax models
|
||||
if (lowerModelId.includes('minimax')) {
|
||||
return modelId.replace(/-/g, ' ').replace(/minimax/i, 'MiniMax');
|
||||
}
|
||||
|
||||
// Claude models via OpenRouter or similar
|
||||
if (lowerModelId.includes('claude')) {
|
||||
return modelId;
|
||||
}
|
||||
|
||||
// Default: use model ID as display name with tier in parentheses
|
||||
return `${modelId} (${tier})`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Infer provider type from profile configuration
|
||||
*
|
||||
* @param profile - The legacy profile
|
||||
* @returns The inferred provider type
|
||||
*/
|
||||
private inferProviderType(profile: ClaudeApiProfile): ClaudeCompatibleProvider['providerType'] {
|
||||
const baseUrl = profile.baseUrl.toLowerCase();
|
||||
const name = profile.name.toLowerCase();
|
||||
|
||||
// Check URL patterns
|
||||
if (baseUrl.includes('z.ai') || baseUrl.includes('zhipuai')) {
|
||||
return 'glm';
|
||||
}
|
||||
if (baseUrl.includes('minimax')) {
|
||||
return 'minimax';
|
||||
}
|
||||
if (baseUrl.includes('openrouter')) {
|
||||
return 'openrouter';
|
||||
}
|
||||
if (baseUrl.includes('anthropic.com')) {
|
||||
return 'anthropic';
|
||||
}
|
||||
|
||||
// Check name patterns
|
||||
if (name.includes('glm') || name.includes('zhipu')) {
|
||||
return 'glm';
|
||||
}
|
||||
if (name.includes('minimax')) {
|
||||
return 'minimax';
|
||||
}
|
||||
if (name.includes('openrouter')) {
|
||||
return 'openrouter';
|
||||
}
|
||||
if (name.includes('anthropic') || name.includes('direct')) {
|
||||
return 'anthropic';
|
||||
}
|
||||
|
||||
// Default to custom
|
||||
return 'custom';
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate model-related settings to canonical format
|
||||
*
|
||||
@@ -570,29 +407,18 @@ export class SettingsService {
|
||||
ignoreEmptyArrayOverwrite('recentFolders');
|
||||
ignoreEmptyArrayOverwrite('mcpServers');
|
||||
ignoreEmptyArrayOverwrite('enabledCursorModels');
|
||||
ignoreEmptyArrayOverwrite('claudeApiProfiles');
|
||||
// Note: claudeCompatibleProviders intentionally NOT guarded - users should be able to delete all providers
|
||||
|
||||
// Empty object overwrite guard
|
||||
const ignoreEmptyObjectOverwrite = <K extends keyof GlobalSettings>(key: K): void => {
|
||||
const nextVal = sanitizedUpdates[key] as unknown;
|
||||
const curVal = current[key] as unknown;
|
||||
if (
|
||||
nextVal &&
|
||||
typeof nextVal === 'object' &&
|
||||
!Array.isArray(nextVal) &&
|
||||
Object.keys(nextVal).length === 0 &&
|
||||
curVal &&
|
||||
typeof curVal === 'object' &&
|
||||
!Array.isArray(curVal) &&
|
||||
Object.keys(curVal).length > 0
|
||||
) {
|
||||
delete sanitizedUpdates[key];
|
||||
}
|
||||
};
|
||||
|
||||
ignoreEmptyObjectOverwrite('lastSelectedSessionByProject');
|
||||
ignoreEmptyObjectOverwrite('autoModeByWorktree');
|
||||
if (
|
||||
sanitizedUpdates.lastSelectedSessionByProject &&
|
||||
typeof sanitizedUpdates.lastSelectedSessionByProject === 'object' &&
|
||||
!Array.isArray(sanitizedUpdates.lastSelectedSessionByProject) &&
|
||||
Object.keys(sanitizedUpdates.lastSelectedSessionByProject).length === 0 &&
|
||||
current.lastSelectedSessionByProject &&
|
||||
Object.keys(current.lastSelectedSessionByProject).length > 0
|
||||
) {
|
||||
delete sanitizedUpdates.lastSelectedSessionByProject;
|
||||
}
|
||||
|
||||
// If a request attempted to wipe projects, also ignore theme changes in that same request.
|
||||
if (attemptedProjectWipe) {
|
||||
@@ -621,21 +447,6 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
// Deep merge autoModeByWorktree if provided (preserves other worktree entries)
|
||||
if (sanitizedUpdates.autoModeByWorktree) {
|
||||
type WorktreeEntry = { maxConcurrency: number; branchName: string | null };
|
||||
const mergedAutoModeByWorktree: Record<string, WorktreeEntry> = {
|
||||
...current.autoModeByWorktree,
|
||||
};
|
||||
for (const [key, value] of Object.entries(sanitizedUpdates.autoModeByWorktree)) {
|
||||
mergedAutoModeByWorktree[key] = {
|
||||
...mergedAutoModeByWorktree[key],
|
||||
...value,
|
||||
};
|
||||
}
|
||||
updated.autoModeByWorktree = mergedAutoModeByWorktree;
|
||||
}
|
||||
|
||||
await writeSettingsJson(settingsPath, updated);
|
||||
logger.info('Global settings updated');
|
||||
|
||||
@@ -821,51 +632,6 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
// Handle activeClaudeApiProfileId special cases:
|
||||
// - "__USE_GLOBAL__" marker means delete the key (use global setting)
|
||||
// - null means explicit "Direct Anthropic API"
|
||||
// - string means specific profile ID
|
||||
if (
|
||||
'activeClaudeApiProfileId' in updates &&
|
||||
updates.activeClaudeApiProfileId === '__USE_GLOBAL__'
|
||||
) {
|
||||
delete updated.activeClaudeApiProfileId;
|
||||
}
|
||||
|
||||
// Handle phaseModelOverrides special cases:
|
||||
// - "__CLEAR__" marker means delete the key (use global settings for all phases)
|
||||
// - object means partial overrides for specific phases
|
||||
if (
|
||||
'phaseModelOverrides' in updates &&
|
||||
(updates as Record<string, unknown>).phaseModelOverrides === '__CLEAR__'
|
||||
) {
|
||||
delete updated.phaseModelOverrides;
|
||||
}
|
||||
|
||||
// Handle defaultFeatureModel special cases:
|
||||
// - "__CLEAR__" marker means delete the key (use global setting)
|
||||
// - object means project-specific override
|
||||
if (
|
||||
'defaultFeatureModel' in updates &&
|
||||
(updates as Record<string, unknown>).defaultFeatureModel === '__CLEAR__'
|
||||
) {
|
||||
delete updated.defaultFeatureModel;
|
||||
}
|
||||
|
||||
// Handle devCommand special cases:
|
||||
// - null means delete the key (use auto-detection)
|
||||
// - string means custom command
|
||||
if ('devCommand' in updates && updates.devCommand === null) {
|
||||
delete updated.devCommand;
|
||||
}
|
||||
|
||||
// Handle testCommand special cases:
|
||||
// - null means delete the key (use auto-detection)
|
||||
// - string means custom command
|
||||
if ('testCommand' in updates && updates.testCommand === null) {
|
||||
delete updated.testCommand;
|
||||
}
|
||||
|
||||
await writeSettingsJson(settingsPath, updated);
|
||||
logger.info(`Project settings updated for ${projectPath}`);
|
||||
|
||||
@@ -951,7 +717,7 @@ export class SettingsService {
|
||||
theme: (appState.theme as GlobalSettings['theme']) || 'dark',
|
||||
sidebarOpen: appState.sidebarOpen !== undefined ? (appState.sidebarOpen as boolean) : true,
|
||||
chatHistoryOpen: (appState.chatHistoryOpen as boolean) || false,
|
||||
maxConcurrency: (appState.maxConcurrency as number) || DEFAULT_MAX_CONCURRENCY,
|
||||
maxConcurrency: (appState.maxConcurrency as number) || 3,
|
||||
defaultSkipTests:
|
||||
appState.defaultSkipTests !== undefined ? (appState.defaultSkipTests as boolean) : true,
|
||||
enableDependencyBlocking:
|
||||
|
||||
@@ -1,682 +0,0 @@
|
||||
/**
|
||||
* Test Runner Service
|
||||
*
|
||||
* Manages test execution processes for git worktrees.
|
||||
* Runs user-configured test commands with output streaming.
|
||||
*
|
||||
* Features:
|
||||
* - Process management with graceful shutdown
|
||||
* - Output buffering and throttling for WebSocket streaming
|
||||
* - Support for running all tests or specific files
|
||||
* - Cross-platform process cleanup (Windows/Unix)
|
||||
*/
|
||||
|
||||
import { spawn, execSync, type ChildProcess } from 'child_process';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { EventEmitter } from '../lib/events.js';
|
||||
|
||||
const logger = createLogger('TestRunnerService');
|
||||
|
||||
// Maximum scrollback buffer size (characters)
|
||||
const MAX_SCROLLBACK_SIZE = 50000; // ~50KB per test run
|
||||
|
||||
// Throttle output to prevent overwhelming WebSocket under heavy load
|
||||
// Note: Too aggressive throttling (< 50ms) can cause memory issues and UI crashes
|
||||
// due to rapid React state updates and string concatenation overhead
|
||||
const OUTPUT_THROTTLE_MS = 100; // ~10fps - balances responsiveness with stability
|
||||
const OUTPUT_BATCH_SIZE = 8192; // Larger batch size to reduce event frequency
|
||||
|
||||
/**
|
||||
* Status of a test run
|
||||
*/
|
||||
export type TestRunStatus = 'pending' | 'running' | 'passed' | 'failed' | 'cancelled' | 'error';
|
||||
|
||||
/**
|
||||
* Information about an active test run session
|
||||
*/
|
||||
export interface TestRunSession {
|
||||
/** Unique identifier for this test run */
|
||||
id: string;
|
||||
/** Path to the worktree where tests are running */
|
||||
worktreePath: string;
|
||||
/** The command being run */
|
||||
command: string;
|
||||
/** The spawned child process */
|
||||
process: ChildProcess | null;
|
||||
/** When the test run started */
|
||||
startedAt: Date;
|
||||
/** When the test run finished (if completed) */
|
||||
finishedAt: Date | null;
|
||||
/** Current status of the test run */
|
||||
status: TestRunStatus;
|
||||
/** Exit code from the process (if completed) */
|
||||
exitCode: number | null;
|
||||
/** Specific test file being run (optional) */
|
||||
testFile?: string;
|
||||
/** Scrollback buffer for log history (replay on reconnect) */
|
||||
scrollbackBuffer: string;
|
||||
/** Pending output to be flushed to subscribers */
|
||||
outputBuffer: string;
|
||||
/** Throttle timer for batching output */
|
||||
flushTimeout: NodeJS.Timeout | null;
|
||||
/** Flag to indicate session is stopping (prevents output after stop) */
|
||||
stopping: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of a test run operation
|
||||
*/
|
||||
export interface TestRunResult {
|
||||
success: boolean;
|
||||
result?: {
|
||||
sessionId: string;
|
||||
worktreePath: string;
|
||||
command: string;
|
||||
status: TestRunStatus;
|
||||
testFile?: string;
|
||||
message: string;
|
||||
};
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test Runner Service class
|
||||
* Manages test execution processes across worktrees
|
||||
*/
|
||||
class TestRunnerService {
|
||||
private sessions: Map<string, TestRunSession> = new Map();
|
||||
private emitter: EventEmitter | null = null;
|
||||
|
||||
/**
|
||||
* Set the event emitter for streaming log events
|
||||
* Called during service initialization with the global event emitter
|
||||
*/
|
||||
setEventEmitter(emitter: EventEmitter): void {
|
||||
this.emitter = emitter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to check if a file exists using secureFs
|
||||
*/
|
||||
private async fileExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await secureFs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Append data to scrollback buffer with size limit enforcement
|
||||
* Evicts oldest data when buffer exceeds MAX_SCROLLBACK_SIZE
|
||||
*/
|
||||
private appendToScrollback(session: TestRunSession, data: string): void {
|
||||
session.scrollbackBuffer += data;
|
||||
if (session.scrollbackBuffer.length > MAX_SCROLLBACK_SIZE) {
|
||||
session.scrollbackBuffer = session.scrollbackBuffer.slice(-MAX_SCROLLBACK_SIZE);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush buffered output to WebSocket subscribers
|
||||
* Sends batched output to prevent overwhelming clients under heavy load
|
||||
*/
|
||||
private flushOutput(session: TestRunSession): void {
|
||||
// Skip flush if session is stopping or buffer is empty
|
||||
if (session.stopping || session.outputBuffer.length === 0) {
|
||||
session.flushTimeout = null;
|
||||
return;
|
||||
}
|
||||
|
||||
let dataToSend = session.outputBuffer;
|
||||
if (dataToSend.length > OUTPUT_BATCH_SIZE) {
|
||||
// Send in batches if buffer is large
|
||||
dataToSend = session.outputBuffer.slice(0, OUTPUT_BATCH_SIZE);
|
||||
session.outputBuffer = session.outputBuffer.slice(OUTPUT_BATCH_SIZE);
|
||||
// Schedule another flush for remaining data
|
||||
session.flushTimeout = setTimeout(() => this.flushOutput(session), OUTPUT_THROTTLE_MS);
|
||||
} else {
|
||||
session.outputBuffer = '';
|
||||
session.flushTimeout = null;
|
||||
}
|
||||
|
||||
// Emit output event for WebSocket streaming
|
||||
if (this.emitter) {
|
||||
this.emitter.emit('test-runner:output', {
|
||||
sessionId: session.id,
|
||||
worktreePath: session.worktreePath,
|
||||
content: dataToSend,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle incoming stdout/stderr data from test process
|
||||
* Buffers data for scrollback replay and schedules throttled emission
|
||||
*/
|
||||
private handleProcessOutput(session: TestRunSession, data: Buffer): void {
|
||||
// Skip output if session is stopping
|
||||
if (session.stopping) {
|
||||
return;
|
||||
}
|
||||
|
||||
const content = data.toString();
|
||||
|
||||
// Append to scrollback buffer for replay on reconnect
|
||||
this.appendToScrollback(session, content);
|
||||
|
||||
// Buffer output for throttled live delivery
|
||||
session.outputBuffer += content;
|
||||
|
||||
// Schedule flush if not already scheduled
|
||||
if (!session.flushTimeout) {
|
||||
session.flushTimeout = setTimeout(() => this.flushOutput(session), OUTPUT_THROTTLE_MS);
|
||||
}
|
||||
|
||||
// Also log for debugging (existing behavior)
|
||||
logger.debug(`[${session.id}] ${content.trim()}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Kill any process running (platform-specific cleanup)
|
||||
*/
|
||||
private killProcessTree(pid: number): void {
|
||||
try {
|
||||
if (process.platform === 'win32') {
|
||||
// Windows: use taskkill to kill process tree
|
||||
execSync(`taskkill /F /T /PID ${pid}`, { stdio: 'ignore' });
|
||||
} else {
|
||||
// Unix: kill the process group
|
||||
try {
|
||||
process.kill(-pid, 'SIGTERM');
|
||||
} catch {
|
||||
// Fallback to killing just the process
|
||||
process.kill(pid, 'SIGTERM');
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug(`Error killing process ${pid}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unique session ID
|
||||
*/
|
||||
private generateSessionId(): string {
|
||||
return `test-${Date.now()}-${Math.random().toString(36).slice(2, 11)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize a test file path to prevent command injection
|
||||
* Allows only safe characters for file paths
|
||||
*/
|
||||
private sanitizeTestFile(testFile: string): string {
|
||||
// Remove any shell metacharacters and normalize path
|
||||
// Allow only alphanumeric, dots, slashes, hyphens, underscores, colons (for Windows paths)
|
||||
return testFile.replace(/[^a-zA-Z0-9.\\/_\-:]/g, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Start tests in a worktree using the provided command
|
||||
*
|
||||
* @param worktreePath - Path to the worktree where tests should run
|
||||
* @param options - Configuration for the test run
|
||||
* @returns TestRunResult with session info or error
|
||||
*/
|
||||
async startTests(
|
||||
worktreePath: string,
|
||||
options: {
|
||||
command: string;
|
||||
testFile?: string;
|
||||
}
|
||||
): Promise<TestRunResult> {
|
||||
const { command, testFile } = options;
|
||||
|
||||
// Check if already running
|
||||
const existingSession = this.getActiveSession(worktreePath);
|
||||
if (existingSession) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Tests are already running for this worktree (session: ${existingSession.id})`,
|
||||
};
|
||||
}
|
||||
|
||||
// Verify the worktree exists
|
||||
if (!(await this.fileExists(worktreePath))) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Worktree path does not exist: ${worktreePath}`,
|
||||
};
|
||||
}
|
||||
|
||||
if (!command) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'No test command provided',
|
||||
};
|
||||
}
|
||||
|
||||
// Build the final command (append test file if specified)
|
||||
let finalCommand = command;
|
||||
if (testFile) {
|
||||
// Sanitize test file path to prevent command injection
|
||||
const sanitizedFile = this.sanitizeTestFile(testFile);
|
||||
// Append the test file to the command
|
||||
// Most test runners support: command -- file or command file
|
||||
finalCommand = `${command} -- ${sanitizedFile}`;
|
||||
}
|
||||
|
||||
// Parse command into cmd and args (shell execution)
|
||||
// We use shell: true to support complex commands like "npm run test:server"
|
||||
logger.info(`Starting tests in ${worktreePath}`);
|
||||
logger.info(`Command: ${finalCommand}`);
|
||||
|
||||
// Create session
|
||||
const sessionId = this.generateSessionId();
|
||||
const session: TestRunSession = {
|
||||
id: sessionId,
|
||||
worktreePath,
|
||||
command: finalCommand,
|
||||
process: null,
|
||||
startedAt: new Date(),
|
||||
finishedAt: null,
|
||||
status: 'pending',
|
||||
exitCode: null,
|
||||
testFile,
|
||||
scrollbackBuffer: '',
|
||||
outputBuffer: '',
|
||||
flushTimeout: null,
|
||||
stopping: false,
|
||||
};
|
||||
|
||||
// Spawn the test process using shell
|
||||
const env = {
|
||||
...process.env,
|
||||
FORCE_COLOR: '1',
|
||||
COLORTERM: 'truecolor',
|
||||
TERM: 'xterm-256color',
|
||||
CI: 'true', // Helps some test runners format output better
|
||||
};
|
||||
|
||||
const testProcess = spawn(finalCommand, [], {
|
||||
cwd: worktreePath,
|
||||
env,
|
||||
shell: true,
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
detached: process.platform !== 'win32', // Use process groups on Unix for cleanup
|
||||
});
|
||||
|
||||
session.process = testProcess;
|
||||
session.status = 'running';
|
||||
|
||||
// Track if process failed early
|
||||
const status = { error: null as string | null, exited: false };
|
||||
|
||||
// Helper to clean up resources and emit events
|
||||
const cleanupAndFinish = (
|
||||
exitCode: number | null,
|
||||
finalStatus: TestRunStatus,
|
||||
errorMessage?: string
|
||||
) => {
|
||||
session.finishedAt = new Date();
|
||||
session.exitCode = exitCode;
|
||||
session.status = finalStatus;
|
||||
|
||||
if (session.flushTimeout) {
|
||||
clearTimeout(session.flushTimeout);
|
||||
session.flushTimeout = null;
|
||||
}
|
||||
|
||||
// Flush any remaining output
|
||||
if (session.outputBuffer.length > 0 && this.emitter && !session.stopping) {
|
||||
this.emitter.emit('test-runner:output', {
|
||||
sessionId: session.id,
|
||||
worktreePath: session.worktreePath,
|
||||
content: session.outputBuffer,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
session.outputBuffer = '';
|
||||
}
|
||||
|
||||
// Emit completed event
|
||||
if (this.emitter && !session.stopping) {
|
||||
this.emitter.emit('test-runner:completed', {
|
||||
sessionId: session.id,
|
||||
worktreePath: session.worktreePath,
|
||||
command: session.command,
|
||||
status: finalStatus,
|
||||
exitCode,
|
||||
error: errorMessage,
|
||||
duration: session.finishedAt.getTime() - session.startedAt.getTime(),
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Capture stdout
|
||||
if (testProcess.stdout) {
|
||||
testProcess.stdout.on('data', (data: Buffer) => {
|
||||
this.handleProcessOutput(session, data);
|
||||
});
|
||||
}
|
||||
|
||||
// Capture stderr
|
||||
if (testProcess.stderr) {
|
||||
testProcess.stderr.on('data', (data: Buffer) => {
|
||||
this.handleProcessOutput(session, data);
|
||||
});
|
||||
}
|
||||
|
||||
testProcess.on('error', (error) => {
|
||||
logger.error(`Process error for ${sessionId}:`, error);
|
||||
status.error = error.message;
|
||||
cleanupAndFinish(null, 'error', error.message);
|
||||
});
|
||||
|
||||
testProcess.on('exit', (code) => {
|
||||
logger.info(`Test process for ${worktreePath} exited with code ${code}`);
|
||||
status.exited = true;
|
||||
|
||||
// Determine final status based on exit code
|
||||
let finalStatus: TestRunStatus;
|
||||
if (session.stopping) {
|
||||
finalStatus = 'cancelled';
|
||||
} else if (code === 0) {
|
||||
finalStatus = 'passed';
|
||||
} else {
|
||||
finalStatus = 'failed';
|
||||
}
|
||||
|
||||
cleanupAndFinish(code, finalStatus);
|
||||
});
|
||||
|
||||
// Store session
|
||||
this.sessions.set(sessionId, session);
|
||||
|
||||
// Wait a moment to see if the process fails immediately
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
if (status.error) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Failed to start tests: ${status.error}`,
|
||||
};
|
||||
}
|
||||
|
||||
if (status.exited) {
|
||||
// Process already exited - check if it was immediate failure
|
||||
const exitedSession = this.sessions.get(sessionId);
|
||||
if (exitedSession && exitedSession.status === 'error') {
|
||||
return {
|
||||
success: false,
|
||||
error: `Test process exited immediately. Check output for details.`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Emit started event
|
||||
if (this.emitter) {
|
||||
this.emitter.emit('test-runner:started', {
|
||||
sessionId,
|
||||
worktreePath,
|
||||
command: finalCommand,
|
||||
testFile,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
sessionId,
|
||||
worktreePath,
|
||||
command: finalCommand,
|
||||
status: 'running',
|
||||
testFile,
|
||||
message: `Tests started: ${finalCommand}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a running test session
|
||||
*
|
||||
* @param sessionId - The ID of the test session to stop
|
||||
* @returns Result with success status and message
|
||||
*/
|
||||
async stopTests(sessionId: string): Promise<{
|
||||
success: boolean;
|
||||
result?: { sessionId: string; message: string };
|
||||
error?: string;
|
||||
}> {
|
||||
const session = this.sessions.get(sessionId);
|
||||
|
||||
if (!session) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Test session not found: ${sessionId}`,
|
||||
};
|
||||
}
|
||||
|
||||
if (session.status !== 'running') {
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
sessionId,
|
||||
message: `Tests already finished (status: ${session.status})`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
logger.info(`Cancelling test session ${sessionId}`);
|
||||
|
||||
// Mark as stopping to prevent further output events
|
||||
session.stopping = true;
|
||||
|
||||
// Clean up flush timeout
|
||||
if (session.flushTimeout) {
|
||||
clearTimeout(session.flushTimeout);
|
||||
session.flushTimeout = null;
|
||||
}
|
||||
|
||||
// Kill the process
|
||||
if (session.process && !session.process.killed && session.process.pid) {
|
||||
this.killProcessTree(session.process.pid);
|
||||
}
|
||||
|
||||
session.status = 'cancelled';
|
||||
session.finishedAt = new Date();
|
||||
|
||||
// Emit cancelled event
|
||||
if (this.emitter) {
|
||||
this.emitter.emit('test-runner:completed', {
|
||||
sessionId,
|
||||
worktreePath: session.worktreePath,
|
||||
command: session.command,
|
||||
status: 'cancelled',
|
||||
exitCode: null,
|
||||
duration: session.finishedAt.getTime() - session.startedAt.getTime(),
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
sessionId,
|
||||
message: 'Test run cancelled',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the active test session for a worktree
|
||||
*/
|
||||
getActiveSession(worktreePath: string): TestRunSession | undefined {
|
||||
for (const session of this.sessions.values()) {
|
||||
if (session.worktreePath === worktreePath && session.status === 'running') {
|
||||
return session;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a test session by ID
|
||||
*/
|
||||
getSession(sessionId: string): TestRunSession | undefined {
|
||||
return this.sessions.get(sessionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get buffered output for a test session
|
||||
*/
|
||||
getSessionOutput(sessionId: string): {
|
||||
success: boolean;
|
||||
result?: {
|
||||
sessionId: string;
|
||||
output: string;
|
||||
status: TestRunStatus;
|
||||
startedAt: string;
|
||||
finishedAt: string | null;
|
||||
};
|
||||
error?: string;
|
||||
} {
|
||||
const session = this.sessions.get(sessionId);
|
||||
|
||||
if (!session) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Test session not found: ${sessionId}`,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
sessionId,
|
||||
output: session.scrollbackBuffer,
|
||||
status: session.status,
|
||||
startedAt: session.startedAt.toISOString(),
|
||||
finishedAt: session.finishedAt?.toISOString() || null,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* List all test sessions (optionally filter by worktree)
|
||||
*/
|
||||
listSessions(worktreePath?: string): {
|
||||
success: boolean;
|
||||
result: {
|
||||
sessions: Array<{
|
||||
sessionId: string;
|
||||
worktreePath: string;
|
||||
command: string;
|
||||
status: TestRunStatus;
|
||||
testFile?: string;
|
||||
startedAt: string;
|
||||
finishedAt: string | null;
|
||||
exitCode: number | null;
|
||||
}>;
|
||||
};
|
||||
} {
|
||||
let sessions = Array.from(this.sessions.values());
|
||||
|
||||
if (worktreePath) {
|
||||
sessions = sessions.filter((s) => s.worktreePath === worktreePath);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
sessions: sessions.map((s) => ({
|
||||
sessionId: s.id,
|
||||
worktreePath: s.worktreePath,
|
||||
command: s.command,
|
||||
status: s.status,
|
||||
testFile: s.testFile,
|
||||
startedAt: s.startedAt.toISOString(),
|
||||
finishedAt: s.finishedAt?.toISOString() || null,
|
||||
exitCode: s.exitCode,
|
||||
})),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a worktree has an active test run
|
||||
*/
|
||||
isRunning(worktreePath: string): boolean {
|
||||
return this.getActiveSession(worktreePath) !== undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old completed sessions (keep only recent ones)
|
||||
*/
|
||||
cleanupOldSessions(maxAgeMs: number = 30 * 60 * 1000): void {
|
||||
const now = Date.now();
|
||||
for (const [sessionId, session] of this.sessions.entries()) {
|
||||
if (session.status !== 'running' && session.finishedAt) {
|
||||
if (now - session.finishedAt.getTime() > maxAgeMs) {
|
||||
this.sessions.delete(sessionId);
|
||||
logger.debug(`Cleaned up old test session: ${sessionId}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel all running test sessions (for cleanup)
|
||||
*/
|
||||
async cancelAll(): Promise<void> {
|
||||
logger.info(`Cancelling all ${this.sessions.size} test sessions`);
|
||||
|
||||
for (const session of this.sessions.values()) {
|
||||
if (session.status === 'running') {
|
||||
await this.stopTests(session.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup service resources
|
||||
*/
|
||||
async cleanup(): Promise<void> {
|
||||
await this.cancelAll();
|
||||
this.sessions.clear();
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
let testRunnerServiceInstance: TestRunnerService | null = null;
|
||||
|
||||
export function getTestRunnerService(): TestRunnerService {
|
||||
if (!testRunnerServiceInstance) {
|
||||
testRunnerServiceInstance = new TestRunnerService();
|
||||
}
|
||||
return testRunnerServiceInstance;
|
||||
}
|
||||
|
||||
// Cleanup on process exit
|
||||
process.on('SIGTERM', () => {
|
||||
if (testRunnerServiceInstance) {
|
||||
testRunnerServiceInstance.cleanup().catch((err) => {
|
||||
logger.error('Cleanup failed on SIGTERM:', err);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
if (testRunnerServiceInstance) {
|
||||
testRunnerServiceInstance.cleanup().catch((err) => {
|
||||
logger.error('Cleanup failed on SIGINT:', err);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Export the class for testing purposes
|
||||
export { TestRunnerService };
|
||||
@@ -23,16 +23,6 @@ export type {
|
||||
PhaseModelConfig,
|
||||
PhaseModelKey,
|
||||
PhaseModelEntry,
|
||||
// Claude-compatible provider types
|
||||
ApiKeySource,
|
||||
ClaudeCompatibleProviderType,
|
||||
ClaudeModelAlias,
|
||||
ProviderModel,
|
||||
ClaudeCompatibleProvider,
|
||||
ClaudeCompatibleProviderTemplate,
|
||||
// Legacy profile types (deprecated)
|
||||
ClaudeApiProfile,
|
||||
ClaudeApiProfileTemplate,
|
||||
} from '@automaker/types';
|
||||
|
||||
export {
|
||||
|
||||
@@ -41,14 +41,13 @@ describe('model-resolver.ts', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should pass through unknown models unchanged (may be provider models)', () => {
|
||||
// Unknown models now pass through unchanged to support ClaudeCompatibleProvider models
|
||||
// like GLM-4.7, MiniMax-M2.1, o1, etc.
|
||||
const models = ['o1', 'o1-mini', 'o3', 'unknown-model', 'fake-model-123', 'GLM-4.7'];
|
||||
it('should treat unknown models as falling back to default', () => {
|
||||
// Note: Don't include valid Cursor model IDs here (e.g., 'gpt-5.2' is in CURSOR_MODEL_MAP)
|
||||
const models = ['o1', 'o1-mini', 'o3', 'unknown-model', 'fake-model-123'];
|
||||
models.forEach((model) => {
|
||||
const result = resolveModelString(model);
|
||||
// Should pass through unchanged (could be provider models)
|
||||
expect(result).toBe(model);
|
||||
// Should fall back to default since these aren't supported
|
||||
expect(result).toBe(DEFAULT_MODELS.claude);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -74,12 +73,12 @@ describe('model-resolver.ts', () => {
|
||||
expect(result).toBe(customDefault);
|
||||
});
|
||||
|
||||
it('should pass through unknown model key unchanged (no warning)', () => {
|
||||
it('should return default for unknown model key', () => {
|
||||
const result = resolveModelString('unknown-model');
|
||||
// Unknown models pass through unchanged (could be provider models)
|
||||
expect(result).toBe('unknown-model');
|
||||
// No warning - unknown models are valid for providers
|
||||
expect(consoleSpy.warn).not.toHaveBeenCalled();
|
||||
expect(result).toBe(DEFAULT_MODELS.claude);
|
||||
expect(consoleSpy.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Unknown model key "unknown-model"')
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle empty string', () => {
|
||||
|
||||
@@ -1311,317 +1311,4 @@ describe('opencode-provider.ts', () => {
|
||||
expect(args[modelIndex + 1]).toBe('provider/model-v1.2.3-beta');
|
||||
});
|
||||
});
|
||||
|
||||
// ==========================================================================
|
||||
// parseProvidersOutput Tests
|
||||
// ==========================================================================
|
||||
|
||||
describe('parseProvidersOutput', () => {
|
||||
// Helper function to access private method
|
||||
function parseProviders(output: string) {
|
||||
return (
|
||||
provider as unknown as {
|
||||
parseProvidersOutput: (output: string) => Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
authenticated: boolean;
|
||||
authMethod?: 'oauth' | 'api_key';
|
||||
}>;
|
||||
}
|
||||
).parseProvidersOutput(output);
|
||||
}
|
||||
|
||||
// =======================================================================
|
||||
// Critical Fix Validation
|
||||
// =======================================================================
|
||||
|
||||
describe('Critical Fix Validation', () => {
|
||||
it('should map "z.ai coding plan" to "zai-coding-plan" (NOT "z-ai")', () => {
|
||||
const output = '● z.ai coding plan oauth';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toBe('zai-coding-plan');
|
||||
expect(result[0].name).toBe('z.ai coding plan');
|
||||
expect(result[0].authMethod).toBe('oauth');
|
||||
});
|
||||
|
||||
it('should map "z.ai" to "z-ai" (different from coding plan)', () => {
|
||||
const output = '● z.ai api';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toBe('z-ai');
|
||||
expect(result[0].name).toBe('z.ai');
|
||||
expect(result[0].authMethod).toBe('api_key');
|
||||
});
|
||||
|
||||
it('should distinguish between "z.ai coding plan" and "z.ai"', () => {
|
||||
const output = '● z.ai coding plan oauth\n● z.ai api';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].id).toBe('zai-coding-plan');
|
||||
expect(result[0].name).toBe('z.ai coding plan');
|
||||
expect(result[1].id).toBe('z-ai');
|
||||
expect(result[1].name).toBe('z.ai');
|
||||
});
|
||||
});
|
||||
|
||||
// =======================================================================
|
||||
// Provider Name Mapping
|
||||
// =======================================================================
|
||||
|
||||
describe('Provider Name Mapping', () => {
|
||||
it('should map all 12 providers correctly', () => {
|
||||
const output = `● anthropic oauth
|
||||
● github copilot oauth
|
||||
● google api
|
||||
● openai api
|
||||
● openrouter api
|
||||
● azure api
|
||||
● amazon bedrock oauth
|
||||
● ollama api
|
||||
● lm studio api
|
||||
● opencode oauth
|
||||
● z.ai coding plan oauth
|
||||
● z.ai api`;
|
||||
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result).toHaveLength(12);
|
||||
expect(result.map((p) => p.id)).toEqual([
|
||||
'anthropic',
|
||||
'github-copilot',
|
||||
'google',
|
||||
'openai',
|
||||
'openrouter',
|
||||
'azure',
|
||||
'amazon-bedrock',
|
||||
'ollama',
|
||||
'lmstudio',
|
||||
'opencode',
|
||||
'zai-coding-plan',
|
||||
'z-ai',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle case-insensitive provider names and preserve original casing', () => {
|
||||
const output = '● Anthropic api\n● OPENAI oauth\n● GitHub Copilot oauth';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].id).toBe('anthropic');
|
||||
expect(result[0].name).toBe('Anthropic'); // Preserves casing
|
||||
expect(result[1].id).toBe('openai');
|
||||
expect(result[1].name).toBe('OPENAI'); // Preserves casing
|
||||
expect(result[2].id).toBe('github-copilot');
|
||||
expect(result[2].name).toBe('GitHub Copilot'); // Preserves casing
|
||||
});
|
||||
|
||||
it('should handle multi-word provider names with spaces', () => {
|
||||
const output = '● Amazon Bedrock oauth\n● LM Studio api\n● GitHub Copilot oauth';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result[0].id).toBe('amazon-bedrock');
|
||||
expect(result[0].name).toBe('Amazon Bedrock');
|
||||
expect(result[1].id).toBe('lmstudio');
|
||||
expect(result[1].name).toBe('LM Studio');
|
||||
expect(result[2].id).toBe('github-copilot');
|
||||
expect(result[2].name).toBe('GitHub Copilot');
|
||||
});
|
||||
});
|
||||
|
||||
// =======================================================================
|
||||
// Duplicate Aliases
|
||||
// =======================================================================
|
||||
|
||||
describe('Duplicate Aliases', () => {
|
||||
it('should map provider aliases to the same ID', () => {
|
||||
// Test copilot variants
|
||||
const copilot1 = parseProviders('● copilot oauth');
|
||||
const copilot2 = parseProviders('● github copilot oauth');
|
||||
expect(copilot1[0].id).toBe('github-copilot');
|
||||
expect(copilot2[0].id).toBe('github-copilot');
|
||||
|
||||
// Test bedrock variants
|
||||
const bedrock1 = parseProviders('● bedrock oauth');
|
||||
const bedrock2 = parseProviders('● amazon bedrock oauth');
|
||||
expect(bedrock1[0].id).toBe('amazon-bedrock');
|
||||
expect(bedrock2[0].id).toBe('amazon-bedrock');
|
||||
|
||||
// Test lmstudio variants
|
||||
const lm1 = parseProviders('● lmstudio api');
|
||||
const lm2 = parseProviders('● lm studio api');
|
||||
expect(lm1[0].id).toBe('lmstudio');
|
||||
expect(lm2[0].id).toBe('lmstudio');
|
||||
});
|
||||
});
|
||||
|
||||
// =======================================================================
|
||||
// Authentication Methods
|
||||
// =======================================================================
|
||||
|
||||
describe('Authentication Methods', () => {
|
||||
it('should detect oauth and api_key auth methods', () => {
|
||||
const output = '● anthropic oauth\n● openai api\n● google api_key';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result[0].authMethod).toBe('oauth');
|
||||
expect(result[1].authMethod).toBe('api_key');
|
||||
expect(result[2].authMethod).toBe('api_key');
|
||||
});
|
||||
|
||||
it('should set authenticated to true and handle case-insensitive auth methods', () => {
|
||||
const output = '● anthropic OAuth\n● openai API';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result[0].authenticated).toBe(true);
|
||||
expect(result[0].authMethod).toBe('oauth');
|
||||
expect(result[1].authenticated).toBe(true);
|
||||
expect(result[1].authMethod).toBe('api_key');
|
||||
});
|
||||
|
||||
it('should return undefined authMethod for unknown auth types', () => {
|
||||
const output = '● anthropic unknown-auth';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result[0].authenticated).toBe(true);
|
||||
expect(result[0].authMethod).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
// =======================================================================
|
||||
// ANSI Escape Sequences
|
||||
// =======================================================================
|
||||
|
||||
describe('ANSI Escape Sequences', () => {
|
||||
it('should strip ANSI color codes from output', () => {
|
||||
const output = '\x1b[32m● anthropic oauth\x1b[0m';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toBe('anthropic');
|
||||
expect(result[0].name).toBe('anthropic');
|
||||
});
|
||||
|
||||
it('should handle complex ANSI sequences and codes in provider names', () => {
|
||||
const output =
|
||||
'\x1b[1;32m●\x1b[0m \x1b[33mgit\x1b[32mhub\x1b[0m copilot\x1b[0m \x1b[36moauth\x1b[0m';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toBe('github-copilot');
|
||||
});
|
||||
});
|
||||
|
||||
// =======================================================================
|
||||
// Edge Cases
|
||||
// =======================================================================
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should return empty array for empty output or no ● symbols', () => {
|
||||
expect(parseProviders('')).toEqual([]);
|
||||
expect(parseProviders('anthropic oauth\nopenai api')).toEqual([]);
|
||||
expect(parseProviders('No authenticated providers')).toEqual([]);
|
||||
});
|
||||
|
||||
it('should skip malformed lines with ● but insufficient content', () => {
|
||||
const output = '●\n● \n● anthropic\n● openai api';
|
||||
const result = parseProviders(output);
|
||||
|
||||
// Only the last line has both provider name and auth method
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toBe('openai');
|
||||
});
|
||||
|
||||
it('should use fallback for unknown providers (spaces to hyphens)', () => {
|
||||
const output = '● unknown provider name oauth';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result[0].id).toBe('unknown-provider-name');
|
||||
expect(result[0].name).toBe('unknown provider name');
|
||||
});
|
||||
|
||||
it('should handle extra whitespace and mixed case', () => {
|
||||
const output = '● AnThRoPiC oauth';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result[0].id).toBe('anthropic');
|
||||
expect(result[0].name).toBe('AnThRoPiC');
|
||||
});
|
||||
|
||||
it('should handle multiple ● symbols on same line', () => {
|
||||
const output = '● ● anthropic oauth';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toBe('anthropic');
|
||||
});
|
||||
|
||||
it('should handle different newline formats and trailing newlines', () => {
|
||||
const outputUnix = '● anthropic oauth\n● openai api';
|
||||
const outputWindows = '● anthropic oauth\r\n● openai api\r\n\r\n';
|
||||
|
||||
const resultUnix = parseProviders(outputUnix);
|
||||
const resultWindows = parseProviders(outputWindows);
|
||||
|
||||
expect(resultUnix).toHaveLength(2);
|
||||
expect(resultWindows).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle provider names with numbers and special characters', () => {
|
||||
const output = '● gpt-4o api';
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result[0].id).toBe('gpt-4o');
|
||||
expect(result[0].name).toBe('gpt-4o');
|
||||
});
|
||||
});
|
||||
|
||||
// =======================================================================
|
||||
// Real-world CLI Output
|
||||
// =======================================================================
|
||||
|
||||
describe('Real-world CLI Output', () => {
|
||||
it('should parse CLI output with box drawing characters and decorations', () => {
|
||||
const output = `┌─────────────────────────────────────────────────┐
|
||||
│ Authenticated Providers │
|
||||
├─────────────────────────────────────────────────┤
|
||||
● anthropic oauth
|
||||
● openai api
|
||||
└─────────────────────────────────────────────────┘`;
|
||||
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].id).toBe('anthropic');
|
||||
expect(result[1].id).toBe('openai');
|
||||
});
|
||||
|
||||
it('should parse output with ANSI colors and box characters', () => {
|
||||
const output = `\x1b[1m┌─────────────────────────────────────────────────┐\x1b[0m
|
||||
\x1b[1m│ Authenticated Providers │\x1b[0m
|
||||
\x1b[1m├─────────────────────────────────────────────────┤\x1b[0m
|
||||
\x1b[32m●\x1b[0m \x1b[33manthropic\x1b[0m \x1b[36moauth\x1b[0m
|
||||
\x1b[32m●\x1b[0m \x1b[33mgoogle\x1b[0m \x1b[36mapi\x1b[0m
|
||||
\x1b[1m└─────────────────────────────────────────────────┘\x1b[0m`;
|
||||
|
||||
const result = parseProviders(output);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].id).toBe('anthropic');
|
||||
expect(result[1].id).toBe('google');
|
||||
});
|
||||
|
||||
it('should handle "no authenticated providers" message', () => {
|
||||
const output = `┌─────────────────────────────────────────────────┐
|
||||
│ No authenticated providers found │
|
||||
└─────────────────────────────────────────────────┘`;
|
||||
|
||||
const result = parseProviders(output);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,6 @@ import { ClaudeProvider } from '@/providers/claude-provider.js';
|
||||
import { CursorProvider } from '@/providers/cursor-provider.js';
|
||||
import { CodexProvider } from '@/providers/codex-provider.js';
|
||||
import { OpencodeProvider } from '@/providers/opencode-provider.js';
|
||||
import { GeminiProvider } from '@/providers/gemini-provider.js';
|
||||
|
||||
describe('provider-factory.ts', () => {
|
||||
let consoleSpy: any;
|
||||
@@ -12,7 +11,6 @@ describe('provider-factory.ts', () => {
|
||||
let detectCursorSpy: any;
|
||||
let detectCodexSpy: any;
|
||||
let detectOpencodeSpy: any;
|
||||
let detectGeminiSpy: any;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleSpy = {
|
||||
@@ -32,9 +30,6 @@ describe('provider-factory.ts', () => {
|
||||
detectOpencodeSpy = vi
|
||||
.spyOn(OpencodeProvider.prototype, 'detectInstallation')
|
||||
.mockResolvedValue({ installed: true });
|
||||
detectGeminiSpy = vi
|
||||
.spyOn(GeminiProvider.prototype, 'detectInstallation')
|
||||
.mockResolvedValue({ installed: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -43,7 +38,6 @@ describe('provider-factory.ts', () => {
|
||||
detectCursorSpy.mockRestore();
|
||||
detectCodexSpy.mockRestore();
|
||||
detectOpencodeSpy.mockRestore();
|
||||
detectGeminiSpy.mockRestore();
|
||||
});
|
||||
|
||||
describe('getProviderForModel', () => {
|
||||
@@ -172,15 +166,9 @@ describe('provider-factory.ts', () => {
|
||||
expect(hasClaudeProvider).toBe(true);
|
||||
});
|
||||
|
||||
it('should return exactly 5 providers', () => {
|
||||
it('should return exactly 4 providers', () => {
|
||||
const providers = ProviderFactory.getAllProviders();
|
||||
expect(providers).toHaveLength(5);
|
||||
});
|
||||
|
||||
it('should include GeminiProvider', () => {
|
||||
const providers = ProviderFactory.getAllProviders();
|
||||
const hasGeminiProvider = providers.some((p) => p instanceof GeminiProvider);
|
||||
expect(hasGeminiProvider).toBe(true);
|
||||
expect(providers).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('should include CursorProvider', () => {
|
||||
@@ -218,8 +206,7 @@ describe('provider-factory.ts', () => {
|
||||
expect(keys).toContain('cursor');
|
||||
expect(keys).toContain('codex');
|
||||
expect(keys).toContain('opencode');
|
||||
expect(keys).toContain('gemini');
|
||||
expect(keys).toHaveLength(5);
|
||||
expect(keys).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('should include cursor status', async () => {
|
||||
|
||||
@@ -1,565 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach, type Mock } from 'vitest';
|
||||
import type { Request, Response } from 'express';
|
||||
import { createMockExpressContext } from '../../../utils/mocks.js';
|
||||
|
||||
// Mock child_process with importOriginal to keep other exports
|
||||
vi.mock('child_process', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('child_process')>();
|
||||
return {
|
||||
...actual,
|
||||
execFile: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock util.promisify to return the function as-is so we can mock execFile
|
||||
vi.mock('util', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('util')>();
|
||||
return {
|
||||
...actual,
|
||||
promisify: (fn: unknown) => fn,
|
||||
};
|
||||
});
|
||||
|
||||
// Import handler after mocks are set up
|
||||
import { createAddRemoteHandler } from '@/routes/worktree/routes/add-remote.js';
|
||||
import { execFile } from 'child_process';
|
||||
|
||||
// Get the mocked execFile
|
||||
const mockExecFile = execFile as Mock;
|
||||
|
||||
/**
|
||||
* Helper to create a standard mock implementation for git commands
|
||||
*/
|
||||
function createGitMock(options: {
|
||||
existingRemotes?: string[];
|
||||
addRemoteFails?: boolean;
|
||||
addRemoteError?: string;
|
||||
fetchFails?: boolean;
|
||||
}): (command: string, args: string[]) => Promise<{ stdout: string; stderr: string }> {
|
||||
const {
|
||||
existingRemotes = [],
|
||||
addRemoteFails = false,
|
||||
addRemoteError = 'git remote add failed',
|
||||
fetchFails = false,
|
||||
} = options;
|
||||
|
||||
return (command: string, args: string[]) => {
|
||||
if (command === 'git' && args[0] === 'remote' && args.length === 1) {
|
||||
return Promise.resolve({ stdout: existingRemotes.join('\n'), stderr: '' });
|
||||
}
|
||||
if (command === 'git' && args[0] === 'remote' && args[1] === 'add') {
|
||||
if (addRemoteFails) {
|
||||
return Promise.reject(new Error(addRemoteError));
|
||||
}
|
||||
return Promise.resolve({ stdout: '', stderr: '' });
|
||||
}
|
||||
if (command === 'git' && args[0] === 'fetch') {
|
||||
if (fetchFails) {
|
||||
return Promise.reject(new Error('fetch failed'));
|
||||
}
|
||||
return Promise.resolve({ stdout: '', stderr: '' });
|
||||
}
|
||||
return Promise.resolve({ stdout: '', stderr: '' });
|
||||
};
|
||||
}
|
||||
|
||||
describe('add-remote route', () => {
|
||||
let req: Request;
|
||||
let res: Response;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const context = createMockExpressContext();
|
||||
req = context.req;
|
||||
res = context.res;
|
||||
});
|
||||
|
||||
describe('input validation', () => {
|
||||
it('should return 400 if worktreePath is missing', async () => {
|
||||
req.body = { remoteName: 'origin', remoteUrl: 'https://github.com/user/repo.git' };
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'worktreePath required',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return 400 if remoteName is missing', async () => {
|
||||
req.body = { worktreePath: '/test/path', remoteUrl: 'https://github.com/user/repo.git' };
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'remoteName required',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return 400 if remoteUrl is missing', async () => {
|
||||
req.body = { worktreePath: '/test/path', remoteName: 'origin' };
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'remoteUrl required',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('remote name validation', () => {
|
||||
it('should return 400 for empty remote name', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: '',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'remoteName required',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return 400 for remote name starting with dash', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: '-invalid',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error:
|
||||
'Invalid remote name. Must start with alphanumeric character and contain only letters, numbers, dashes, underscores, or periods.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return 400 for remote name starting with period', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: '.invalid',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error:
|
||||
'Invalid remote name. Must start with alphanumeric character and contain only letters, numbers, dashes, underscores, or periods.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return 400 for remote name with invalid characters', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'invalid name',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error:
|
||||
'Invalid remote name. Must start with alphanumeric character and contain only letters, numbers, dashes, underscores, or periods.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return 400 for remote name exceeding 250 characters', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'a'.repeat(251),
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error:
|
||||
'Invalid remote name. Must start with alphanumeric character and contain only letters, numbers, dashes, underscores, or periods.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept valid remote names with alphanumeric, dashes, underscores, and periods', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'my-remote_name.1',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
// Mock git remote to return empty list (no existing remotes)
|
||||
mockExecFile.mockImplementation(createGitMock({ existingRemotes: [] }));
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
// Should not return 400 for invalid name
|
||||
expect(res.status).not.toHaveBeenCalledWith(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('remote URL validation', () => {
|
||||
it('should return 400 for empty remote URL', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: '',
|
||||
};
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'remoteUrl required',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return 400 for invalid remote URL', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'not-a-valid-url',
|
||||
};
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'Invalid remote URL. Must be a valid git URL (HTTPS, SSH, or git:// protocol).',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return 400 for URL exceeding 2048 characters', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'https://github.com/' + 'a'.repeat(2049) + '.git',
|
||||
};
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'Invalid remote URL. Must be a valid git URL (HTTPS, SSH, or git:// protocol).',
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept HTTPS URLs', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation(createGitMock({ existingRemotes: [] }));
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).not.toHaveBeenCalledWith(400);
|
||||
});
|
||||
|
||||
it('should accept HTTP URLs', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'http://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation(createGitMock({ existingRemotes: [] }));
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).not.toHaveBeenCalledWith(400);
|
||||
});
|
||||
|
||||
it('should accept SSH URLs', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'git@github.com:user/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation(createGitMock({ existingRemotes: [] }));
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).not.toHaveBeenCalledWith(400);
|
||||
});
|
||||
|
||||
it('should accept git:// protocol URLs', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'git://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation(createGitMock({ existingRemotes: [] }));
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).not.toHaveBeenCalledWith(400);
|
||||
});
|
||||
|
||||
it('should accept ssh:// protocol URLs', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'ssh://git@github.com/user/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation(createGitMock({ existingRemotes: [] }));
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).not.toHaveBeenCalledWith(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('remote already exists check', () => {
|
||||
it('should return 400 with REMOTE_EXISTS code when remote already exists', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation(createGitMock({ existingRemotes: ['origin', 'upstream'] }));
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: "Remote 'origin' already exists",
|
||||
code: 'REMOTE_EXISTS',
|
||||
});
|
||||
});
|
||||
|
||||
it('should proceed if remote does not exist', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'new-remote',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation(createGitMock({ existingRemotes: ['origin'] }));
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
// Should call git remote add with array arguments
|
||||
expect(mockExecFile).toHaveBeenCalledWith(
|
||||
'git',
|
||||
['remote', 'add', 'new-remote', 'https://github.com/user/repo.git'],
|
||||
expect.any(Object)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('successful remote addition', () => {
|
||||
it('should add remote successfully with successful fetch', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'upstream',
|
||||
remoteUrl: 'https://github.com/other/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation(
|
||||
createGitMock({ existingRemotes: ['origin'], fetchFails: false })
|
||||
);
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: true,
|
||||
result: {
|
||||
remoteName: 'upstream',
|
||||
remoteUrl: 'https://github.com/other/repo.git',
|
||||
fetched: true,
|
||||
message: "Successfully added remote 'upstream' and fetched its branches",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should add remote successfully even if fetch fails', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'upstream',
|
||||
remoteUrl: 'https://github.com/other/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation(
|
||||
createGitMock({ existingRemotes: ['origin'], fetchFails: true })
|
||||
);
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: true,
|
||||
result: {
|
||||
remoteName: 'upstream',
|
||||
remoteUrl: 'https://github.com/other/repo.git',
|
||||
fetched: false,
|
||||
message:
|
||||
"Successfully added remote 'upstream' (fetch failed - you may need to fetch manually)",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should pass correct cwd option to git commands', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/custom/worktree/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
const execCalls: { command: string; args: string[]; options: unknown }[] = [];
|
||||
mockExecFile.mockImplementation((command: string, args: string[], options: unknown) => {
|
||||
execCalls.push({ command, args, options });
|
||||
if (command === 'git' && args[0] === 'remote' && args.length === 1) {
|
||||
return Promise.resolve({ stdout: '', stderr: '' });
|
||||
}
|
||||
return Promise.resolve({ stdout: '', stderr: '' });
|
||||
});
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
// Check that git remote was called with correct cwd
|
||||
expect((execCalls[0].options as { cwd: string }).cwd).toBe('/custom/worktree/path');
|
||||
// Check that git remote add was called with correct cwd
|
||||
expect((execCalls[1].options as { cwd: string }).cwd).toBe('/custom/worktree/path');
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should return 500 when git remote add fails', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation(
|
||||
createGitMock({
|
||||
existingRemotes: [],
|
||||
addRemoteFails: true,
|
||||
addRemoteError: 'git remote add failed',
|
||||
})
|
||||
);
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(500);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: 'git remote add failed',
|
||||
});
|
||||
});
|
||||
|
||||
it('should continue adding remote if git remote check fails', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation((command: string, args: string[]) => {
|
||||
if (command === 'git' && args[0] === 'remote' && args.length === 1) {
|
||||
return Promise.reject(new Error('not a git repo'));
|
||||
}
|
||||
if (command === 'git' && args[0] === 'remote' && args[1] === 'add') {
|
||||
return Promise.resolve({ stdout: '', stderr: '' });
|
||||
}
|
||||
if (command === 'git' && args[0] === 'fetch') {
|
||||
return Promise.resolve({ stdout: '', stderr: '' });
|
||||
}
|
||||
return Promise.resolve({ stdout: '', stderr: '' });
|
||||
});
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
// Should still try to add remote with array arguments
|
||||
expect(mockExecFile).toHaveBeenCalledWith(
|
||||
'git',
|
||||
['remote', 'add', 'origin', 'https://github.com/user/repo.git'],
|
||||
expect.any(Object)
|
||||
);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: true,
|
||||
result: expect.objectContaining({
|
||||
remoteName: 'origin',
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle non-Error exceptions', async () => {
|
||||
req.body = {
|
||||
worktreePath: '/test/path',
|
||||
remoteName: 'origin',
|
||||
remoteUrl: 'https://github.com/user/repo.git',
|
||||
};
|
||||
|
||||
mockExecFile.mockImplementation((command: string, args: string[]) => {
|
||||
if (command === 'git' && args[0] === 'remote' && args.length === 1) {
|
||||
return Promise.resolve({ stdout: '', stderr: '' });
|
||||
}
|
||||
if (command === 'git' && args[0] === 'remote' && args[1] === 'add') {
|
||||
return Promise.reject('String error');
|
||||
}
|
||||
return Promise.resolve({ stdout: '', stderr: '' });
|
||||
});
|
||||
|
||||
const handler = createAddRemoteHandler();
|
||||
await handler(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(500);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: expect.any(String),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -124,59 +124,6 @@ describe('claude-usage-service.ts', () => {
|
||||
|
||||
expect(result).toBe('Plain text');
|
||||
});
|
||||
|
||||
it('should strip OSC sequences (window title, etc.)', () => {
|
||||
const service = new ClaudeUsageService();
|
||||
// OSC sequence to set window title: ESC ] 0 ; title BEL
|
||||
const input = '\x1B]0;Claude Code\x07Regular text';
|
||||
// @ts-expect-error - accessing private method for testing
|
||||
const result = service.stripAnsiCodes(input);
|
||||
|
||||
expect(result).toBe('Regular text');
|
||||
});
|
||||
|
||||
it('should strip DEC private mode sequences', () => {
|
||||
const service = new ClaudeUsageService();
|
||||
// DEC private mode sequences like ESC[?2026h and ESC[?2026l
|
||||
const input = '\x1B[?2026lClaude Code\x1B[?2026h more text';
|
||||
// @ts-expect-error - accessing private method for testing
|
||||
const result = service.stripAnsiCodes(input);
|
||||
|
||||
expect(result).toBe('Claude Code more text');
|
||||
});
|
||||
|
||||
it('should handle complex terminal output with mixed escape sequences', () => {
|
||||
const service = new ClaudeUsageService();
|
||||
// Simulate the garbled output seen in the bug: "[?2026l ]0;❇ Claude Code [?2026h"
|
||||
// This contains OSC (set title) and DEC private mode sequences
|
||||
const input =
|
||||
'\x1B[?2026l\x1B]0;❇ Claude Code\x07\x1B[?2026hCurrent session 0%used Resets3am';
|
||||
// @ts-expect-error - accessing private method for testing
|
||||
const result = service.stripAnsiCodes(input);
|
||||
|
||||
expect(result).toBe('Current session 0%used Resets3am');
|
||||
});
|
||||
|
||||
it('should strip single character escape sequences', () => {
|
||||
const service = new ClaudeUsageService();
|
||||
// ESC c is the reset terminal command
|
||||
const input = '\x1BcReset text';
|
||||
// @ts-expect-error - accessing private method for testing
|
||||
const result = service.stripAnsiCodes(input);
|
||||
|
||||
expect(result).toBe('Reset text');
|
||||
});
|
||||
|
||||
it('should remove control characters but preserve newlines and tabs', () => {
|
||||
const service = new ClaudeUsageService();
|
||||
// BEL character (\x07) should be stripped, but the word "Bell" is regular text
|
||||
const input = 'Line 1\nLine 2\tTabbed\x07 with bell';
|
||||
// @ts-expect-error - accessing private method for testing
|
||||
const result = service.stripAnsiCodes(input);
|
||||
|
||||
// BEL is stripped, newlines and tabs preserved
|
||||
expect(result).toBe('Line 1\nLine 2\tTabbed with bell');
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseResetTime', () => {
|
||||
|
||||
@@ -1,623 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { FeatureExportService, FEATURE_EXPORT_VERSION } from '@/services/feature-export-service.js';
|
||||
import type { Feature, FeatureExport } from '@automaker/types';
|
||||
import type { FeatureLoader } from '@/services/feature-loader.js';
|
||||
|
||||
describe('feature-export-service.ts', () => {
|
||||
let exportService: FeatureExportService;
|
||||
let mockFeatureLoader: {
|
||||
get: ReturnType<typeof vi.fn>;
|
||||
getAll: ReturnType<typeof vi.fn>;
|
||||
create: ReturnType<typeof vi.fn>;
|
||||
update: ReturnType<typeof vi.fn>;
|
||||
generateFeatureId: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
const testProjectPath = '/test/project';
|
||||
|
||||
const sampleFeature: Feature = {
|
||||
id: 'feature-123-abc',
|
||||
title: 'Test Feature',
|
||||
category: 'UI',
|
||||
description: 'A test feature description',
|
||||
status: 'pending',
|
||||
priority: 1,
|
||||
dependencies: ['feature-456'],
|
||||
descriptionHistory: [
|
||||
{
|
||||
description: 'Initial description',
|
||||
timestamp: '2024-01-01T00:00:00.000Z',
|
||||
source: 'initial',
|
||||
},
|
||||
],
|
||||
planSpec: {
|
||||
status: 'generated',
|
||||
content: 'Plan content',
|
||||
version: 1,
|
||||
reviewedByUser: false,
|
||||
},
|
||||
imagePaths: ['/tmp/image1.png', '/tmp/image2.jpg'],
|
||||
textFilePaths: [
|
||||
{
|
||||
id: 'file-1',
|
||||
path: '/tmp/doc.txt',
|
||||
filename: 'doc.txt',
|
||||
mimeType: 'text/plain',
|
||||
content: 'Some content',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Create mock FeatureLoader instance
|
||||
mockFeatureLoader = {
|
||||
get: vi.fn(),
|
||||
getAll: vi.fn(),
|
||||
create: vi.fn(),
|
||||
update: vi.fn(),
|
||||
generateFeatureId: vi.fn().mockReturnValue('feature-mock-id'),
|
||||
};
|
||||
|
||||
// Inject mock via constructor
|
||||
exportService = new FeatureExportService(mockFeatureLoader as unknown as FeatureLoader);
|
||||
});
|
||||
|
||||
describe('exportFeatureData', () => {
|
||||
it('should export feature to JSON format', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, { format: 'json' });
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.version).toBe(FEATURE_EXPORT_VERSION);
|
||||
expect(parsed.feature.id).toBe(sampleFeature.id);
|
||||
expect(parsed.feature.title).toBe(sampleFeature.title);
|
||||
expect(parsed.exportedAt).toBeDefined();
|
||||
});
|
||||
|
||||
it('should export feature to YAML format', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, { format: 'yaml' });
|
||||
|
||||
expect(result).toContain('version:');
|
||||
expect(result).toContain('feature:');
|
||||
expect(result).toContain('Test Feature');
|
||||
expect(result).toContain('exportedAt:');
|
||||
});
|
||||
|
||||
it('should exclude description history when option is false', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
includeHistory: false,
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.descriptionHistory).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should include description history by default', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, { format: 'json' });
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.descriptionHistory).toBeDefined();
|
||||
expect(parsed.feature.descriptionHistory).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should exclude plan spec when option is false', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
includePlanSpec: false,
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.planSpec).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should include plan spec by default', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, { format: 'json' });
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.planSpec).toBeDefined();
|
||||
});
|
||||
|
||||
it('should include metadata when provided', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
metadata: { projectName: 'TestProject', branch: 'main' },
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.metadata).toEqual({ projectName: 'TestProject', branch: 'main' });
|
||||
});
|
||||
|
||||
it('should include exportedBy when provided', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
exportedBy: 'test-user',
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.exportedBy).toBe('test-user');
|
||||
});
|
||||
|
||||
it('should remove transient fields (titleGenerating, error)', () => {
|
||||
const featureWithTransient: Feature = {
|
||||
...sampleFeature,
|
||||
titleGenerating: true,
|
||||
error: 'Some error',
|
||||
};
|
||||
|
||||
const result = exportService.exportFeatureData(featureWithTransient, { format: 'json' });
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.titleGenerating).toBeUndefined();
|
||||
expect(parsed.feature.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should support compact JSON (prettyPrint: false)', () => {
|
||||
const prettyResult = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
prettyPrint: true,
|
||||
});
|
||||
const compactResult = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
prettyPrint: false,
|
||||
});
|
||||
|
||||
// Compact should have no newlines/indentation
|
||||
expect(compactResult).not.toContain('\n');
|
||||
// Pretty should have newlines
|
||||
expect(prettyResult).toContain('\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('exportFeature', () => {
|
||||
it('should fetch and export feature by ID', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.exportFeature(testProjectPath, 'feature-123-abc');
|
||||
|
||||
expect(mockFeatureLoader.get).toHaveBeenCalledWith(testProjectPath, 'feature-123-abc');
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.id).toBe(sampleFeature.id);
|
||||
});
|
||||
|
||||
it('should throw when feature not found', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
|
||||
await expect(exportService.exportFeature(testProjectPath, 'nonexistent')).rejects.toThrow(
|
||||
'Feature nonexistent not found'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('exportFeatures', () => {
|
||||
const features: Feature[] = [
|
||||
{ ...sampleFeature, id: 'feature-1', category: 'UI' },
|
||||
{ ...sampleFeature, id: 'feature-2', category: 'Backend', status: 'completed' },
|
||||
{ ...sampleFeature, id: 'feature-3', category: 'UI', status: 'pending' },
|
||||
];
|
||||
|
||||
it('should export all features', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath);
|
||||
|
||||
const parsed = JSON.parse(result);
|
||||
expect(parsed.count).toBe(3);
|
||||
expect(parsed.features).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should filter by category', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath, { category: 'UI' });
|
||||
|
||||
const parsed = JSON.parse(result);
|
||||
expect(parsed.count).toBe(2);
|
||||
expect(parsed.features.every((f: FeatureExport) => f.feature.category === 'UI')).toBe(true);
|
||||
});
|
||||
|
||||
it('should filter by status', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath, { status: 'completed' });
|
||||
|
||||
const parsed = JSON.parse(result);
|
||||
expect(parsed.count).toBe(1);
|
||||
expect(parsed.features[0].feature.status).toBe('completed');
|
||||
});
|
||||
|
||||
it('should filter by feature IDs', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath, {
|
||||
featureIds: ['feature-1', 'feature-3'],
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result);
|
||||
expect(parsed.count).toBe(2);
|
||||
const ids = parsed.features.map((f: FeatureExport) => f.feature.id);
|
||||
expect(ids).toContain('feature-1');
|
||||
expect(ids).toContain('feature-3');
|
||||
expect(ids).not.toContain('feature-2');
|
||||
});
|
||||
|
||||
it('should export to YAML format', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath, { format: 'yaml' });
|
||||
|
||||
expect(result).toContain('version:');
|
||||
expect(result).toContain('count:');
|
||||
expect(result).toContain('features:');
|
||||
});
|
||||
|
||||
it('should include metadata when provided', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath, {
|
||||
metadata: { projectName: 'TestProject' },
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result);
|
||||
expect(parsed.metadata).toEqual({ projectName: 'TestProject' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseImportData', () => {
|
||||
it('should parse valid JSON', () => {
|
||||
const json = JSON.stringify(sampleFeature);
|
||||
const result = exportService.parseImportData(json);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect((result as Feature).id).toBe(sampleFeature.id);
|
||||
});
|
||||
|
||||
it('should parse valid YAML', () => {
|
||||
const yaml = `
|
||||
id: feature-yaml-123
|
||||
title: YAML Feature
|
||||
category: Testing
|
||||
description: A YAML feature
|
||||
`;
|
||||
const result = exportService.parseImportData(yaml);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect((result as Feature).id).toBe('feature-yaml-123');
|
||||
expect((result as Feature).title).toBe('YAML Feature');
|
||||
});
|
||||
|
||||
it('should return null for invalid data', () => {
|
||||
const result = exportService.parseImportData('not valid {json} or yaml: [');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should parse FeatureExport wrapper', () => {
|
||||
const exportData: FeatureExport = {
|
||||
version: '1.0.0',
|
||||
feature: sampleFeature,
|
||||
exportedAt: new Date().toISOString(),
|
||||
};
|
||||
const json = JSON.stringify(exportData);
|
||||
|
||||
const result = exportService.parseImportData(json) as FeatureExport;
|
||||
|
||||
expect(result.version).toBe('1.0.0');
|
||||
expect(result.feature.id).toBe(sampleFeature.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('detectFormat', () => {
|
||||
it('should detect JSON format', () => {
|
||||
const json = JSON.stringify({ id: 'test' });
|
||||
expect(exportService.detectFormat(json)).toBe('json');
|
||||
});
|
||||
|
||||
it('should detect YAML format', () => {
|
||||
const yaml = `
|
||||
id: test
|
||||
title: Test
|
||||
`;
|
||||
expect(exportService.detectFormat(yaml)).toBe('yaml');
|
||||
});
|
||||
|
||||
it('should detect YAML for plain text (YAML is very permissive)', () => {
|
||||
// YAML parses any plain text as a string, so this is detected as valid YAML
|
||||
// The actual validation happens in parseImportData which checks for required fields
|
||||
expect(exportService.detectFormat('not valid {[')).toBe('yaml');
|
||||
});
|
||||
|
||||
it('should handle whitespace', () => {
|
||||
const json = ' { "id": "test" } ';
|
||||
expect(exportService.detectFormat(json)).toBe('json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('importFeature', () => {
|
||||
it('should import feature from raw Feature data', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.featureId).toBe(sampleFeature.id);
|
||||
expect(mockFeatureLoader.create).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should import feature from FeatureExport wrapper', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockResolvedValue(sampleFeature);
|
||||
|
||||
const exportData: FeatureExport = {
|
||||
version: '1.0.0',
|
||||
feature: sampleFeature,
|
||||
exportedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: exportData,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.featureId).toBe(sampleFeature.id);
|
||||
});
|
||||
|
||||
it('should use custom ID when provided', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
id: data.id!,
|
||||
}));
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
newId: 'custom-id-123',
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.featureId).toBe('custom-id-123');
|
||||
});
|
||||
|
||||
it('should fail when feature exists and overwrite is false', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
overwrite: false,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.errors).toContain(
|
||||
`Feature with ID ${sampleFeature.id} already exists. Set overwrite: true to replace.`
|
||||
);
|
||||
});
|
||||
|
||||
it('should overwrite when overwrite is true', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(sampleFeature);
|
||||
mockFeatureLoader.update.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
overwrite: true,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.wasOverwritten).toBe(true);
|
||||
expect(mockFeatureLoader.update).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should apply target category override', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
...data,
|
||||
}));
|
||||
|
||||
await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
targetCategory: 'NewCategory',
|
||||
});
|
||||
|
||||
const createCall = mockFeatureLoader.create.mock.calls[0];
|
||||
expect(createCall[1].category).toBe('NewCategory');
|
||||
});
|
||||
|
||||
it('should clear branch info when preserveBranchInfo is false', async () => {
|
||||
const featureWithBranch: Feature = {
|
||||
...sampleFeature,
|
||||
branchName: 'feature/test-branch',
|
||||
};
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...featureWithBranch,
|
||||
...data,
|
||||
}));
|
||||
|
||||
await exportService.importFeature(testProjectPath, {
|
||||
data: featureWithBranch,
|
||||
preserveBranchInfo: false,
|
||||
});
|
||||
|
||||
const createCall = mockFeatureLoader.create.mock.calls[0];
|
||||
expect(createCall[1].branchName).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should preserve branch info when preserveBranchInfo is true', async () => {
|
||||
const featureWithBranch: Feature = {
|
||||
...sampleFeature,
|
||||
branchName: 'feature/test-branch',
|
||||
};
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...featureWithBranch,
|
||||
...data,
|
||||
}));
|
||||
|
||||
await exportService.importFeature(testProjectPath, {
|
||||
data: featureWithBranch,
|
||||
preserveBranchInfo: true,
|
||||
});
|
||||
|
||||
const createCall = mockFeatureLoader.create.mock.calls[0];
|
||||
expect(createCall[1].branchName).toBe('feature/test-branch');
|
||||
});
|
||||
|
||||
it('should warn and clear image paths', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
});
|
||||
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings).toContainEqual(expect.stringContaining('image path'));
|
||||
const createCall = mockFeatureLoader.create.mock.calls[0];
|
||||
expect(createCall[1].imagePaths).toEqual([]);
|
||||
});
|
||||
|
||||
it('should warn and clear text file paths', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
});
|
||||
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings).toContainEqual(expect.stringContaining('text file path'));
|
||||
const createCall = mockFeatureLoader.create.mock.calls[0];
|
||||
expect(createCall[1].textFilePaths).toEqual([]);
|
||||
});
|
||||
|
||||
it('should fail with validation error for missing required fields', async () => {
|
||||
const invalidFeature = {
|
||||
id: 'feature-invalid',
|
||||
// Missing description, title, and category
|
||||
} as Feature;
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: invalidFeature,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.errors).toBeDefined();
|
||||
expect(result.errors!.some((e) => e.includes('title or description'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should generate ID when none provided', async () => {
|
||||
const featureWithoutId = {
|
||||
title: 'No ID Feature',
|
||||
category: 'Testing',
|
||||
description: 'Feature without ID',
|
||||
} as Feature;
|
||||
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...featureWithoutId,
|
||||
id: data.id!,
|
||||
}));
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: featureWithoutId,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.featureId).toBe('feature-mock-id');
|
||||
});
|
||||
});
|
||||
|
||||
describe('importFeatures', () => {
|
||||
const bulkExport = {
|
||||
version: '1.0.0',
|
||||
exportedAt: new Date().toISOString(),
|
||||
count: 2,
|
||||
features: [
|
||||
{
|
||||
version: '1.0.0',
|
||||
feature: { ...sampleFeature, id: 'feature-1' },
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
{
|
||||
version: '1.0.0',
|
||||
feature: { ...sampleFeature, id: 'feature-2' },
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
it('should import multiple features from JSON string', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
id: data.id!,
|
||||
}));
|
||||
|
||||
const results = await exportService.importFeatures(
|
||||
testProjectPath,
|
||||
JSON.stringify(bulkExport)
|
||||
);
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0].success).toBe(true);
|
||||
expect(results[1].success).toBe(true);
|
||||
});
|
||||
|
||||
it('should import multiple features from parsed data', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
id: data.id!,
|
||||
}));
|
||||
|
||||
const results = await exportService.importFeatures(testProjectPath, bulkExport);
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results.every((r) => r.success)).toBe(true);
|
||||
});
|
||||
|
||||
it('should apply options to all features', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
...data,
|
||||
}));
|
||||
|
||||
await exportService.importFeatures(testProjectPath, bulkExport, {
|
||||
targetCategory: 'ImportedCategory',
|
||||
});
|
||||
|
||||
const createCalls = mockFeatureLoader.create.mock.calls;
|
||||
expect(createCalls[0][1].category).toBe('ImportedCategory');
|
||||
expect(createCalls[1][1].category).toBe('ImportedCategory');
|
||||
});
|
||||
|
||||
it('should return error for invalid bulk format', async () => {
|
||||
const results = await exportService.importFeatures(testProjectPath, '{ "invalid": "data" }');
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0].success).toBe(false);
|
||||
expect(results[0].errors).toContainEqual(expect.stringContaining('Invalid bulk import data'));
|
||||
});
|
||||
|
||||
it('should handle partial failures', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValueOnce(null).mockResolvedValueOnce(sampleFeature); // Second feature exists
|
||||
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
id: data.id!,
|
||||
}));
|
||||
|
||||
const results = await exportService.importFeatures(testProjectPath, bulkExport, {
|
||||
overwrite: false,
|
||||
});
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0].success).toBe(true);
|
||||
expect(results[1].success).toBe(false); // Exists without overwrite
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -63,10 +63,7 @@ describe('IdeationService', () => {
|
||||
} as unknown as EventEmitter;
|
||||
|
||||
// Create mock settings service
|
||||
mockSettingsService = {
|
||||
getCredentials: vi.fn().mockResolvedValue({}),
|
||||
getGlobalSettings: vi.fn().mockResolvedValue({}),
|
||||
} as unknown as SettingsService;
|
||||
mockSettingsService = {} as SettingsService;
|
||||
|
||||
// Create mock feature loader
|
||||
mockFeatureLoader = {
|
||||
|
||||
@@ -788,367 +788,6 @@ describe('pipeline-service.ts', () => {
|
||||
const nextStatus = pipelineService.getNextStatus('in_progress', config, false);
|
||||
expect(nextStatus).toBe('pipeline_step1'); // Should use step1 (order 0), not step2
|
||||
});
|
||||
|
||||
describe('with exclusions', () => {
|
||||
it('should skip excluded step when coming from in_progress', () => {
|
||||
const config: PipelineConfig = {
|
||||
version: 1,
|
||||
steps: [
|
||||
{
|
||||
id: 'step1',
|
||||
name: 'Step 1',
|
||||
order: 0,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'blue',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step2',
|
||||
name: 'Step 2',
|
||||
order: 1,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'green',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const nextStatus = pipelineService.getNextStatus('in_progress', config, false, ['step1']);
|
||||
expect(nextStatus).toBe('pipeline_step2'); // Should skip step1 and go to step2
|
||||
});
|
||||
|
||||
it('should skip excluded step when moving between steps', () => {
|
||||
const config: PipelineConfig = {
|
||||
version: 1,
|
||||
steps: [
|
||||
{
|
||||
id: 'step1',
|
||||
name: 'Step 1',
|
||||
order: 0,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'blue',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step2',
|
||||
name: 'Step 2',
|
||||
order: 1,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'green',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step3',
|
||||
name: 'Step 3',
|
||||
order: 2,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'red',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const nextStatus = pipelineService.getNextStatus('pipeline_step1', config, false, [
|
||||
'step2',
|
||||
]);
|
||||
expect(nextStatus).toBe('pipeline_step3'); // Should skip step2 and go to step3
|
||||
});
|
||||
|
||||
it('should go to final status when all remaining steps are excluded', () => {
|
||||
const config: PipelineConfig = {
|
||||
version: 1,
|
||||
steps: [
|
||||
{
|
||||
id: 'step1',
|
||||
name: 'Step 1',
|
||||
order: 0,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'blue',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step2',
|
||||
name: 'Step 2',
|
||||
order: 1,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'green',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const nextStatus = pipelineService.getNextStatus('pipeline_step1', config, false, [
|
||||
'step2',
|
||||
]);
|
||||
expect(nextStatus).toBe('verified'); // No more steps after exclusion
|
||||
});
|
||||
|
||||
it('should go to waiting_approval when all remaining steps excluded and skipTests is true', () => {
|
||||
const config: PipelineConfig = {
|
||||
version: 1,
|
||||
steps: [
|
||||
{
|
||||
id: 'step1',
|
||||
name: 'Step 1',
|
||||
order: 0,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'blue',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step2',
|
||||
name: 'Step 2',
|
||||
order: 1,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'green',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const nextStatus = pipelineService.getNextStatus('pipeline_step1', config, true, ['step2']);
|
||||
expect(nextStatus).toBe('waiting_approval');
|
||||
});
|
||||
|
||||
it('should go to final status when all steps are excluded from in_progress', () => {
|
||||
const config: PipelineConfig = {
|
||||
version: 1,
|
||||
steps: [
|
||||
{
|
||||
id: 'step1',
|
||||
name: 'Step 1',
|
||||
order: 0,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'blue',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step2',
|
||||
name: 'Step 2',
|
||||
order: 1,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'green',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const nextStatus = pipelineService.getNextStatus('in_progress', config, false, [
|
||||
'step1',
|
||||
'step2',
|
||||
]);
|
||||
expect(nextStatus).toBe('verified');
|
||||
});
|
||||
|
||||
it('should handle empty exclusions array like no exclusions', () => {
|
||||
const config: PipelineConfig = {
|
||||
version: 1,
|
||||
steps: [
|
||||
{
|
||||
id: 'step1',
|
||||
name: 'Step 1',
|
||||
order: 0,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'blue',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const nextStatus = pipelineService.getNextStatus('in_progress', config, false, []);
|
||||
expect(nextStatus).toBe('pipeline_step1');
|
||||
});
|
||||
|
||||
it('should handle undefined exclusions like no exclusions', () => {
|
||||
const config: PipelineConfig = {
|
||||
version: 1,
|
||||
steps: [
|
||||
{
|
||||
id: 'step1',
|
||||
name: 'Step 1',
|
||||
order: 0,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'blue',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const nextStatus = pipelineService.getNextStatus('in_progress', config, false, undefined);
|
||||
expect(nextStatus).toBe('pipeline_step1');
|
||||
});
|
||||
|
||||
it('should skip multiple excluded steps in sequence', () => {
|
||||
const config: PipelineConfig = {
|
||||
version: 1,
|
||||
steps: [
|
||||
{
|
||||
id: 'step1',
|
||||
name: 'Step 1',
|
||||
order: 0,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'blue',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step2',
|
||||
name: 'Step 2',
|
||||
order: 1,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'green',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step3',
|
||||
name: 'Step 3',
|
||||
order: 2,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'red',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step4',
|
||||
name: 'Step 4',
|
||||
order: 3,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'yellow',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// Exclude step2 and step3
|
||||
const nextStatus = pipelineService.getNextStatus('pipeline_step1', config, false, [
|
||||
'step2',
|
||||
'step3',
|
||||
]);
|
||||
expect(nextStatus).toBe('pipeline_step4'); // Should skip step2 and step3
|
||||
});
|
||||
|
||||
it('should handle exclusion of non-existent step IDs gracefully', () => {
|
||||
const config: PipelineConfig = {
|
||||
version: 1,
|
||||
steps: [
|
||||
{
|
||||
id: 'step1',
|
||||
name: 'Step 1',
|
||||
order: 0,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'blue',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step2',
|
||||
name: 'Step 2',
|
||||
order: 1,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'green',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// Exclude a non-existent step - should have no effect
|
||||
const nextStatus = pipelineService.getNextStatus('in_progress', config, false, [
|
||||
'nonexistent',
|
||||
]);
|
||||
expect(nextStatus).toBe('pipeline_step1');
|
||||
});
|
||||
|
||||
it('should find next valid step when current step becomes excluded mid-flow', () => {
|
||||
const config: PipelineConfig = {
|
||||
version: 1,
|
||||
steps: [
|
||||
{
|
||||
id: 'step1',
|
||||
name: 'Step 1',
|
||||
order: 0,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'blue',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step2',
|
||||
name: 'Step 2',
|
||||
order: 1,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'green',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step3',
|
||||
name: 'Step 3',
|
||||
order: 2,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'red',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// Feature is at step1 but step1 is now excluded - should find next valid step
|
||||
const nextStatus = pipelineService.getNextStatus('pipeline_step1', config, false, [
|
||||
'step1',
|
||||
'step2',
|
||||
]);
|
||||
expect(nextStatus).toBe('pipeline_step3');
|
||||
});
|
||||
|
||||
it('should go to final status when current step is excluded and no steps remain', () => {
|
||||
const config: PipelineConfig = {
|
||||
version: 1,
|
||||
steps: [
|
||||
{
|
||||
id: 'step1',
|
||||
name: 'Step 1',
|
||||
order: 0,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'blue',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
id: 'step2',
|
||||
name: 'Step 2',
|
||||
order: 1,
|
||||
instructions: 'Instructions',
|
||||
colorClass: 'green',
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-01T00:00:00.000Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// Feature is at step1 but both steps are excluded
|
||||
const nextStatus = pipelineService.getNextStatus('pipeline_step1', config, false, [
|
||||
'step1',
|
||||
'step2',
|
||||
]);
|
||||
expect(nextStatus).toBe('verified');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStep', () => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@automaker/ui",
|
||||
"version": "0.13.0",
|
||||
"version": "0.12.0",
|
||||
"description": "An autonomous AI development studio that helps you build software faster using AI-powered agents",
|
||||
"homepage": "https://github.com/AutoMaker-Org/automaker",
|
||||
"repository": {
|
||||
@@ -80,8 +80,7 @@
|
||||
"@radix-ui/react-switch": "1.2.6",
|
||||
"@radix-ui/react-tabs": "1.1.13",
|
||||
"@radix-ui/react-tooltip": "1.2.8",
|
||||
"@tanstack/react-query": "^5.90.17",
|
||||
"@tanstack/react-query-devtools": "^5.91.2",
|
||||
"@tanstack/react-query": "5.90.12",
|
||||
"@tanstack/react-router": "1.141.6",
|
||||
"@uiw/react-codemirror": "4.25.4",
|
||||
"@xterm/addon-fit": "0.10.0",
|
||||
@@ -102,8 +101,6 @@
|
||||
"react-markdown": "10.1.0",
|
||||
"react-resizable-panels": "3.0.6",
|
||||
"rehype-raw": "7.0.0",
|
||||
"rehype-sanitize": "^6.0.0",
|
||||
"remark-gfm": "^4.0.1",
|
||||
"sonner": "2.0.7",
|
||||
"tailwind-merge": "3.4.0",
|
||||
"usehooks-ts": "3.1.1",
|
||||
@@ -149,7 +146,6 @@
|
||||
"productName": "Automaker",
|
||||
"artifactName": "${productName}-${version}-${arch}.${ext}",
|
||||
"npmRebuild": false,
|
||||
"publish": null,
|
||||
"afterPack": "./scripts/rebuild-server-natives.cjs",
|
||||
"directories": {
|
||||
"output": "release"
|
||||
|
||||
@@ -58,7 +58,7 @@ const E2E_SETTINGS = {
|
||||
featureGenerationModel: { model: 'sonnet' },
|
||||
backlogPlanningModel: { model: 'sonnet' },
|
||||
projectAnalysisModel: { model: 'sonnet' },
|
||||
ideationModel: { model: 'sonnet' },
|
||||
suggestionsModel: { model: 'sonnet' },
|
||||
},
|
||||
enhancementModel: 'sonnet',
|
||||
validationModel: 'opus',
|
||||
|
||||
@@ -1,40 +1,115 @@
|
||||
/**
|
||||
* Claude Usage Popover
|
||||
*
|
||||
* Displays Claude API usage statistics using React Query for data fetching.
|
||||
*/
|
||||
|
||||
import { useState, useMemo } from 'react';
|
||||
import { useState, useEffect, useMemo, useCallback } from 'react';
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { RefreshCw, AlertTriangle, CheckCircle, XCircle, Clock, ExternalLink } from 'lucide-react';
|
||||
import { Spinner } from '@/components/ui/spinner';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { getElectronAPI } from '@/lib/electron';
|
||||
import { useAppStore } from '@/store/app-store';
|
||||
import { useSetupStore } from '@/store/setup-store';
|
||||
import { useClaudeUsage } from '@/hooks/queries';
|
||||
|
||||
// Error codes for distinguishing failure modes
|
||||
const ERROR_CODES = {
|
||||
API_BRIDGE_UNAVAILABLE: 'API_BRIDGE_UNAVAILABLE',
|
||||
AUTH_ERROR: 'AUTH_ERROR',
|
||||
TRUST_PROMPT: 'TRUST_PROMPT',
|
||||
UNKNOWN: 'UNKNOWN',
|
||||
} as const;
|
||||
|
||||
type ErrorCode = (typeof ERROR_CODES)[keyof typeof ERROR_CODES];
|
||||
|
||||
type UsageError = {
|
||||
code: ErrorCode;
|
||||
message: string;
|
||||
};
|
||||
|
||||
// Fixed refresh interval (45 seconds)
|
||||
const REFRESH_INTERVAL_SECONDS = 45;
|
||||
|
||||
export function ClaudeUsagePopover() {
|
||||
const { claudeUsage, claudeUsageLastUpdated, setClaudeUsage } = useAppStore();
|
||||
const claudeAuthStatus = useSetupStore((state) => state.claudeAuthStatus);
|
||||
const [open, setOpen] = useState(false);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState<UsageError | null>(null);
|
||||
|
||||
// Check if CLI is verified/authenticated
|
||||
const isCliVerified =
|
||||
claudeAuthStatus?.authenticated && claudeAuthStatus?.method === 'cli_authenticated';
|
||||
|
||||
// Use React Query for usage data
|
||||
const {
|
||||
data: claudeUsage,
|
||||
isLoading,
|
||||
isFetching,
|
||||
error,
|
||||
dataUpdatedAt,
|
||||
refetch,
|
||||
} = useClaudeUsage(isCliVerified);
|
||||
|
||||
// Check if data is stale (older than 2 minutes)
|
||||
// Check if data is stale (older than 2 minutes) - recalculates when claudeUsageLastUpdated changes
|
||||
const isStale = useMemo(() => {
|
||||
return !dataUpdatedAt || Date.now() - dataUpdatedAt > 2 * 60 * 1000;
|
||||
}, [dataUpdatedAt]);
|
||||
return !claudeUsageLastUpdated || Date.now() - claudeUsageLastUpdated > 2 * 60 * 1000;
|
||||
}, [claudeUsageLastUpdated]);
|
||||
|
||||
const fetchUsage = useCallback(
|
||||
async (isAutoRefresh = false) => {
|
||||
if (!isAutoRefresh) setLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const api = getElectronAPI();
|
||||
if (!api.claude) {
|
||||
setError({
|
||||
code: ERROR_CODES.API_BRIDGE_UNAVAILABLE,
|
||||
message: 'Claude API bridge not available',
|
||||
});
|
||||
return;
|
||||
}
|
||||
const data = await api.claude.getUsage();
|
||||
if ('error' in data) {
|
||||
// Detect trust prompt error
|
||||
const isTrustPrompt =
|
||||
data.error === 'Trust prompt pending' ||
|
||||
(data.message && data.message.includes('folder permission'));
|
||||
setError({
|
||||
code: isTrustPrompt ? ERROR_CODES.TRUST_PROMPT : ERROR_CODES.AUTH_ERROR,
|
||||
message: data.message || data.error,
|
||||
});
|
||||
return;
|
||||
}
|
||||
setClaudeUsage(data);
|
||||
} catch (err) {
|
||||
setError({
|
||||
code: ERROR_CODES.UNKNOWN,
|
||||
message: err instanceof Error ? err.message : 'Failed to fetch usage',
|
||||
});
|
||||
} finally {
|
||||
if (!isAutoRefresh) setLoading(false);
|
||||
}
|
||||
},
|
||||
[setClaudeUsage]
|
||||
);
|
||||
|
||||
// Auto-fetch on mount if data is stale (only if CLI is verified)
|
||||
useEffect(() => {
|
||||
if (isStale && isCliVerified) {
|
||||
fetchUsage(true);
|
||||
}
|
||||
}, [isStale, isCliVerified, fetchUsage]);
|
||||
|
||||
useEffect(() => {
|
||||
// Skip if CLI is not verified
|
||||
if (!isCliVerified) return;
|
||||
|
||||
// Initial fetch when opened
|
||||
if (open) {
|
||||
if (!claudeUsage || isStale) {
|
||||
fetchUsage();
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-refresh interval (only when open)
|
||||
let intervalId: NodeJS.Timeout | null = null;
|
||||
if (open) {
|
||||
intervalId = setInterval(() => {
|
||||
fetchUsage(true);
|
||||
}, REFRESH_INTERVAL_SECONDS * 1000);
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (intervalId) clearInterval(intervalId);
|
||||
};
|
||||
}, [open, claudeUsage, isStale, isCliVerified, fetchUsage]);
|
||||
|
||||
// Derived status color/icon helper
|
||||
const getStatusInfo = (percentage: number) => {
|
||||
@@ -69,6 +144,7 @@ export function ClaudeUsagePopover() {
|
||||
isPrimary?: boolean;
|
||||
stale?: boolean;
|
||||
}) => {
|
||||
// Check if percentage is valid (not NaN, not undefined, is a finite number)
|
||||
const isValidPercentage =
|
||||
typeof percentage === 'number' && !isNaN(percentage) && isFinite(percentage);
|
||||
const safePercentage = isValidPercentage ? percentage : 0;
|
||||
@@ -169,10 +245,10 @@ export function ClaudeUsagePopover() {
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className={cn('h-6 w-6', isFetching && 'opacity-80')}
|
||||
onClick={() => !isFetching && refetch()}
|
||||
className={cn('h-6 w-6', loading && 'opacity-80')}
|
||||
onClick={() => !loading && fetchUsage(false)}
|
||||
>
|
||||
<RefreshCw className={cn('w-3.5 h-3.5', isFetching && 'animate-spin')} />
|
||||
<RefreshCw className="w-3.5 h-3.5" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
@@ -183,16 +259,26 @@ export function ClaudeUsagePopover() {
|
||||
<div className="flex flex-col items-center justify-center py-6 text-center space-y-3">
|
||||
<AlertTriangle className="w-8 h-8 text-yellow-500/80" />
|
||||
<div className="space-y-1 flex flex-col items-center">
|
||||
<p className="text-sm font-medium">
|
||||
{error instanceof Error ? error.message : 'Failed to fetch usage'}
|
||||
</p>
|
||||
<p className="text-sm font-medium">{error.message}</p>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Make sure Claude CLI is installed and authenticated via{' '}
|
||||
<code className="font-mono bg-muted px-1 rounded">claude login</code>
|
||||
{error.code === ERROR_CODES.API_BRIDGE_UNAVAILABLE ? (
|
||||
'Ensure the Electron bridge is running or restart the app'
|
||||
) : error.code === ERROR_CODES.TRUST_PROMPT ? (
|
||||
<>
|
||||
Run <code className="font-mono bg-muted px-1 rounded">claude</code> in your
|
||||
terminal and approve access to continue
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
Make sure Claude CLI is installed and authenticated via{' '}
|
||||
<code className="font-mono bg-muted px-1 rounded">claude login</code>
|
||||
</>
|
||||
)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
) : isLoading || !claudeUsage ? (
|
||||
) : !claudeUsage ? (
|
||||
// Loading state
|
||||
<div className="flex flex-col items-center justify-center py-8 space-y-2">
|
||||
<Spinner size="lg" />
|
||||
<p className="text-xs text-muted-foreground">Loading usage data...</p>
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { useState, useMemo } from 'react';
|
||||
import { useState, useEffect, useMemo, useCallback } from 'react';
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { RefreshCw, AlertTriangle, CheckCircle, XCircle, Clock, ExternalLink } from 'lucide-react';
|
||||
import { Spinner } from '@/components/ui/spinner';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { getElectronAPI } from '@/lib/electron';
|
||||
import { useAppStore } from '@/store/app-store';
|
||||
import { useSetupStore } from '@/store/setup-store';
|
||||
import { useCodexUsage } from '@/hooks/queries';
|
||||
|
||||
// Error codes for distinguishing failure modes
|
||||
const ERROR_CODES = {
|
||||
@@ -22,6 +23,9 @@ type UsageError = {
|
||||
message: string;
|
||||
};
|
||||
|
||||
// Fixed refresh interval (45 seconds)
|
||||
const REFRESH_INTERVAL_SECONDS = 45;
|
||||
|
||||
// Helper to format reset time
|
||||
function formatResetTime(unixTimestamp: number): string {
|
||||
const date = new Date(unixTimestamp * 1000);
|
||||
@@ -59,39 +63,95 @@ function getWindowLabel(durationMins: number): { title: string; subtitle: string
|
||||
}
|
||||
|
||||
export function CodexUsagePopover() {
|
||||
const { codexUsage, codexUsageLastUpdated, setCodexUsage } = useAppStore();
|
||||
const codexAuthStatus = useSetupStore((state) => state.codexAuthStatus);
|
||||
const [open, setOpen] = useState(false);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState<UsageError | null>(null);
|
||||
|
||||
// Check if Codex is authenticated
|
||||
const isCodexAuthenticated = codexAuthStatus?.authenticated;
|
||||
|
||||
// Use React Query for data fetching with automatic polling
|
||||
const {
|
||||
data: codexUsage,
|
||||
isLoading,
|
||||
isFetching,
|
||||
error: queryError,
|
||||
dataUpdatedAt,
|
||||
refetch,
|
||||
} = useCodexUsage(isCodexAuthenticated);
|
||||
|
||||
// Check if data is stale (older than 2 minutes)
|
||||
const isStale = useMemo(() => {
|
||||
return !dataUpdatedAt || Date.now() - dataUpdatedAt > 2 * 60 * 1000;
|
||||
}, [dataUpdatedAt]);
|
||||
return !codexUsageLastUpdated || Date.now() - codexUsageLastUpdated > 2 * 60 * 1000;
|
||||
}, [codexUsageLastUpdated]);
|
||||
|
||||
// Convert query error to UsageError format for backward compatibility
|
||||
const error = useMemo((): UsageError | null => {
|
||||
if (!queryError) return null;
|
||||
const message = queryError instanceof Error ? queryError.message : String(queryError);
|
||||
if (message.includes('not available') || message.includes('does not provide')) {
|
||||
return { code: ERROR_CODES.NOT_AVAILABLE, message };
|
||||
const fetchUsage = useCallback(
|
||||
async (isAutoRefresh = false) => {
|
||||
if (!isAutoRefresh) setLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const api = getElectronAPI();
|
||||
if (!api.codex) {
|
||||
setError({
|
||||
code: ERROR_CODES.API_BRIDGE_UNAVAILABLE,
|
||||
message: 'Codex API bridge not available',
|
||||
});
|
||||
return;
|
||||
}
|
||||
const data = await api.codex.getUsage();
|
||||
if ('error' in data) {
|
||||
// Check if it's the "not available" error
|
||||
if (
|
||||
data.message?.includes('not available') ||
|
||||
data.message?.includes('does not provide')
|
||||
) {
|
||||
setError({
|
||||
code: ERROR_CODES.NOT_AVAILABLE,
|
||||
message: data.message || data.error,
|
||||
});
|
||||
} else {
|
||||
setError({
|
||||
code: ERROR_CODES.AUTH_ERROR,
|
||||
message: data.message || data.error,
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
setCodexUsage(data);
|
||||
} catch (err) {
|
||||
setError({
|
||||
code: ERROR_CODES.UNKNOWN,
|
||||
message: err instanceof Error ? err.message : 'Failed to fetch usage',
|
||||
});
|
||||
} finally {
|
||||
if (!isAutoRefresh) setLoading(false);
|
||||
}
|
||||
},
|
||||
[setCodexUsage]
|
||||
);
|
||||
|
||||
// Auto-fetch on mount if data is stale (only if authenticated)
|
||||
useEffect(() => {
|
||||
if (isStale && isCodexAuthenticated) {
|
||||
fetchUsage(true);
|
||||
}
|
||||
if (message.includes('bridge') || message.includes('API')) {
|
||||
return { code: ERROR_CODES.API_BRIDGE_UNAVAILABLE, message };
|
||||
}, [isStale, isCodexAuthenticated, fetchUsage]);
|
||||
|
||||
useEffect(() => {
|
||||
// Skip if not authenticated
|
||||
if (!isCodexAuthenticated) return;
|
||||
|
||||
// Initial fetch when opened
|
||||
if (open) {
|
||||
if (!codexUsage || isStale) {
|
||||
fetchUsage();
|
||||
}
|
||||
}
|
||||
return { code: ERROR_CODES.AUTH_ERROR, message };
|
||||
}, [queryError]);
|
||||
|
||||
// Auto-refresh interval (only when open)
|
||||
let intervalId: NodeJS.Timeout | null = null;
|
||||
if (open) {
|
||||
intervalId = setInterval(() => {
|
||||
fetchUsage(true);
|
||||
}, REFRESH_INTERVAL_SECONDS * 1000);
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (intervalId) clearInterval(intervalId);
|
||||
};
|
||||
}, [open, codexUsage, isStale, isCodexAuthenticated, fetchUsage]);
|
||||
|
||||
// Derived status color/icon helper
|
||||
const getStatusInfo = (percentage: number) => {
|
||||
@@ -229,10 +289,10 @@ export function CodexUsagePopover() {
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className={cn('h-6 w-6', isFetching && 'opacity-80')}
|
||||
onClick={() => !isFetching && refetch()}
|
||||
className={cn('h-6 w-6', loading && 'opacity-80')}
|
||||
onClick={() => !loading && fetchUsage(false)}
|
||||
>
|
||||
<RefreshCw className={cn('w-3.5 h-3.5', isFetching && 'animate-spin')} />
|
||||
<RefreshCw className="w-3.5 h-3.5" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -45,8 +45,6 @@ export function BoardBackgroundModal({ open, onOpenChange }: BoardBackgroundModa
|
||||
setCardBorderOpacity,
|
||||
setHideScrollbar,
|
||||
clearBoardBackground,
|
||||
persistSettings,
|
||||
getCurrentSettings,
|
||||
} = useBoardBackgroundSettings();
|
||||
const [isDragOver, setIsDragOver] = useState(false);
|
||||
const [isProcessing, setIsProcessing] = useState(false);
|
||||
@@ -57,31 +55,12 @@ export function BoardBackgroundModal({ open, onOpenChange }: BoardBackgroundModa
|
||||
const backgroundSettings =
|
||||
(currentProject && boardBackgroundByProject[currentProject.path]) || defaultBackgroundSettings;
|
||||
|
||||
// Local state for sliders during dragging (avoids store updates during drag)
|
||||
const [localCardOpacity, setLocalCardOpacity] = useState(backgroundSettings.cardOpacity);
|
||||
const [localColumnOpacity, setLocalColumnOpacity] = useState(backgroundSettings.columnOpacity);
|
||||
const [localCardBorderOpacity, setLocalCardBorderOpacity] = useState(
|
||||
backgroundSettings.cardBorderOpacity
|
||||
);
|
||||
const [isDragging, setIsDragging] = useState(false);
|
||||
|
||||
// Sync local state with store when not dragging (e.g., on modal open or external changes)
|
||||
useEffect(() => {
|
||||
if (!isDragging) {
|
||||
setLocalCardOpacity(backgroundSettings.cardOpacity);
|
||||
setLocalColumnOpacity(backgroundSettings.columnOpacity);
|
||||
setLocalCardBorderOpacity(backgroundSettings.cardBorderOpacity);
|
||||
}
|
||||
}, [
|
||||
isDragging,
|
||||
backgroundSettings.cardOpacity,
|
||||
backgroundSettings.columnOpacity,
|
||||
backgroundSettings.cardBorderOpacity,
|
||||
]);
|
||||
|
||||
const cardOpacity = backgroundSettings.cardOpacity;
|
||||
const columnOpacity = backgroundSettings.columnOpacity;
|
||||
const columnBorderEnabled = backgroundSettings.columnBorderEnabled;
|
||||
const cardGlassmorphism = backgroundSettings.cardGlassmorphism;
|
||||
const cardBorderEnabled = backgroundSettings.cardBorderEnabled;
|
||||
const cardBorderOpacity = backgroundSettings.cardBorderOpacity;
|
||||
const hideScrollbar = backgroundSettings.hideScrollbar;
|
||||
const imageVersion = backgroundSettings.imageVersion;
|
||||
|
||||
@@ -219,40 +198,21 @@ export function BoardBackgroundModal({ open, onOpenChange }: BoardBackgroundModa
|
||||
}
|
||||
}, [currentProject, clearBoardBackground]);
|
||||
|
||||
// Live update local state during drag (modal-only, no store update)
|
||||
const handleCardOpacityChange = useCallback((value: number[]) => {
|
||||
setIsDragging(true);
|
||||
setLocalCardOpacity(value[0]);
|
||||
}, []);
|
||||
|
||||
// Update store and persist when slider is released
|
||||
const handleCardOpacityCommit = useCallback(
|
||||
(value: number[]) => {
|
||||
// Live update opacity when sliders change (with persistence)
|
||||
const handleCardOpacityChange = useCallback(
|
||||
async (value: number[]) => {
|
||||
if (!currentProject) return;
|
||||
setIsDragging(false);
|
||||
setCardOpacity(currentProject.path, value[0]);
|
||||
const current = getCurrentSettings(currentProject.path);
|
||||
persistSettings(currentProject.path, { ...current, cardOpacity: value[0] });
|
||||
await setCardOpacity(currentProject.path, value[0]);
|
||||
},
|
||||
[currentProject, setCardOpacity, getCurrentSettings, persistSettings]
|
||||
[currentProject, setCardOpacity]
|
||||
);
|
||||
|
||||
// Live update local state during drag (modal-only, no store update)
|
||||
const handleColumnOpacityChange = useCallback((value: number[]) => {
|
||||
setIsDragging(true);
|
||||
setLocalColumnOpacity(value[0]);
|
||||
}, []);
|
||||
|
||||
// Update store and persist when slider is released
|
||||
const handleColumnOpacityCommit = useCallback(
|
||||
(value: number[]) => {
|
||||
const handleColumnOpacityChange = useCallback(
|
||||
async (value: number[]) => {
|
||||
if (!currentProject) return;
|
||||
setIsDragging(false);
|
||||
setColumnOpacity(currentProject.path, value[0]);
|
||||
const current = getCurrentSettings(currentProject.path);
|
||||
persistSettings(currentProject.path, { ...current, columnOpacity: value[0] });
|
||||
await setColumnOpacity(currentProject.path, value[0]);
|
||||
},
|
||||
[currentProject, setColumnOpacity, getCurrentSettings, persistSettings]
|
||||
[currentProject, setColumnOpacity]
|
||||
);
|
||||
|
||||
const handleColumnBorderToggle = useCallback(
|
||||
@@ -279,22 +239,12 @@ export function BoardBackgroundModal({ open, onOpenChange }: BoardBackgroundModa
|
||||
[currentProject, setCardBorderEnabled]
|
||||
);
|
||||
|
||||
// Live update local state during drag (modal-only, no store update)
|
||||
const handleCardBorderOpacityChange = useCallback((value: number[]) => {
|
||||
setIsDragging(true);
|
||||
setLocalCardBorderOpacity(value[0]);
|
||||
}, []);
|
||||
|
||||
// Update store and persist when slider is released
|
||||
const handleCardBorderOpacityCommit = useCallback(
|
||||
(value: number[]) => {
|
||||
const handleCardBorderOpacityChange = useCallback(
|
||||
async (value: number[]) => {
|
||||
if (!currentProject) return;
|
||||
setIsDragging(false);
|
||||
setCardBorderOpacity(currentProject.path, value[0]);
|
||||
const current = getCurrentSettings(currentProject.path);
|
||||
persistSettings(currentProject.path, { ...current, cardBorderOpacity: value[0] });
|
||||
await setCardBorderOpacity(currentProject.path, value[0]);
|
||||
},
|
||||
[currentProject, setCardBorderOpacity, getCurrentSettings, persistSettings]
|
||||
[currentProject, setCardBorderOpacity]
|
||||
);
|
||||
|
||||
const handleHideScrollbarToggle = useCallback(
|
||||
@@ -428,12 +378,11 @@ export function BoardBackgroundModal({ open, onOpenChange }: BoardBackgroundModa
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label>Card Opacity</Label>
|
||||
<span className="text-sm text-muted-foreground">{localCardOpacity}%</span>
|
||||
<span className="text-sm text-muted-foreground">{cardOpacity}%</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={[localCardOpacity]}
|
||||
value={[cardOpacity]}
|
||||
onValueChange={handleCardOpacityChange}
|
||||
onValueCommit={handleCardOpacityCommit}
|
||||
min={0}
|
||||
max={100}
|
||||
step={1}
|
||||
@@ -444,12 +393,11 @@ export function BoardBackgroundModal({ open, onOpenChange }: BoardBackgroundModa
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label>Column Opacity</Label>
|
||||
<span className="text-sm text-muted-foreground">{localColumnOpacity}%</span>
|
||||
<span className="text-sm text-muted-foreground">{columnOpacity}%</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={[localColumnOpacity]}
|
||||
value={[columnOpacity]}
|
||||
onValueChange={handleColumnOpacityChange}
|
||||
onValueCommit={handleColumnOpacityCommit}
|
||||
min={0}
|
||||
max={100}
|
||||
step={1}
|
||||
@@ -498,12 +446,11 @@ export function BoardBackgroundModal({ open, onOpenChange }: BoardBackgroundModa
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<Label>Card Border Opacity</Label>
|
||||
<span className="text-sm text-muted-foreground">{localCardBorderOpacity}%</span>
|
||||
<span className="text-sm text-muted-foreground">{cardBorderOpacity}%</span>
|
||||
</div>
|
||||
<Slider
|
||||
value={[localCardBorderOpacity]}
|
||||
value={[cardBorderOpacity]}
|
||||
onValueChange={handleCardBorderOpacityChange}
|
||||
onValueCommit={handleCardBorderOpacityCommit}
|
||||
min={0}
|
||||
max={100}
|
||||
step={1}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
@@ -9,7 +10,7 @@ import {
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { Folder, FolderOpen, AlertCircle } from 'lucide-react';
|
||||
import { Spinner } from '@/components/ui/spinner';
|
||||
import { useWorkspaceDirectories } from '@/hooks/queries';
|
||||
import { getHttpApiClient } from '@/lib/http-api-client';
|
||||
|
||||
interface WorkspaceDirectory {
|
||||
name: string;
|
||||
@@ -23,15 +24,41 @@ interface WorkspacePickerModalProps {
|
||||
}
|
||||
|
||||
export function WorkspacePickerModal({ open, onOpenChange, onSelect }: WorkspacePickerModalProps) {
|
||||
// React Query hook - only fetch when modal is open
|
||||
const { data: directories = [], isLoading, error, refetch } = useWorkspaceDirectories(open);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [directories, setDirectories] = useState<WorkspaceDirectory[]>([]);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const loadDirectories = useCallback(async () => {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const client = getHttpApiClient();
|
||||
const result = await client.workspace.getDirectories();
|
||||
|
||||
if (result.success && result.directories) {
|
||||
setDirectories(result.directories);
|
||||
} else {
|
||||
setError(result.error || 'Failed to load directories');
|
||||
}
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load directories');
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Load directories when modal opens
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
loadDirectories();
|
||||
}
|
||||
}, [open, loadDirectories]);
|
||||
|
||||
const handleSelect = (dir: WorkspaceDirectory) => {
|
||||
onSelect(dir.path, dir.name);
|
||||
};
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : null;
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="bg-card border-border max-w-lg max-h-[80vh] flex flex-col">
|
||||
@@ -53,19 +80,19 @@ export function WorkspacePickerModal({ open, onOpenChange, onSelect }: Workspace
|
||||
</div>
|
||||
)}
|
||||
|
||||
{errorMessage && !isLoading && (
|
||||
{error && !isLoading && (
|
||||
<div className="flex flex-col items-center justify-center h-full gap-3 text-center px-4">
|
||||
<div className="w-12 h-12 rounded-full bg-destructive/10 flex items-center justify-center">
|
||||
<AlertCircle className="w-6 h-6 text-destructive" />
|
||||
</div>
|
||||
<p className="text-sm text-destructive">{errorMessage}</p>
|
||||
<Button variant="secondary" size="sm" onClick={() => refetch()} className="mt-2">
|
||||
<p className="text-sm text-destructive">{error}</p>
|
||||
<Button variant="secondary" size="sm" onClick={loadDirectories} className="mt-2">
|
||||
Try Again
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isLoading && !errorMessage && directories.length === 0 && (
|
||||
{!isLoading && !error && directories.length === 0 && (
|
||||
<div className="flex flex-col items-center justify-center h-full gap-3 text-center px-4">
|
||||
<div className="w-12 h-12 rounded-full bg-muted flex items-center justify-center">
|
||||
<Folder className="w-6 h-6 text-muted-foreground" />
|
||||
@@ -76,7 +103,7 @@ export function WorkspacePickerModal({ open, onOpenChange, onSelect }: Workspace
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isLoading && !errorMessage && directories.length > 0 && (
|
||||
{!isLoading && !error && directories.length > 0 && (
|
||||
<div className="space-y-2">
|
||||
{directories.map((dir) => (
|
||||
<button
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useEffect, useRef, useState, memo, useCallback, useMemo } from 'react';
|
||||
import { useEffect, useRef, useState, memo, useCallback } from 'react';
|
||||
import type { LucideIcon } from 'lucide-react';
|
||||
import { Edit2, Trash2, Palette, ChevronRight, Moon, Sun, Monitor } from 'lucide-react';
|
||||
import { toast } from 'sonner';
|
||||
@@ -6,67 +6,35 @@ import { cn } from '@/lib/utils';
|
||||
import { type ThemeMode, useAppStore } from '@/store/app-store';
|
||||
import { ConfirmDialog } from '@/components/ui/confirm-dialog';
|
||||
import type { Project } from '@/lib/electron';
|
||||
import {
|
||||
PROJECT_DARK_THEMES,
|
||||
PROJECT_LIGHT_THEMES,
|
||||
THEME_SUBMENU_CONSTANTS,
|
||||
} from '@/components/layout/sidebar/constants';
|
||||
import { PROJECT_DARK_THEMES, PROJECT_LIGHT_THEMES } from '@/components/layout/sidebar/constants';
|
||||
import { useThemePreview } from '@/components/layout/sidebar/hooks';
|
||||
|
||||
/**
|
||||
* Constant representing the "use global theme" option.
|
||||
* An empty string is used to indicate that no project-specific theme is set.
|
||||
*/
|
||||
// Constant for "use global theme" option
|
||||
const USE_GLOBAL_THEME = '' as const;
|
||||
|
||||
/**
|
||||
* Z-index values for context menu layering.
|
||||
* Ensures proper stacking order when menus overlap.
|
||||
*/
|
||||
// Constants for z-index values
|
||||
const Z_INDEX = {
|
||||
/** Base z-index for the main context menu */
|
||||
CONTEXT_MENU: 100,
|
||||
/** Higher z-index for theme submenu to appear above parent menu */
|
||||
THEME_SUBMENU: 101,
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Represents a selectable theme option in the theme submenu.
|
||||
* Uses ThemeMode from app-store for type safety.
|
||||
*/
|
||||
// Theme option type - using ThemeMode for type safety
|
||||
interface ThemeOption {
|
||||
/** The theme mode value (e.g., 'dark', 'light', 'dracula') */
|
||||
value: ThemeMode;
|
||||
/** Display label for the theme option */
|
||||
label: string;
|
||||
/** Lucide icon component to display alongside the label */
|
||||
icon: LucideIcon;
|
||||
/** CSS color value for the icon */
|
||||
color: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for the ThemeButton component.
|
||||
* Defines the interface for rendering individual theme selection buttons.
|
||||
*/
|
||||
// Reusable theme button component to avoid duplication (DRY principle)
|
||||
interface ThemeButtonProps {
|
||||
/** The theme option data to display */
|
||||
option: ThemeOption;
|
||||
/** Whether this theme is currently selected */
|
||||
isSelected: boolean;
|
||||
/** Handler for pointer enter events (used for preview) */
|
||||
onPointerEnter: () => void;
|
||||
/** Handler for pointer leave events (used to clear preview) */
|
||||
onPointerLeave: (e: React.PointerEvent) => void;
|
||||
/** Handler for click events (used to select theme) */
|
||||
onClick: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* A reusable button component for individual theme options.
|
||||
* Implements hover preview and selection functionality.
|
||||
* Memoized to prevent unnecessary re-renders when parent state changes.
|
||||
*/
|
||||
const ThemeButton = memo(function ThemeButton({
|
||||
option,
|
||||
isSelected,
|
||||
@@ -95,33 +63,17 @@ const ThemeButton = memo(function ThemeButton({
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
* Props for the ThemeColumn component.
|
||||
* Defines the interface for rendering a column of related theme options (e.g., dark or light themes).
|
||||
*/
|
||||
// Reusable theme column component
|
||||
interface ThemeColumnProps {
|
||||
/** Column header title (e.g., "Dark", "Light") */
|
||||
title: string;
|
||||
/** Icon to display in the column header */
|
||||
icon: LucideIcon;
|
||||
/** Array of theme options to display in this column */
|
||||
themes: ThemeOption[];
|
||||
/** Currently selected theme value, or null if using global theme */
|
||||
selectedTheme: ThemeMode | null;
|
||||
/** Handler called when user hovers over a theme option for preview */
|
||||
onPreviewEnter: (value: ThemeMode) => void;
|
||||
/** Handler called when user stops hovering over a theme option */
|
||||
onPreviewLeave: (e: React.PointerEvent) => void;
|
||||
/** Handler called when user clicks to select a theme */
|
||||
onSelect: (value: ThemeMode) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* A reusable column component for displaying themed options.
|
||||
* Renders a group of related themes (e.g., all dark themes or all light themes)
|
||||
* with a header and scrollable list of ThemeButton components.
|
||||
* Memoized to prevent unnecessary re-renders.
|
||||
*/
|
||||
const ThemeColumn = memo(function ThemeColumn({
|
||||
title,
|
||||
icon: Icon,
|
||||
@@ -153,36 +105,13 @@ const ThemeColumn = memo(function ThemeColumn({
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
* Props for the ProjectContextMenu component.
|
||||
* Defines the interface for the project right-click context menu.
|
||||
*/
|
||||
interface ProjectContextMenuProps {
|
||||
/** The project this context menu is for */
|
||||
project: Project;
|
||||
/** Screen coordinates where the context menu should appear */
|
||||
position: { x: number; y: number };
|
||||
/** Callback to close the context menu */
|
||||
onClose: () => void;
|
||||
/** Callback when user selects "Edit Name & Icon" option */
|
||||
onEdit: (project: Project) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* A context menu component for project-specific actions.
|
||||
*
|
||||
* Provides options for:
|
||||
* - Editing project name and icon
|
||||
* - Setting project-specific theme (with live preview on hover)
|
||||
* - Removing project from the workspace
|
||||
*
|
||||
* Features viewport-aware positioning for the theme submenu to prevent
|
||||
* overflow, and implements delayed hover handling to improve UX when
|
||||
* navigating between the trigger button and submenu.
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns The rendered context menu or null if not visible
|
||||
*/
|
||||
export function ProjectContextMenu({
|
||||
project,
|
||||
position,
|
||||
@@ -201,82 +130,9 @@ export function ProjectContextMenu({
|
||||
const [showThemeSubmenu, setShowThemeSubmenu] = useState(false);
|
||||
const [removeConfirmed, setRemoveConfirmed] = useState(false);
|
||||
const themeSubmenuRef = useRef<HTMLDivElement>(null);
|
||||
const closeTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
|
||||
const { handlePreviewEnter, handlePreviewLeave } = useThemePreview({ setPreviewTheme });
|
||||
|
||||
// Handler to open theme submenu and cancel any pending close
|
||||
const handleThemeMenuEnter = useCallback(() => {
|
||||
// Cancel any pending close timeout
|
||||
if (closeTimeoutRef.current) {
|
||||
clearTimeout(closeTimeoutRef.current);
|
||||
closeTimeoutRef.current = null;
|
||||
}
|
||||
setShowThemeSubmenu(true);
|
||||
}, []);
|
||||
|
||||
// Handler to close theme submenu with a small delay
|
||||
// This prevents the submenu from closing when mouse crosses the gap between trigger and submenu
|
||||
const handleThemeMenuLeave = useCallback(() => {
|
||||
// Add a small delay before closing to allow mouse to reach submenu
|
||||
closeTimeoutRef.current = setTimeout(() => {
|
||||
setShowThemeSubmenu(false);
|
||||
setPreviewTheme(null);
|
||||
}, 100); // 100ms delay is enough to cross the gap
|
||||
}, [setPreviewTheme]);
|
||||
|
||||
/**
|
||||
* Calculates theme submenu position to prevent viewport overflow.
|
||||
*
|
||||
* This memoized calculation determines the optimal vertical position and maximum
|
||||
* height for the theme submenu based on the current viewport dimensions and
|
||||
* the trigger button's position.
|
||||
*
|
||||
* @returns Object containing:
|
||||
* - top: Vertical offset from default position (negative values shift submenu up)
|
||||
* - maxHeight: Maximum height constraint to prevent overflow with scrolling
|
||||
*/
|
||||
const submenuPosition = useMemo(() => {
|
||||
const { ESTIMATED_SUBMENU_HEIGHT, COLLISION_PADDING, THEME_BUTTON_OFFSET } =
|
||||
THEME_SUBMENU_CONSTANTS;
|
||||
|
||||
const viewportHeight = typeof window !== 'undefined' ? window.innerHeight : 800;
|
||||
|
||||
// Calculate where the submenu's bottom edge would be if positioned normally
|
||||
const submenuBottomY = position.y + THEME_BUTTON_OFFSET + ESTIMATED_SUBMENU_HEIGHT;
|
||||
|
||||
// Check if submenu would overflow bottom of viewport
|
||||
const wouldOverflowBottom = submenuBottomY > viewportHeight - COLLISION_PADDING;
|
||||
|
||||
// If it would overflow, calculate how much to shift it up
|
||||
if (wouldOverflowBottom) {
|
||||
// Calculate the offset needed to align submenu bottom with viewport bottom minus padding
|
||||
const overflowAmount = submenuBottomY - (viewportHeight - COLLISION_PADDING);
|
||||
return {
|
||||
top: -overflowAmount,
|
||||
maxHeight: Math.min(ESTIMATED_SUBMENU_HEIGHT, viewportHeight - COLLISION_PADDING * 2),
|
||||
};
|
||||
}
|
||||
|
||||
// Default: submenu opens at top of parent (aligned with the theme button)
|
||||
return {
|
||||
top: 0,
|
||||
maxHeight: Math.min(
|
||||
ESTIMATED_SUBMENU_HEIGHT,
|
||||
viewportHeight - position.y - THEME_BUTTON_OFFSET - COLLISION_PADDING
|
||||
),
|
||||
};
|
||||
}, [position.y]);
|
||||
|
||||
// Cleanup timeout on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (closeTimeoutRef.current) {
|
||||
clearTimeout(closeTimeoutRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: globalThis.MouseEvent) => {
|
||||
// Don't close if a confirmation dialog is open (dialog is in a portal)
|
||||
@@ -386,8 +242,11 @@ export function ProjectContextMenu({
|
||||
{/* Theme Submenu Trigger */}
|
||||
<div
|
||||
className="relative"
|
||||
onMouseEnter={handleThemeMenuEnter}
|
||||
onMouseLeave={handleThemeMenuLeave}
|
||||
onMouseEnter={() => setShowThemeSubmenu(true)}
|
||||
onMouseLeave={() => {
|
||||
setShowThemeSubmenu(false);
|
||||
setPreviewTheme(null);
|
||||
}}
|
||||
>
|
||||
<button
|
||||
onClick={() => setShowThemeSubmenu(!showThemeSubmenu)}
|
||||
@@ -414,18 +273,13 @@ export function ProjectContextMenu({
|
||||
<div
|
||||
ref={themeSubmenuRef}
|
||||
className={cn(
|
||||
'absolute left-full ml-1 min-w-[420px] rounded-lg',
|
||||
'absolute left-full top-0 ml-1 min-w-[420px] rounded-lg',
|
||||
'bg-popover text-popover-foreground',
|
||||
'border border-border shadow-lg',
|
||||
'animate-in fade-in zoom-in-95 duration-100'
|
||||
)}
|
||||
style={{
|
||||
zIndex: Z_INDEX.THEME_SUBMENU,
|
||||
top: `${submenuPosition.top}px`,
|
||||
}}
|
||||
style={{ zIndex: Z_INDEX.THEME_SUBMENU }}
|
||||
data-testid="project-theme-submenu"
|
||||
onMouseEnter={handleThemeMenuEnter}
|
||||
onMouseLeave={handleThemeMenuLeave}
|
||||
>
|
||||
<div className="p-2">
|
||||
{/* Use Global Option */}
|
||||
@@ -452,13 +306,7 @@ export function ProjectContextMenu({
|
||||
<div className="h-px bg-border my-2" />
|
||||
|
||||
{/* Two Column Layout - Using reusable ThemeColumn component */}
|
||||
{/* Dynamic max height with scroll for viewport overflow handling */}
|
||||
<div
|
||||
className="flex gap-2 overflow-y-auto scrollbar-styled"
|
||||
style={{
|
||||
maxHeight: `${Math.max(0, submenuPosition.maxHeight - THEME_SUBMENU_CONSTANTS.SUBMENU_HEADER_HEIGHT)}px`,
|
||||
}}
|
||||
>
|
||||
<div className="flex gap-2">
|
||||
<ThemeColumn
|
||||
title="Dark"
|
||||
icon={Moon}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { useState, useCallback, useEffect } from 'react';
|
||||
import { useState, useCallback } from 'react';
|
||||
import { createLogger } from '@automaker/utils/logger';
|
||||
import { useNavigate, useLocation } from '@tanstack/react-router';
|
||||
import { PanelLeftClose, ChevronDown } from 'lucide-react';
|
||||
|
||||
const logger = createLogger('Sidebar');
|
||||
import { cn } from '@/lib/utils';
|
||||
import { useAppStore } from '@/store/app-store';
|
||||
import { useNotificationsStore } from '@/store/notifications-store';
|
||||
@@ -9,18 +10,22 @@ import { useKeyboardShortcuts, useKeyboardShortcutsConfig } from '@/hooks/use-ke
|
||||
import { getElectronAPI } from '@/lib/electron';
|
||||
import { initializeProject, hasAppSpec, hasAutomakerDir } from '@/lib/project-init';
|
||||
import { toast } from 'sonner';
|
||||
import { useIsCompact } from '@/hooks/use-media-query';
|
||||
import type { Project } from '@/lib/electron';
|
||||
import { DeleteProjectDialog } from '@/components/views/settings-view/components/delete-project-dialog';
|
||||
import { NewProjectModal } from '@/components/dialogs/new-project-modal';
|
||||
import { CreateSpecDialog } from '@/components/views/spec-view/dialogs';
|
||||
|
||||
// Sidebar components
|
||||
// Local imports from subfolder
|
||||
import {
|
||||
SidebarNavigation,
|
||||
CollapseToggleButton,
|
||||
MobileSidebarToggle,
|
||||
SidebarHeader,
|
||||
SidebarNavigation,
|
||||
SidebarFooter,
|
||||
} from './components';
|
||||
import { SIDEBAR_FEATURE_FLAGS } from './constants';
|
||||
MobileSidebarToggle,
|
||||
} from './sidebar/components';
|
||||
import { useIsCompact } from '@/hooks/use-media-query';
|
||||
import { PanelLeftClose } from 'lucide-react';
|
||||
import { TrashDialog, OnboardingDialog } from './sidebar/dialogs';
|
||||
import { SIDEBAR_FEATURE_FLAGS } from './sidebar/constants';
|
||||
import {
|
||||
useSidebarAutoCollapse,
|
||||
useRunningAgents,
|
||||
@@ -30,19 +35,7 @@ import {
|
||||
useSetupDialog,
|
||||
useTrashOperations,
|
||||
useUnviewedValidations,
|
||||
} from './hooks';
|
||||
import { TrashDialog, OnboardingDialog } from './dialogs';
|
||||
|
||||
// Reuse dialogs from project-switcher
|
||||
import { ProjectContextMenu } from '../project-switcher/components/project-context-menu';
|
||||
import { EditProjectDialog } from '../project-switcher/components/edit-project-dialog';
|
||||
|
||||
// Import shared dialogs
|
||||
import { DeleteProjectDialog } from '@/components/views/settings-view/components/delete-project-dialog';
|
||||
import { NewProjectModal } from '@/components/dialogs/new-project-modal';
|
||||
import { CreateSpecDialog } from '@/components/views/spec-view/dialogs';
|
||||
|
||||
const logger = createLogger('Sidebar');
|
||||
} from './sidebar/hooks';
|
||||
|
||||
export function Sidebar() {
|
||||
const navigate = useNavigate();
|
||||
@@ -66,14 +59,12 @@ export function Sidebar() {
|
||||
moveProjectToTrash,
|
||||
specCreatingForProject,
|
||||
setSpecCreatingForProject,
|
||||
setCurrentProject,
|
||||
} = useAppStore();
|
||||
|
||||
const isCompact = useIsCompact();
|
||||
|
||||
// Environment variable flags for hiding sidebar items
|
||||
const { hideTerminal, hideRunningAgents, hideContext, hideSpecEditor, hideWiki } =
|
||||
SIDEBAR_FEATURE_FLAGS;
|
||||
const { hideTerminal, hideRunningAgents, hideContext, hideSpecEditor } = SIDEBAR_FEATURE_FLAGS;
|
||||
|
||||
// Get customizable keyboard shortcuts
|
||||
const shortcuts = useKeyboardShortcutsConfig();
|
||||
@@ -81,13 +72,6 @@ export function Sidebar() {
|
||||
// Get unread notifications count
|
||||
const unreadNotificationsCount = useNotificationsStore((s) => s.unreadCount);
|
||||
|
||||
// State for context menu
|
||||
const [contextMenuProject, setContextMenuProject] = useState<Project | null>(null);
|
||||
const [contextMenuPosition, setContextMenuPosition] = useState<{ x: number; y: number } | null>(
|
||||
null
|
||||
);
|
||||
const [editDialogProject, setEditDialogProject] = useState<Project | null>(null);
|
||||
|
||||
// State for delete project confirmation dialog
|
||||
const [showDeleteProjectDialog, setShowDeleteProjectDialog] = useState(false);
|
||||
|
||||
@@ -145,7 +129,7 @@ export function Sidebar() {
|
||||
const isCurrentProjectGeneratingSpec =
|
||||
specCreatingForProject !== null && specCreatingForProject === currentProject?.path;
|
||||
|
||||
// Auto-collapse sidebar on small screens
|
||||
// Auto-collapse sidebar on small screens and update Electron window minWidth
|
||||
useSidebarAutoCollapse({ sidebarOpen, toggleSidebar });
|
||||
|
||||
// Running agents count
|
||||
@@ -179,28 +163,9 @@ export function Sidebar() {
|
||||
setNewProjectPath,
|
||||
});
|
||||
|
||||
// Context menu handlers
|
||||
const handleContextMenu = useCallback((project: Project, event: React.MouseEvent) => {
|
||||
event.preventDefault();
|
||||
setContextMenuProject(project);
|
||||
setContextMenuPosition({ x: event.clientX, y: event.clientY });
|
||||
}, []);
|
||||
|
||||
const handleCloseContextMenu = useCallback(() => {
|
||||
setContextMenuProject(null);
|
||||
setContextMenuPosition(null);
|
||||
}, []);
|
||||
|
||||
const handleEditProject = useCallback(
|
||||
(project: Project) => {
|
||||
setEditDialogProject(project);
|
||||
handleCloseContextMenu();
|
||||
},
|
||||
[handleCloseContextMenu]
|
||||
);
|
||||
|
||||
/**
|
||||
* Opens the system folder selection dialog and initializes the selected project.
|
||||
* Used by both the 'O' keyboard shortcut and the folder icon button.
|
||||
*/
|
||||
const handleOpenFolder = useCallback(async () => {
|
||||
const api = getElectronAPI();
|
||||
@@ -208,10 +173,14 @@ export function Sidebar() {
|
||||
|
||||
if (!result.canceled && result.filePaths[0]) {
|
||||
const path = result.filePaths[0];
|
||||
// Extract folder name from path (works on both Windows and Mac/Linux)
|
||||
const name = path.split(/[/\\]/).filter(Boolean).pop() || 'Untitled Project';
|
||||
|
||||
try {
|
||||
// Check if this is a brand new project (no .automaker directory)
|
||||
const hadAutomakerDir = await hasAutomakerDir(path);
|
||||
|
||||
// Initialize the .automaker directory structure
|
||||
const initResult = await initializeProject(path);
|
||||
|
||||
if (!initResult.success) {
|
||||
@@ -221,10 +190,15 @@ export function Sidebar() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Upsert project and set as current (handles both create and update cases)
|
||||
// Theme handling (trashed project recovery or undefined for global) is done by the store
|
||||
upsertAndSetCurrentProject(path, name);
|
||||
|
||||
// Check if app_spec.txt exists
|
||||
const specExists = await hasAppSpec(path);
|
||||
|
||||
if (!hadAutomakerDir && !specExists) {
|
||||
// This is a brand new project - show setup dialog
|
||||
setSetupProjectPath(path);
|
||||
setShowSetupDialog(true);
|
||||
toast.success('Project opened', {
|
||||
@@ -239,8 +213,6 @@ export function Sidebar() {
|
||||
description: `Opened ${name}`,
|
||||
});
|
||||
}
|
||||
|
||||
navigate({ to: '/board' });
|
||||
} catch (error) {
|
||||
logger.error('Failed to open project:', error);
|
||||
toast.error('Failed to open project', {
|
||||
@@ -248,13 +220,9 @@ export function Sidebar() {
|
||||
});
|
||||
}
|
||||
}
|
||||
}, [upsertAndSetCurrentProject, navigate, setSetupProjectPath, setShowSetupDialog]);
|
||||
}, [upsertAndSetCurrentProject]);
|
||||
|
||||
const handleNewProject = useCallback(() => {
|
||||
setShowNewProjectModal(true);
|
||||
}, [setShowNewProjectModal]);
|
||||
|
||||
// Navigation sections and keyboard shortcuts
|
||||
// Navigation sections and keyboard shortcuts (defined after handlers)
|
||||
const { navSections, navigationShortcuts } = useNavigation({
|
||||
shortcuts,
|
||||
hideSpecEditor,
|
||||
@@ -276,48 +244,12 @@ export function Sidebar() {
|
||||
// Register keyboard shortcuts
|
||||
useKeyboardShortcuts(navigationShortcuts);
|
||||
|
||||
// Keyboard shortcuts for project switching (1-9, 0)
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (event: KeyboardEvent) => {
|
||||
const target = event.target as HTMLElement;
|
||||
if (target.tagName === 'INPUT' || target.tagName === 'TEXTAREA' || target.isContentEditable) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.ctrlKey || event.metaKey || event.altKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
const key = event.key;
|
||||
let projectIndex: number | null = null;
|
||||
|
||||
if (key >= '1' && key <= '9') {
|
||||
projectIndex = parseInt(key, 10) - 1;
|
||||
} else if (key === '0') {
|
||||
projectIndex = 9;
|
||||
}
|
||||
|
||||
if (projectIndex !== null && projectIndex < projects.length) {
|
||||
const targetProject = projects[projectIndex];
|
||||
if (targetProject && targetProject.id !== currentProject?.id) {
|
||||
setCurrentProject(targetProject);
|
||||
navigate({ to: '/board' });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener('keydown', handleKeyDown);
|
||||
return () => window.removeEventListener('keydown', handleKeyDown);
|
||||
}, [projects, currentProject, setCurrentProject, navigate]);
|
||||
|
||||
const isActiveRoute = (id: string) => {
|
||||
// Map view IDs to route paths
|
||||
const routePath = id === 'welcome' ? '/' : `/${id}`;
|
||||
return location.pathname === routePath;
|
||||
};
|
||||
|
||||
// Track if nav can scroll down
|
||||
const [canScrollDown, setCanScrollDown] = useState(false);
|
||||
|
||||
// Check if sidebar should be completely hidden on mobile
|
||||
const shouldHideSidebar = isCompact && mobileSidebarHidden;
|
||||
|
||||
@@ -334,7 +266,6 @@ export function Sidebar() {
|
||||
data-testid="sidebar-backdrop"
|
||||
/>
|
||||
)}
|
||||
|
||||
<aside
|
||||
className={cn(
|
||||
'flex-shrink-0 flex flex-col z-30',
|
||||
@@ -346,11 +277,9 @@ export function Sidebar() {
|
||||
'transition-all duration-300 ease-[cubic-bezier(0.4,0,0.2,1)]',
|
||||
// Mobile: completely hidden when mobileSidebarHidden is true
|
||||
shouldHideSidebar && 'hidden',
|
||||
// Width based on state
|
||||
// Mobile: overlay when open, collapsed when closed
|
||||
!shouldHideSidebar &&
|
||||
(sidebarOpen
|
||||
? 'fixed inset-y-0 left-0 w-[17rem] lg:relative lg:w-[17rem]'
|
||||
: 'relative w-14')
|
||||
(sidebarOpen ? 'fixed inset-y-0 left-0 w-72 lg:relative lg:w-72' : 'relative w-16')
|
||||
)}
|
||||
data-testid="sidebar"
|
||||
>
|
||||
@@ -384,9 +313,8 @@ export function Sidebar() {
|
||||
<SidebarHeader
|
||||
sidebarOpen={sidebarOpen}
|
||||
currentProject={currentProject}
|
||||
onNewProject={handleNewProject}
|
||||
onOpenFolder={handleOpenFolder}
|
||||
onProjectContextMenu={handleContextMenu}
|
||||
onClose={toggleSidebar}
|
||||
onExpand={toggleSidebar}
|
||||
/>
|
||||
|
||||
<SidebarNavigation
|
||||
@@ -395,27 +323,17 @@ export function Sidebar() {
|
||||
navSections={navSections}
|
||||
isActiveRoute={isActiveRoute}
|
||||
navigate={navigate}
|
||||
onScrollStateChange={setCanScrollDown}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Scroll indicator - shows there's more content below */}
|
||||
{canScrollDown && sidebarOpen && (
|
||||
<div className="flex justify-center py-1 border-t border-border/30">
|
||||
<ChevronDown className="w-4 h-4 text-muted-foreground/50 animate-bounce" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
<SidebarFooter
|
||||
sidebarOpen={sidebarOpen}
|
||||
isActiveRoute={isActiveRoute}
|
||||
navigate={navigate}
|
||||
hideRunningAgents={hideRunningAgents}
|
||||
hideWiki={hideWiki}
|
||||
runningAgentsCount={runningAgentsCount}
|
||||
shortcuts={{ settings: shortcuts.settings }}
|
||||
/>
|
||||
|
||||
<TrashDialog
|
||||
open={showTrashDialog}
|
||||
onOpenChange={setShowTrashDialog}
|
||||
@@ -474,25 +392,6 @@ export function Sidebar() {
|
||||
isCreating={isCreatingProject}
|
||||
/>
|
||||
</aside>
|
||||
|
||||
{/* Context Menu */}
|
||||
{contextMenuProject && contextMenuPosition && (
|
||||
<ProjectContextMenu
|
||||
project={contextMenuProject}
|
||||
position={contextMenuPosition}
|
||||
onClose={handleCloseContextMenu}
|
||||
onEdit={handleEditProject}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Edit Project Dialog */}
|
||||
{editDialogProject && (
|
||||
<EditProjectDialog
|
||||
project={editDialogProject}
|
||||
open={!!editDialogProject}
|
||||
onOpenChange={(open) => !open && setEditDialogProject(null)}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -25,7 +25,7 @@ export function CollapseToggleButton({
|
||||
<button
|
||||
onClick={toggleSidebar}
|
||||
className={cn(
|
||||
'flex absolute top-[40px] -right-3.5 z-9999',
|
||||
'flex absolute top-[68px] -right-3 z-9999',
|
||||
'group/toggle items-center justify-center w-7 h-7 rounded-full',
|
||||
// Glass morphism button
|
||||
'bg-card/95 backdrop-blur-sm border border-border/80',
|
||||
|
||||
@@ -30,41 +30,17 @@ import {
|
||||
import { DndContext, closestCenter } from '@dnd-kit/core';
|
||||
import { SortableContext, verticalListSortingStrategy } from '@dnd-kit/sortable';
|
||||
import { SortableProjectItem, ThemeMenuItem } from './';
|
||||
import { PROJECT_DARK_THEMES, PROJECT_LIGHT_THEMES, THEME_SUBMENU_CONSTANTS } from '../constants';
|
||||
import { PROJECT_DARK_THEMES, PROJECT_LIGHT_THEMES } from '../constants';
|
||||
import { useProjectPicker, useDragAndDrop, useProjectTheme } from '../hooks';
|
||||
import { useKeyboardShortcutsConfig } from '@/hooks/use-keyboard-shortcuts';
|
||||
|
||||
/**
|
||||
* Props for the ProjectSelectorWithOptions component.
|
||||
* Defines the interface for the project selector dropdown with additional options menu.
|
||||
*/
|
||||
interface ProjectSelectorWithOptionsProps {
|
||||
/** Whether the sidebar is currently expanded */
|
||||
sidebarOpen: boolean;
|
||||
/** Whether the project picker dropdown is currently open */
|
||||
isProjectPickerOpen: boolean;
|
||||
/** Callback to control the project picker dropdown open state */
|
||||
setIsProjectPickerOpen: (value: boolean | ((prev: boolean) => boolean)) => void;
|
||||
/** Callback to show the delete project confirmation dialog */
|
||||
setShowDeleteProjectDialog: (show: boolean) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* A project selector component with search, drag-and-drop reordering, and options menu.
|
||||
*
|
||||
* Features:
|
||||
* - Searchable dropdown for quick project switching
|
||||
* - Drag-and-drop reordering of projects
|
||||
* - Project-specific theme selection with live preview
|
||||
* - Project history navigation (previous/next)
|
||||
* - Option to move project to trash
|
||||
*
|
||||
* The component uses viewport-aware positioning via THEME_SUBMENU_CONSTANTS
|
||||
* for consistent submenu behavior across the application.
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns The rendered project selector or null if sidebar is closed or no projects exist
|
||||
*/
|
||||
export function ProjectSelectorWithOptions({
|
||||
sidebarOpen,
|
||||
isProjectPickerOpen,
|
||||
@@ -270,7 +246,6 @@ export function ProjectSelectorWithOptions({
|
||||
<DropdownMenuSubContent
|
||||
className="w-[420px] bg-popover/95 backdrop-blur-xl"
|
||||
data-testid="project-theme-menu"
|
||||
collisionPadding={THEME_SUBMENU_CONSTANTS.COLLISION_PADDING}
|
||||
onPointerLeave={() => {
|
||||
// Clear preview theme when leaving the dropdown
|
||||
setPreviewTheme(null);
|
||||
@@ -311,8 +286,7 @@ export function ProjectSelectorWithOptions({
|
||||
</div>
|
||||
<DropdownMenuSeparator />
|
||||
{/* Two Column Layout */}
|
||||
{/* Max height with scroll to ensure all themes are visible when menu is near screen edge */}
|
||||
<div className="flex gap-2 p-2 max-h-[60vh] overflow-y-auto scrollbar-styled">
|
||||
<div className="flex gap-2 p-2">
|
||||
{/* Dark Themes Column */}
|
||||
<div className="flex-1">
|
||||
<div className="flex items-center gap-1.5 px-2 py-1.5 text-xs font-medium text-muted-foreground">
|
||||
|
||||
@@ -1,31 +1,13 @@
|
||||
import { useCallback } from 'react';
|
||||
import type { NavigateOptions } from '@tanstack/react-router';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { formatShortcut } from '@/store/app-store';
|
||||
import { Activity, Settings, BookOpen, MessageSquare, ExternalLink } from 'lucide-react';
|
||||
import { useOSDetection } from '@/hooks/use-os-detection';
|
||||
import { getElectronAPI } from '@/lib/electron';
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';
|
||||
|
||||
function getOSAbbreviation(os: string): string {
|
||||
switch (os) {
|
||||
case 'mac':
|
||||
return 'M';
|
||||
case 'windows':
|
||||
return 'W';
|
||||
case 'linux':
|
||||
return 'L';
|
||||
default:
|
||||
return '?';
|
||||
}
|
||||
}
|
||||
import { Activity, Settings } from 'lucide-react';
|
||||
|
||||
interface SidebarFooterProps {
|
||||
sidebarOpen: boolean;
|
||||
isActiveRoute: (id: string) => boolean;
|
||||
navigate: (opts: NavigateOptions) => void;
|
||||
hideRunningAgents: boolean;
|
||||
hideWiki: boolean;
|
||||
runningAgentsCount: number;
|
||||
shortcuts: {
|
||||
settings: string;
|
||||
@@ -37,225 +19,86 @@ export function SidebarFooter({
|
||||
isActiveRoute,
|
||||
navigate,
|
||||
hideRunningAgents,
|
||||
hideWiki,
|
||||
runningAgentsCount,
|
||||
shortcuts,
|
||||
}: SidebarFooterProps) {
|
||||
const appVersion = typeof __APP_VERSION__ !== 'undefined' ? __APP_VERSION__ : '0.0.0';
|
||||
const { os } = useOSDetection();
|
||||
const appMode = import.meta.env.VITE_APP_MODE || '?';
|
||||
const versionSuffix = `${getOSAbbreviation(os)}${appMode}`;
|
||||
|
||||
const handleWikiClick = useCallback(() => {
|
||||
navigate({ to: '/wiki' });
|
||||
}, [navigate]);
|
||||
|
||||
const handleFeedbackClick = useCallback(() => {
|
||||
try {
|
||||
const api = getElectronAPI();
|
||||
api.openExternalLink('https://github.com/AutoMaker-Org/automaker/issues');
|
||||
} catch {
|
||||
// Fallback for non-Electron environments (SSR, web browser)
|
||||
window.open('https://github.com/AutoMaker-Org/automaker/issues', '_blank');
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Collapsed state
|
||||
if (!sidebarOpen) {
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'shrink-0 border-t border-border/40',
|
||||
'bg-gradient-to-t from-background/10 via-sidebar/50 to-transparent'
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-col items-center py-2 px-2 gap-1">
|
||||
{/* Running Agents */}
|
||||
{!hideRunningAgents && (
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
onClick={() => navigate({ to: '/running-agents' })}
|
||||
className={cn(
|
||||
'relative flex items-center justify-center w-10 h-10 rounded-xl',
|
||||
'transition-all duration-200 ease-out titlebar-no-drag',
|
||||
isActiveRoute('running-agents')
|
||||
? [
|
||||
'bg-gradient-to-r from-brand-500/20 via-brand-500/15 to-brand-600/10',
|
||||
'text-foreground border border-brand-500/30',
|
||||
'shadow-md shadow-brand-500/10',
|
||||
]
|
||||
: [
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50 border border-transparent hover:border-border/40',
|
||||
]
|
||||
)}
|
||||
data-testid="running-agents-link"
|
||||
>
|
||||
<Activity
|
||||
className={cn(
|
||||
'w-[18px] h-[18px]',
|
||||
isActiveRoute('running-agents') && 'text-brand-500'
|
||||
)}
|
||||
/>
|
||||
{runningAgentsCount > 0 && (
|
||||
<span
|
||||
className={cn(
|
||||
'absolute -top-1 -right-1 flex items-center justify-center',
|
||||
'min-w-4 h-4 px-1 text-[9px] font-bold rounded-full',
|
||||
'bg-brand-500 text-white shadow-sm'
|
||||
)}
|
||||
>
|
||||
{runningAgentsCount > 99 ? '99' : runningAgentsCount}
|
||||
</span>
|
||||
)}
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right" sideOffset={8}>
|
||||
Running Agents
|
||||
{runningAgentsCount > 0 && (
|
||||
<span className="ml-2 px-1.5 py-0.5 bg-brand-500 text-white rounded-full text-[10px]">
|
||||
{runningAgentsCount}
|
||||
</span>
|
||||
)}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
)}
|
||||
|
||||
{/* Settings */}
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
onClick={() => navigate({ to: '/settings' })}
|
||||
className={cn(
|
||||
'flex items-center justify-center w-10 h-10 rounded-xl',
|
||||
'transition-all duration-200 ease-out titlebar-no-drag',
|
||||
isActiveRoute('settings')
|
||||
? [
|
||||
'bg-gradient-to-r from-brand-500/20 via-brand-500/15 to-brand-600/10',
|
||||
'text-foreground border border-brand-500/30',
|
||||
'shadow-md shadow-brand-500/10',
|
||||
]
|
||||
: [
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50 border border-transparent hover:border-border/40',
|
||||
]
|
||||
)}
|
||||
data-testid="settings-button"
|
||||
>
|
||||
<Settings
|
||||
className={cn(
|
||||
'w-[18px] h-[18px]',
|
||||
isActiveRoute('settings') && 'text-brand-500'
|
||||
)}
|
||||
/>
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right" sideOffset={8}>
|
||||
Global Settings
|
||||
<span className="ml-2 px-1.5 py-0.5 bg-muted rounded text-[10px] font-mono text-muted-foreground">
|
||||
{formatShortcut(shortcuts.settings, true)}
|
||||
</span>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
|
||||
{/* Documentation */}
|
||||
{!hideWiki && (
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
onClick={handleWikiClick}
|
||||
className={cn(
|
||||
'flex items-center justify-center w-10 h-10 rounded-xl',
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50 border border-transparent hover:border-border/40',
|
||||
'transition-all duration-200 ease-out titlebar-no-drag'
|
||||
)}
|
||||
data-testid="documentation-button"
|
||||
>
|
||||
<BookOpen className="w-[18px] h-[18px]" />
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right" sideOffset={8}>
|
||||
Documentation
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
)}
|
||||
|
||||
{/* Feedback */}
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
onClick={handleFeedbackClick}
|
||||
className={cn(
|
||||
'flex items-center justify-center w-10 h-10 rounded-xl',
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50 border border-transparent hover:border-border/40',
|
||||
'transition-all duration-200 ease-out titlebar-no-drag'
|
||||
)}
|
||||
data-testid="feedback-button"
|
||||
>
|
||||
<MessageSquare className="w-[18px] h-[18px]" />
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right" sideOffset={8}>
|
||||
Feedback
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Expanded state
|
||||
return (
|
||||
<div className="shrink-0">
|
||||
<div
|
||||
className={cn(
|
||||
'shrink-0',
|
||||
// Top border with gradient fade
|
||||
'border-t border-border/40',
|
||||
// Elevated background for visual separation
|
||||
'bg-gradient-to-t from-background/10 via-sidebar/50 to-transparent'
|
||||
)}
|
||||
>
|
||||
{/* Running Agents Link */}
|
||||
{!hideRunningAgents && (
|
||||
<div className="px-3 py-0.5">
|
||||
<div className="p-2 pb-0">
|
||||
<button
|
||||
onClick={() => navigate({ to: '/running-agents' })}
|
||||
className={cn(
|
||||
'group flex items-center w-full px-3 py-2 rounded-lg relative overflow-hidden titlebar-no-drag',
|
||||
'group flex items-center w-full px-3 py-2.5 rounded-xl relative overflow-hidden titlebar-no-drag',
|
||||
'transition-all duration-200 ease-out',
|
||||
isActiveRoute('running-agents')
|
||||
? [
|
||||
'bg-gradient-to-r from-brand-500/20 via-brand-500/15 to-brand-600/10',
|
||||
'text-foreground font-medium',
|
||||
'border border-brand-500/30',
|
||||
'shadow-sm shadow-brand-500/10',
|
||||
'shadow-md shadow-brand-500/10',
|
||||
]
|
||||
: [
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50',
|
||||
'border border-transparent hover:border-border/40',
|
||||
]
|
||||
'hover:shadow-sm',
|
||||
],
|
||||
sidebarOpen ? 'justify-start' : 'justify-center',
|
||||
'hover:scale-[1.02] active:scale-[0.97]'
|
||||
)}
|
||||
title={!sidebarOpen ? 'Running Agents' : undefined}
|
||||
data-testid="running-agents-link"
|
||||
>
|
||||
<Activity
|
||||
className={cn(
|
||||
'w-[18px] h-[18px] shrink-0 transition-all duration-200',
|
||||
isActiveRoute('running-agents')
|
||||
? 'text-brand-500 drop-shadow-sm'
|
||||
: 'group-hover:text-brand-400'
|
||||
<div className="relative">
|
||||
<Activity
|
||||
className={cn(
|
||||
'w-[18px] h-[18px] shrink-0 transition-all duration-200',
|
||||
isActiveRoute('running-agents')
|
||||
? 'text-brand-500 drop-shadow-sm'
|
||||
: 'group-hover:text-brand-400 group-hover:scale-110'
|
||||
)}
|
||||
/>
|
||||
{/* Running agents count badge - shown in collapsed state */}
|
||||
{!sidebarOpen && runningAgentsCount > 0 && (
|
||||
<span
|
||||
className={cn(
|
||||
'absolute -top-1.5 -right-1.5 flex items-center justify-center',
|
||||
'min-w-4 h-4 px-1 text-[9px] font-bold rounded-full',
|
||||
'bg-brand-500 text-white shadow-sm',
|
||||
'animate-in fade-in zoom-in duration-200'
|
||||
)}
|
||||
data-testid="running-agents-count-collapsed"
|
||||
>
|
||||
{runningAgentsCount > 99 ? '99' : runningAgentsCount}
|
||||
</span>
|
||||
)}
|
||||
/>
|
||||
<span className="ml-3 text-sm flex-1 text-left">Running Agents</span>
|
||||
{runningAgentsCount > 0 && (
|
||||
</div>
|
||||
<span
|
||||
className={cn(
|
||||
'ml-3 font-medium text-sm flex-1 text-left',
|
||||
sidebarOpen ? 'block' : 'hidden'
|
||||
)}
|
||||
>
|
||||
Running Agents
|
||||
</span>
|
||||
{/* Running agents count badge - shown in expanded state */}
|
||||
{sidebarOpen && runningAgentsCount > 0 && (
|
||||
<span
|
||||
className={cn(
|
||||
'flex items-center justify-center',
|
||||
'min-w-5 h-5 px-1.5 text-[10px] font-bold rounded-full',
|
||||
'min-w-6 h-6 px-1.5 text-xs font-semibold rounded-full',
|
||||
'bg-brand-500 text-white shadow-sm',
|
||||
'animate-in fade-in zoom-in duration-200',
|
||||
isActiveRoute('running-agents') && 'bg-brand-600'
|
||||
)}
|
||||
data-testid="running-agents-count"
|
||||
@@ -263,30 +106,52 @@ export function SidebarFooter({
|
||||
{runningAgentsCount > 99 ? '99' : runningAgentsCount}
|
||||
</span>
|
||||
)}
|
||||
{!sidebarOpen && (
|
||||
<span
|
||||
className={cn(
|
||||
'absolute left-full ml-3 px-2.5 py-1.5 rounded-lg',
|
||||
'bg-popover text-popover-foreground text-xs font-medium',
|
||||
'border border-border shadow-lg',
|
||||
'opacity-0 group-hover:opacity-100',
|
||||
'transition-all duration-200 whitespace-nowrap z-50',
|
||||
'translate-x-1 group-hover:translate-x-0'
|
||||
)}
|
||||
>
|
||||
Running Agents
|
||||
{runningAgentsCount > 0 && (
|
||||
<span className="ml-2 px-1.5 py-0.5 bg-brand-500 text-white rounded-full text-[10px] font-semibold">
|
||||
{runningAgentsCount}
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Settings Link */}
|
||||
<div className="px-3 py-0.5">
|
||||
<div className="p-2">
|
||||
<button
|
||||
onClick={() => navigate({ to: '/settings' })}
|
||||
className={cn(
|
||||
'group flex items-center w-full px-3 py-2 rounded-lg relative overflow-hidden titlebar-no-drag',
|
||||
'group flex items-center w-full px-3 py-2.5 rounded-xl relative overflow-hidden titlebar-no-drag',
|
||||
'transition-all duration-200 ease-out',
|
||||
isActiveRoute('settings')
|
||||
? [
|
||||
'bg-gradient-to-r from-brand-500/20 via-brand-500/15 to-brand-600/10',
|
||||
'text-foreground font-medium',
|
||||
'border border-brand-500/30',
|
||||
'shadow-sm shadow-brand-500/10',
|
||||
'shadow-md shadow-brand-500/10',
|
||||
]
|
||||
: [
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50',
|
||||
'border border-transparent hover:border-border/40',
|
||||
]
|
||||
'hover:shadow-sm',
|
||||
],
|
||||
sidebarOpen ? 'justify-start' : 'justify-center',
|
||||
'hover:scale-[1.02] active:scale-[0.97]'
|
||||
)}
|
||||
title={!sidebarOpen ? 'Global Settings' : undefined}
|
||||
data-testid="settings-button"
|
||||
>
|
||||
<Settings
|
||||
@@ -294,70 +159,49 @@ export function SidebarFooter({
|
||||
'w-[18px] h-[18px] shrink-0 transition-all duration-200',
|
||||
isActiveRoute('settings')
|
||||
? 'text-brand-500 drop-shadow-sm'
|
||||
: 'group-hover:text-brand-400'
|
||||
: 'group-hover:text-brand-400 group-hover:rotate-90 group-hover:scale-110'
|
||||
)}
|
||||
/>
|
||||
<span className="ml-3 text-sm flex-1 text-left">Settings</span>
|
||||
<span
|
||||
className={cn(
|
||||
'flex items-center justify-center min-w-5 h-5 px-1.5 text-[10px] font-mono rounded transition-all duration-200',
|
||||
isActiveRoute('settings')
|
||||
? 'bg-brand-500/20 text-brand-400'
|
||||
: 'bg-muted text-muted-foreground group-hover:bg-accent'
|
||||
'ml-3 font-medium text-sm flex-1 text-left',
|
||||
sidebarOpen ? 'block' : 'hidden'
|
||||
)}
|
||||
data-testid="shortcut-settings"
|
||||
>
|
||||
{formatShortcut(shortcuts.settings, true)}
|
||||
Global Settings
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Separator */}
|
||||
<div className="h-px bg-border/40 mx-3 my-2" />
|
||||
|
||||
{/* Documentation Link */}
|
||||
{!hideWiki && (
|
||||
<div className="px-3 py-0.5">
|
||||
<button
|
||||
onClick={handleWikiClick}
|
||||
className={cn(
|
||||
'group flex items-center w-full px-3 py-1.5 rounded-md titlebar-no-drag',
|
||||
'text-muted-foreground/70 hover:text-foreground',
|
||||
'hover:bg-accent/30',
|
||||
'transition-all duration-200 ease-out'
|
||||
)}
|
||||
data-testid="documentation-button"
|
||||
>
|
||||
<BookOpen className="w-4 h-4 shrink-0" />
|
||||
<span className="ml-2.5 text-xs">Documentation</span>
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Feedback Link */}
|
||||
<div className="px-3 pt-0.5">
|
||||
<button
|
||||
onClick={handleFeedbackClick}
|
||||
className={cn(
|
||||
'group flex items-center w-full px-3 py-1.5 rounded-md titlebar-no-drag',
|
||||
'text-muted-foreground/70 hover:text-foreground',
|
||||
'hover:bg-accent/30',
|
||||
'transition-all duration-200 ease-out'
|
||||
{sidebarOpen && (
|
||||
<span
|
||||
className={cn(
|
||||
'flex items-center justify-center min-w-5 h-5 px-1.5 text-[10px] font-mono rounded-md transition-all duration-200',
|
||||
isActiveRoute('settings')
|
||||
? 'bg-brand-500/20 text-brand-400'
|
||||
: 'bg-muted text-muted-foreground group-hover:bg-accent'
|
||||
)}
|
||||
data-testid="shortcut-settings"
|
||||
>
|
||||
{formatShortcut(shortcuts.settings, true)}
|
||||
</span>
|
||||
)}
|
||||
{!sidebarOpen && (
|
||||
<span
|
||||
className={cn(
|
||||
'absolute left-full ml-3 px-2.5 py-1.5 rounded-lg',
|
||||
'bg-popover text-popover-foreground text-xs font-medium',
|
||||
'border border-border shadow-lg',
|
||||
'opacity-0 group-hover:opacity-100',
|
||||
'transition-all duration-200 whitespace-nowrap z-50',
|
||||
'translate-x-1 group-hover:translate-x-0'
|
||||
)}
|
||||
>
|
||||
Global Settings
|
||||
<span className="ml-2 px-1.5 py-0.5 bg-muted rounded text-[10px] font-mono text-muted-foreground">
|
||||
{formatShortcut(shortcuts.settings, true)}
|
||||
</span>
|
||||
</span>
|
||||
)}
|
||||
data-testid="feedback-button"
|
||||
>
|
||||
<MessageSquare className="w-4 h-4 shrink-0" />
|
||||
<span className="ml-2.5 text-xs">Feedback</span>
|
||||
<ExternalLink className="w-3 h-3 ml-auto text-muted-foreground/50" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Version */}
|
||||
<div className="px-6 py-1.5 text-center">
|
||||
<span className="text-[9px] text-muted-foreground/40">
|
||||
v{appVersion} {versionSuffix}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,411 +1,179 @@
|
||||
import { useState, useCallback } from 'react';
|
||||
import { useNavigate } from '@tanstack/react-router';
|
||||
import { ChevronsUpDown, Folder, Plus, FolderOpen } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import { Folder, LucideIcon, X, Menu, Check } from 'lucide-react';
|
||||
import * as LucideIcons from 'lucide-react';
|
||||
import type { LucideIcon } from 'lucide-react';
|
||||
import { cn, isMac } from '@/lib/utils';
|
||||
import { formatShortcut } from '@/store/app-store';
|
||||
import { isElectron, type Project } from '@/lib/electron';
|
||||
import { getAuthenticatedImageUrl } from '@/lib/api-fetch';
|
||||
import { isElectron, type Project } from '@/lib/electron';
|
||||
import { useIsCompact } from '@/hooks/use-media-query';
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover';
|
||||
import { useAppStore } from '@/store/app-store';
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuSeparator,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/ui/dropdown-menu';
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';
|
||||
|
||||
interface SidebarHeaderProps {
|
||||
sidebarOpen: boolean;
|
||||
currentProject: Project | null;
|
||||
onNewProject: () => void;
|
||||
onOpenFolder: () => void;
|
||||
onProjectContextMenu: (project: Project, event: React.MouseEvent) => void;
|
||||
onClose?: () => void;
|
||||
onExpand?: () => void;
|
||||
}
|
||||
|
||||
export function SidebarHeader({
|
||||
sidebarOpen,
|
||||
currentProject,
|
||||
onNewProject,
|
||||
onOpenFolder,
|
||||
onProjectContextMenu,
|
||||
onClose,
|
||||
onExpand,
|
||||
}: SidebarHeaderProps) {
|
||||
const navigate = useNavigate();
|
||||
const isCompact = useIsCompact();
|
||||
const [projectListOpen, setProjectListOpen] = useState(false);
|
||||
const { projects, setCurrentProject } = useAppStore();
|
||||
const [dropdownOpen, setDropdownOpen] = useState(false);
|
||||
|
||||
const handleLogoClick = useCallback(() => {
|
||||
navigate({ to: '/dashboard' });
|
||||
}, [navigate]);
|
||||
|
||||
const handleProjectSelect = useCallback(
|
||||
(project: Project) => {
|
||||
setCurrentProject(project);
|
||||
setDropdownOpen(false);
|
||||
navigate({ to: '/board' });
|
||||
},
|
||||
[setCurrentProject, navigate]
|
||||
);
|
||||
|
||||
const getIconComponent = (project: Project): LucideIcon => {
|
||||
if (project.icon && project.icon in LucideIcons) {
|
||||
return (LucideIcons as unknown as Record<string, LucideIcon>)[project.icon];
|
||||
// Get the icon component from lucide-react
|
||||
const getIconComponent = (): LucideIcon => {
|
||||
if (currentProject?.icon && currentProject.icon in LucideIcons) {
|
||||
return (LucideIcons as unknown as Record<string, LucideIcon>)[currentProject.icon];
|
||||
}
|
||||
return Folder;
|
||||
};
|
||||
|
||||
const renderProjectIcon = (project: Project, size: 'sm' | 'md' = 'md') => {
|
||||
const IconComponent = getIconComponent(project);
|
||||
const sizeClasses = size === 'sm' ? 'w-6 h-6' : 'w-8 h-8';
|
||||
const iconSizeClasses = size === 'sm' ? 'w-4 h-4' : 'w-5 h-5';
|
||||
const IconComponent = getIconComponent();
|
||||
const hasCustomIcon = !!currentProject?.customIconPath;
|
||||
|
||||
if (project.customIconPath) {
|
||||
return (
|
||||
<img
|
||||
src={getAuthenticatedImageUrl(project.customIconPath, project.path)}
|
||||
alt={project.name}
|
||||
className={cn(sizeClasses, 'rounded-lg object-cover ring-1 ring-border/50')}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
sizeClasses,
|
||||
'rounded-lg bg-brand-500/10 border border-brand-500/20 flex items-center justify-center'
|
||||
)}
|
||||
>
|
||||
<IconComponent className={cn(iconSizeClasses, 'text-brand-500')} />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
// Collapsed state - show logo only
|
||||
if (!sidebarOpen) {
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'shrink-0 flex flex-col items-center relative px-2 pt-3 pb-2',
|
||||
isMac && isElectron() && 'pt-[10px]'
|
||||
)}
|
||||
>
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
onClick={handleLogoClick}
|
||||
className="group flex flex-col items-center"
|
||||
data-testid="logo-button"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 256 256"
|
||||
role="img"
|
||||
aria-label="Automaker Logo"
|
||||
className="size-8 group-hover:rotate-12 transition-transform duration-300 ease-out"
|
||||
>
|
||||
<defs>
|
||||
<linearGradient
|
||||
id="bg-collapsed"
|
||||
x1="0"
|
||||
y1="0"
|
||||
x2="256"
|
||||
y2="256"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0%" style={{ stopColor: 'var(--brand-400)' }} />
|
||||
<stop offset="100%" style={{ stopColor: 'var(--brand-600)' }} />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect x="16" y="16" width="224" height="224" rx="56" fill="url(#bg-collapsed)" />
|
||||
<g
|
||||
fill="none"
|
||||
stroke="#FFFFFF"
|
||||
strokeWidth="20"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<path d="M92 92 L52 128 L92 164" />
|
||||
<path d="M144 72 L116 184" />
|
||||
<path d="M164 92 L204 128 L164 164" />
|
||||
</g>
|
||||
</svg>
|
||||
</button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right" sideOffset={8}>
|
||||
Go to Dashboard
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
|
||||
{/* Collapsed project icon with dropdown */}
|
||||
{currentProject && (
|
||||
<>
|
||||
<div className="w-full h-px bg-border/40 my-2" />
|
||||
<DropdownMenu open={dropdownOpen} onOpenChange={setDropdownOpen}>
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<button
|
||||
onContextMenu={(e) => onProjectContextMenu(currentProject, e)}
|
||||
className="p-1 rounded-lg hover:bg-accent/50 transition-colors"
|
||||
data-testid="collapsed-project-button"
|
||||
>
|
||||
{renderProjectIcon(currentProject)}
|
||||
</button>
|
||||
</DropdownMenuTrigger>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right" sideOffset={8}>
|
||||
{currentProject.name}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
<DropdownMenuContent
|
||||
align="start"
|
||||
side="right"
|
||||
sideOffset={8}
|
||||
className="w-64"
|
||||
data-testid="collapsed-project-dropdown-content"
|
||||
>
|
||||
<div className="px-2 py-1.5">
|
||||
<span className="text-xs font-medium text-muted-foreground">Projects</span>
|
||||
</div>
|
||||
{projects.map((project, index) => {
|
||||
const isActive = currentProject?.id === project.id;
|
||||
const hotkeyLabel = index < 9 ? `${index + 1}` : index === 9 ? '0' : undefined;
|
||||
|
||||
return (
|
||||
<DropdownMenuItem
|
||||
key={project.id}
|
||||
onClick={() => handleProjectSelect(project)}
|
||||
onContextMenu={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setDropdownOpen(false);
|
||||
onProjectContextMenu(project, e);
|
||||
}}
|
||||
className="flex items-center gap-3 cursor-pointer"
|
||||
data-testid={`collapsed-project-item-${project.id}`}
|
||||
>
|
||||
{renderProjectIcon(project, 'sm')}
|
||||
<span
|
||||
className={cn(
|
||||
'flex-1 truncate',
|
||||
isActive && 'font-semibold text-foreground'
|
||||
)}
|
||||
>
|
||||
{project.name}
|
||||
</span>
|
||||
{hotkeyLabel && (
|
||||
<span className="text-xs text-muted-foreground">
|
||||
{formatShortcut(`Cmd+${hotkeyLabel}`, true)}
|
||||
</span>
|
||||
)}
|
||||
</DropdownMenuItem>
|
||||
);
|
||||
})}
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem
|
||||
onClick={() => {
|
||||
setDropdownOpen(false);
|
||||
onNewProject();
|
||||
}}
|
||||
className="cursor-pointer"
|
||||
data-testid="collapsed-new-project-dropdown-item"
|
||||
>
|
||||
<Plus className="w-4 h-4 mr-2" />
|
||||
<span>New Project</span>
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => {
|
||||
setDropdownOpen(false);
|
||||
onOpenFolder();
|
||||
}}
|
||||
className="cursor-pointer"
|
||||
data-testid="collapsed-open-project-dropdown-item"
|
||||
>
|
||||
<FolderOpen className="w-4 h-4 mr-2" />
|
||||
<span>Open Project</span>
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Expanded state - show logo + project dropdown
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'shrink-0 flex flex-col relative px-3 pt-3 pb-2',
|
||||
'shrink-0 flex flex-col relative',
|
||||
// Add padding on macOS Electron for traffic light buttons
|
||||
isMac && isElectron() && 'pt-[10px]'
|
||||
)}
|
||||
>
|
||||
{/* Header with logo and project dropdown */}
|
||||
<div className="flex items-center gap-3">
|
||||
{/* Logo */}
|
||||
{/* Mobile close button - only visible on mobile when sidebar is open */}
|
||||
{sidebarOpen && onClose && (
|
||||
<button
|
||||
onClick={handleLogoClick}
|
||||
className="group flex items-center shrink-0 titlebar-no-drag"
|
||||
title="Go to Dashboard"
|
||||
data-testid="logo-button"
|
||||
onClick={onClose}
|
||||
className={cn(
|
||||
'lg:hidden absolute top-3 right-3 z-10',
|
||||
'flex items-center justify-center w-8 h-8 rounded-lg',
|
||||
'bg-muted/50 hover:bg-muted',
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'transition-colors duration-200'
|
||||
)}
|
||||
aria-label="Close navigation"
|
||||
data-testid="sidebar-mobile-close"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 256 256"
|
||||
role="img"
|
||||
aria-label="Automaker Logo"
|
||||
className="h-8 w-8 group-hover:rotate-12 transition-transform duration-300 ease-out"
|
||||
>
|
||||
<defs>
|
||||
<linearGradient
|
||||
id="bg-header"
|
||||
x1="0"
|
||||
y1="0"
|
||||
x2="256"
|
||||
y2="256"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0%" style={{ stopColor: 'var(--brand-400)' }} />
|
||||
<stop offset="100%" style={{ stopColor: 'var(--brand-600)' }} />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect x="16" y="16" width="224" height="224" rx="56" fill="url(#bg-header)" />
|
||||
<g
|
||||
fill="none"
|
||||
stroke="#FFFFFF"
|
||||
strokeWidth="20"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
>
|
||||
<path d="M92 92 L52 128 L92 164" />
|
||||
<path d="M144 72 L116 184" />
|
||||
<path d="M164 92 L204 128 L164 164" />
|
||||
</g>
|
||||
</svg>
|
||||
<X className="w-5 h-5" />
|
||||
</button>
|
||||
|
||||
{/* Project Dropdown */}
|
||||
{currentProject ? (
|
||||
<DropdownMenu open={dropdownOpen} onOpenChange={setDropdownOpen}>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<button
|
||||
className={cn(
|
||||
'flex-1 flex items-center gap-2 px-2 py-1.5 rounded-lg min-w-0',
|
||||
'hover:bg-accent/50 transition-colors titlebar-no-drag',
|
||||
'focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-1'
|
||||
)}
|
||||
onContextMenu={(e) => onProjectContextMenu(currentProject, e)}
|
||||
data-testid="project-dropdown-trigger"
|
||||
>
|
||||
{renderProjectIcon(currentProject, 'sm')}
|
||||
<span className="flex-1 text-sm font-semibold text-foreground truncate text-left">
|
||||
{currentProject.name}
|
||||
</span>
|
||||
<ChevronsUpDown className="w-4 h-4 text-muted-foreground shrink-0" />
|
||||
</button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent
|
||||
align="start"
|
||||
side="bottom"
|
||||
sideOffset={8}
|
||||
className="w-64"
|
||||
data-testid="project-dropdown-content"
|
||||
)}
|
||||
{/* Mobile expand button - hamburger menu to expand sidebar when collapsed on mobile */}
|
||||
{!sidebarOpen && isCompact && onExpand && (
|
||||
<button
|
||||
onClick={onExpand}
|
||||
className={cn(
|
||||
'flex items-center justify-center w-10 h-10 mx-auto mt-2 rounded-lg',
|
||||
'bg-muted/50 hover:bg-muted',
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'transition-colors duration-200'
|
||||
)}
|
||||
aria-label="Expand navigation"
|
||||
data-testid="sidebar-mobile-expand"
|
||||
>
|
||||
<Menu className="w-5 h-5" />
|
||||
</button>
|
||||
)}
|
||||
{/* Project name and icon display - entire element clickable on mobile */}
|
||||
{currentProject && (
|
||||
<Popover open={projectListOpen} onOpenChange={setProjectListOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<button
|
||||
className={cn(
|
||||
'flex items-center gap-3 px-4 pt-3 pb-1 w-full text-left',
|
||||
'rounded-lg transition-colors duration-150',
|
||||
!sidebarOpen && 'justify-center px-2',
|
||||
// Only enable click behavior on compact screens
|
||||
isCompact && 'hover:bg-accent/50 cursor-pointer',
|
||||
!isCompact && 'pointer-events-none'
|
||||
)}
|
||||
title={isCompact ? 'Switch project' : undefined}
|
||||
>
|
||||
<div className="px-2 py-1.5">
|
||||
<span className="text-xs font-medium text-muted-foreground">Projects</span>
|
||||
{/* Project Icon */}
|
||||
<div className="shrink-0">
|
||||
{hasCustomIcon ? (
|
||||
<img
|
||||
src={getAuthenticatedImageUrl(
|
||||
currentProject.customIconPath!,
|
||||
currentProject.path
|
||||
)}
|
||||
alt={currentProject.name}
|
||||
className="w-8 h-8 rounded-lg object-cover ring-1 ring-border/50"
|
||||
/>
|
||||
) : (
|
||||
<div className="w-8 h-8 rounded-lg bg-brand-500/10 border border-brand-500/20 flex items-center justify-center">
|
||||
<IconComponent className="w-5 h-5 text-brand-500" />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{projects.map((project, index) => {
|
||||
|
||||
{/* Project Name - only show when sidebar is open */}
|
||||
{sidebarOpen && (
|
||||
<div className="flex-1 min-w-0">
|
||||
<h2 className="text-sm font-semibold text-foreground truncate">
|
||||
{currentProject.name}
|
||||
</h2>
|
||||
</div>
|
||||
)}
|
||||
</button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="w-64 p-2" align="start" side="bottom" sideOffset={8}>
|
||||
<div className="space-y-1">
|
||||
<p className="text-xs font-medium text-muted-foreground px-2 py-1">Switch Project</p>
|
||||
{projects.map((project) => {
|
||||
const ProjectIcon =
|
||||
project.icon && project.icon in LucideIcons
|
||||
? (LucideIcons as unknown as Record<string, LucideIcon>)[project.icon]
|
||||
: Folder;
|
||||
const isActive = currentProject?.id === project.id;
|
||||
const hotkeyLabel = index < 9 ? `${index + 1}` : index === 9 ? '0' : undefined;
|
||||
|
||||
return (
|
||||
<DropdownMenuItem
|
||||
<button
|
||||
key={project.id}
|
||||
onClick={() => handleProjectSelect(project)}
|
||||
onContextMenu={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
setDropdownOpen(false);
|
||||
onProjectContextMenu(project, e);
|
||||
onClick={() => {
|
||||
setCurrentProject(project);
|
||||
setProjectListOpen(false);
|
||||
}}
|
||||
className="flex items-center gap-3 cursor-pointer"
|
||||
data-testid={`project-item-${project.id}`}
|
||||
>
|
||||
{renderProjectIcon(project, 'sm')}
|
||||
<span
|
||||
className={cn('flex-1 truncate', isActive && 'font-semibold text-foreground')}
|
||||
>
|
||||
{project.name}
|
||||
</span>
|
||||
{hotkeyLabel && (
|
||||
<span className="text-xs text-muted-foreground">
|
||||
{formatShortcut(`Cmd+${hotkeyLabel}`, true)}
|
||||
</span>
|
||||
className={cn(
|
||||
'w-full flex items-center gap-3 px-2 py-2 rounded-lg text-left',
|
||||
'transition-colors duration-150',
|
||||
isActive
|
||||
? 'bg-brand-500/10 text-brand-500'
|
||||
: 'hover:bg-accent text-foreground'
|
||||
)}
|
||||
</DropdownMenuItem>
|
||||
>
|
||||
{project.customIconPath ? (
|
||||
<img
|
||||
src={getAuthenticatedImageUrl(project.customIconPath, project.path)}
|
||||
alt={project.name}
|
||||
className="w-6 h-6 rounded object-cover ring-1 ring-border/50"
|
||||
/>
|
||||
) : (
|
||||
<div
|
||||
className={cn(
|
||||
'w-6 h-6 rounded flex items-center justify-center',
|
||||
isActive ? 'bg-brand-500/20' : 'bg-muted'
|
||||
)}
|
||||
>
|
||||
<ProjectIcon
|
||||
className={cn(
|
||||
'w-4 h-4',
|
||||
isActive ? 'text-brand-500' : 'text-muted-foreground'
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<span className="flex-1 text-sm truncate">{project.name}</span>
|
||||
{isActive && <Check className="w-4 h-4 text-brand-500" />}
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem
|
||||
onClick={() => {
|
||||
setDropdownOpen(false);
|
||||
onNewProject();
|
||||
}}
|
||||
className="cursor-pointer"
|
||||
data-testid="new-project-dropdown-item"
|
||||
>
|
||||
<Plus className="w-4 h-4 mr-2" />
|
||||
<span>New Project</span>
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => {
|
||||
setDropdownOpen(false);
|
||||
onOpenFolder();
|
||||
}}
|
||||
className="cursor-pointer"
|
||||
data-testid="open-project-dropdown-item"
|
||||
>
|
||||
<FolderOpen className="w-4 h-4 mr-2" />
|
||||
<span>Open Project</span>
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
) : (
|
||||
<div className="flex-1 flex items-center gap-2">
|
||||
<button
|
||||
onClick={onNewProject}
|
||||
className={cn(
|
||||
'flex items-center gap-2 px-3 py-1.5 rounded-lg',
|
||||
'text-sm text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50 transition-colors titlebar-no-drag'
|
||||
)}
|
||||
data-testid="new-project-button"
|
||||
>
|
||||
<Plus className="w-4 h-4" />
|
||||
<span>New Project</span>
|
||||
</button>
|
||||
<button
|
||||
onClick={onOpenFolder}
|
||||
className={cn(
|
||||
'flex items-center gap-2 px-3 py-1.5 rounded-lg',
|
||||
'text-sm text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50 transition-colors titlebar-no-drag'
|
||||
)}
|
||||
data-testid="open-project-button"
|
||||
>
|
||||
<FolderOpen className="w-4 h-4" />
|
||||
<span>Open</span>
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,24 +1,9 @@
|
||||
import { useState, useCallback, useEffect, useRef } from 'react';
|
||||
import type { NavigateOptions } from '@tanstack/react-router';
|
||||
import { ChevronDown, Wrench, Github } from 'lucide-react';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { formatShortcut } from '@/store/app-store';
|
||||
import type { NavSection } from '../types';
|
||||
import type { Project } from '@/lib/electron';
|
||||
import { Spinner } from '@/components/ui/spinner';
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/ui/dropdown-menu';
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';
|
||||
|
||||
// Map section labels to icons
|
||||
const sectionIcons: Record<string, React.ComponentType<{ className?: string }>> = {
|
||||
Tools: Wrench,
|
||||
GitHub: Github,
|
||||
};
|
||||
|
||||
interface SidebarNavigationProps {
|
||||
currentProject: Project | null;
|
||||
@@ -26,7 +11,6 @@ interface SidebarNavigationProps {
|
||||
navSections: NavSection[];
|
||||
isActiveRoute: (id: string) => boolean;
|
||||
navigate: (opts: NavigateOptions) => void;
|
||||
onScrollStateChange?: (canScrollDown: boolean) => void;
|
||||
}
|
||||
|
||||
export function SidebarNavigation({
|
||||
@@ -35,299 +19,174 @@ export function SidebarNavigation({
|
||||
navSections,
|
||||
isActiveRoute,
|
||||
navigate,
|
||||
onScrollStateChange,
|
||||
}: SidebarNavigationProps) {
|
||||
const navRef = useRef<HTMLElement>(null);
|
||||
|
||||
// Track collapsed state for each collapsible section
|
||||
const [collapsedSections, setCollapsedSections] = useState<Record<string, boolean>>({});
|
||||
|
||||
// Initialize collapsed state when sections change (e.g., GitHub section appears)
|
||||
useEffect(() => {
|
||||
setCollapsedSections((prev) => {
|
||||
const updated = { ...prev };
|
||||
navSections.forEach((section) => {
|
||||
if (section.collapsible && section.label && !(section.label in updated)) {
|
||||
updated[section.label] = section.defaultCollapsed ?? false;
|
||||
}
|
||||
});
|
||||
return updated;
|
||||
});
|
||||
}, [navSections]);
|
||||
|
||||
// Check scroll state
|
||||
const checkScrollState = useCallback(() => {
|
||||
if (!navRef.current || !onScrollStateChange) return;
|
||||
const { scrollTop, scrollHeight, clientHeight } = navRef.current;
|
||||
const canScrollDown = scrollTop + clientHeight < scrollHeight - 10;
|
||||
onScrollStateChange(canScrollDown);
|
||||
}, [onScrollStateChange]);
|
||||
|
||||
// Monitor scroll state
|
||||
useEffect(() => {
|
||||
checkScrollState();
|
||||
const nav = navRef.current;
|
||||
if (!nav) return;
|
||||
|
||||
nav.addEventListener('scroll', checkScrollState);
|
||||
const resizeObserver = new ResizeObserver(checkScrollState);
|
||||
resizeObserver.observe(nav);
|
||||
|
||||
return () => {
|
||||
nav.removeEventListener('scroll', checkScrollState);
|
||||
resizeObserver.disconnect();
|
||||
};
|
||||
}, [checkScrollState, collapsedSections]);
|
||||
|
||||
const toggleSection = useCallback((label: string) => {
|
||||
setCollapsedSections((prev) => ({
|
||||
...prev,
|
||||
[label]: !prev[label],
|
||||
}));
|
||||
}, []);
|
||||
|
||||
// Filter sections: always show non-project sections, only show project sections when project exists
|
||||
const visibleSections = navSections.filter((section) => {
|
||||
// Always show Dashboard (first section with no label)
|
||||
if (!section.label && section.items.some((item) => item.id === 'dashboard')) {
|
||||
return true;
|
||||
}
|
||||
// Show other sections only when project is selected
|
||||
return !!currentProject;
|
||||
});
|
||||
|
||||
return (
|
||||
<nav ref={navRef} className={cn('flex-1 overflow-y-auto scrollbar-hide px-3 pb-2 mt-1')}>
|
||||
{/* Navigation sections */}
|
||||
{visibleSections.map((section, sectionIdx) => {
|
||||
const isCollapsed = section.label ? collapsedSections[section.label] : false;
|
||||
const isCollapsible = section.collapsible && section.label && sidebarOpen;
|
||||
|
||||
const SectionIcon = section.label ? sectionIcons[section.label] : null;
|
||||
|
||||
return (
|
||||
<div key={sectionIdx} className={sectionIdx > 0 && sidebarOpen ? 'mt-4' : ''}>
|
||||
{/* Section Label - clickable if collapsible (expanded sidebar) */}
|
||||
<nav
|
||||
className={cn(
|
||||
'flex-1 overflow-y-auto scrollbar-hide px-3 pb-2',
|
||||
sidebarOpen ? 'mt-1' : 'mt-1'
|
||||
)}
|
||||
>
|
||||
{!currentProject && sidebarOpen ? (
|
||||
// Placeholder when no project is selected (only in expanded state)
|
||||
<div className="flex items-center justify-center h-full px-4">
|
||||
<p className="text-muted-foreground text-sm text-center">
|
||||
<span className="block">Select or create a project above</span>
|
||||
</p>
|
||||
</div>
|
||||
) : currentProject ? (
|
||||
// Navigation sections when project is selected
|
||||
navSections.map((section, sectionIdx) => (
|
||||
<div key={sectionIdx} className={sectionIdx > 0 && sidebarOpen ? 'mt-6' : ''}>
|
||||
{/* Section Label */}
|
||||
{section.label && sidebarOpen && (
|
||||
<button
|
||||
onClick={() => isCollapsible && toggleSection(section.label!)}
|
||||
className={cn(
|
||||
'flex items-center w-full px-3 mb-1.5',
|
||||
isCollapsible && 'cursor-pointer hover:text-foreground'
|
||||
)}
|
||||
disabled={!isCollapsible}
|
||||
>
|
||||
<div className="px-3 mb-2">
|
||||
<span className="text-[10px] font-semibold text-muted-foreground/70 uppercase tracking-widest">
|
||||
{section.label}
|
||||
</span>
|
||||
{isCollapsible && (
|
||||
<ChevronDown
|
||||
className={cn(
|
||||
'w-3 h-3 ml-auto text-muted-foreground/50 transition-transform duration-200',
|
||||
isCollapsed && '-rotate-90'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Section icon with dropdown (collapsed sidebar) */}
|
||||
{section.label && !sidebarOpen && SectionIcon && section.collapsible && isCollapsed && (
|
||||
<DropdownMenu>
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<button
|
||||
className={cn(
|
||||
'group flex items-center justify-center w-full py-2 rounded-lg',
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50 border border-transparent hover:border-border/40',
|
||||
'transition-all duration-200 ease-out'
|
||||
)}
|
||||
>
|
||||
<SectionIcon className="w-[18px] h-[18px]" />
|
||||
</button>
|
||||
</DropdownMenuTrigger>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right" sideOffset={8}>
|
||||
{section.label}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
<DropdownMenuContent side="right" align="start" sideOffset={8} className="w-48">
|
||||
{section.items.map((item) => {
|
||||
const ItemIcon = item.icon;
|
||||
return (
|
||||
<DropdownMenuItem
|
||||
key={item.id}
|
||||
onClick={() => navigate({ to: `/${item.id}` as unknown as '/' })}
|
||||
className="flex items-center gap-2 cursor-pointer"
|
||||
>
|
||||
<ItemIcon className="w-4 h-4" />
|
||||
<span>{item.label}</span>
|
||||
{item.shortcut && (
|
||||
<span className="ml-auto text-[10px] font-mono text-muted-foreground">
|
||||
{formatShortcut(item.shortcut, true)}
|
||||
</span>
|
||||
)}
|
||||
</DropdownMenuItem>
|
||||
);
|
||||
})}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)}
|
||||
|
||||
{/* Separator for sections without label (visual separation) */}
|
||||
{!section.label && sectionIdx > 0 && sidebarOpen && (
|
||||
<div className="h-px bg-border/40 mx-3 mb-3"></div>
|
||||
<div className="h-px bg-border/40 mx-3 mb-4"></div>
|
||||
)}
|
||||
{(section.label || sectionIdx > 0) && !sidebarOpen && (
|
||||
<div className="h-px bg-border/30 mx-2 my-1.5"></div>
|
||||
)}
|
||||
|
||||
{/* Nav Items - show when section is expanded, or when sidebar is collapsed and section doesn't use dropdown */}
|
||||
{!isCollapsed && (
|
||||
<div className="space-y-1">
|
||||
{section.items.map((item) => {
|
||||
const isActive = isActiveRoute(item.id);
|
||||
const Icon = item.icon;
|
||||
{/* Nav Items */}
|
||||
<div className="space-y-1.5">
|
||||
{section.items.map((item) => {
|
||||
const isActive = isActiveRoute(item.id);
|
||||
const Icon = item.icon;
|
||||
|
||||
return (
|
||||
<button
|
||||
key={item.id}
|
||||
onClick={() => {
|
||||
// Cast to the router's path type; item.id is constrained to known routes
|
||||
navigate({ to: `/${item.id}` as unknown as '/' });
|
||||
}}
|
||||
className={cn(
|
||||
'group flex items-center w-full px-3 py-2 rounded-lg relative overflow-hidden titlebar-no-drag',
|
||||
'transition-all duration-200 ease-out',
|
||||
isActive
|
||||
? [
|
||||
// Active: Premium gradient with glow
|
||||
'bg-gradient-to-r from-brand-500/20 via-brand-500/15 to-brand-600/10',
|
||||
'text-foreground font-medium',
|
||||
'border border-brand-500/30',
|
||||
'shadow-sm shadow-brand-500/10',
|
||||
]
|
||||
: [
|
||||
// Inactive: Subtle hover state
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50',
|
||||
'border border-transparent hover:border-border/40',
|
||||
],
|
||||
sidebarOpen ? 'justify-start' : 'justify-center'
|
||||
return (
|
||||
<button
|
||||
key={item.id}
|
||||
onClick={() => {
|
||||
// Cast to the router's path type; item.id is constrained to known routes
|
||||
navigate({ to: `/${item.id}` as unknown as '/' });
|
||||
}}
|
||||
className={cn(
|
||||
'group flex items-center w-full px-3 py-2.5 rounded-xl relative overflow-hidden titlebar-no-drag',
|
||||
'transition-all duration-200 ease-out',
|
||||
isActive
|
||||
? [
|
||||
// Active: Premium gradient with glow
|
||||
'bg-gradient-to-r from-brand-500/20 via-brand-500/15 to-brand-600/10',
|
||||
'text-foreground font-medium',
|
||||
'border border-brand-500/30',
|
||||
'shadow-md shadow-brand-500/10',
|
||||
]
|
||||
: [
|
||||
// Inactive: Subtle hover state
|
||||
'text-muted-foreground hover:text-foreground',
|
||||
'hover:bg-accent/50',
|
||||
'border border-transparent hover:border-border/40',
|
||||
'hover:shadow-sm',
|
||||
],
|
||||
sidebarOpen ? 'justify-start' : 'justify-center',
|
||||
'hover:scale-[1.02] active:scale-[0.97]'
|
||||
)}
|
||||
title={!sidebarOpen ? item.label : undefined}
|
||||
data-testid={`nav-${item.id}`}
|
||||
>
|
||||
<div className="relative">
|
||||
{item.isLoading ? (
|
||||
<Spinner
|
||||
size="md"
|
||||
className={cn(
|
||||
'shrink-0',
|
||||
isActive ? 'text-brand-500' : 'text-muted-foreground'
|
||||
)}
|
||||
/>
|
||||
) : (
|
||||
<Icon
|
||||
className={cn(
|
||||
'w-[18px] h-[18px] shrink-0 transition-all duration-200',
|
||||
isActive
|
||||
? 'text-brand-500 drop-shadow-sm'
|
||||
: 'group-hover:text-brand-400 group-hover:scale-110'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
title={!sidebarOpen ? item.label : undefined}
|
||||
data-testid={`nav-${item.id}`}
|
||||
>
|
||||
<div className="relative">
|
||||
{item.isLoading ? (
|
||||
<Spinner
|
||||
size="sm"
|
||||
className={cn(
|
||||
'shrink-0',
|
||||
isActive ? 'text-brand-500' : 'text-muted-foreground'
|
||||
)}
|
||||
/>
|
||||
) : (
|
||||
<Icon
|
||||
className={cn(
|
||||
'w-[18px] h-[18px] shrink-0 transition-all duration-200',
|
||||
isActive
|
||||
? 'text-brand-500 drop-shadow-sm'
|
||||
: 'group-hover:text-brand-400'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
{/* Count badge for collapsed state */}
|
||||
{!sidebarOpen && item.count !== undefined && item.count > 0 && (
|
||||
<span
|
||||
className={cn(
|
||||
'absolute -top-1.5 -right-1.5 flex items-center justify-center',
|
||||
'min-w-4 h-4 px-0.5 text-[9px] font-bold rounded-full',
|
||||
'bg-primary text-primary-foreground shadow-sm',
|
||||
'animate-in fade-in zoom-in duration-200'
|
||||
)}
|
||||
>
|
||||
{item.count > 99 ? '99' : item.count}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<span
|
||||
className={cn(
|
||||
'ml-3 text-sm flex-1 text-left',
|
||||
sidebarOpen ? 'block' : 'hidden'
|
||||
)}
|
||||
>
|
||||
{item.label}
|
||||
</span>
|
||||
{/* Count badge */}
|
||||
{item.count !== undefined && item.count > 0 && sidebarOpen && (
|
||||
{/* Count badge for collapsed state */}
|
||||
{!sidebarOpen && item.count !== undefined && item.count > 0 && (
|
||||
<span
|
||||
className={cn(
|
||||
'flex items-center justify-center',
|
||||
'min-w-5 h-5 px-1.5 text-[10px] font-bold rounded-full',
|
||||
'absolute -top-1.5 -right-1.5 flex items-center justify-center',
|
||||
'min-w-4 h-4 px-1 text-[9px] font-bold rounded-full',
|
||||
'bg-primary text-primary-foreground shadow-sm',
|
||||
'animate-in fade-in zoom-in duration-200'
|
||||
)}
|
||||
data-testid={`count-${item.id}`}
|
||||
>
|
||||
{item.count > 99 ? '99+' : item.count}
|
||||
{item.count > 99 ? '99' : item.count}
|
||||
</span>
|
||||
)}
|
||||
{item.shortcut && sidebarOpen && !item.count && (
|
||||
<span
|
||||
className={cn(
|
||||
'flex items-center justify-center min-w-5 h-5 px-1.5 text-[10px] font-mono rounded transition-all duration-200',
|
||||
isActive
|
||||
? 'bg-brand-500/20 text-brand-400'
|
||||
: 'bg-muted text-muted-foreground group-hover:bg-accent'
|
||||
)}
|
||||
data-testid={`shortcut-${item.id}`}
|
||||
>
|
||||
{formatShortcut(item.shortcut, true)}
|
||||
</span>
|
||||
</div>
|
||||
<span
|
||||
className={cn(
|
||||
'ml-3 font-medium text-sm flex-1 text-left',
|
||||
sidebarOpen ? 'block' : 'hidden'
|
||||
)}
|
||||
{/* Tooltip for collapsed state */}
|
||||
{!sidebarOpen && (
|
||||
<span
|
||||
className={cn(
|
||||
'absolute left-full ml-3 px-2.5 py-1.5 rounded-md',
|
||||
'bg-popover text-popover-foreground text-sm',
|
||||
'border border-border shadow-lg',
|
||||
'opacity-0 group-hover:opacity-100',
|
||||
'transition-all duration-200 whitespace-nowrap z-50',
|
||||
'translate-x-1 group-hover:translate-x-0'
|
||||
)}
|
||||
data-testid={`sidebar-tooltip-${item.label.toLowerCase()}`}
|
||||
>
|
||||
{item.label}
|
||||
{item.shortcut && (
|
||||
<span className="ml-2 px-1.5 py-0.5 bg-muted rounded text-[10px] font-mono text-muted-foreground">
|
||||
{formatShortcut(item.shortcut, true)}
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
>
|
||||
{item.label}
|
||||
</span>
|
||||
{/* Count badge */}
|
||||
{item.count !== undefined && item.count > 0 && sidebarOpen && (
|
||||
<span
|
||||
className={cn(
|
||||
'flex items-center justify-center',
|
||||
'min-w-5 h-5 px-1.5 text-[10px] font-bold rounded-full',
|
||||
'bg-primary text-primary-foreground shadow-sm',
|
||||
'animate-in fade-in zoom-in duration-200'
|
||||
)}
|
||||
data-testid={`count-${item.id}`}
|
||||
>
|
||||
{item.count > 99 ? '99+' : item.count}
|
||||
</span>
|
||||
)}
|
||||
{item.shortcut && sidebarOpen && !item.count && (
|
||||
<span
|
||||
className={cn(
|
||||
'flex items-center justify-center min-w-5 h-5 px-1.5 text-[10px] font-mono rounded-md transition-all duration-200',
|
||||
isActive
|
||||
? 'bg-brand-500/20 text-brand-400'
|
||||
: 'bg-muted text-muted-foreground group-hover:bg-accent'
|
||||
)}
|
||||
data-testid={`shortcut-${item.id}`}
|
||||
>
|
||||
{formatShortcut(item.shortcut, true)}
|
||||
</span>
|
||||
)}
|
||||
{/* Tooltip for collapsed state */}
|
||||
{!sidebarOpen && (
|
||||
<span
|
||||
className={cn(
|
||||
'absolute left-full ml-3 px-2.5 py-1.5 rounded-lg',
|
||||
'bg-popover text-popover-foreground text-xs font-medium',
|
||||
'border border-border shadow-lg',
|
||||
'opacity-0 group-hover:opacity-100',
|
||||
'transition-all duration-200 whitespace-nowrap z-50',
|
||||
'translate-x-1 group-hover:translate-x-0'
|
||||
)}
|
||||
data-testid={`sidebar-tooltip-${item.label.toLowerCase()}`}
|
||||
>
|
||||
{item.label}
|
||||
{item.shortcut && (
|
||||
<span className="ml-2 px-1.5 py-0.5 bg-muted rounded text-[10px] font-mono text-muted-foreground">
|
||||
{formatShortcut(item.shortcut, true)}
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
|
||||
{/* Placeholder when no project is selected */}
|
||||
{!currentProject && sidebarOpen && (
|
||||
<div className="flex items-center justify-center px-4 py-8">
|
||||
<p className="text-muted-foreground text-xs text-center">
|
||||
Select or create a project to continue
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
))
|
||||
) : null}
|
||||
</nav>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,36 +1,5 @@
|
||||
import { darkThemes, lightThemes } from '@/config/theme-options';
|
||||
|
||||
/**
|
||||
* Shared constants for theme submenu positioning and layout.
|
||||
* Used across project-context-menu and project-selector-with-options components
|
||||
* to ensure consistent viewport-aware positioning and styling.
|
||||
*/
|
||||
export const THEME_SUBMENU_CONSTANTS = {
|
||||
/**
|
||||
* Estimated total height of the theme submenu content in pixels.
|
||||
* Includes all theme options, headers, padding, and "Use Global" button.
|
||||
*/
|
||||
ESTIMATED_SUBMENU_HEIGHT: 620,
|
||||
|
||||
/**
|
||||
* Padding from viewport edges to prevent submenu overflow.
|
||||
* Applied to both top and bottom edges when calculating available space.
|
||||
*/
|
||||
COLLISION_PADDING: 32,
|
||||
|
||||
/**
|
||||
* Vertical offset from context menu top to the "Project Theme" button.
|
||||
* Used for calculating submenu position relative to trigger button.
|
||||
*/
|
||||
THEME_BUTTON_OFFSET: 50,
|
||||
|
||||
/**
|
||||
* Height reserved for submenu header area (includes "Use Global" button and separator).
|
||||
* Subtracted from maxHeight to get scrollable content area height.
|
||||
*/
|
||||
SUBMENU_HEADER_HEIGHT: 80,
|
||||
} as const;
|
||||
|
||||
export const PROJECT_DARK_THEMES = darkThemes.map((opt) => ({
|
||||
value: opt.value,
|
||||
label: opt.label,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user