mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-31 06:42:03 +00:00
Compare commits
151 Commits
feat/coder
...
claude/add
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d3a4e13c4e | ||
|
|
7941deffd7 | ||
|
|
01859f3a9a | ||
|
|
afb6e14811 | ||
|
|
c65f931326 | ||
|
|
f480386905 | ||
|
|
7773db559d | ||
|
|
655f254538 | ||
|
|
b4be3c11e2 | ||
|
|
57ce198ae9 | ||
|
|
733ca15e15 | ||
|
|
e110c058a2 | ||
|
|
0fdda11b09 | ||
|
|
0155da0be5 | ||
|
|
41b127ebf3 | ||
|
|
e7e83a30d9 | ||
|
|
40950b5fce | ||
|
|
3f05735be1 | ||
|
|
05f0ceceb6 | ||
|
|
28d50aa017 | ||
|
|
103c6bc8a0 | ||
|
|
6c47068f71 | ||
|
|
a9616ff309 | ||
|
|
4fa0923ff8 | ||
|
|
c3cecc18f2 | ||
|
|
3fcda8abfc | ||
|
|
a45ee59b7d | ||
|
|
662f854203 | ||
|
|
f2860d9366 | ||
|
|
6eb7acb6d4 | ||
|
|
4ab927a5fb | ||
|
|
02de3df3df | ||
|
|
b73885e04a | ||
|
|
afa93dde0d | ||
|
|
aac59c2b3a | ||
|
|
c3e7e57968 | ||
|
|
7bb97953a7 | ||
|
|
2214c2700b | ||
|
|
7bee54717c | ||
|
|
5ab53afd7f | ||
|
|
3ebd67f35f | ||
|
|
641bbde877 | ||
|
|
7c80249bbf | ||
|
|
a73a57b9a4 | ||
|
|
db71dc9aa5 | ||
|
|
a8ddd07442 | ||
|
|
2165223b49 | ||
|
|
3bde3d2732 | ||
|
|
900a312c92 | ||
|
|
69ff8df7c1 | ||
|
|
4f584f9a89 | ||
|
|
47a6033b43 | ||
|
|
a1f234c7e2 | ||
|
|
8facdc66a9 | ||
|
|
2ab78dd590 | ||
|
|
c14a40f7f8 | ||
|
|
8dd5858299 | ||
|
|
76eb3a2ac2 | ||
|
|
179c5ae9c2 | ||
|
|
8c356d7c36 | ||
|
|
a863dcc11d | ||
|
|
cf60f84f89 | ||
|
|
47e6ed6a17 | ||
|
|
d266c98e48 | ||
|
|
628e464b74 | ||
|
|
17d42e7931 | ||
|
|
5119ee4222 | ||
|
|
b039b745be | ||
|
|
02a7a54736 | ||
|
|
43481c2bab | ||
|
|
d7f6e72a9e | ||
|
|
82e22b4362 | ||
|
|
0d9259473e | ||
|
|
ea3930cf3d | ||
|
|
d97c4b7b57 | ||
|
|
2fac2ca4bb | ||
|
|
9bb52f1ded | ||
|
|
f987fc1f10 | ||
|
|
63b8eb0991 | ||
|
|
a52c0461e5 | ||
|
|
e73c92b031 | ||
|
|
09151aa3c8 | ||
|
|
d6300f33ca | ||
|
|
4b0d1399b1 | ||
|
|
55a34a9f1f | ||
|
|
c4652190eb | ||
|
|
af95dae73a | ||
|
|
1c1d9d30a7 | ||
|
|
3faebfa3fe | ||
|
|
d0eaf0e51d | ||
|
|
cf3ee6aec6 | ||
|
|
da80729f56 | ||
|
|
9ad58e1a74 | ||
|
|
55b17a7a11 | ||
|
|
2854e24e84 | ||
|
|
b91d84ee84 | ||
|
|
30a2c3d740 | ||
|
|
e3213b1426 | ||
|
|
bfc23cdfa1 | ||
|
|
8b5da3195b | ||
|
|
0c452a3ebc | ||
|
|
cfc5530d1c | ||
|
|
749fb3a5c1 | ||
|
|
dd26de9f55 | ||
|
|
b6cb926cbe | ||
|
|
eb30ef71f9 | ||
|
|
75fe579e93 | ||
|
|
8ab9dc5a11 | ||
|
|
96202d4bc2 | ||
|
|
f68aee6a19 | ||
|
|
7795d81183 | ||
|
|
0c053dab48 | ||
|
|
1ede7e7e6a | ||
|
|
980006d40e | ||
|
|
ef2dcbacd4 | ||
|
|
505a2b1e0b | ||
|
|
2e57553639 | ||
|
|
f37812247d | ||
|
|
484d4c65d5 | ||
|
|
d96f369b73 | ||
|
|
f0e655f49a | ||
|
|
d22deabe79 | ||
|
|
518c81815e | ||
|
|
01652d0d11 | ||
|
|
7b7ac72c14 | ||
|
|
9137f0e75f | ||
|
|
b66efae5b7 | ||
|
|
2a8706e714 | ||
|
|
174c02cb79 | ||
|
|
a7f7898ee4 | ||
|
|
fdad82bf88 | ||
|
|
b0b49764b9 | ||
|
|
e10cb83adc | ||
|
|
b8875f71a5 | ||
|
|
4186b80a82 | ||
|
|
7eae0215f2 | ||
|
|
4cd84a4734 | ||
|
|
361cb06bf0 | ||
|
|
3170e22383 | ||
|
|
9dbec7281a | ||
|
|
c2fed78733 | ||
|
|
5fe7bcd378 | ||
|
|
20caa424fc | ||
|
|
c4e0a7cc96 | ||
|
|
d1219a225c | ||
|
|
3411256366 | ||
|
|
d08ef472a3 | ||
|
|
d81997d24b | ||
|
|
845674128e | ||
|
|
2bc931a8b0 | ||
|
|
e57549c06e |
21
.github/workflows/release.yml
vendored
21
.github/workflows/release.yml
vendored
@@ -62,7 +62,9 @@ jobs:
|
|||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: macos-builds
|
name: macos-builds
|
||||||
path: apps/ui/release/*.{dmg,zip}
|
path: |
|
||||||
|
apps/ui/release/*.dmg
|
||||||
|
apps/ui/release/*.zip
|
||||||
retention-days: 30
|
retention-days: 30
|
||||||
|
|
||||||
- name: Upload Windows artifacts
|
- name: Upload Windows artifacts
|
||||||
@@ -78,7 +80,10 @@ jobs:
|
|||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: linux-builds
|
name: linux-builds
|
||||||
path: apps/ui/release/*.{AppImage,deb,rpm}
|
path: |
|
||||||
|
apps/ui/release/*.AppImage
|
||||||
|
apps/ui/release/*.deb
|
||||||
|
apps/ui/release/*.rpm
|
||||||
retention-days: 30
|
retention-days: 30
|
||||||
|
|
||||||
upload:
|
upload:
|
||||||
@@ -109,8 +114,14 @@ jobs:
|
|||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
artifacts/macos-builds/*.{dmg,zip,blockmap}
|
artifacts/macos-builds/*.dmg
|
||||||
artifacts/windows-builds/*.{exe,blockmap}
|
artifacts/macos-builds/*.zip
|
||||||
artifacts/linux-builds/*.{AppImage,deb,rpm,blockmap}
|
artifacts/macos-builds/*.blockmap
|
||||||
|
artifacts/windows-builds/*.exe
|
||||||
|
artifacts/windows-builds/*.blockmap
|
||||||
|
artifacts/linux-builds/*.AppImage
|
||||||
|
artifacts/linux-builds/*.deb
|
||||||
|
artifacts/linux-builds/*.rpm
|
||||||
|
artifacts/linux-builds/*.blockmap
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
@@ -172,4 +172,5 @@ Use `resolveModelString()` from `@automaker/model-resolver` to convert model ali
|
|||||||
- `DATA_DIR` - Data storage directory (default: ./data)
|
- `DATA_DIR` - Data storage directory (default: ./data)
|
||||||
- `ALLOWED_ROOT_DIRECTORY` - Restrict file operations to specific directory
|
- `ALLOWED_ROOT_DIRECTORY` - Restrict file operations to specific directory
|
||||||
- `AUTOMAKER_MOCK_AGENT=true` - Enable mock agent mode for CI testing
|
- `AUTOMAKER_MOCK_AGENT=true` - Enable mock agent mode for CI testing
|
||||||
|
- `AUTOMAKER_AUTO_LOGIN=true` - Skip login prompt in development (disabled when NODE_ENV=production)
|
||||||
- `VITE_HOSTNAME` - Hostname for frontend API URLs (default: localhost)
|
- `VITE_HOSTNAME` - Hostname for frontend API URLs (default: localhost)
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ COPY libs/platform/package*.json ./libs/platform/
|
|||||||
COPY libs/model-resolver/package*.json ./libs/model-resolver/
|
COPY libs/model-resolver/package*.json ./libs/model-resolver/
|
||||||
COPY libs/dependency-resolver/package*.json ./libs/dependency-resolver/
|
COPY libs/dependency-resolver/package*.json ./libs/dependency-resolver/
|
||||||
COPY libs/git-utils/package*.json ./libs/git-utils/
|
COPY libs/git-utils/package*.json ./libs/git-utils/
|
||||||
|
COPY libs/spec-parser/package*.json ./libs/spec-parser/
|
||||||
|
|
||||||
# Copy scripts (needed by npm workspace)
|
# Copy scripts (needed by npm workspace)
|
||||||
COPY scripts ./scripts
|
COPY scripts ./scripts
|
||||||
|
|||||||
@@ -389,6 +389,7 @@ npm run lint
|
|||||||
- `VITE_SKIP_ELECTRON` - Skip Electron in dev mode
|
- `VITE_SKIP_ELECTRON` - Skip Electron in dev mode
|
||||||
- `OPEN_DEVTOOLS` - Auto-open DevTools in Electron
|
- `OPEN_DEVTOOLS` - Auto-open DevTools in Electron
|
||||||
- `AUTOMAKER_SKIP_SANDBOX_WARNING` - Skip sandbox warning dialog (useful for dev/CI)
|
- `AUTOMAKER_SKIP_SANDBOX_WARNING` - Skip sandbox warning dialog (useful for dev/CI)
|
||||||
|
- `AUTOMAKER_AUTO_LOGIN=true` - Skip login prompt in development (ignored when NODE_ENV=production)
|
||||||
|
|
||||||
### Authentication Setup
|
### Authentication Setup
|
||||||
|
|
||||||
|
|||||||
300
SECURITY_TODO.md
Normal file
300
SECURITY_TODO.md
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
# Security Audit Findings - v0.13.0rc Branch
|
||||||
|
|
||||||
|
**Date:** $(date)
|
||||||
|
**Audit Type:** Git diff security review against v0.13.0rc branch
|
||||||
|
**Status:** ⚠️ Security vulnerabilities found - requires fixes before release
|
||||||
|
|
||||||
|
## Executive Summary
|
||||||
|
|
||||||
|
No intentionally malicious code was detected in the changes. However, several **critical security vulnerabilities** were identified that could allow command injection attacks. These must be fixed before release.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🔴 Critical Security Issues
|
||||||
|
|
||||||
|
### 1. Command Injection in Merge Handler
|
||||||
|
|
||||||
|
**File:** `apps/server/src/routes/worktree/routes/merge.ts`
|
||||||
|
**Lines:** 43, 54, 65-66, 93
|
||||||
|
**Severity:** CRITICAL
|
||||||
|
|
||||||
|
**Issue:**
|
||||||
|
User-controlled inputs (`branchName`, `mergeTo`, `options?.message`) are directly interpolated into shell commands without validation, allowing command injection attacks.
|
||||||
|
|
||||||
|
**Vulnerable Code:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Line 43 - branchName not validated
|
||||||
|
await execAsync(`git rev-parse --verify ${branchName}`, { cwd: projectPath });
|
||||||
|
|
||||||
|
// Line 54 - mergeTo not validated
|
||||||
|
await execAsync(`git rev-parse --verify ${mergeTo}`, { cwd: projectPath });
|
||||||
|
|
||||||
|
// Lines 65-66 - branchName and message not validated
|
||||||
|
const mergeCmd = options?.squash
|
||||||
|
? `git merge --squash ${branchName}`
|
||||||
|
: `git merge ${branchName} -m "${options?.message || `Merge ${branchName} into ${mergeTo}`}"`;
|
||||||
|
|
||||||
|
// Line 93 - message not sanitized
|
||||||
|
await execAsync(`git commit -m "${options?.message || `Merge ${branchName} (squash)`}"`, {
|
||||||
|
cwd: projectPath,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
**Attack Vector:**
|
||||||
|
An attacker could inject shell commands via branch names or commit messages:
|
||||||
|
|
||||||
|
- Branch name: `main; rm -rf /`
|
||||||
|
- Commit message: `"; malicious_command; "`
|
||||||
|
|
||||||
|
**Fix Required:**
|
||||||
|
|
||||||
|
1. Validate `branchName` and `mergeTo` using `isValidBranchName()` before use
|
||||||
|
2. Sanitize commit messages or use `execGitCommand` with proper escaping
|
||||||
|
3. Replace `execAsync` template literals with `execGitCommand` array-based calls
|
||||||
|
|
||||||
|
**Note:** `isValidBranchName` is imported but only used AFTER deletion (line 119), not before execAsync calls.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 2. Command Injection in Push Handler
|
||||||
|
|
||||||
|
**File:** `apps/server/src/routes/worktree/routes/push.ts`
|
||||||
|
**Lines:** 44, 49
|
||||||
|
**Severity:** CRITICAL
|
||||||
|
|
||||||
|
**Issue:**
|
||||||
|
User-controlled `remote` parameter and `branchName` are directly interpolated into shell commands without validation.
|
||||||
|
|
||||||
|
**Vulnerable Code:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Line 38 - remote defaults to 'origin' but not validated
|
||||||
|
const targetRemote = remote || 'origin';
|
||||||
|
|
||||||
|
// Lines 44, 49 - targetRemote and branchName not validated
|
||||||
|
await execAsync(`git push -u ${targetRemote} ${branchName} ${forceFlag}`, {
|
||||||
|
cwd: worktreePath,
|
||||||
|
});
|
||||||
|
await execAsync(`git push --set-upstream ${targetRemote} ${branchName} ${forceFlag}`, {
|
||||||
|
cwd: worktreePath,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
**Attack Vector:**
|
||||||
|
An attacker could inject commands via the remote name:
|
||||||
|
|
||||||
|
- Remote: `origin; malicious_command; #`
|
||||||
|
|
||||||
|
**Fix Required:**
|
||||||
|
|
||||||
|
1. Validate `targetRemote` parameter (alphanumeric + `-`, `_` only)
|
||||||
|
2. Validate `branchName` before use (even though it comes from git output)
|
||||||
|
3. Use `execGitCommand` with array arguments instead of template literals
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3. Unsafe Environment Variable Export in Shell Script
|
||||||
|
|
||||||
|
**File:** `start-automaker.sh`
|
||||||
|
**Lines:** 5068, 5085
|
||||||
|
**Severity:** CRITICAL
|
||||||
|
|
||||||
|
**Issue:**
|
||||||
|
Unsafe parsing and export of `.env` file contents using `xargs` without proper handling of special characters.
|
||||||
|
|
||||||
|
**Vulnerable Code:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export $(grep -v '^#' .env | xargs)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Attack Vector:**
|
||||||
|
If `.env` file contains malicious content with spaces, special characters, or code, it could be executed:
|
||||||
|
|
||||||
|
- `.env` entry: `VAR="value; malicious_command"`
|
||||||
|
- Could lead to code execution during startup
|
||||||
|
|
||||||
|
**Fix Required:**
|
||||||
|
Replace with safer parsing method:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Safer approach
|
||||||
|
set -a
|
||||||
|
source <(grep -v '^#' .env | sed 's/^/export /')
|
||||||
|
set +a
|
||||||
|
|
||||||
|
# Or even safer - validate each line
|
||||||
|
while IFS= read -r line; do
|
||||||
|
[[ "$line" =~ ^[[:space:]]*# ]] && continue
|
||||||
|
[[ -z "$line" ]] && continue
|
||||||
|
if [[ "$line" =~ ^([A-Za-z_][A-Za-z0-9_]*)=(.*)$ ]]; then
|
||||||
|
export "${BASH_REMATCH[1]}"="${BASH_REMATCH[2]}"
|
||||||
|
fi
|
||||||
|
done < .env
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🟡 Moderate Security Concerns
|
||||||
|
|
||||||
|
### 4. Inconsistent Use of Secure Command Execution
|
||||||
|
|
||||||
|
**Issue:**
|
||||||
|
The codebase has `execGitCommand()` function available (which uses array arguments and is safer), but it's not consistently used. Some places still use `execAsync` with template literals.
|
||||||
|
|
||||||
|
**Files Affected:**
|
||||||
|
|
||||||
|
- `apps/server/src/routes/worktree/routes/merge.ts`
|
||||||
|
- `apps/server/src/routes/worktree/routes/push.ts`
|
||||||
|
|
||||||
|
**Recommendation:**
|
||||||
|
|
||||||
|
- Audit all `execAsync` calls with template literals
|
||||||
|
- Replace with `execGitCommand` where possible
|
||||||
|
- Document when `execAsync` is acceptable (only with fully validated inputs)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 5. Missing Input Validation
|
||||||
|
|
||||||
|
**Issues:**
|
||||||
|
|
||||||
|
1. `targetRemote` in `push.ts` defaults to 'origin' but isn't validated
|
||||||
|
2. Commit messages in `merge.ts` aren't sanitized before use in shell commands
|
||||||
|
3. `worktreePath` validation relies on middleware but should be double-checked
|
||||||
|
|
||||||
|
**Recommendation:**
|
||||||
|
|
||||||
|
- Add validation functions for remote names
|
||||||
|
- Sanitize commit messages (remove shell metacharacters)
|
||||||
|
- Add defensive validation even when middleware exists
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ✅ Positive Security Findings
|
||||||
|
|
||||||
|
1. **No Hardcoded Credentials:** No API keys, passwords, or tokens found in the diff
|
||||||
|
2. **No Data Exfiltration:** No suspicious network requests or data transmission patterns
|
||||||
|
3. **No Backdoors:** No hidden functionality or unauthorized access patterns detected
|
||||||
|
4. **Safe Command Execution:** `execGitCommand` function properly uses array arguments in some places
|
||||||
|
5. **Environment Variable Handling:** `init-script-service.ts` properly sanitizes environment variables (lines 194-220)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📋 Action Items
|
||||||
|
|
||||||
|
### Immediate (Before Release)
|
||||||
|
|
||||||
|
- [ ] **Fix command injection in `merge.ts`**
|
||||||
|
- [ ] Validate `branchName` with `isValidBranchName()` before line 43
|
||||||
|
- [ ] Validate `mergeTo` with `isValidBranchName()` before line 54
|
||||||
|
- [ ] Sanitize commit messages or use `execGitCommand` for merge commands
|
||||||
|
- [ ] Replace `execAsync` template literals with `execGitCommand` array calls
|
||||||
|
|
||||||
|
- [ ] **Fix command injection in `push.ts`**
|
||||||
|
- [ ] Add validation function for remote names
|
||||||
|
- [ ] Validate `targetRemote` before use
|
||||||
|
- [ ] Validate `branchName` before use (defensive programming)
|
||||||
|
- [ ] Replace `execAsync` template literals with `execGitCommand`
|
||||||
|
|
||||||
|
- [ ] **Fix shell script security issue**
|
||||||
|
- [ ] Replace unsafe `export $(grep ... | xargs)` with safer parsing
|
||||||
|
- [ ] Add validation for `.env` file contents
|
||||||
|
- [ ] Test with edge cases (spaces, special chars, quotes)
|
||||||
|
|
||||||
|
### Short-term (Next Sprint)
|
||||||
|
|
||||||
|
- [ ] **Audit all `execAsync` calls**
|
||||||
|
- [ ] Create inventory of all `execAsync` calls with template literals
|
||||||
|
- [ ] Replace with `execGitCommand` where possible
|
||||||
|
- [ ] Document exceptions and why they're safe
|
||||||
|
|
||||||
|
- [ ] **Add input validation utilities**
|
||||||
|
- [ ] Create `isValidRemoteName()` function
|
||||||
|
- [ ] Create `sanitizeCommitMessage()` function
|
||||||
|
- [ ] Add validation for all user-controlled inputs
|
||||||
|
|
||||||
|
- [ ] **Security testing**
|
||||||
|
- [ ] Add unit tests for command injection prevention
|
||||||
|
- [ ] Add integration tests with malicious inputs
|
||||||
|
- [ ] Test shell script with malicious `.env` files
|
||||||
|
|
||||||
|
### Long-term (Security Hardening)
|
||||||
|
|
||||||
|
- [ ] **Code review process**
|
||||||
|
- [ ] Add security checklist for PR reviews
|
||||||
|
- [ ] Require security review for shell command execution changes
|
||||||
|
- [ ] Add automated security scanning
|
||||||
|
|
||||||
|
- [ ] **Documentation**
|
||||||
|
- [ ] Document secure coding practices for shell commands
|
||||||
|
- [ ] Create security guidelines for contributors
|
||||||
|
- [ ] Add security section to CONTRIBUTING.md
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🔍 Testing Recommendations
|
||||||
|
|
||||||
|
### Command Injection Tests
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Test cases for merge.ts
|
||||||
|
describe('merge handler security', () => {
|
||||||
|
it('should reject branch names with shell metacharacters', () => {
|
||||||
|
// Test: branchName = "main; rm -rf /"
|
||||||
|
// Expected: Validation error, command not executed
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should sanitize commit messages', () => {
|
||||||
|
// Test: message = '"; malicious_command; "'
|
||||||
|
// Expected: Sanitized or rejected
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test cases for push.ts
|
||||||
|
describe('push handler security', () => {
|
||||||
|
it('should reject remote names with shell metacharacters', () => {
|
||||||
|
// Test: remote = "origin; malicious_command; #"
|
||||||
|
// Expected: Validation error, command not executed
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Shell Script Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Test with malicious .env content
|
||||||
|
echo 'VAR="value; echo PWNED"' > test.env
|
||||||
|
# Expected: Should not execute the command
|
||||||
|
|
||||||
|
# Test with spaces in values
|
||||||
|
echo 'VAR="value with spaces"' > test.env
|
||||||
|
# Expected: Should handle correctly
|
||||||
|
|
||||||
|
# Test with special characters
|
||||||
|
echo 'VAR="value\$with\$dollars"' > test.env
|
||||||
|
# Expected: Should handle correctly
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📚 References
|
||||||
|
|
||||||
|
- [OWASP Command Injection](https://owasp.org/www-community/attacks/Command_Injection)
|
||||||
|
- [Node.js Child Process Security](https://nodejs.org/api/child_process.html#child_process_security_concerns)
|
||||||
|
- [Shell Script Security Best Practices](https://mywiki.wooledge.org/BashGuide/Practices)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- All findings are based on code diff analysis
|
||||||
|
- No runtime testing was performed
|
||||||
|
- Assumes attacker has access to API endpoints (authenticated or unauthenticated)
|
||||||
|
- Fixes should be tested thoroughly before deployment
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Last Updated:** $(date)
|
||||||
|
**Next Review:** After fixes are implemented
|
||||||
8
TODO.md
8
TODO.md
@@ -2,6 +2,14 @@
|
|||||||
|
|
||||||
- Setting the default model does not seem like it works.
|
- Setting the default model does not seem like it works.
|
||||||
|
|
||||||
|
# Performance (completed)
|
||||||
|
|
||||||
|
- [x] Graph performance mode for large graphs (compact nodes/edges + visible-only rendering)
|
||||||
|
- [x] Render containment on heavy scroll regions (kanban columns, chat history)
|
||||||
|
- [x] Reduce blur/shadow effects when lists get large
|
||||||
|
- [x] React Query tuning for heavy datasets (less refetch on focus/reconnect)
|
||||||
|
- [x] DnD/list rendering optimizations (virtualized kanban + memoized card sections)
|
||||||
|
|
||||||
# UX
|
# UX
|
||||||
|
|
||||||
- Consolidate all models to a single place in the settings instead of having AI profiles and all this other stuff
|
- Consolidate all models to a single place in the settings instead of having AI profiles and all this other stuff
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@automaker/server",
|
"name": "@automaker/server",
|
||||||
"version": "0.12.0",
|
"version": "0.13.0",
|
||||||
"description": "Backend server for Automaker - provides API for both web and Electron modes",
|
"description": "Backend server for Automaker - provides API for both web and Electron modes",
|
||||||
"author": "AutoMaker Team",
|
"author": "AutoMaker Team",
|
||||||
"license": "SEE LICENSE IN LICENSE",
|
"license": "SEE LICENSE IN LICENSE",
|
||||||
@@ -40,7 +40,8 @@
|
|||||||
"express": "5.2.1",
|
"express": "5.2.1",
|
||||||
"morgan": "1.10.1",
|
"morgan": "1.10.1",
|
||||||
"node-pty": "1.1.0-beta41",
|
"node-pty": "1.1.0-beta41",
|
||||||
"ws": "8.18.3"
|
"ws": "8.18.3",
|
||||||
|
"yaml": "2.7.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/cookie": "0.6.0",
|
"@types/cookie": "0.6.0",
|
||||||
|
|||||||
@@ -43,7 +43,6 @@ import { createEnhancePromptRoutes } from './routes/enhance-prompt/index.js';
|
|||||||
import { createWorktreeRoutes } from './routes/worktree/index.js';
|
import { createWorktreeRoutes } from './routes/worktree/index.js';
|
||||||
import { createGitRoutes } from './routes/git/index.js';
|
import { createGitRoutes } from './routes/git/index.js';
|
||||||
import { createSetupRoutes } from './routes/setup/index.js';
|
import { createSetupRoutes } from './routes/setup/index.js';
|
||||||
import { createSuggestionsRoutes } from './routes/suggestions/index.js';
|
|
||||||
import { createModelsRoutes } from './routes/models/index.js';
|
import { createModelsRoutes } from './routes/models/index.js';
|
||||||
import { createRunningAgentsRoutes } from './routes/running-agents/index.js';
|
import { createRunningAgentsRoutes } from './routes/running-agents/index.js';
|
||||||
import { createWorkspaceRoutes } from './routes/workspace/index.js';
|
import { createWorkspaceRoutes } from './routes/workspace/index.js';
|
||||||
@@ -83,8 +82,9 @@ import { createNotificationsRoutes } from './routes/notifications/index.js';
|
|||||||
import { getNotificationService } from './services/notification-service.js';
|
import { getNotificationService } from './services/notification-service.js';
|
||||||
import { createEventHistoryRoutes } from './routes/event-history/index.js';
|
import { createEventHistoryRoutes } from './routes/event-history/index.js';
|
||||||
import { getEventHistoryService } from './services/event-history-service.js';
|
import { getEventHistoryService } from './services/event-history-service.js';
|
||||||
import { createCodeReviewRoutes } from './routes/code-review/index.js';
|
import { getTestRunnerService } from './services/test-runner-service.js';
|
||||||
import { CodeReviewService } from './services/code-review-service.js';
|
import { createProviderUsageRoutes } from './routes/provider-usage/index.js';
|
||||||
|
import { ProviderUsageTracker } from './services/provider-usage-tracker.js';
|
||||||
|
|
||||||
// Load environment variables
|
// Load environment variables
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
@@ -93,6 +93,9 @@ const PORT = parseInt(process.env.PORT || '3008', 10);
|
|||||||
const HOST = process.env.HOST || '0.0.0.0';
|
const HOST = process.env.HOST || '0.0.0.0';
|
||||||
const HOSTNAME = process.env.HOSTNAME || 'localhost';
|
const HOSTNAME = process.env.HOSTNAME || 'localhost';
|
||||||
const DATA_DIR = process.env.DATA_DIR || './data';
|
const DATA_DIR = process.env.DATA_DIR || './data';
|
||||||
|
logger.info('[SERVER_STARTUP] process.env.DATA_DIR:', process.env.DATA_DIR);
|
||||||
|
logger.info('[SERVER_STARTUP] Resolved DATA_DIR:', DATA_DIR);
|
||||||
|
logger.info('[SERVER_STARTUP] process.cwd():', process.cwd());
|
||||||
const ENABLE_REQUEST_LOGGING_DEFAULT = process.env.ENABLE_REQUEST_LOGGING !== 'false'; // Default to true
|
const ENABLE_REQUEST_LOGGING_DEFAULT = process.env.ENABLE_REQUEST_LOGGING !== 'false'; // Default to true
|
||||||
|
|
||||||
// Runtime-configurable request logging flag (can be changed via settings)
|
// Runtime-configurable request logging flag (can be changed via settings)
|
||||||
@@ -112,24 +115,37 @@ export function isRequestLoggingEnabled(): boolean {
|
|||||||
return requestLoggingEnabled;
|
return requestLoggingEnabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Width for log box content (excluding borders)
|
||||||
|
const BOX_CONTENT_WIDTH = 67;
|
||||||
|
|
||||||
// Check for required environment variables
|
// Check for required environment variables
|
||||||
const hasAnthropicKey = !!process.env.ANTHROPIC_API_KEY;
|
const hasAnthropicKey = !!process.env.ANTHROPIC_API_KEY;
|
||||||
|
|
||||||
if (!hasAnthropicKey) {
|
if (!hasAnthropicKey) {
|
||||||
|
const wHeader = '⚠️ WARNING: No Claude authentication configured'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const w1 = 'The Claude Agent SDK requires authentication to function.'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const w2 = 'Set your Anthropic API key:'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const w3 = ' export ANTHROPIC_API_KEY="sk-ant-..."'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const w4 = 'Or use the setup wizard in Settings to configure authentication.'.padEnd(
|
||||||
|
BOX_CONTENT_WIDTH
|
||||||
|
);
|
||||||
|
|
||||||
logger.warn(`
|
logger.warn(`
|
||||||
╔═══════════════════════════════════════════════════════════════════════╗
|
╔═════════════════════════════════════════════════════════════════════╗
|
||||||
║ ⚠️ WARNING: No Claude authentication configured ║
|
║ ${wHeader}║
|
||||||
|
╠═════════════════════════════════════════════════════════════════════╣
|
||||||
║ ║
|
║ ║
|
||||||
║ The Claude Agent SDK requires authentication to function. ║
|
║ ${w1}║
|
||||||
║ ║
|
║ ║
|
||||||
║ Set your Anthropic API key: ║
|
║ ${w2}║
|
||||||
║ export ANTHROPIC_API_KEY="sk-ant-..." ║
|
║ ${w3}║
|
||||||
║ ║
|
║ ║
|
||||||
║ Or use the setup wizard in Settings to configure authentication. ║
|
║ ${w4}║
|
||||||
╚═══════════════════════════════════════════════════════════════════════╝
|
║ ║
|
||||||
|
╚═════════════════════════════════════════════════════════════════════╝
|
||||||
`);
|
`);
|
||||||
} else {
|
} else {
|
||||||
logger.info('✓ ANTHROPIC_API_KEY detected (API key auth)');
|
logger.info('✓ ANTHROPIC_API_KEY detected');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize security
|
// Initialize security
|
||||||
@@ -177,15 +193,26 @@ app.use(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// For local development, allow localhost origins
|
// For local development, allow all localhost/loopback origins (any port)
|
||||||
|
try {
|
||||||
|
const url = new URL(origin);
|
||||||
|
const hostname = url.hostname;
|
||||||
|
|
||||||
if (
|
if (
|
||||||
origin.startsWith('http://localhost:') ||
|
hostname === 'localhost' ||
|
||||||
origin.startsWith('http://127.0.0.1:') ||
|
hostname === '127.0.0.1' ||
|
||||||
origin.startsWith('http://[::1]:')
|
hostname === '::1' ||
|
||||||
|
hostname === '0.0.0.0' ||
|
||||||
|
hostname.startsWith('192.168.') ||
|
||||||
|
hostname.startsWith('10.') ||
|
||||||
|
hostname.startsWith('172.')
|
||||||
) {
|
) {
|
||||||
callback(null, origin);
|
callback(null, origin);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// Ignore URL parsing errors
|
||||||
|
}
|
||||||
|
|
||||||
// Reject other origins by default for security
|
// Reject other origins by default for security
|
||||||
callback(new Error('Not allowed by CORS'));
|
callback(new Error('Not allowed by CORS'));
|
||||||
@@ -211,7 +238,7 @@ const codexModelCacheService = new CodexModelCacheService(DATA_DIR, codexAppServ
|
|||||||
const codexUsageService = new CodexUsageService(codexAppServerService);
|
const codexUsageService = new CodexUsageService(codexAppServerService);
|
||||||
const mcpTestService = new MCPTestService(settingsService);
|
const mcpTestService = new MCPTestService(settingsService);
|
||||||
const ideationService = new IdeationService(events, settingsService, featureLoader);
|
const ideationService = new IdeationService(events, settingsService, featureLoader);
|
||||||
const codeReviewService = new CodeReviewService(events, settingsService);
|
const providerUsageTracker = new ProviderUsageTracker(codexUsageService);
|
||||||
|
|
||||||
// Initialize DevServerService with event emitter for real-time log streaming
|
// Initialize DevServerService with event emitter for real-time log streaming
|
||||||
const devServerService = getDevServerService();
|
const devServerService = getDevServerService();
|
||||||
@@ -224,11 +251,32 @@ notificationService.setEventEmitter(events);
|
|||||||
// Initialize Event History Service
|
// Initialize Event History Service
|
||||||
const eventHistoryService = getEventHistoryService();
|
const eventHistoryService = getEventHistoryService();
|
||||||
|
|
||||||
|
// Initialize Test Runner Service with event emitter for real-time test output streaming
|
||||||
|
const testRunnerService = getTestRunnerService();
|
||||||
|
testRunnerService.setEventEmitter(events);
|
||||||
|
|
||||||
// Initialize Event Hook Service for custom event triggers (with history storage)
|
// Initialize Event Hook Service for custom event triggers (with history storage)
|
||||||
eventHookService.initialize(events, settingsService, eventHistoryService);
|
eventHookService.initialize(events, settingsService, eventHistoryService, featureLoader);
|
||||||
|
|
||||||
// Initialize services
|
// Initialize services
|
||||||
(async () => {
|
(async () => {
|
||||||
|
// Migrate settings from legacy Electron userData location if needed
|
||||||
|
// This handles users upgrading from versions that stored settings in ~/.config/Automaker (Linux),
|
||||||
|
// ~/Library/Application Support/Automaker (macOS), or %APPDATA%\Automaker (Windows)
|
||||||
|
// to the new shared ./data directory
|
||||||
|
try {
|
||||||
|
const migrationResult = await settingsService.migrateFromLegacyElectronPath();
|
||||||
|
if (migrationResult.migrated) {
|
||||||
|
logger.info(`Settings migrated from legacy location: ${migrationResult.legacyPath}`);
|
||||||
|
logger.info(`Migrated files: ${migrationResult.migratedFiles.join(', ')}`);
|
||||||
|
}
|
||||||
|
if (migrationResult.errors.length > 0) {
|
||||||
|
logger.warn('Migration errors:', migrationResult.errors);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('Failed to check for legacy settings migration:', err);
|
||||||
|
}
|
||||||
|
|
||||||
// Apply logging settings from saved settings
|
// Apply logging settings from saved settings
|
||||||
try {
|
try {
|
||||||
const settings = await settingsService.getGlobalSettings();
|
const settings = await settingsService.getGlobalSettings();
|
||||||
@@ -285,7 +333,6 @@ app.use('/api/auto-mode', createAutoModeRoutes(autoModeService));
|
|||||||
app.use('/api/enhance-prompt', createEnhancePromptRoutes(settingsService));
|
app.use('/api/enhance-prompt', createEnhancePromptRoutes(settingsService));
|
||||||
app.use('/api/worktree', createWorktreeRoutes(events, settingsService));
|
app.use('/api/worktree', createWorktreeRoutes(events, settingsService));
|
||||||
app.use('/api/git', createGitRoutes());
|
app.use('/api/git', createGitRoutes());
|
||||||
app.use('/api/suggestions', createSuggestionsRoutes(events, settingsService));
|
|
||||||
app.use('/api/models', createModelsRoutes());
|
app.use('/api/models', createModelsRoutes());
|
||||||
app.use('/api/spec-regeneration', createSpecRegenerationRoutes(events, settingsService));
|
app.use('/api/spec-regeneration', createSpecRegenerationRoutes(events, settingsService));
|
||||||
app.use('/api/running-agents', createRunningAgentsRoutes(autoModeService));
|
app.use('/api/running-agents', createRunningAgentsRoutes(autoModeService));
|
||||||
@@ -303,7 +350,7 @@ app.use('/api/pipeline', createPipelineRoutes(pipelineService));
|
|||||||
app.use('/api/ideation', createIdeationRoutes(events, ideationService, featureLoader));
|
app.use('/api/ideation', createIdeationRoutes(events, ideationService, featureLoader));
|
||||||
app.use('/api/notifications', createNotificationsRoutes(notificationService));
|
app.use('/api/notifications', createNotificationsRoutes(notificationService));
|
||||||
app.use('/api/event-history', createEventHistoryRoutes(eventHistoryService, settingsService));
|
app.use('/api/event-history', createEventHistoryRoutes(eventHistoryService, settingsService));
|
||||||
app.use('/api/code-review', createCodeReviewRoutes(codeReviewService));
|
app.use('/api/provider-usage', createProviderUsageRoutes(providerUsageTracker));
|
||||||
|
|
||||||
// Create HTTP server
|
// Create HTTP server
|
||||||
const server = createServer(app);
|
const server = createServer(app);
|
||||||
@@ -622,40 +669,74 @@ const startServer = (port: number, host: string) => {
|
|||||||
? 'enabled (password protected)'
|
? 'enabled (password protected)'
|
||||||
: 'enabled'
|
: 'enabled'
|
||||||
: 'disabled';
|
: 'disabled';
|
||||||
const portStr = port.toString().padEnd(4);
|
|
||||||
|
// Build URLs for display
|
||||||
|
const listenAddr = `${host}:${port}`;
|
||||||
|
const httpUrl = `http://${HOSTNAME}:${port}`;
|
||||||
|
const wsEventsUrl = `ws://${HOSTNAME}:${port}/api/events`;
|
||||||
|
const wsTerminalUrl = `ws://${HOSTNAME}:${port}/api/terminal/ws`;
|
||||||
|
const healthUrl = `http://${HOSTNAME}:${port}/api/health`;
|
||||||
|
|
||||||
|
const sHeader = '🚀 Automaker Backend Server'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const s1 = `Listening: ${listenAddr}`.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const s2 = `HTTP API: ${httpUrl}`.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const s3 = `WebSocket: ${wsEventsUrl}`.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const s4 = `Terminal WS: ${wsTerminalUrl}`.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const s5 = `Health: ${healthUrl}`.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const s6 = `Terminal: ${terminalStatus}`.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
|
||||||
logger.info(`
|
logger.info(`
|
||||||
╔═══════════════════════════════════════════════════════╗
|
╔═════════════════════════════════════════════════════════════════════╗
|
||||||
║ Automaker Backend Server ║
|
║ ${sHeader}║
|
||||||
╠═══════════════════════════════════════════════════════╣
|
╠═════════════════════════════════════════════════════════════════════╣
|
||||||
║ Listening: ${host}:${port}${' '.repeat(Math.max(0, 34 - host.length - port.toString().length))}║
|
║ ║
|
||||||
║ HTTP API: http://${HOSTNAME}:${portStr} ║
|
║ ${s1}║
|
||||||
║ WebSocket: ws://${HOSTNAME}:${portStr}/api/events ║
|
║ ${s2}║
|
||||||
║ Terminal: ws://${HOSTNAME}:${portStr}/api/terminal/ws ║
|
║ ${s3}║
|
||||||
║ Health: http://${HOSTNAME}:${portStr}/api/health ║
|
║ ${s4}║
|
||||||
║ Terminal: ${terminalStatus.padEnd(37)}║
|
║ ${s5}║
|
||||||
╚═══════════════════════════════════════════════════════╝
|
║ ${s6}║
|
||||||
|
║ ║
|
||||||
|
╚═════════════════════════════════════════════════════════════════════╝
|
||||||
`);
|
`);
|
||||||
});
|
});
|
||||||
|
|
||||||
server.on('error', (error: NodeJS.ErrnoException) => {
|
server.on('error', (error: NodeJS.ErrnoException) => {
|
||||||
if (error.code === 'EADDRINUSE') {
|
if (error.code === 'EADDRINUSE') {
|
||||||
|
const portStr = port.toString();
|
||||||
|
const nextPortStr = (port + 1).toString();
|
||||||
|
const killCmd = `lsof -ti:${portStr} | xargs kill -9`;
|
||||||
|
const altCmd = `PORT=${nextPortStr} npm run dev:server`;
|
||||||
|
|
||||||
|
const eHeader = `❌ ERROR: Port ${portStr} is already in use`.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const e1 = 'Another process is using this port.'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const e2 = 'To fix this, try one of:'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const e3 = '1. Kill the process using the port:'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const e4 = ` ${killCmd}`.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const e5 = '2. Use a different port:'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const e6 = ` ${altCmd}`.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const e7 = '3. Use the init.sh script which handles this:'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const e8 = ' ./init.sh'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
|
||||||
logger.error(`
|
logger.error(`
|
||||||
╔═══════════════════════════════════════════════════════╗
|
╔═════════════════════════════════════════════════════════════════════╗
|
||||||
║ ❌ ERROR: Port ${port} is already in use ║
|
║ ${eHeader}║
|
||||||
╠═══════════════════════════════════════════════════════╣
|
╠═════════════════════════════════════════════════════════════════════╣
|
||||||
║ Another process is using this port. ║
|
|
||||||
║ ║
|
║ ║
|
||||||
║ To fix this, try one of: ║
|
║ ${e1}║
|
||||||
║ ║
|
║ ║
|
||||||
║ 1. Kill the process using the port: ║
|
║ ${e2}║
|
||||||
║ lsof -ti:${port} | xargs kill -9 ║
|
|
||||||
║ ║
|
║ ║
|
||||||
║ 2. Use a different port: ║
|
║ ${e3}║
|
||||||
║ PORT=${port + 1} npm run dev:server ║
|
║ ${e4}║
|
||||||
║ ║
|
║ ║
|
||||||
║ 3. Use the init.sh script which handles this: ║
|
║ ${e5}║
|
||||||
║ ./init.sh ║
|
║ ${e6}║
|
||||||
╚═══════════════════════════════════════════════════════╝
|
║ ║
|
||||||
|
║ ${e7}║
|
||||||
|
║ ${e8}║
|
||||||
|
║ ║
|
||||||
|
╚═════════════════════════════════════════════════════════════════════╝
|
||||||
`);
|
`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -23,6 +23,13 @@ const SESSION_COOKIE_NAME = 'automaker_session';
|
|||||||
const SESSION_MAX_AGE_MS = 30 * 24 * 60 * 60 * 1000; // 30 days
|
const SESSION_MAX_AGE_MS = 30 * 24 * 60 * 60 * 1000; // 30 days
|
||||||
const WS_TOKEN_MAX_AGE_MS = 5 * 60 * 1000; // 5 minutes for WebSocket connection tokens
|
const WS_TOKEN_MAX_AGE_MS = 5 * 60 * 1000; // 5 minutes for WebSocket connection tokens
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if an environment variable is set to 'true'
|
||||||
|
*/
|
||||||
|
function isEnvTrue(envVar: string | undefined): boolean {
|
||||||
|
return envVar === 'true';
|
||||||
|
}
|
||||||
|
|
||||||
// Session store - persisted to file for survival across server restarts
|
// Session store - persisted to file for survival across server restarts
|
||||||
const validSessions = new Map<string, { createdAt: number; expiresAt: number }>();
|
const validSessions = new Map<string, { createdAt: number; expiresAt: number }>();
|
||||||
|
|
||||||
@@ -130,21 +137,47 @@ function ensureApiKey(): string {
|
|||||||
// API key - always generated/loaded on startup for CSRF protection
|
// API key - always generated/loaded on startup for CSRF protection
|
||||||
const API_KEY = ensureApiKey();
|
const API_KEY = ensureApiKey();
|
||||||
|
|
||||||
|
// Width for log box content (excluding borders)
|
||||||
|
const BOX_CONTENT_WIDTH = 67;
|
||||||
|
|
||||||
// Print API key to console for web mode users (unless suppressed for production logging)
|
// Print API key to console for web mode users (unless suppressed for production logging)
|
||||||
if (process.env.AUTOMAKER_HIDE_API_KEY !== 'true') {
|
if (!isEnvTrue(process.env.AUTOMAKER_HIDE_API_KEY)) {
|
||||||
|
const autoLoginEnabled = isEnvTrue(process.env.AUTOMAKER_AUTO_LOGIN);
|
||||||
|
const autoLoginStatus = autoLoginEnabled ? 'enabled (auto-login active)' : 'disabled';
|
||||||
|
|
||||||
|
// Build box lines with exact padding
|
||||||
|
const header = '🔐 API Key for Web Mode Authentication'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const line1 = "When accessing via browser, you'll be prompted to enter this key:".padEnd(
|
||||||
|
BOX_CONTENT_WIDTH
|
||||||
|
);
|
||||||
|
const line2 = API_KEY.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const line3 = 'In Electron mode, authentication is handled automatically.'.padEnd(
|
||||||
|
BOX_CONTENT_WIDTH
|
||||||
|
);
|
||||||
|
const line4 = `Auto-login (AUTOMAKER_AUTO_LOGIN): ${autoLoginStatus}`.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const tipHeader = '💡 Tips'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const line5 = 'Set AUTOMAKER_API_KEY env var to use a fixed key'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
const line6 = 'Set AUTOMAKER_AUTO_LOGIN=true to skip the login prompt'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
|
|
||||||
logger.info(`
|
logger.info(`
|
||||||
╔═══════════════════════════════════════════════════════════════════════╗
|
╔═════════════════════════════════════════════════════════════════════╗
|
||||||
║ 🔐 API Key for Web Mode Authentication ║
|
║ ${header}║
|
||||||
╠═══════════════════════════════════════════════════════════════════════╣
|
╠═════════════════════════════════════════════════════════════════════╣
|
||||||
║ ║
|
║ ║
|
||||||
║ When accessing via browser, you'll be prompted to enter this key: ║
|
║ ${line1}║
|
||||||
║ ║
|
║ ║
|
||||||
║ ${API_KEY}
|
║ ${line2}║
|
||||||
║ ║
|
║ ║
|
||||||
║ In Electron mode, authentication is handled automatically. ║
|
║ ${line3}║
|
||||||
║ ║
|
║ ║
|
||||||
║ 💡 Tip: Set AUTOMAKER_API_KEY env var to use a fixed key for dev ║
|
║ ${line4}║
|
||||||
╚═══════════════════════════════════════════════════════════════════════╝
|
║ ║
|
||||||
|
╠═════════════════════════════════════════════════════════════════════╣
|
||||||
|
║ ${tipHeader}║
|
||||||
|
╠═════════════════════════════════════════════════════════════════════╣
|
||||||
|
║ ${line5}║
|
||||||
|
║ ${line6}║
|
||||||
|
╚═════════════════════════════════════════════════════════════════════╝
|
||||||
`);
|
`);
|
||||||
} else {
|
} else {
|
||||||
logger.info('API key banner hidden (AUTOMAKER_HIDE_API_KEY=true)');
|
logger.info('API key banner hidden (AUTOMAKER_HIDE_API_KEY=true)');
|
||||||
@@ -320,6 +353,15 @@ function checkAuthentication(
|
|||||||
return { authenticated: false, errorType: 'invalid_api_key' };
|
return { authenticated: false, errorType: 'invalid_api_key' };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check for session token in query parameter (web mode - needed for image loads)
|
||||||
|
const queryToken = query.token;
|
||||||
|
if (queryToken) {
|
||||||
|
if (validateSession(queryToken)) {
|
||||||
|
return { authenticated: true };
|
||||||
|
}
|
||||||
|
return { authenticated: false, errorType: 'invalid_session' };
|
||||||
|
}
|
||||||
|
|
||||||
// Check for session cookie (web mode)
|
// Check for session cookie (web mode)
|
||||||
const sessionToken = cookies[SESSION_COOKIE_NAME];
|
const sessionToken = cookies[SESSION_COOKIE_NAME];
|
||||||
if (sessionToken && validateSession(sessionToken)) {
|
if (sessionToken && validateSession(sessionToken)) {
|
||||||
@@ -335,10 +377,17 @@ function checkAuthentication(
|
|||||||
* Accepts either:
|
* Accepts either:
|
||||||
* 1. X-API-Key header (for Electron mode)
|
* 1. X-API-Key header (for Electron mode)
|
||||||
* 2. X-Session-Token header (for web mode with explicit token)
|
* 2. X-Session-Token header (for web mode with explicit token)
|
||||||
* 3. apiKey query parameter (fallback for cases where headers can't be set)
|
* 3. apiKey query parameter (fallback for Electron, cases where headers can't be set)
|
||||||
* 4. Session cookie (for web mode)
|
* 4. token query parameter (fallback for web mode, needed for image loads via CSS/img tags)
|
||||||
|
* 5. Session cookie (for web mode)
|
||||||
*/
|
*/
|
||||||
export function authMiddleware(req: Request, res: Response, next: NextFunction): void {
|
export function authMiddleware(req: Request, res: Response, next: NextFunction): void {
|
||||||
|
// Allow disabling auth for local/trusted networks
|
||||||
|
if (isEnvTrue(process.env.AUTOMAKER_DISABLE_AUTH)) {
|
||||||
|
next();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const result = checkAuthentication(
|
const result = checkAuthentication(
|
||||||
req.headers as Record<string, string | string[] | undefined>,
|
req.headers as Record<string, string | string[] | undefined>,
|
||||||
req.query as Record<string, string | undefined>,
|
req.query as Record<string, string | undefined>,
|
||||||
@@ -384,9 +433,10 @@ export function isAuthEnabled(): boolean {
|
|||||||
* Get authentication status for health endpoint
|
* Get authentication status for health endpoint
|
||||||
*/
|
*/
|
||||||
export function getAuthStatus(): { enabled: boolean; method: string } {
|
export function getAuthStatus(): { enabled: boolean; method: string } {
|
||||||
|
const disabled = isEnvTrue(process.env.AUTOMAKER_DISABLE_AUTH);
|
||||||
return {
|
return {
|
||||||
enabled: true,
|
enabled: !disabled,
|
||||||
method: 'api_key_or_session',
|
method: disabled ? 'disabled' : 'api_key_or_session',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -394,6 +444,7 @@ export function getAuthStatus(): { enabled: boolean; method: string } {
|
|||||||
* Check if a request is authenticated (for status endpoint)
|
* Check if a request is authenticated (for status endpoint)
|
||||||
*/
|
*/
|
||||||
export function isRequestAuthenticated(req: Request): boolean {
|
export function isRequestAuthenticated(req: Request): boolean {
|
||||||
|
if (isEnvTrue(process.env.AUTOMAKER_DISABLE_AUTH)) return true;
|
||||||
const result = checkAuthentication(
|
const result = checkAuthentication(
|
||||||
req.headers as Record<string, string | string[] | undefined>,
|
req.headers as Record<string, string | string[] | undefined>,
|
||||||
req.query as Record<string, string | undefined>,
|
req.query as Record<string, string | undefined>,
|
||||||
@@ -411,5 +462,6 @@ export function checkRawAuthentication(
|
|||||||
query: Record<string, string | undefined>,
|
query: Record<string, string | undefined>,
|
||||||
cookies: Record<string, string | undefined>
|
cookies: Record<string, string | undefined>
|
||||||
): boolean {
|
): boolean {
|
||||||
|
if (isEnvTrue(process.env.AUTOMAKER_DISABLE_AUTH)) return true;
|
||||||
return checkAuthentication(headers, query, cookies).authenticated;
|
return checkAuthentication(headers, query, cookies).authenticated;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -40,7 +40,6 @@ export interface UnifiedCliDetection {
|
|||||||
claude?: CliDetectionResult;
|
claude?: CliDetectionResult;
|
||||||
codex?: CliDetectionResult;
|
codex?: CliDetectionResult;
|
||||||
cursor?: CliDetectionResult;
|
cursor?: CliDetectionResult;
|
||||||
coderabbit?: CliDetectionResult;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -77,16 +76,6 @@ const CLI_CONFIGS = {
|
|||||||
win32: 'iwr https://cursor.sh/install.ps1 -UseBasicParsing | iex',
|
win32: 'iwr https://cursor.sh/install.ps1 -UseBasicParsing | iex',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
coderabbit: {
|
|
||||||
name: 'CodeRabbit CLI',
|
|
||||||
commands: ['coderabbit', 'cr'],
|
|
||||||
versionArgs: ['--version'],
|
|
||||||
installCommands: {
|
|
||||||
darwin: 'npm install -g coderabbit',
|
|
||||||
linux: 'npm install -g coderabbit',
|
|
||||||
win32: 'npm install -g coderabbit',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
} as const;
|
} as const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -241,8 +230,6 @@ export async function checkCliAuth(
|
|||||||
return await checkCodexAuth(command);
|
return await checkCodexAuth(command);
|
||||||
case 'cursor':
|
case 'cursor':
|
||||||
return await checkCursorAuth(command);
|
return await checkCursorAuth(command);
|
||||||
case 'coderabbit':
|
|
||||||
return await checkCodeRabbitAuth(command);
|
|
||||||
default:
|
default:
|
||||||
return 'none';
|
return 'none';
|
||||||
}
|
}
|
||||||
@@ -368,64 +355,6 @@ async function checkCursorAuth(command: string): Promise<'cli' | 'api_key' | 'no
|
|||||||
return 'none';
|
return 'none';
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Check CodeRabbit CLI authentication
|
|
||||||
*
|
|
||||||
* Expected output when authenticated:
|
|
||||||
* ```
|
|
||||||
* CodeRabbit CLI Status
|
|
||||||
* ✅ Authentication: Logged in
|
|
||||||
* User Information:
|
|
||||||
* 👤 Name: ...
|
|
||||||
* ```
|
|
||||||
*/
|
|
||||||
async function checkCodeRabbitAuth(command: string): Promise<'cli' | 'api_key' | 'none'> {
|
|
||||||
// Check for environment variable
|
|
||||||
if (process.env.CODERABBIT_API_KEY) {
|
|
||||||
return 'api_key';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try running auth status command
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
const child = spawn(command, ['auth', 'status'], {
|
|
||||||
stdio: 'pipe',
|
|
||||||
timeout: 10000, // Increased timeout for slower systems
|
|
||||||
});
|
|
||||||
|
|
||||||
let stdout = '';
|
|
||||||
let stderr = '';
|
|
||||||
|
|
||||||
child.stdout?.on('data', (data) => {
|
|
||||||
stdout += data.toString();
|
|
||||||
});
|
|
||||||
|
|
||||||
child.stderr?.on('data', (data) => {
|
|
||||||
stderr += data.toString();
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on('close', (code) => {
|
|
||||||
const output = stdout + stderr;
|
|
||||||
|
|
||||||
// Check for positive authentication indicators in output
|
|
||||||
const isAuthenticated =
|
|
||||||
code === 0 &&
|
|
||||||
(output.includes('Logged in') || output.includes('logged in')) &&
|
|
||||||
!output.toLowerCase().includes('not logged in') &&
|
|
||||||
!output.toLowerCase().includes('not authenticated');
|
|
||||||
|
|
||||||
if (isAuthenticated) {
|
|
||||||
resolve('cli');
|
|
||||||
} else {
|
|
||||||
resolve('none');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on('error', () => {
|
|
||||||
resolve('none');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get installation instructions for a provider
|
* Get installation instructions for a provider
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -5,7 +5,17 @@
|
|||||||
import type { SettingsService } from '../services/settings-service.js';
|
import type { SettingsService } from '../services/settings-service.js';
|
||||||
import type { ContextFilesResult, ContextFileInfo } from '@automaker/utils';
|
import type { ContextFilesResult, ContextFileInfo } from '@automaker/utils';
|
||||||
import { createLogger } from '@automaker/utils';
|
import { createLogger } from '@automaker/utils';
|
||||||
import type { MCPServerConfig, McpServerConfig, PromptCustomization } from '@automaker/types';
|
import type {
|
||||||
|
MCPServerConfig,
|
||||||
|
McpServerConfig,
|
||||||
|
PromptCustomization,
|
||||||
|
ClaudeApiProfile,
|
||||||
|
ClaudeCompatibleProvider,
|
||||||
|
PhaseModelKey,
|
||||||
|
PhaseModelEntry,
|
||||||
|
Credentials,
|
||||||
|
} from '@automaker/types';
|
||||||
|
import { DEFAULT_PHASE_MODELS } from '@automaker/types';
|
||||||
import {
|
import {
|
||||||
mergeAutoModePrompts,
|
mergeAutoModePrompts,
|
||||||
mergeAgentPrompts,
|
mergeAgentPrompts,
|
||||||
@@ -345,3 +355,376 @@ export async function getCustomSubagents(
|
|||||||
|
|
||||||
return Object.keys(merged).length > 0 ? merged : undefined;
|
return Object.keys(merged).length > 0 ? merged : undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Result from getActiveClaudeApiProfile */
|
||||||
|
export interface ActiveClaudeApiProfileResult {
|
||||||
|
/** The active profile, or undefined if using direct Anthropic API */
|
||||||
|
profile: ClaudeApiProfile | undefined;
|
||||||
|
/** Credentials for resolving 'credentials' apiKeySource */
|
||||||
|
credentials: import('@automaker/types').Credentials | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the active Claude API profile and credentials from settings.
|
||||||
|
* Checks project settings first for per-project overrides, then falls back to global settings.
|
||||||
|
* Returns both the profile and credentials for resolving 'credentials' apiKeySource.
|
||||||
|
*
|
||||||
|
* @deprecated Use getProviderById and getPhaseModelWithOverrides instead for the new provider system.
|
||||||
|
* This function is kept for backward compatibility during migration.
|
||||||
|
*
|
||||||
|
* @param settingsService - Optional settings service instance
|
||||||
|
* @param logPrefix - Prefix for log messages (e.g., '[AgentService]')
|
||||||
|
* @param projectPath - Optional project path for per-project override
|
||||||
|
* @returns Promise resolving to object with profile and credentials
|
||||||
|
*/
|
||||||
|
export async function getActiveClaudeApiProfile(
|
||||||
|
settingsService?: SettingsService | null,
|
||||||
|
logPrefix = '[SettingsHelper]',
|
||||||
|
projectPath?: string
|
||||||
|
): Promise<ActiveClaudeApiProfileResult> {
|
||||||
|
if (!settingsService) {
|
||||||
|
return { profile: undefined, credentials: undefined };
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const globalSettings = await settingsService.getGlobalSettings();
|
||||||
|
const credentials = await settingsService.getCredentials();
|
||||||
|
const profiles = globalSettings.claudeApiProfiles || [];
|
||||||
|
|
||||||
|
// Check for project-level override first
|
||||||
|
let activeProfileId: string | null | undefined;
|
||||||
|
let isProjectOverride = false;
|
||||||
|
|
||||||
|
if (projectPath) {
|
||||||
|
const projectSettings = await settingsService.getProjectSettings(projectPath);
|
||||||
|
// undefined = use global, null = explicit no profile, string = specific profile
|
||||||
|
if (projectSettings.activeClaudeApiProfileId !== undefined) {
|
||||||
|
activeProfileId = projectSettings.activeClaudeApiProfileId;
|
||||||
|
isProjectOverride = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to global if project doesn't specify
|
||||||
|
if (activeProfileId === undefined && !isProjectOverride) {
|
||||||
|
activeProfileId = globalSettings.activeClaudeApiProfileId;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No active profile selected - use direct Anthropic API
|
||||||
|
if (!activeProfileId) {
|
||||||
|
if (isProjectOverride && activeProfileId === null) {
|
||||||
|
logger.info(`${logPrefix} Project explicitly using Direct Anthropic API`);
|
||||||
|
}
|
||||||
|
return { profile: undefined, credentials };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the active profile by ID
|
||||||
|
const activeProfile = profiles.find((p) => p.id === activeProfileId);
|
||||||
|
|
||||||
|
if (activeProfile) {
|
||||||
|
const overrideSuffix = isProjectOverride ? ' (project override)' : '';
|
||||||
|
logger.info(`${logPrefix} Using Claude API profile: ${activeProfile.name}${overrideSuffix}`);
|
||||||
|
return { profile: activeProfile, credentials };
|
||||||
|
} else {
|
||||||
|
logger.warn(
|
||||||
|
`${logPrefix} Active profile ID "${activeProfileId}" not found, falling back to direct Anthropic API`
|
||||||
|
);
|
||||||
|
return { profile: undefined, credentials };
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`${logPrefix} Failed to load Claude API profile:`, error);
|
||||||
|
return { profile: undefined, credentials: undefined };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// New Provider System Helpers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
/** Result from getProviderById */
|
||||||
|
export interface ProviderByIdResult {
|
||||||
|
/** The provider, or undefined if not found */
|
||||||
|
provider: ClaudeCompatibleProvider | undefined;
|
||||||
|
/** Credentials for resolving 'credentials' apiKeySource */
|
||||||
|
credentials: Credentials | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a ClaudeCompatibleProvider by its ID.
|
||||||
|
* Returns the provider configuration and credentials for API key resolution.
|
||||||
|
*
|
||||||
|
* @param providerId - The provider ID to look up
|
||||||
|
* @param settingsService - Settings service instance
|
||||||
|
* @param logPrefix - Prefix for log messages
|
||||||
|
* @returns Promise resolving to object with provider and credentials
|
||||||
|
*/
|
||||||
|
export async function getProviderById(
|
||||||
|
providerId: string,
|
||||||
|
settingsService: SettingsService,
|
||||||
|
logPrefix = '[SettingsHelper]'
|
||||||
|
): Promise<ProviderByIdResult> {
|
||||||
|
try {
|
||||||
|
const globalSettings = await settingsService.getGlobalSettings();
|
||||||
|
const credentials = await settingsService.getCredentials();
|
||||||
|
const providers = globalSettings.claudeCompatibleProviders || [];
|
||||||
|
|
||||||
|
const provider = providers.find((p) => p.id === providerId);
|
||||||
|
|
||||||
|
if (provider) {
|
||||||
|
if (provider.enabled === false) {
|
||||||
|
logger.warn(`${logPrefix} Provider "${provider.name}" (${providerId}) is disabled`);
|
||||||
|
} else {
|
||||||
|
logger.debug(`${logPrefix} Found provider: ${provider.name}`);
|
||||||
|
}
|
||||||
|
return { provider, credentials };
|
||||||
|
} else {
|
||||||
|
logger.warn(`${logPrefix} Provider not found: ${providerId}`);
|
||||||
|
return { provider: undefined, credentials };
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`${logPrefix} Failed to load provider by ID:`, error);
|
||||||
|
return { provider: undefined, credentials: undefined };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Result from getPhaseModelWithOverrides */
|
||||||
|
export interface PhaseModelWithOverridesResult {
|
||||||
|
/** The resolved phase model entry */
|
||||||
|
phaseModel: PhaseModelEntry;
|
||||||
|
/** Whether a project override was applied */
|
||||||
|
isProjectOverride: boolean;
|
||||||
|
/** The provider if providerId is set and found */
|
||||||
|
provider: ClaudeCompatibleProvider | undefined;
|
||||||
|
/** Credentials for API key resolution */
|
||||||
|
credentials: Credentials | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the phase model configuration for a specific phase, applying project overrides if available.
|
||||||
|
* Also resolves the provider if the phase model has a providerId.
|
||||||
|
*
|
||||||
|
* @param phase - The phase key (e.g., 'enhancementModel', 'specGenerationModel')
|
||||||
|
* @param settingsService - Optional settings service instance (returns defaults if undefined)
|
||||||
|
* @param projectPath - Optional project path for checking overrides
|
||||||
|
* @param logPrefix - Prefix for log messages
|
||||||
|
* @returns Promise resolving to phase model with provider info
|
||||||
|
*/
|
||||||
|
export async function getPhaseModelWithOverrides(
|
||||||
|
phase: PhaseModelKey,
|
||||||
|
settingsService?: SettingsService | null,
|
||||||
|
projectPath?: string,
|
||||||
|
logPrefix = '[SettingsHelper]'
|
||||||
|
): Promise<PhaseModelWithOverridesResult> {
|
||||||
|
// Handle undefined settingsService gracefully
|
||||||
|
if (!settingsService) {
|
||||||
|
logger.info(`${logPrefix} SettingsService not available, using default for ${phase}`);
|
||||||
|
return {
|
||||||
|
phaseModel: DEFAULT_PHASE_MODELS[phase] || { model: 'sonnet' },
|
||||||
|
isProjectOverride: false,
|
||||||
|
provider: undefined,
|
||||||
|
credentials: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const globalSettings = await settingsService.getGlobalSettings();
|
||||||
|
const credentials = await settingsService.getCredentials();
|
||||||
|
const globalPhaseModels = globalSettings.phaseModels || {};
|
||||||
|
|
||||||
|
// Start with global phase model
|
||||||
|
let phaseModel = globalPhaseModels[phase];
|
||||||
|
let isProjectOverride = false;
|
||||||
|
|
||||||
|
// Check for project override
|
||||||
|
if (projectPath) {
|
||||||
|
const projectSettings = await settingsService.getProjectSettings(projectPath);
|
||||||
|
const projectOverrides = projectSettings.phaseModelOverrides || {};
|
||||||
|
|
||||||
|
if (projectOverrides[phase]) {
|
||||||
|
phaseModel = projectOverrides[phase];
|
||||||
|
isProjectOverride = true;
|
||||||
|
logger.debug(`${logPrefix} Using project override for ${phase}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no phase model found, use per-phase default
|
||||||
|
if (!phaseModel) {
|
||||||
|
phaseModel = DEFAULT_PHASE_MODELS[phase] || { model: 'sonnet' };
|
||||||
|
logger.debug(`${logPrefix} No ${phase} configured, using default: ${phaseModel.model}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve provider if providerId is set
|
||||||
|
let provider: ClaudeCompatibleProvider | undefined;
|
||||||
|
if (phaseModel.providerId) {
|
||||||
|
const providers = globalSettings.claudeCompatibleProviders || [];
|
||||||
|
provider = providers.find((p) => p.id === phaseModel.providerId);
|
||||||
|
|
||||||
|
if (provider) {
|
||||||
|
if (provider.enabled === false) {
|
||||||
|
logger.warn(
|
||||||
|
`${logPrefix} Provider "${provider.name}" for ${phase} is disabled, falling back to direct API`
|
||||||
|
);
|
||||||
|
provider = undefined;
|
||||||
|
} else {
|
||||||
|
logger.debug(`${logPrefix} Using provider "${provider.name}" for ${phase}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.warn(
|
||||||
|
`${logPrefix} Provider ${phaseModel.providerId} not found for ${phase}, falling back to direct API`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
phaseModel,
|
||||||
|
isProjectOverride,
|
||||||
|
provider,
|
||||||
|
credentials,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`${logPrefix} Failed to get phase model with overrides:`, error);
|
||||||
|
// Return a safe default
|
||||||
|
return {
|
||||||
|
phaseModel: { model: 'sonnet' },
|
||||||
|
isProjectOverride: false,
|
||||||
|
provider: undefined,
|
||||||
|
credentials: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Result from getProviderByModelId */
|
||||||
|
export interface ProviderByModelIdResult {
|
||||||
|
/** The provider that contains this model, or undefined if not found */
|
||||||
|
provider: ClaudeCompatibleProvider | undefined;
|
||||||
|
/** The model configuration if found */
|
||||||
|
modelConfig: import('@automaker/types').ProviderModel | undefined;
|
||||||
|
/** Credentials for API key resolution */
|
||||||
|
credentials: Credentials | undefined;
|
||||||
|
/** The resolved Claude model ID to use for API calls (from mapsToClaudeModel) */
|
||||||
|
resolvedModel: string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find a ClaudeCompatibleProvider by one of its model IDs.
|
||||||
|
* Searches through all enabled providers to find one that contains the specified model.
|
||||||
|
* This is useful when you have a model string from the UI but need the provider config.
|
||||||
|
*
|
||||||
|
* Also resolves the `mapsToClaudeModel` field to get the actual Claude model ID to use
|
||||||
|
* when calling the API (e.g., "GLM-4.5-Air" -> "claude-haiku-4-5").
|
||||||
|
*
|
||||||
|
* @param modelId - The model ID to search for (e.g., "GLM-4.7", "MiniMax-M2.1")
|
||||||
|
* @param settingsService - Settings service instance
|
||||||
|
* @param logPrefix - Prefix for log messages
|
||||||
|
* @returns Promise resolving to object with provider, model config, credentials, and resolved model
|
||||||
|
*/
|
||||||
|
export async function getProviderByModelId(
|
||||||
|
modelId: string,
|
||||||
|
settingsService: SettingsService,
|
||||||
|
logPrefix = '[SettingsHelper]'
|
||||||
|
): Promise<ProviderByModelIdResult> {
|
||||||
|
try {
|
||||||
|
const globalSettings = await settingsService.getGlobalSettings();
|
||||||
|
const credentials = await settingsService.getCredentials();
|
||||||
|
const providers = globalSettings.claudeCompatibleProviders || [];
|
||||||
|
|
||||||
|
// Search through all enabled providers for this model
|
||||||
|
for (const provider of providers) {
|
||||||
|
// Skip disabled providers
|
||||||
|
if (provider.enabled === false) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this provider has the model
|
||||||
|
const modelConfig = provider.models?.find(
|
||||||
|
(m) => m.id === modelId || m.id.toLowerCase() === modelId.toLowerCase()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (modelConfig) {
|
||||||
|
logger.info(`${logPrefix} Found model "${modelId}" in provider "${provider.name}"`);
|
||||||
|
|
||||||
|
// Resolve the mapped Claude model if specified
|
||||||
|
let resolvedModel: string | undefined;
|
||||||
|
if (modelConfig.mapsToClaudeModel) {
|
||||||
|
// Import resolveModelString to convert alias to full model ID
|
||||||
|
const { resolveModelString } = await import('@automaker/model-resolver');
|
||||||
|
resolvedModel = resolveModelString(modelConfig.mapsToClaudeModel);
|
||||||
|
logger.info(
|
||||||
|
`${logPrefix} Model "${modelId}" maps to Claude model "${modelConfig.mapsToClaudeModel}" -> "${resolvedModel}"`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { provider, modelConfig, credentials, resolvedModel };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Model not found in any provider
|
||||||
|
logger.debug(`${logPrefix} Model "${modelId}" not found in any provider`);
|
||||||
|
return {
|
||||||
|
provider: undefined,
|
||||||
|
modelConfig: undefined,
|
||||||
|
credentials: undefined,
|
||||||
|
resolvedModel: undefined,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`${logPrefix} Failed to find provider by model ID:`, error);
|
||||||
|
return {
|
||||||
|
provider: undefined,
|
||||||
|
modelConfig: undefined,
|
||||||
|
credentials: undefined,
|
||||||
|
resolvedModel: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all enabled provider models for use in model dropdowns.
|
||||||
|
* Returns models from all enabled ClaudeCompatibleProviders.
|
||||||
|
*
|
||||||
|
* @param settingsService - Settings service instance
|
||||||
|
* @param logPrefix - Prefix for log messages
|
||||||
|
* @returns Promise resolving to array of provider models with their provider info
|
||||||
|
*/
|
||||||
|
export async function getAllProviderModels(
|
||||||
|
settingsService: SettingsService,
|
||||||
|
logPrefix = '[SettingsHelper]'
|
||||||
|
): Promise<
|
||||||
|
Array<{
|
||||||
|
providerId: string;
|
||||||
|
providerName: string;
|
||||||
|
model: import('@automaker/types').ProviderModel;
|
||||||
|
}>
|
||||||
|
> {
|
||||||
|
try {
|
||||||
|
const globalSettings = await settingsService.getGlobalSettings();
|
||||||
|
const providers = globalSettings.claudeCompatibleProviders || [];
|
||||||
|
|
||||||
|
const allModels: Array<{
|
||||||
|
providerId: string;
|
||||||
|
providerName: string;
|
||||||
|
model: import('@automaker/types').ProviderModel;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
for (const provider of providers) {
|
||||||
|
// Skip disabled providers
|
||||||
|
if (provider.enabled === false) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const model of provider.models || []) {
|
||||||
|
allModels.push({
|
||||||
|
providerId: provider.id,
|
||||||
|
providerName: provider.name,
|
||||||
|
model,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
`${logPrefix} Found ${allModels.length} models from ${providers.length} providers`
|
||||||
|
);
|
||||||
|
return allModels;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`${logPrefix} Failed to get all provider models:`, error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -10,7 +10,21 @@ import { BaseProvider } from './base-provider.js';
|
|||||||
import { classifyError, getUserFriendlyErrorMessage, createLogger } from '@automaker/utils';
|
import { classifyError, getUserFriendlyErrorMessage, createLogger } from '@automaker/utils';
|
||||||
|
|
||||||
const logger = createLogger('ClaudeProvider');
|
const logger = createLogger('ClaudeProvider');
|
||||||
import { getThinkingTokenBudget, validateBareModelId } from '@automaker/types';
|
import {
|
||||||
|
getThinkingTokenBudget,
|
||||||
|
validateBareModelId,
|
||||||
|
type ClaudeApiProfile,
|
||||||
|
type ClaudeCompatibleProvider,
|
||||||
|
type Credentials,
|
||||||
|
} from '@automaker/types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ProviderConfig - Union type for provider configuration
|
||||||
|
*
|
||||||
|
* Accepts either the legacy ClaudeApiProfile or new ClaudeCompatibleProvider.
|
||||||
|
* Both share the same connection settings structure.
|
||||||
|
*/
|
||||||
|
type ProviderConfig = ClaudeApiProfile | ClaudeCompatibleProvider;
|
||||||
import type {
|
import type {
|
||||||
ExecuteOptions,
|
ExecuteOptions,
|
||||||
ProviderMessage,
|
ProviderMessage,
|
||||||
@@ -21,9 +35,19 @@ import type {
|
|||||||
// Explicit allowlist of environment variables to pass to the SDK.
|
// Explicit allowlist of environment variables to pass to the SDK.
|
||||||
// Only these vars are passed - nothing else from process.env leaks through.
|
// Only these vars are passed - nothing else from process.env leaks through.
|
||||||
const ALLOWED_ENV_VARS = [
|
const ALLOWED_ENV_VARS = [
|
||||||
|
// Authentication
|
||||||
'ANTHROPIC_API_KEY',
|
'ANTHROPIC_API_KEY',
|
||||||
'ANTHROPIC_BASE_URL',
|
|
||||||
'ANTHROPIC_AUTH_TOKEN',
|
'ANTHROPIC_AUTH_TOKEN',
|
||||||
|
// Endpoint configuration
|
||||||
|
'ANTHROPIC_BASE_URL',
|
||||||
|
'API_TIMEOUT_MS',
|
||||||
|
// Model mappings
|
||||||
|
'ANTHROPIC_DEFAULT_HAIKU_MODEL',
|
||||||
|
'ANTHROPIC_DEFAULT_SONNET_MODEL',
|
||||||
|
'ANTHROPIC_DEFAULT_OPUS_MODEL',
|
||||||
|
// Traffic control
|
||||||
|
'CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC',
|
||||||
|
// System vars (always from process.env)
|
||||||
'PATH',
|
'PATH',
|
||||||
'HOME',
|
'HOME',
|
||||||
'SHELL',
|
'SHELL',
|
||||||
@@ -33,16 +57,132 @@ const ALLOWED_ENV_VARS = [
|
|||||||
'LC_ALL',
|
'LC_ALL',
|
||||||
];
|
];
|
||||||
|
|
||||||
|
// System vars are always passed from process.env regardless of profile
|
||||||
|
const SYSTEM_ENV_VARS = ['PATH', 'HOME', 'SHELL', 'TERM', 'USER', 'LANG', 'LC_ALL'];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build environment for the SDK with only explicitly allowed variables
|
* Check if the config is a ClaudeCompatibleProvider (new system)
|
||||||
|
* by checking for the 'models' array property
|
||||||
*/
|
*/
|
||||||
function buildEnv(): Record<string, string | undefined> {
|
function isClaudeCompatibleProvider(config: ProviderConfig): config is ClaudeCompatibleProvider {
|
||||||
|
return 'models' in config && Array.isArray(config.models);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build environment for the SDK with only explicitly allowed variables.
|
||||||
|
* When a provider/profile is provided, uses its configuration (clean switch - don't inherit from process.env).
|
||||||
|
* When no provider is provided, uses direct Anthropic API settings from process.env.
|
||||||
|
*
|
||||||
|
* Supports both:
|
||||||
|
* - ClaudeCompatibleProvider (new system with models[] array)
|
||||||
|
* - ClaudeApiProfile (legacy system with modelMappings)
|
||||||
|
*
|
||||||
|
* @param providerConfig - Optional provider configuration for alternative endpoint
|
||||||
|
* @param credentials - Optional credentials object for resolving 'credentials' apiKeySource
|
||||||
|
*/
|
||||||
|
function buildEnv(
|
||||||
|
providerConfig?: ProviderConfig,
|
||||||
|
credentials?: Credentials
|
||||||
|
): Record<string, string | undefined> {
|
||||||
const env: Record<string, string | undefined> = {};
|
const env: Record<string, string | undefined> = {};
|
||||||
for (const key of ALLOWED_ENV_VARS) {
|
|
||||||
|
if (providerConfig) {
|
||||||
|
// Use provider configuration (clean switch - don't inherit non-system vars from process.env)
|
||||||
|
logger.debug('[buildEnv] Using provider configuration:', {
|
||||||
|
name: providerConfig.name,
|
||||||
|
baseUrl: providerConfig.baseUrl,
|
||||||
|
apiKeySource: providerConfig.apiKeySource ?? 'inline',
|
||||||
|
isNewProvider: isClaudeCompatibleProvider(providerConfig),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Resolve API key based on source strategy
|
||||||
|
let apiKey: string | undefined;
|
||||||
|
const source = providerConfig.apiKeySource ?? 'inline'; // Default to inline for backwards compat
|
||||||
|
|
||||||
|
switch (source) {
|
||||||
|
case 'inline':
|
||||||
|
apiKey = providerConfig.apiKey;
|
||||||
|
break;
|
||||||
|
case 'env':
|
||||||
|
apiKey = process.env.ANTHROPIC_API_KEY;
|
||||||
|
break;
|
||||||
|
case 'credentials':
|
||||||
|
apiKey = credentials?.apiKeys?.anthropic;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Warn if no API key found
|
||||||
|
if (!apiKey) {
|
||||||
|
logger.warn(`No API key found for provider "${providerConfig.name}" with source "${source}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authentication
|
||||||
|
if (providerConfig.useAuthToken) {
|
||||||
|
env['ANTHROPIC_AUTH_TOKEN'] = apiKey;
|
||||||
|
} else {
|
||||||
|
env['ANTHROPIC_API_KEY'] = apiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Endpoint configuration
|
||||||
|
env['ANTHROPIC_BASE_URL'] = providerConfig.baseUrl;
|
||||||
|
logger.debug(`[buildEnv] Set ANTHROPIC_BASE_URL to: ${providerConfig.baseUrl}`);
|
||||||
|
|
||||||
|
if (providerConfig.timeoutMs) {
|
||||||
|
env['API_TIMEOUT_MS'] = String(providerConfig.timeoutMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Model mappings - only for legacy ClaudeApiProfile
|
||||||
|
// For ClaudeCompatibleProvider, the model is passed directly (no mapping needed)
|
||||||
|
if (!isClaudeCompatibleProvider(providerConfig) && providerConfig.modelMappings) {
|
||||||
|
if (providerConfig.modelMappings.haiku) {
|
||||||
|
env['ANTHROPIC_DEFAULT_HAIKU_MODEL'] = providerConfig.modelMappings.haiku;
|
||||||
|
}
|
||||||
|
if (providerConfig.modelMappings.sonnet) {
|
||||||
|
env['ANTHROPIC_DEFAULT_SONNET_MODEL'] = providerConfig.modelMappings.sonnet;
|
||||||
|
}
|
||||||
|
if (providerConfig.modelMappings.opus) {
|
||||||
|
env['ANTHROPIC_DEFAULT_OPUS_MODEL'] = providerConfig.modelMappings.opus;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Traffic control
|
||||||
|
if (providerConfig.disableNonessentialTraffic) {
|
||||||
|
env['CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC'] = '1';
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Use direct Anthropic API - pass through credentials or environment variables
|
||||||
|
// This supports:
|
||||||
|
// 1. API Key mode: ANTHROPIC_API_KEY from credentials (UI settings) or env
|
||||||
|
// 2. Claude Max plan: Uses CLI OAuth auth (SDK handles this automatically)
|
||||||
|
// 3. Custom endpoints via ANTHROPIC_BASE_URL env var (backward compatibility)
|
||||||
|
//
|
||||||
|
// Priority: credentials file (UI settings) -> environment variable
|
||||||
|
// Note: Only auth and endpoint vars are passed. Model mappings and traffic
|
||||||
|
// control are NOT passed (those require a profile for explicit configuration).
|
||||||
|
if (credentials?.apiKeys?.anthropic) {
|
||||||
|
env['ANTHROPIC_API_KEY'] = credentials.apiKeys.anthropic;
|
||||||
|
} else if (process.env.ANTHROPIC_API_KEY) {
|
||||||
|
env['ANTHROPIC_API_KEY'] = process.env.ANTHROPIC_API_KEY;
|
||||||
|
}
|
||||||
|
// If using Claude Max plan via CLI auth, the SDK handles auth automatically
|
||||||
|
// when no API key is provided. We don't set ANTHROPIC_AUTH_TOKEN here
|
||||||
|
// unless it was explicitly set in process.env (rare edge case).
|
||||||
|
if (process.env.ANTHROPIC_AUTH_TOKEN) {
|
||||||
|
env['ANTHROPIC_AUTH_TOKEN'] = process.env.ANTHROPIC_AUTH_TOKEN;
|
||||||
|
}
|
||||||
|
// Pass through ANTHROPIC_BASE_URL if set in environment (backward compatibility)
|
||||||
|
if (process.env.ANTHROPIC_BASE_URL) {
|
||||||
|
env['ANTHROPIC_BASE_URL'] = process.env.ANTHROPIC_BASE_URL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always add system vars from process.env
|
||||||
|
for (const key of SYSTEM_ENV_VARS) {
|
||||||
if (process.env[key]) {
|
if (process.env[key]) {
|
||||||
env[key] = process.env[key];
|
env[key] = process.env[key];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return env;
|
return env;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -70,8 +210,15 @@ export class ClaudeProvider extends BaseProvider {
|
|||||||
conversationHistory,
|
conversationHistory,
|
||||||
sdkSessionId,
|
sdkSessionId,
|
||||||
thinkingLevel,
|
thinkingLevel,
|
||||||
|
claudeApiProfile,
|
||||||
|
claudeCompatibleProvider,
|
||||||
|
credentials,
|
||||||
} = options;
|
} = options;
|
||||||
|
|
||||||
|
// Determine which provider config to use
|
||||||
|
// claudeCompatibleProvider takes precedence over claudeApiProfile
|
||||||
|
const providerConfig = claudeCompatibleProvider || claudeApiProfile;
|
||||||
|
|
||||||
// Convert thinking level to token budget
|
// Convert thinking level to token budget
|
||||||
const maxThinkingTokens = getThinkingTokenBudget(thinkingLevel);
|
const maxThinkingTokens = getThinkingTokenBudget(thinkingLevel);
|
||||||
|
|
||||||
@@ -82,7 +229,9 @@ export class ClaudeProvider extends BaseProvider {
|
|||||||
maxTurns,
|
maxTurns,
|
||||||
cwd,
|
cwd,
|
||||||
// Pass only explicitly allowed environment variables to SDK
|
// Pass only explicitly allowed environment variables to SDK
|
||||||
env: buildEnv(),
|
// When a provider is active, uses provider settings (clean switch)
|
||||||
|
// When no provider, uses direct Anthropic API (from process.env or CLI OAuth)
|
||||||
|
env: buildEnv(providerConfig, credentials),
|
||||||
// Pass through allowedTools if provided by caller (decided by sdk-options.ts)
|
// Pass through allowedTools if provided by caller (decided by sdk-options.ts)
|
||||||
...(allowedTools && { allowedTools }),
|
...(allowedTools && { allowedTools }),
|
||||||
// AUTONOMOUS MODE: Always bypass permissions for fully autonomous operation
|
// AUTONOMOUS MODE: Always bypass permissions for fully autonomous operation
|
||||||
@@ -127,6 +276,18 @@ export class ClaudeProvider extends BaseProvider {
|
|||||||
promptPayload = prompt;
|
promptPayload = prompt;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Log the environment being passed to the SDK for debugging
|
||||||
|
const envForSdk = sdkOptions.env as Record<string, string | undefined>;
|
||||||
|
logger.debug('[ClaudeProvider] SDK Configuration:', {
|
||||||
|
model: sdkOptions.model,
|
||||||
|
baseUrl: envForSdk?.['ANTHROPIC_BASE_URL'] || '(default Anthropic API)',
|
||||||
|
hasApiKey: !!envForSdk?.['ANTHROPIC_API_KEY'],
|
||||||
|
hasAuthToken: !!envForSdk?.['ANTHROPIC_AUTH_TOKEN'],
|
||||||
|
providerName: providerConfig?.name || '(direct Anthropic)',
|
||||||
|
maxTurns: sdkOptions.maxTurns,
|
||||||
|
maxThinkingTokens: sdkOptions.maxThinkingTokens,
|
||||||
|
});
|
||||||
|
|
||||||
// Execute via Claude Agent SDK
|
// Execute via Claude Agent SDK
|
||||||
try {
|
try {
|
||||||
const stream = query({ prompt: promptPayload, options: sdkOptions });
|
const stream = query({ prompt: promptPayload, options: sdkOptions });
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ export class CursorConfigManager {
|
|||||||
|
|
||||||
// Return default config with all available models
|
// Return default config with all available models
|
||||||
return {
|
return {
|
||||||
defaultModel: 'auto',
|
defaultModel: 'cursor-auto',
|
||||||
models: getAllCursorModelIds(),
|
models: getAllCursorModelIds(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -77,7 +77,7 @@ export class CursorConfigManager {
|
|||||||
* Get the default model
|
* Get the default model
|
||||||
*/
|
*/
|
||||||
getDefaultModel(): CursorModelId {
|
getDefaultModel(): CursorModelId {
|
||||||
return this.config.defaultModel || 'auto';
|
return this.config.defaultModel || 'cursor-auto';
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -93,7 +93,7 @@ export class CursorConfigManager {
|
|||||||
* Get enabled models
|
* Get enabled models
|
||||||
*/
|
*/
|
||||||
getEnabledModels(): CursorModelId[] {
|
getEnabledModels(): CursorModelId[] {
|
||||||
return this.config.models || ['auto'];
|
return this.config.models || ['cursor-auto'];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -174,7 +174,7 @@ export class CursorConfigManager {
|
|||||||
*/
|
*/
|
||||||
reset(): void {
|
reset(): void {
|
||||||
this.config = {
|
this.config = {
|
||||||
defaultModel: 'auto',
|
defaultModel: 'cursor-auto',
|
||||||
models: getAllCursorModelIds(),
|
models: getAllCursorModelIds(),
|
||||||
};
|
};
|
||||||
this.saveConfig();
|
this.saveConfig();
|
||||||
|
|||||||
@@ -337,10 +337,11 @@ export class CursorProvider extends CliProvider {
|
|||||||
'--stream-partial-output' // Real-time streaming
|
'--stream-partial-output' // Real-time streaming
|
||||||
);
|
);
|
||||||
|
|
||||||
// Only add --force if NOT in read-only mode
|
// In read-only mode, use --mode ask for Q&A style (no tools)
|
||||||
// Without --force, Cursor CLI suggests changes but doesn't apply them
|
// Otherwise, add --force to allow file edits
|
||||||
// With --force, Cursor CLI can actually edit files
|
if (options.readOnly) {
|
||||||
if (!options.readOnly) {
|
cliArgs.push('--mode', 'ask');
|
||||||
|
} else {
|
||||||
cliArgs.push('--force');
|
cliArgs.push('--force');
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -672,10 +673,13 @@ export class CursorProvider extends CliProvider {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract prompt text to pass via stdin (avoids shell escaping issues)
|
// Embed system prompt into user prompt (Cursor CLI doesn't support separate system messages)
|
||||||
const promptText = this.extractPromptText(options);
|
const effectiveOptions = this.embedSystemPromptIntoPrompt(options);
|
||||||
|
|
||||||
const cliArgs = this.buildCliArgs(options);
|
// Extract prompt text to pass via stdin (avoids shell escaping issues)
|
||||||
|
const promptText = this.extractPromptText(effectiveOptions);
|
||||||
|
|
||||||
|
const cliArgs = this.buildCliArgs(effectiveOptions);
|
||||||
const subprocessOptions = this.buildSubprocessOptions(options, cliArgs);
|
const subprocessOptions = this.buildSubprocessOptions(options, cliArgs);
|
||||||
|
|
||||||
// Pass prompt via stdin to avoid shell interpretation of special characters
|
// Pass prompt via stdin to avoid shell interpretation of special characters
|
||||||
|
|||||||
815
apps/server/src/providers/gemini-provider.ts
Normal file
815
apps/server/src/providers/gemini-provider.ts
Normal file
@@ -0,0 +1,815 @@
|
|||||||
|
/**
|
||||||
|
* Gemini Provider - Executes queries using the Gemini CLI
|
||||||
|
*
|
||||||
|
* Extends CliProvider with Gemini-specific:
|
||||||
|
* - Event normalization for Gemini's JSONL streaming format
|
||||||
|
* - Google account and API key authentication support
|
||||||
|
* - Thinking level configuration
|
||||||
|
*
|
||||||
|
* Based on https://github.com/google-gemini/gemini-cli
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { execSync } from 'child_process';
|
||||||
|
import * as fs from 'fs/promises';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as os from 'os';
|
||||||
|
import { CliProvider, type CliSpawnConfig, type CliErrorInfo } from './cli-provider.js';
|
||||||
|
import type {
|
||||||
|
ProviderConfig,
|
||||||
|
ExecuteOptions,
|
||||||
|
ProviderMessage,
|
||||||
|
InstallationStatus,
|
||||||
|
ModelDefinition,
|
||||||
|
ContentBlock,
|
||||||
|
} from './types.js';
|
||||||
|
import { validateBareModelId } from '@automaker/types';
|
||||||
|
import { GEMINI_MODEL_MAP, type GeminiAuthStatus } from '@automaker/types';
|
||||||
|
import { createLogger, isAbortError } from '@automaker/utils';
|
||||||
|
import { spawnJSONLProcess } from '@automaker/platform';
|
||||||
|
|
||||||
|
// Create logger for this module
|
||||||
|
const logger = createLogger('GeminiProvider');
|
||||||
|
|
||||||
|
// =============================================================================
|
||||||
|
// Gemini Stream Event Types
|
||||||
|
// =============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Base event structure from Gemini CLI --output-format stream-json
|
||||||
|
*
|
||||||
|
* Actual CLI output format:
|
||||||
|
* {"type":"init","timestamp":"...","session_id":"...","model":"..."}
|
||||||
|
* {"type":"message","timestamp":"...","role":"user","content":"..."}
|
||||||
|
* {"type":"message","timestamp":"...","role":"assistant","content":"...","delta":true}
|
||||||
|
* {"type":"tool_use","timestamp":"...","tool_name":"...","tool_id":"...","parameters":{...}}
|
||||||
|
* {"type":"tool_result","timestamp":"...","tool_id":"...","status":"success","output":"..."}
|
||||||
|
* {"type":"result","timestamp":"...","status":"success","stats":{...}}
|
||||||
|
*/
|
||||||
|
interface GeminiStreamEvent {
|
||||||
|
type: 'init' | 'message' | 'tool_use' | 'tool_result' | 'result' | 'error';
|
||||||
|
timestamp?: string;
|
||||||
|
session_id?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GeminiInitEvent extends GeminiStreamEvent {
|
||||||
|
type: 'init';
|
||||||
|
session_id: string;
|
||||||
|
model: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GeminiMessageEvent extends GeminiStreamEvent {
|
||||||
|
type: 'message';
|
||||||
|
role: 'user' | 'assistant';
|
||||||
|
content: string;
|
||||||
|
delta?: boolean;
|
||||||
|
session_id?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GeminiToolUseEvent extends GeminiStreamEvent {
|
||||||
|
type: 'tool_use';
|
||||||
|
tool_id: string;
|
||||||
|
tool_name: string;
|
||||||
|
parameters: Record<string, unknown>;
|
||||||
|
session_id?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GeminiToolResultEvent extends GeminiStreamEvent {
|
||||||
|
type: 'tool_result';
|
||||||
|
tool_id: string;
|
||||||
|
status: 'success' | 'error';
|
||||||
|
output: string;
|
||||||
|
session_id?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GeminiResultEvent extends GeminiStreamEvent {
|
||||||
|
type: 'result';
|
||||||
|
status: 'success' | 'error';
|
||||||
|
stats?: {
|
||||||
|
total_tokens?: number;
|
||||||
|
input_tokens?: number;
|
||||||
|
output_tokens?: number;
|
||||||
|
cached?: number;
|
||||||
|
input?: number;
|
||||||
|
duration_ms?: number;
|
||||||
|
tool_calls?: number;
|
||||||
|
};
|
||||||
|
error?: string;
|
||||||
|
session_id?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// =============================================================================
|
||||||
|
// Error Codes
|
||||||
|
// =============================================================================
|
||||||
|
|
||||||
|
export enum GeminiErrorCode {
|
||||||
|
NOT_INSTALLED = 'GEMINI_NOT_INSTALLED',
|
||||||
|
NOT_AUTHENTICATED = 'GEMINI_NOT_AUTHENTICATED',
|
||||||
|
RATE_LIMITED = 'GEMINI_RATE_LIMITED',
|
||||||
|
MODEL_UNAVAILABLE = 'GEMINI_MODEL_UNAVAILABLE',
|
||||||
|
NETWORK_ERROR = 'GEMINI_NETWORK_ERROR',
|
||||||
|
PROCESS_CRASHED = 'GEMINI_PROCESS_CRASHED',
|
||||||
|
TIMEOUT = 'GEMINI_TIMEOUT',
|
||||||
|
UNKNOWN = 'GEMINI_UNKNOWN_ERROR',
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GeminiError extends Error {
|
||||||
|
code: GeminiErrorCode;
|
||||||
|
recoverable: boolean;
|
||||||
|
suggestion?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// =============================================================================
|
||||||
|
// Tool Name Normalization
|
||||||
|
// =============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gemini CLI tool name to standard tool name mapping
|
||||||
|
* This allows the UI to properly categorize and display Gemini tool calls
|
||||||
|
*/
|
||||||
|
const GEMINI_TOOL_NAME_MAP: Record<string, string> = {
|
||||||
|
write_todos: 'TodoWrite',
|
||||||
|
read_file: 'Read',
|
||||||
|
read_many_files: 'Read',
|
||||||
|
replace: 'Edit',
|
||||||
|
write_file: 'Write',
|
||||||
|
run_shell_command: 'Bash',
|
||||||
|
search_file_content: 'Grep',
|
||||||
|
glob: 'Glob',
|
||||||
|
list_directory: 'Ls',
|
||||||
|
web_fetch: 'WebFetch',
|
||||||
|
google_web_search: 'WebSearch',
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize Gemini tool names to standard tool names
|
||||||
|
*/
|
||||||
|
function normalizeGeminiToolName(geminiToolName: string): string {
|
||||||
|
return GEMINI_TOOL_NAME_MAP[geminiToolName] || geminiToolName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize Gemini tool input parameters to standard format
|
||||||
|
*
|
||||||
|
* Gemini `write_todos` format:
|
||||||
|
* {"todos": [{"description": "Task text", "status": "pending|in_progress|completed|cancelled"}]}
|
||||||
|
*
|
||||||
|
* Claude `TodoWrite` format:
|
||||||
|
* {"todos": [{"content": "Task text", "status": "pending|in_progress|completed", "activeForm": "..."}]}
|
||||||
|
*/
|
||||||
|
function normalizeGeminiToolInput(
|
||||||
|
toolName: string,
|
||||||
|
input: Record<string, unknown>
|
||||||
|
): Record<string, unknown> {
|
||||||
|
// Normalize write_todos: map 'description' to 'content', handle 'cancelled' status
|
||||||
|
if (toolName === 'write_todos' && Array.isArray(input.todos)) {
|
||||||
|
return {
|
||||||
|
todos: input.todos.map((todo: { description?: string; status?: string }) => ({
|
||||||
|
content: todo.description || '',
|
||||||
|
// Map 'cancelled' to 'completed' since Claude doesn't have cancelled status
|
||||||
|
status: todo.status === 'cancelled' ? 'completed' : todo.status,
|
||||||
|
// Use description as activeForm since Gemini doesn't have it
|
||||||
|
activeForm: todo.description || '',
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GeminiProvider - Integrates Gemini CLI as an AI provider
|
||||||
|
*
|
||||||
|
* Features:
|
||||||
|
* - Google account OAuth login support
|
||||||
|
* - API key authentication (GEMINI_API_KEY)
|
||||||
|
* - Vertex AI support
|
||||||
|
* - Thinking level configuration
|
||||||
|
* - Streaming JSON output
|
||||||
|
*/
|
||||||
|
export class GeminiProvider extends CliProvider {
|
||||||
|
constructor(config: ProviderConfig = {}) {
|
||||||
|
super(config);
|
||||||
|
// Trigger CLI detection on construction
|
||||||
|
this.ensureCliDetected();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================================================
|
||||||
|
// CliProvider Abstract Method Implementations
|
||||||
|
// ==========================================================================
|
||||||
|
|
||||||
|
getName(): string {
|
||||||
|
return 'gemini';
|
||||||
|
}
|
||||||
|
|
||||||
|
getCliName(): string {
|
||||||
|
return 'gemini';
|
||||||
|
}
|
||||||
|
|
||||||
|
getSpawnConfig(): CliSpawnConfig {
|
||||||
|
return {
|
||||||
|
windowsStrategy: 'npx', // Gemini CLI can be run via npx
|
||||||
|
npxPackage: '@google/gemini-cli', // Official Google Gemini CLI package
|
||||||
|
commonPaths: {
|
||||||
|
linux: [
|
||||||
|
path.join(os.homedir(), '.local/bin/gemini'),
|
||||||
|
'/usr/local/bin/gemini',
|
||||||
|
path.join(os.homedir(), '.npm-global/bin/gemini'),
|
||||||
|
],
|
||||||
|
darwin: [
|
||||||
|
path.join(os.homedir(), '.local/bin/gemini'),
|
||||||
|
'/usr/local/bin/gemini',
|
||||||
|
'/opt/homebrew/bin/gemini',
|
||||||
|
path.join(os.homedir(), '.npm-global/bin/gemini'),
|
||||||
|
],
|
||||||
|
win32: [
|
||||||
|
path.join(os.homedir(), 'AppData', 'Roaming', 'npm', 'gemini.cmd'),
|
||||||
|
path.join(os.homedir(), '.npm-global', 'gemini.cmd'),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract prompt text from ExecuteOptions
|
||||||
|
*/
|
||||||
|
private extractPromptText(options: ExecuteOptions): string {
|
||||||
|
if (typeof options.prompt === 'string') {
|
||||||
|
return options.prompt;
|
||||||
|
} else if (Array.isArray(options.prompt)) {
|
||||||
|
return options.prompt
|
||||||
|
.filter((p) => p.type === 'text' && p.text)
|
||||||
|
.map((p) => p.text)
|
||||||
|
.join('\n');
|
||||||
|
} else {
|
||||||
|
throw new Error('Invalid prompt format');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
buildCliArgs(options: ExecuteOptions): string[] {
|
||||||
|
// Model comes in stripped of provider prefix (e.g., '2.5-flash' from 'gemini-2.5-flash')
|
||||||
|
// We need to add 'gemini-' back since it's part of the actual CLI model name
|
||||||
|
const bareModel = options.model || '2.5-flash';
|
||||||
|
const cliArgs: string[] = [];
|
||||||
|
|
||||||
|
// Streaming JSON output format for real-time updates
|
||||||
|
cliArgs.push('--output-format', 'stream-json');
|
||||||
|
|
||||||
|
// Model selection - Gemini CLI expects full model names like "gemini-2.5-flash"
|
||||||
|
// Unlike Cursor CLI where 'cursor-' is just a routing prefix, for Gemini CLI
|
||||||
|
// the 'gemini-' is part of the actual model name Google expects
|
||||||
|
if (bareModel && bareModel !== 'auto') {
|
||||||
|
// Add gemini- prefix if not already present (handles edge cases)
|
||||||
|
const cliModel = bareModel.startsWith('gemini-') ? bareModel : `gemini-${bareModel}`;
|
||||||
|
cliArgs.push('--model', cliModel);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Disable sandbox mode for faster execution (sandbox adds overhead)
|
||||||
|
cliArgs.push('--sandbox', 'false');
|
||||||
|
|
||||||
|
// YOLO mode for automatic approval (required for non-interactive use)
|
||||||
|
// Use explicit approval-mode for clearer semantics
|
||||||
|
cliArgs.push('--approval-mode', 'yolo');
|
||||||
|
|
||||||
|
// Explicitly include the working directory in allowed workspace directories
|
||||||
|
// This ensures Gemini CLI allows file operations in the project directory,
|
||||||
|
// even if it has a different workspace cached from a previous session
|
||||||
|
if (options.cwd) {
|
||||||
|
cliArgs.push('--include-directories', options.cwd);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: Gemini CLI doesn't have a --thinking-level flag.
|
||||||
|
// Thinking capabilities are determined by the model selection (e.g., gemini-2.5-pro).
|
||||||
|
// The model handles thinking internally based on the task complexity.
|
||||||
|
|
||||||
|
// The prompt will be passed as the last positional argument
|
||||||
|
// We'll append it in executeQuery after extracting the text
|
||||||
|
|
||||||
|
return cliArgs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert Gemini event to AutoMaker ProviderMessage format
|
||||||
|
*/
|
||||||
|
normalizeEvent(event: unknown): ProviderMessage | null {
|
||||||
|
const geminiEvent = event as GeminiStreamEvent;
|
||||||
|
|
||||||
|
switch (geminiEvent.type) {
|
||||||
|
case 'init': {
|
||||||
|
// Init event - capture session but don't yield a message
|
||||||
|
const initEvent = geminiEvent as GeminiInitEvent;
|
||||||
|
logger.debug(
|
||||||
|
`Gemini init event: session=${initEvent.session_id}, model=${initEvent.model}`
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'message': {
|
||||||
|
const messageEvent = geminiEvent as GeminiMessageEvent;
|
||||||
|
|
||||||
|
// Skip user messages - already handled by caller
|
||||||
|
if (messageEvent.role === 'user') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle assistant messages
|
||||||
|
if (messageEvent.role === 'assistant') {
|
||||||
|
return {
|
||||||
|
type: 'assistant',
|
||||||
|
session_id: messageEvent.session_id,
|
||||||
|
message: {
|
||||||
|
role: 'assistant',
|
||||||
|
content: [{ type: 'text', text: messageEvent.content }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'tool_use': {
|
||||||
|
const toolEvent = geminiEvent as GeminiToolUseEvent;
|
||||||
|
const normalizedName = normalizeGeminiToolName(toolEvent.tool_name);
|
||||||
|
const normalizedInput = normalizeGeminiToolInput(
|
||||||
|
toolEvent.tool_name,
|
||||||
|
toolEvent.parameters as Record<string, unknown>
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: 'assistant',
|
||||||
|
session_id: toolEvent.session_id,
|
||||||
|
message: {
|
||||||
|
role: 'assistant',
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'tool_use',
|
||||||
|
name: normalizedName,
|
||||||
|
tool_use_id: toolEvent.tool_id,
|
||||||
|
input: normalizedInput,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'tool_result': {
|
||||||
|
const toolResultEvent = geminiEvent as GeminiToolResultEvent;
|
||||||
|
// If tool result is an error, prefix with error indicator
|
||||||
|
const content =
|
||||||
|
toolResultEvent.status === 'error'
|
||||||
|
? `[ERROR] ${toolResultEvent.output}`
|
||||||
|
: toolResultEvent.output;
|
||||||
|
return {
|
||||||
|
type: 'assistant',
|
||||||
|
session_id: toolResultEvent.session_id,
|
||||||
|
message: {
|
||||||
|
role: 'assistant',
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'tool_result',
|
||||||
|
tool_use_id: toolResultEvent.tool_id,
|
||||||
|
content,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'result': {
|
||||||
|
const resultEvent = geminiEvent as GeminiResultEvent;
|
||||||
|
|
||||||
|
if (resultEvent.status === 'error') {
|
||||||
|
return {
|
||||||
|
type: 'error',
|
||||||
|
session_id: resultEvent.session_id,
|
||||||
|
error: resultEvent.error || 'Unknown error',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success result - include stats for logging
|
||||||
|
logger.debug(
|
||||||
|
`Gemini result: status=${resultEvent.status}, tokens=${resultEvent.stats?.total_tokens}`
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
type: 'result',
|
||||||
|
subtype: 'success',
|
||||||
|
session_id: resultEvent.session_id,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'error': {
|
||||||
|
const errorEvent = geminiEvent as GeminiResultEvent;
|
||||||
|
return {
|
||||||
|
type: 'error',
|
||||||
|
session_id: errorEvent.session_id,
|
||||||
|
error: errorEvent.error || 'Unknown error',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
logger.debug(`Unknown Gemini event type: ${geminiEvent.type}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================================================
|
||||||
|
// CliProvider Overrides
|
||||||
|
// ==========================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Override error mapping for Gemini-specific error codes
|
||||||
|
*/
|
||||||
|
protected mapError(stderr: string, exitCode: number | null): CliErrorInfo {
|
||||||
|
const lower = stderr.toLowerCase();
|
||||||
|
|
||||||
|
if (
|
||||||
|
lower.includes('not authenticated') ||
|
||||||
|
lower.includes('please log in') ||
|
||||||
|
lower.includes('unauthorized') ||
|
||||||
|
lower.includes('login required') ||
|
||||||
|
lower.includes('error authenticating') ||
|
||||||
|
lower.includes('loadcodeassist') ||
|
||||||
|
(lower.includes('econnrefused') && lower.includes('8888'))
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
code: GeminiErrorCode.NOT_AUTHENTICATED,
|
||||||
|
message: 'Gemini CLI is not authenticated',
|
||||||
|
recoverable: true,
|
||||||
|
suggestion:
|
||||||
|
'Run "gemini" interactively to log in, or set GEMINI_API_KEY environment variable',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
lower.includes('rate limit') ||
|
||||||
|
lower.includes('too many requests') ||
|
||||||
|
lower.includes('429') ||
|
||||||
|
lower.includes('quota exceeded')
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
code: GeminiErrorCode.RATE_LIMITED,
|
||||||
|
message: 'Gemini API rate limit exceeded',
|
||||||
|
recoverable: true,
|
||||||
|
suggestion: 'Wait a few minutes and try again. Free tier: 60 req/min, 1000 req/day',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
lower.includes('model not available') ||
|
||||||
|
lower.includes('invalid model') ||
|
||||||
|
lower.includes('unknown model') ||
|
||||||
|
lower.includes('modelnotfounderror') ||
|
||||||
|
lower.includes('model not found') ||
|
||||||
|
(lower.includes('not found') && lower.includes('404'))
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
code: GeminiErrorCode.MODEL_UNAVAILABLE,
|
||||||
|
message: 'Requested model is not available',
|
||||||
|
recoverable: true,
|
||||||
|
suggestion: 'Try using "gemini-2.5-flash" or select a different model',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
lower.includes('network') ||
|
||||||
|
lower.includes('connection') ||
|
||||||
|
lower.includes('econnrefused') ||
|
||||||
|
lower.includes('timeout')
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
code: GeminiErrorCode.NETWORK_ERROR,
|
||||||
|
message: 'Network connection error',
|
||||||
|
recoverable: true,
|
||||||
|
suggestion: 'Check your internet connection and try again',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (exitCode === 137 || lower.includes('killed') || lower.includes('sigterm')) {
|
||||||
|
return {
|
||||||
|
code: GeminiErrorCode.PROCESS_CRASHED,
|
||||||
|
message: 'Gemini CLI process was terminated',
|
||||||
|
recoverable: true,
|
||||||
|
suggestion: 'The process may have run out of memory. Try a simpler task.',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
code: GeminiErrorCode.UNKNOWN,
|
||||||
|
message: stderr || `Gemini CLI exited with code ${exitCode}`,
|
||||||
|
recoverable: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Override install instructions for Gemini-specific guidance
|
||||||
|
*/
|
||||||
|
protected getInstallInstructions(): string {
|
||||||
|
return 'Install with: npm install -g @google/gemini-cli (or visit https://github.com/google-gemini/gemini-cli)';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a prompt using Gemini CLI with streaming
|
||||||
|
*/
|
||||||
|
async *executeQuery(options: ExecuteOptions): AsyncGenerator<ProviderMessage> {
|
||||||
|
this.ensureCliDetected();
|
||||||
|
|
||||||
|
// Validate that model doesn't have a provider prefix
|
||||||
|
validateBareModelId(options.model, 'GeminiProvider');
|
||||||
|
|
||||||
|
if (!this.cliPath) {
|
||||||
|
throw this.createError(
|
||||||
|
GeminiErrorCode.NOT_INSTALLED,
|
||||||
|
'Gemini CLI is not installed',
|
||||||
|
true,
|
||||||
|
this.getInstallInstructions()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract prompt text to pass as positional argument
|
||||||
|
const promptText = this.extractPromptText(options);
|
||||||
|
|
||||||
|
// Build CLI args and append the prompt as the last positional argument
|
||||||
|
const cliArgs = this.buildCliArgs(options);
|
||||||
|
cliArgs.push(promptText); // Gemini CLI uses positional args for the prompt
|
||||||
|
|
||||||
|
const subprocessOptions = this.buildSubprocessOptions(options, cliArgs);
|
||||||
|
|
||||||
|
let sessionId: string | undefined;
|
||||||
|
|
||||||
|
logger.debug(`GeminiProvider.executeQuery called with model: "${options.model}"`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
for await (const rawEvent of spawnJSONLProcess(subprocessOptions)) {
|
||||||
|
const event = rawEvent as GeminiStreamEvent;
|
||||||
|
|
||||||
|
// Capture session ID from init event
|
||||||
|
if (event.type === 'init') {
|
||||||
|
const initEvent = event as GeminiInitEvent;
|
||||||
|
sessionId = initEvent.session_id;
|
||||||
|
logger.debug(`Session started: ${sessionId}, model: ${initEvent.model}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize and yield the event
|
||||||
|
const normalized = this.normalizeEvent(event);
|
||||||
|
if (normalized) {
|
||||||
|
if (!normalized.session_id && sessionId) {
|
||||||
|
normalized.session_id = sessionId;
|
||||||
|
}
|
||||||
|
yield normalized;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (isAbortError(error)) {
|
||||||
|
logger.debug('Query aborted');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map CLI errors to GeminiError
|
||||||
|
if (error instanceof Error && 'stderr' in error) {
|
||||||
|
const errorInfo = this.mapError(
|
||||||
|
(error as { stderr?: string }).stderr || error.message,
|
||||||
|
(error as { exitCode?: number | null }).exitCode ?? null
|
||||||
|
);
|
||||||
|
throw this.createError(
|
||||||
|
errorInfo.code as GeminiErrorCode,
|
||||||
|
errorInfo.message,
|
||||||
|
errorInfo.recoverable,
|
||||||
|
errorInfo.suggestion
|
||||||
|
);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================================================
|
||||||
|
// Gemini-Specific Methods
|
||||||
|
// ==========================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a GeminiError with details
|
||||||
|
*/
|
||||||
|
private createError(
|
||||||
|
code: GeminiErrorCode,
|
||||||
|
message: string,
|
||||||
|
recoverable: boolean = false,
|
||||||
|
suggestion?: string
|
||||||
|
): GeminiError {
|
||||||
|
const error = new Error(message) as GeminiError;
|
||||||
|
error.code = code;
|
||||||
|
error.recoverable = recoverable;
|
||||||
|
error.suggestion = suggestion;
|
||||||
|
error.name = 'GeminiError';
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Gemini CLI version
|
||||||
|
*/
|
||||||
|
async getVersion(): Promise<string | null> {
|
||||||
|
this.ensureCliDetected();
|
||||||
|
if (!this.cliPath) return null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = execSync(`"${this.cliPath}" --version`, {
|
||||||
|
encoding: 'utf8',
|
||||||
|
timeout: 5000,
|
||||||
|
stdio: 'pipe',
|
||||||
|
}).trim();
|
||||||
|
return result;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check authentication status
|
||||||
|
*
|
||||||
|
* Uses a fast credential check approach:
|
||||||
|
* 1. Check for GEMINI_API_KEY environment variable
|
||||||
|
* 2. Check for Google Cloud credentials
|
||||||
|
* 3. Check for Gemini settings file with stored credentials
|
||||||
|
* 4. Quick CLI auth test with --help (fast, doesn't make API calls)
|
||||||
|
*/
|
||||||
|
async checkAuth(): Promise<GeminiAuthStatus> {
|
||||||
|
this.ensureCliDetected();
|
||||||
|
if (!this.cliPath) {
|
||||||
|
logger.debug('checkAuth: CLI not found');
|
||||||
|
return { authenticated: false, method: 'none' };
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('checkAuth: Starting credential check');
|
||||||
|
|
||||||
|
// Determine the likely auth method based on environment
|
||||||
|
const hasApiKey = !!process.env.GEMINI_API_KEY;
|
||||||
|
const hasEnvApiKey = hasApiKey;
|
||||||
|
const hasVertexAi = !!(
|
||||||
|
process.env.GOOGLE_APPLICATION_CREDENTIALS || process.env.GOOGLE_CLOUD_PROJECT
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.debug(`checkAuth: hasApiKey=${hasApiKey}, hasVertexAi=${hasVertexAi}`);
|
||||||
|
|
||||||
|
// Check for Gemini credentials file (~/.gemini/settings.json)
|
||||||
|
const geminiConfigDir = path.join(os.homedir(), '.gemini');
|
||||||
|
const settingsPath = path.join(geminiConfigDir, 'settings.json');
|
||||||
|
let hasCredentialsFile = false;
|
||||||
|
let authType: string | null = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await fs.access(settingsPath);
|
||||||
|
logger.debug(`checkAuth: Found settings file at ${settingsPath}`);
|
||||||
|
try {
|
||||||
|
const content = await fs.readFile(settingsPath, 'utf8');
|
||||||
|
const settings = JSON.parse(content);
|
||||||
|
|
||||||
|
// Auth config is at security.auth.selectedType (e.g., "oauth-personal", "oauth-adc", "api-key")
|
||||||
|
const selectedType = settings?.security?.auth?.selectedType;
|
||||||
|
if (selectedType) {
|
||||||
|
hasCredentialsFile = true;
|
||||||
|
authType = selectedType;
|
||||||
|
logger.debug(`checkAuth: Settings file has auth config, selectedType=${selectedType}`);
|
||||||
|
} else {
|
||||||
|
logger.debug(`checkAuth: Settings file found but no auth type configured`);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
logger.debug(`checkAuth: Failed to parse settings file: ${e}`);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
logger.debug('checkAuth: No settings file found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have an API key, we're authenticated
|
||||||
|
if (hasApiKey) {
|
||||||
|
logger.debug('checkAuth: Using API key authentication');
|
||||||
|
return {
|
||||||
|
authenticated: true,
|
||||||
|
method: 'api_key',
|
||||||
|
hasApiKey,
|
||||||
|
hasEnvApiKey,
|
||||||
|
hasCredentialsFile,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have Vertex AI credentials, we're authenticated
|
||||||
|
if (hasVertexAi) {
|
||||||
|
logger.debug('checkAuth: Using Vertex AI authentication');
|
||||||
|
return {
|
||||||
|
authenticated: true,
|
||||||
|
method: 'vertex_ai',
|
||||||
|
hasApiKey,
|
||||||
|
hasEnvApiKey,
|
||||||
|
hasCredentialsFile,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if settings file indicates configured authentication
|
||||||
|
if (hasCredentialsFile && authType) {
|
||||||
|
// OAuth types: "oauth-personal", "oauth-adc"
|
||||||
|
// API key type: "api-key"
|
||||||
|
// Code assist: "code-assist" (requires IDE integration)
|
||||||
|
if (authType.startsWith('oauth')) {
|
||||||
|
logger.debug(`checkAuth: OAuth authentication configured (${authType})`);
|
||||||
|
return {
|
||||||
|
authenticated: true,
|
||||||
|
method: 'google_login',
|
||||||
|
hasApiKey,
|
||||||
|
hasEnvApiKey,
|
||||||
|
hasCredentialsFile,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (authType === 'api-key') {
|
||||||
|
logger.debug('checkAuth: API key authentication configured in settings');
|
||||||
|
return {
|
||||||
|
authenticated: true,
|
||||||
|
method: 'api_key',
|
||||||
|
hasApiKey,
|
||||||
|
hasEnvApiKey,
|
||||||
|
hasCredentialsFile,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (authType === 'code-assist' || authType === 'codeassist') {
|
||||||
|
logger.debug('checkAuth: Code Assist auth configured but requires local server');
|
||||||
|
return {
|
||||||
|
authenticated: false,
|
||||||
|
method: 'google_login',
|
||||||
|
hasApiKey,
|
||||||
|
hasEnvApiKey,
|
||||||
|
hasCredentialsFile,
|
||||||
|
error:
|
||||||
|
'Code Assist authentication requires IDE integration. Please use "gemini" CLI to log in with a different method, or set GEMINI_API_KEY.',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unknown auth type but something is configured
|
||||||
|
logger.debug(`checkAuth: Unknown auth type configured: ${authType}`);
|
||||||
|
return {
|
||||||
|
authenticated: true,
|
||||||
|
method: 'google_login',
|
||||||
|
hasApiKey,
|
||||||
|
hasEnvApiKey,
|
||||||
|
hasCredentialsFile,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// No credentials found
|
||||||
|
logger.debug('checkAuth: No valid credentials found');
|
||||||
|
return {
|
||||||
|
authenticated: false,
|
||||||
|
method: 'none',
|
||||||
|
hasApiKey,
|
||||||
|
hasEnvApiKey,
|
||||||
|
hasCredentialsFile,
|
||||||
|
error:
|
||||||
|
'No authentication configured. Run "gemini" interactively to log in, or set GEMINI_API_KEY.',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detect installation status (required by BaseProvider)
|
||||||
|
*/
|
||||||
|
async detectInstallation(): Promise<InstallationStatus> {
|
||||||
|
const installed = await this.isInstalled();
|
||||||
|
const version = installed ? await this.getVersion() : undefined;
|
||||||
|
const auth = await this.checkAuth();
|
||||||
|
|
||||||
|
return {
|
||||||
|
installed,
|
||||||
|
version: version || undefined,
|
||||||
|
path: this.cliPath || undefined,
|
||||||
|
method: 'cli',
|
||||||
|
hasApiKey: !!process.env.GEMINI_API_KEY,
|
||||||
|
authenticated: auth.authenticated,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the detected CLI path (public accessor for status endpoints)
|
||||||
|
*/
|
||||||
|
getCliPath(): string | null {
|
||||||
|
this.ensureCliDetected();
|
||||||
|
return this.cliPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get available Gemini models
|
||||||
|
*/
|
||||||
|
getAvailableModels(): ModelDefinition[] {
|
||||||
|
return Object.entries(GEMINI_MODEL_MAP).map(([id, config]) => ({
|
||||||
|
id, // Full model ID with gemini- prefix (e.g., 'gemini-2.5-flash')
|
||||||
|
name: config.label,
|
||||||
|
modelString: id, // Same as id - CLI uses the full model name
|
||||||
|
provider: 'gemini',
|
||||||
|
description: config.description,
|
||||||
|
supportsTools: true,
|
||||||
|
supportsVision: config.supportsVision,
|
||||||
|
contextWindow: config.contextWindow,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a feature is supported
|
||||||
|
*/
|
||||||
|
supportsFeature(feature: string): boolean {
|
||||||
|
const supported = ['tools', 'text', 'streaming', 'vision', 'thinking'];
|
||||||
|
return supported.includes(feature);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -16,6 +16,16 @@ export type {
|
|||||||
ProviderMessage,
|
ProviderMessage,
|
||||||
InstallationStatus,
|
InstallationStatus,
|
||||||
ModelDefinition,
|
ModelDefinition,
|
||||||
|
AgentDefinition,
|
||||||
|
ReasoningEffort,
|
||||||
|
SystemPromptPreset,
|
||||||
|
ConversationMessage,
|
||||||
|
ContentBlock,
|
||||||
|
ValidationResult,
|
||||||
|
McpServerConfig,
|
||||||
|
McpStdioServerConfig,
|
||||||
|
McpSSEServerConfig,
|
||||||
|
McpHttpServerConfig,
|
||||||
} from './types.js';
|
} from './types.js';
|
||||||
|
|
||||||
// Claude provider
|
// Claude provider
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ import type {
|
|||||||
InstallationStatus,
|
InstallationStatus,
|
||||||
ContentBlock,
|
ContentBlock,
|
||||||
} from '@automaker/types';
|
} from '@automaker/types';
|
||||||
import { stripProviderPrefix } from '@automaker/types';
|
|
||||||
import { type SubprocessOptions, getOpenCodeAuthIndicators } from '@automaker/platform';
|
import { type SubprocessOptions, getOpenCodeAuthIndicators } from '@automaker/platform';
|
||||||
import { createLogger } from '@automaker/utils';
|
import { createLogger } from '@automaker/utils';
|
||||||
|
|
||||||
@@ -328,10 +327,18 @@ export class OpencodeProvider extends CliProvider {
|
|||||||
args.push('--format', 'json');
|
args.push('--format', 'json');
|
||||||
|
|
||||||
// Handle model selection
|
// Handle model selection
|
||||||
// Strip 'opencode-' prefix if present, OpenCode uses format like 'anthropic/claude-sonnet-4-5'
|
// Convert canonical prefix format (opencode-xxx) to CLI slash format (opencode/xxx)
|
||||||
|
// OpenCode CLI expects provider/model format (e.g., 'opencode/big-model')
|
||||||
if (options.model) {
|
if (options.model) {
|
||||||
const model = stripProviderPrefix(options.model);
|
// Strip opencode- prefix if present, then ensure slash format
|
||||||
args.push('--model', model);
|
const model = options.model.startsWith('opencode-')
|
||||||
|
? options.model.slice('opencode-'.length)
|
||||||
|
: options.model;
|
||||||
|
|
||||||
|
// If model has slash, it's already provider/model format; otherwise prepend opencode/
|
||||||
|
const cliModel = model.includes('/') ? model : `opencode/${model}`;
|
||||||
|
|
||||||
|
args.push('--model', cliModel);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: OpenCode reads from stdin automatically when input is piped
|
// Note: OpenCode reads from stdin automatically when input is piped
|
||||||
@@ -1035,7 +1042,7 @@ export class OpencodeProvider extends CliProvider {
|
|||||||
'lm studio': 'lmstudio',
|
'lm studio': 'lmstudio',
|
||||||
lmstudio: 'lmstudio',
|
lmstudio: 'lmstudio',
|
||||||
opencode: 'opencode',
|
opencode: 'opencode',
|
||||||
'z.ai coding plan': 'z-ai',
|
'z.ai coding plan': 'zai-coding-plan',
|
||||||
'z.ai': 'z-ai',
|
'z.ai': 'z-ai',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,13 @@
|
|||||||
|
|
||||||
import { BaseProvider } from './base-provider.js';
|
import { BaseProvider } from './base-provider.js';
|
||||||
import type { InstallationStatus, ModelDefinition } from './types.js';
|
import type { InstallationStatus, ModelDefinition } from './types.js';
|
||||||
import { isCursorModel, isCodexModel, isOpencodeModel, type ModelProvider } from '@automaker/types';
|
import {
|
||||||
|
isCursorModel,
|
||||||
|
isCodexModel,
|
||||||
|
isOpencodeModel,
|
||||||
|
isGeminiModel,
|
||||||
|
type ModelProvider,
|
||||||
|
} from '@automaker/types';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
|
||||||
@@ -16,6 +22,7 @@ const DISCONNECTED_MARKERS: Record<string, string> = {
|
|||||||
codex: '.codex-disconnected',
|
codex: '.codex-disconnected',
|
||||||
cursor: '.cursor-disconnected',
|
cursor: '.cursor-disconnected',
|
||||||
opencode: '.opencode-disconnected',
|
opencode: '.opencode-disconnected',
|
||||||
|
gemini: '.gemini-disconnected',
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -239,8 +246,8 @@ export class ProviderFactory {
|
|||||||
model.modelString === modelId ||
|
model.modelString === modelId ||
|
||||||
model.id.endsWith(`-${modelId}`) ||
|
model.id.endsWith(`-${modelId}`) ||
|
||||||
model.modelString.endsWith(`-${modelId}`) ||
|
model.modelString.endsWith(`-${modelId}`) ||
|
||||||
model.modelString === modelId.replace(/^(claude|cursor|codex)-/, '') ||
|
model.modelString === modelId.replace(/^(claude|cursor|codex|gemini)-/, '') ||
|
||||||
model.modelString === modelId.replace(/-(claude|cursor|codex)$/, '')
|
model.modelString === modelId.replace(/-(claude|cursor|codex|gemini)$/, '')
|
||||||
) {
|
) {
|
||||||
return model.supportsVision ?? true;
|
return model.supportsVision ?? true;
|
||||||
}
|
}
|
||||||
@@ -267,6 +274,7 @@ import { ClaudeProvider } from './claude-provider.js';
|
|||||||
import { CursorProvider } from './cursor-provider.js';
|
import { CursorProvider } from './cursor-provider.js';
|
||||||
import { CodexProvider } from './codex-provider.js';
|
import { CodexProvider } from './codex-provider.js';
|
||||||
import { OpencodeProvider } from './opencode-provider.js';
|
import { OpencodeProvider } from './opencode-provider.js';
|
||||||
|
import { GeminiProvider } from './gemini-provider.js';
|
||||||
|
|
||||||
// Register Claude provider
|
// Register Claude provider
|
||||||
registerProvider('claude', {
|
registerProvider('claude', {
|
||||||
@@ -301,3 +309,11 @@ registerProvider('opencode', {
|
|||||||
canHandleModel: (model: string) => isOpencodeModel(model),
|
canHandleModel: (model: string) => isOpencodeModel(model),
|
||||||
priority: 3, // Between codex (5) and claude (0)
|
priority: 3, // Between codex (5) and claude (0)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Register Gemini provider
|
||||||
|
registerProvider('gemini', {
|
||||||
|
factory: () => new GeminiProvider(),
|
||||||
|
aliases: ['google'],
|
||||||
|
canHandleModel: (model: string) => isGeminiModel(model),
|
||||||
|
priority: 4, // Between opencode (3) and codex (5)
|
||||||
|
});
|
||||||
|
|||||||
@@ -20,6 +20,9 @@ import type {
|
|||||||
ContentBlock,
|
ContentBlock,
|
||||||
ThinkingLevel,
|
ThinkingLevel,
|
||||||
ReasoningEffort,
|
ReasoningEffort,
|
||||||
|
ClaudeApiProfile,
|
||||||
|
ClaudeCompatibleProvider,
|
||||||
|
Credentials,
|
||||||
} from '@automaker/types';
|
} from '@automaker/types';
|
||||||
import { stripProviderPrefix } from '@automaker/types';
|
import { stripProviderPrefix } from '@automaker/types';
|
||||||
|
|
||||||
@@ -54,6 +57,18 @@ export interface SimpleQueryOptions {
|
|||||||
readOnly?: boolean;
|
readOnly?: boolean;
|
||||||
/** Setting sources for CLAUDE.md loading */
|
/** Setting sources for CLAUDE.md loading */
|
||||||
settingSources?: Array<'user' | 'project' | 'local'>;
|
settingSources?: Array<'user' | 'project' | 'local'>;
|
||||||
|
/**
|
||||||
|
* Active Claude API profile for alternative endpoint configuration
|
||||||
|
* @deprecated Use claudeCompatibleProvider instead
|
||||||
|
*/
|
||||||
|
claudeApiProfile?: ClaudeApiProfile;
|
||||||
|
/**
|
||||||
|
* Claude-compatible provider for alternative endpoint configuration.
|
||||||
|
* Takes precedence over claudeApiProfile if both are set.
|
||||||
|
*/
|
||||||
|
claudeCompatibleProvider?: ClaudeCompatibleProvider;
|
||||||
|
/** Credentials for resolving 'credentials' apiKeySource in Claude API profiles/providers */
|
||||||
|
credentials?: Credentials;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -125,6 +140,9 @@ export async function simpleQuery(options: SimpleQueryOptions): Promise<SimpleQu
|
|||||||
reasoningEffort: options.reasoningEffort,
|
reasoningEffort: options.reasoningEffort,
|
||||||
readOnly: options.readOnly,
|
readOnly: options.readOnly,
|
||||||
settingSources: options.settingSources,
|
settingSources: options.settingSources,
|
||||||
|
claudeApiProfile: options.claudeApiProfile, // Legacy: Pass active Claude API profile for alternative endpoint configuration
|
||||||
|
claudeCompatibleProvider: options.claudeCompatibleProvider, // New: Pass Claude-compatible provider (takes precedence)
|
||||||
|
credentials: options.credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
};
|
};
|
||||||
|
|
||||||
for await (const msg of provider.executeQuery(providerOptions)) {
|
for await (const msg of provider.executeQuery(providerOptions)) {
|
||||||
@@ -207,6 +225,9 @@ export async function streamingQuery(options: StreamingQueryOptions): Promise<Si
|
|||||||
reasoningEffort: options.reasoningEffort,
|
reasoningEffort: options.reasoningEffort,
|
||||||
readOnly: options.readOnly,
|
readOnly: options.readOnly,
|
||||||
settingSources: options.settingSources,
|
settingSources: options.settingSources,
|
||||||
|
claudeApiProfile: options.claudeApiProfile, // Legacy: Pass active Claude API profile for alternative endpoint configuration
|
||||||
|
claudeCompatibleProvider: options.claudeCompatibleProvider, // New: Pass Claude-compatible provider (takes precedence)
|
||||||
|
credentials: options.credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
};
|
};
|
||||||
|
|
||||||
for await (const msg of provider.executeQuery(providerOptions)) {
|
for await (const msg of provider.executeQuery(providerOptions)) {
|
||||||
|
|||||||
@@ -19,4 +19,7 @@ export type {
|
|||||||
InstallationStatus,
|
InstallationStatus,
|
||||||
ValidationResult,
|
ValidationResult,
|
||||||
ModelDefinition,
|
ModelDefinition,
|
||||||
|
AgentDefinition,
|
||||||
|
ReasoningEffort,
|
||||||
|
SystemPromptPreset,
|
||||||
} from '@automaker/types';
|
} from '@automaker/types';
|
||||||
|
|||||||
@@ -14,7 +14,11 @@ import { streamingQuery } from '../../providers/simple-query-service.js';
|
|||||||
import { parseAndCreateFeatures } from './parse-and-create-features.js';
|
import { parseAndCreateFeatures } from './parse-and-create-features.js';
|
||||||
import { getAppSpecPath } from '@automaker/platform';
|
import { getAppSpecPath } from '@automaker/platform';
|
||||||
import type { SettingsService } from '../../services/settings-service.js';
|
import type { SettingsService } from '../../services/settings-service.js';
|
||||||
import { getAutoLoadClaudeMdSetting, getPromptCustomization } from '../../lib/settings-helpers.js';
|
import {
|
||||||
|
getAutoLoadClaudeMdSetting,
|
||||||
|
getPromptCustomization,
|
||||||
|
getPhaseModelWithOverrides,
|
||||||
|
} from '../../lib/settings-helpers.js';
|
||||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||||
|
|
||||||
const logger = createLogger('SpecRegeneration');
|
const logger = createLogger('SpecRegeneration');
|
||||||
@@ -115,13 +119,26 @@ Generate ${featureCount} NEW features that build on each other logically. Rememb
|
|||||||
'[FeatureGeneration]'
|
'[FeatureGeneration]'
|
||||||
);
|
);
|
||||||
|
|
||||||
// Get model from phase settings
|
// Get model from phase settings with provider info
|
||||||
const settings = await settingsService?.getGlobalSettings();
|
const {
|
||||||
const phaseModelEntry =
|
phaseModel: phaseModelEntry,
|
||||||
settings?.phaseModels?.featureGenerationModel || DEFAULT_PHASE_MODELS.featureGenerationModel;
|
provider,
|
||||||
|
credentials,
|
||||||
|
} = settingsService
|
||||||
|
? await getPhaseModelWithOverrides(
|
||||||
|
'featureGenerationModel',
|
||||||
|
settingsService,
|
||||||
|
projectPath,
|
||||||
|
'[FeatureGeneration]'
|
||||||
|
)
|
||||||
|
: {
|
||||||
|
phaseModel: DEFAULT_PHASE_MODELS.featureGenerationModel,
|
||||||
|
provider: undefined,
|
||||||
|
credentials: undefined,
|
||||||
|
};
|
||||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||||
|
|
||||||
logger.info('Using model:', model);
|
logger.info('Using model:', model, provider ? `via provider: ${provider.name}` : 'direct API');
|
||||||
|
|
||||||
// Use streamingQuery with event callbacks
|
// Use streamingQuery with event callbacks
|
||||||
const result = await streamingQuery({
|
const result = await streamingQuery({
|
||||||
@@ -134,6 +151,8 @@ Generate ${featureCount} NEW features that build on each other logically. Rememb
|
|||||||
thinkingLevel,
|
thinkingLevel,
|
||||||
readOnly: true, // Feature generation only reads code, doesn't write
|
readOnly: true, // Feature generation only reads code, doesn't write
|
||||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||||
|
claudeCompatibleProvider: provider, // Pass provider for alternative endpoint configuration
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
onText: (text) => {
|
onText: (text) => {
|
||||||
logger.debug(`Feature text block received (${text.length} chars)`);
|
logger.debug(`Feature text block received (${text.length} chars)`);
|
||||||
events.emit('spec-regeneration:event', {
|
events.emit('spec-regeneration:event', {
|
||||||
|
|||||||
@@ -16,7 +16,11 @@ import { streamingQuery } from '../../providers/simple-query-service.js';
|
|||||||
import { generateFeaturesFromSpec } from './generate-features-from-spec.js';
|
import { generateFeaturesFromSpec } from './generate-features-from-spec.js';
|
||||||
import { ensureAutomakerDir, getAppSpecPath } from '@automaker/platform';
|
import { ensureAutomakerDir, getAppSpecPath } from '@automaker/platform';
|
||||||
import type { SettingsService } from '../../services/settings-service.js';
|
import type { SettingsService } from '../../services/settings-service.js';
|
||||||
import { getAutoLoadClaudeMdSetting, getPromptCustomization } from '../../lib/settings-helpers.js';
|
import {
|
||||||
|
getAutoLoadClaudeMdSetting,
|
||||||
|
getPromptCustomization,
|
||||||
|
getPhaseModelWithOverrides,
|
||||||
|
} from '../../lib/settings-helpers.js';
|
||||||
|
|
||||||
const logger = createLogger('SpecRegeneration');
|
const logger = createLogger('SpecRegeneration');
|
||||||
|
|
||||||
@@ -92,13 +96,26 @@ ${prompts.appSpec.structuredSpecInstructions}`;
|
|||||||
'[SpecRegeneration]'
|
'[SpecRegeneration]'
|
||||||
);
|
);
|
||||||
|
|
||||||
// Get model from phase settings
|
// Get model from phase settings with provider info
|
||||||
const settings = await settingsService?.getGlobalSettings();
|
const {
|
||||||
const phaseModelEntry =
|
phaseModel: phaseModelEntry,
|
||||||
settings?.phaseModels?.specGenerationModel || DEFAULT_PHASE_MODELS.specGenerationModel;
|
provider,
|
||||||
|
credentials,
|
||||||
|
} = settingsService
|
||||||
|
? await getPhaseModelWithOverrides(
|
||||||
|
'specGenerationModel',
|
||||||
|
settingsService,
|
||||||
|
projectPath,
|
||||||
|
'[SpecRegeneration]'
|
||||||
|
)
|
||||||
|
: {
|
||||||
|
phaseModel: DEFAULT_PHASE_MODELS.specGenerationModel,
|
||||||
|
provider: undefined,
|
||||||
|
credentials: undefined,
|
||||||
|
};
|
||||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||||
|
|
||||||
logger.info('Using model:', model);
|
logger.info('Using model:', model, provider ? `via provider: ${provider.name}` : 'direct API');
|
||||||
|
|
||||||
let responseText = '';
|
let responseText = '';
|
||||||
let structuredOutput: SpecOutput | null = null;
|
let structuredOutput: SpecOutput | null = null;
|
||||||
@@ -132,6 +149,8 @@ Your entire response should be valid JSON starting with { and ending with }. No
|
|||||||
thinkingLevel,
|
thinkingLevel,
|
||||||
readOnly: true, // Spec generation only reads code, we write the spec ourselves
|
readOnly: true, // Spec generation only reads code, we write the spec ourselves
|
||||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||||
|
claudeCompatibleProvider: provider, // Pass provider for alternative endpoint configuration
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
outputFormat: useStructuredOutput
|
outputFormat: useStructuredOutput
|
||||||
? {
|
? {
|
||||||
type: 'json_schema',
|
type: 'json_schema',
|
||||||
|
|||||||
@@ -15,7 +15,10 @@ import { resolvePhaseModel } from '@automaker/model-resolver';
|
|||||||
import { streamingQuery } from '../../providers/simple-query-service.js';
|
import { streamingQuery } from '../../providers/simple-query-service.js';
|
||||||
import { getAppSpecPath } from '@automaker/platform';
|
import { getAppSpecPath } from '@automaker/platform';
|
||||||
import type { SettingsService } from '../../services/settings-service.js';
|
import type { SettingsService } from '../../services/settings-service.js';
|
||||||
import { getAutoLoadClaudeMdSetting } from '../../lib/settings-helpers.js';
|
import {
|
||||||
|
getAutoLoadClaudeMdSetting,
|
||||||
|
getPhaseModelWithOverrides,
|
||||||
|
} from '../../lib/settings-helpers.js';
|
||||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||||
import {
|
import {
|
||||||
extractImplementedFeatures,
|
extractImplementedFeatures,
|
||||||
@@ -152,11 +155,27 @@ export async function syncSpec(
|
|||||||
'[SpecSync]'
|
'[SpecSync]'
|
||||||
);
|
);
|
||||||
|
|
||||||
const settings = await settingsService?.getGlobalSettings();
|
// Get model from phase settings with provider info
|
||||||
const phaseModelEntry =
|
const {
|
||||||
settings?.phaseModels?.specGenerationModel || DEFAULT_PHASE_MODELS.specGenerationModel;
|
phaseModel: phaseModelEntry,
|
||||||
|
provider,
|
||||||
|
credentials,
|
||||||
|
} = settingsService
|
||||||
|
? await getPhaseModelWithOverrides(
|
||||||
|
'specGenerationModel',
|
||||||
|
settingsService,
|
||||||
|
projectPath,
|
||||||
|
'[SpecSync]'
|
||||||
|
)
|
||||||
|
: {
|
||||||
|
phaseModel: DEFAULT_PHASE_MODELS.specGenerationModel,
|
||||||
|
provider: undefined,
|
||||||
|
credentials: undefined,
|
||||||
|
};
|
||||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||||
|
|
||||||
|
logger.info('Using model:', model, provider ? `via provider: ${provider.name}` : 'direct API');
|
||||||
|
|
||||||
// Use AI to analyze tech stack
|
// Use AI to analyze tech stack
|
||||||
const techAnalysisPrompt = `Analyze this project and return ONLY a JSON object with the current technology stack.
|
const techAnalysisPrompt = `Analyze this project and return ONLY a JSON object with the current technology stack.
|
||||||
|
|
||||||
@@ -185,6 +204,8 @@ Return ONLY this JSON format, no other text:
|
|||||||
thinkingLevel,
|
thinkingLevel,
|
||||||
readOnly: true,
|
readOnly: true,
|
||||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||||
|
claudeCompatibleProvider: provider, // Pass provider for alternative endpoint configuration
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
onText: (text) => {
|
onText: (text) => {
|
||||||
logger.debug(`Tech analysis text: ${text.substring(0, 100)}`);
|
logger.debug(`Tech analysis text: ${text.substring(0, 100)}`);
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -117,9 +117,27 @@ export function createAuthRoutes(): Router {
|
|||||||
*
|
*
|
||||||
* Returns whether the current request is authenticated.
|
* Returns whether the current request is authenticated.
|
||||||
* Used by the UI to determine if login is needed.
|
* Used by the UI to determine if login is needed.
|
||||||
|
*
|
||||||
|
* If AUTOMAKER_AUTO_LOGIN=true is set, automatically creates a session
|
||||||
|
* for unauthenticated requests (useful for development).
|
||||||
*/
|
*/
|
||||||
router.get('/status', (req, res) => {
|
router.get('/status', async (req, res) => {
|
||||||
const authenticated = isRequestAuthenticated(req);
|
let authenticated = isRequestAuthenticated(req);
|
||||||
|
|
||||||
|
// Auto-login for development: create session automatically if enabled
|
||||||
|
// Only works in non-production environments as a safeguard
|
||||||
|
if (
|
||||||
|
!authenticated &&
|
||||||
|
process.env.AUTOMAKER_AUTO_LOGIN === 'true' &&
|
||||||
|
process.env.NODE_ENV !== 'production'
|
||||||
|
) {
|
||||||
|
const sessionToken = await createSession();
|
||||||
|
const cookieOptions = getSessionCookieOptions();
|
||||||
|
const cookieName = getSessionCookieName();
|
||||||
|
res.cookie(cookieName, sessionToken, cookieOptions);
|
||||||
|
authenticated = true;
|
||||||
|
}
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
authenticated,
|
authenticated,
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ import { validatePathParams } from '../../middleware/validate-paths.js';
|
|||||||
import { createStopFeatureHandler } from './routes/stop-feature.js';
|
import { createStopFeatureHandler } from './routes/stop-feature.js';
|
||||||
import { createStatusHandler } from './routes/status.js';
|
import { createStatusHandler } from './routes/status.js';
|
||||||
import { createRunFeatureHandler } from './routes/run-feature.js';
|
import { createRunFeatureHandler } from './routes/run-feature.js';
|
||||||
|
import { createStartHandler } from './routes/start.js';
|
||||||
|
import { createStopHandler } from './routes/stop.js';
|
||||||
import { createVerifyFeatureHandler } from './routes/verify-feature.js';
|
import { createVerifyFeatureHandler } from './routes/verify-feature.js';
|
||||||
import { createResumeFeatureHandler } from './routes/resume-feature.js';
|
import { createResumeFeatureHandler } from './routes/resume-feature.js';
|
||||||
import { createContextExistsHandler } from './routes/context-exists.js';
|
import { createContextExistsHandler } from './routes/context-exists.js';
|
||||||
@@ -22,6 +24,10 @@ import { createResumeInterruptedHandler } from './routes/resume-interrupted.js';
|
|||||||
export function createAutoModeRoutes(autoModeService: AutoModeService): Router {
|
export function createAutoModeRoutes(autoModeService: AutoModeService): Router {
|
||||||
const router = Router();
|
const router = Router();
|
||||||
|
|
||||||
|
// Auto loop control routes
|
||||||
|
router.post('/start', validatePathParams('projectPath'), createStartHandler(autoModeService));
|
||||||
|
router.post('/stop', validatePathParams('projectPath'), createStopHandler(autoModeService));
|
||||||
|
|
||||||
router.post('/stop-feature', createStopFeatureHandler(autoModeService));
|
router.post('/stop-feature', createStopFeatureHandler(autoModeService));
|
||||||
router.post('/status', validatePathParams('projectPath?'), createStatusHandler(autoModeService));
|
router.post('/status', validatePathParams('projectPath?'), createStatusHandler(autoModeService));
|
||||||
router.post(
|
router.post(
|
||||||
|
|||||||
@@ -26,6 +26,24 @@ export function createRunFeatureHandler(autoModeService: AutoModeService) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check per-worktree capacity before starting
|
||||||
|
const capacity = await autoModeService.checkWorktreeCapacity(projectPath, featureId);
|
||||||
|
if (!capacity.hasCapacity) {
|
||||||
|
const worktreeDesc = capacity.branchName
|
||||||
|
? `worktree "${capacity.branchName}"`
|
||||||
|
: 'main worktree';
|
||||||
|
res.status(429).json({
|
||||||
|
success: false,
|
||||||
|
error: `Agent limit reached for ${worktreeDesc} (${capacity.currentAgents}/${capacity.maxAgents}). Wait for running tasks to complete or increase the limit.`,
|
||||||
|
details: {
|
||||||
|
currentAgents: capacity.currentAgents,
|
||||||
|
maxAgents: capacity.maxAgents,
|
||||||
|
branchName: capacity.branchName,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Start execution in background
|
// Start execution in background
|
||||||
// executeFeature derives workDir from feature.branchName
|
// executeFeature derives workDir from feature.branchName
|
||||||
autoModeService
|
autoModeService
|
||||||
|
|||||||
67
apps/server/src/routes/auto-mode/routes/start.ts
Normal file
67
apps/server/src/routes/auto-mode/routes/start.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
/**
|
||||||
|
* POST /start endpoint - Start auto mode loop for a project
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
|
||||||
|
const logger = createLogger('AutoMode');
|
||||||
|
|
||||||
|
export function createStartHandler(autoModeService: AutoModeService) {
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const { projectPath, branchName, maxConcurrency } = req.body as {
|
||||||
|
projectPath: string;
|
||||||
|
branchName?: string | null;
|
||||||
|
maxConcurrency?: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!projectPath) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'projectPath is required',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize branchName: undefined becomes null
|
||||||
|
const normalizedBranchName = branchName ?? null;
|
||||||
|
const worktreeDesc = normalizedBranchName
|
||||||
|
? `worktree ${normalizedBranchName}`
|
||||||
|
: 'main worktree';
|
||||||
|
|
||||||
|
// Check if already running
|
||||||
|
if (autoModeService.isAutoLoopRunningForProject(projectPath, normalizedBranchName)) {
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: `Auto mode is already running for ${worktreeDesc}`,
|
||||||
|
alreadyRunning: true,
|
||||||
|
branchName: normalizedBranchName,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the auto loop for this project/worktree
|
||||||
|
const resolvedMaxConcurrency = await autoModeService.startAutoLoopForProject(
|
||||||
|
projectPath,
|
||||||
|
normalizedBranchName,
|
||||||
|
maxConcurrency
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`Started auto loop for ${worktreeDesc} in project: ${projectPath} with maxConcurrency: ${resolvedMaxConcurrency}`
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: `Auto mode started with max ${resolvedMaxConcurrency} concurrent features`,
|
||||||
|
branchName: normalizedBranchName,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Start auto mode failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,5 +1,8 @@
|
|||||||
/**
|
/**
|
||||||
* POST /status endpoint - Get auto mode status
|
* POST /status endpoint - Get auto mode status
|
||||||
|
*
|
||||||
|
* If projectPath is provided, returns per-project status including autoloop state.
|
||||||
|
* If no projectPath, returns global status for backward compatibility.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
@@ -9,10 +12,41 @@ import { getErrorMessage, logError } from '../common.js';
|
|||||||
export function createStatusHandler(autoModeService: AutoModeService) {
|
export function createStatusHandler(autoModeService: AutoModeService) {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
|
const { projectPath, branchName } = req.body as {
|
||||||
|
projectPath?: string;
|
||||||
|
branchName?: string | null;
|
||||||
|
};
|
||||||
|
|
||||||
|
// If projectPath is provided, return per-project/worktree status
|
||||||
|
if (projectPath) {
|
||||||
|
// Normalize branchName: undefined becomes null
|
||||||
|
const normalizedBranchName = branchName ?? null;
|
||||||
|
const projectStatus = autoModeService.getStatusForProject(
|
||||||
|
projectPath,
|
||||||
|
normalizedBranchName
|
||||||
|
);
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
isRunning: projectStatus.runningCount > 0,
|
||||||
|
isAutoLoopRunning: projectStatus.isAutoLoopRunning,
|
||||||
|
runningFeatures: projectStatus.runningFeatures,
|
||||||
|
runningCount: projectStatus.runningCount,
|
||||||
|
maxConcurrency: projectStatus.maxConcurrency,
|
||||||
|
projectPath,
|
||||||
|
branchName: normalizedBranchName,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to global status for backward compatibility
|
||||||
const status = autoModeService.getStatus();
|
const status = autoModeService.getStatus();
|
||||||
|
const activeProjects = autoModeService.getActiveAutoLoopProjects();
|
||||||
|
const activeWorktrees = autoModeService.getActiveAutoLoopWorktrees();
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
...status,
|
...status,
|
||||||
|
activeAutoLoopProjects: activeProjects,
|
||||||
|
activeAutoLoopWorktrees: activeWorktrees,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logError(error, 'Get status failed');
|
logError(error, 'Get status failed');
|
||||||
|
|||||||
66
apps/server/src/routes/auto-mode/routes/stop.ts
Normal file
66
apps/server/src/routes/auto-mode/routes/stop.ts
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
/**
|
||||||
|
* POST /stop endpoint - Stop auto mode loop for a project
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
|
||||||
|
const logger = createLogger('AutoMode');
|
||||||
|
|
||||||
|
export function createStopHandler(autoModeService: AutoModeService) {
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const { projectPath, branchName } = req.body as {
|
||||||
|
projectPath: string;
|
||||||
|
branchName?: string | null;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!projectPath) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'projectPath is required',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize branchName: undefined becomes null
|
||||||
|
const normalizedBranchName = branchName ?? null;
|
||||||
|
const worktreeDesc = normalizedBranchName
|
||||||
|
? `worktree ${normalizedBranchName}`
|
||||||
|
: 'main worktree';
|
||||||
|
|
||||||
|
// Check if running
|
||||||
|
if (!autoModeService.isAutoLoopRunningForProject(projectPath, normalizedBranchName)) {
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: `Auto mode is not running for ${worktreeDesc}`,
|
||||||
|
wasRunning: false,
|
||||||
|
branchName: normalizedBranchName,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop the auto loop for this project/worktree
|
||||||
|
const runningCount = await autoModeService.stopAutoLoopForProject(
|
||||||
|
projectPath,
|
||||||
|
normalizedBranchName
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`Stopped auto loop for ${worktreeDesc} in project: ${projectPath}, ${runningCount} features still running`
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: 'Auto mode stopped',
|
||||||
|
runningFeaturesCount: runningCount,
|
||||||
|
branchName: normalizedBranchName,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Stop auto mode failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -100,11 +100,60 @@ export function getAbortController(): AbortController | null {
|
|||||||
return currentAbortController;
|
return currentAbortController;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getErrorMessage(error: unknown): string {
|
/**
|
||||||
if (error instanceof Error) {
|
* Map SDK/CLI errors to user-friendly messages
|
||||||
return error.message;
|
*/
|
||||||
|
export function mapBacklogPlanError(rawMessage: string): string {
|
||||||
|
// Claude Code spawn failures
|
||||||
|
if (
|
||||||
|
rawMessage.includes('Failed to spawn Claude Code process') ||
|
||||||
|
rawMessage.includes('spawn node ENOENT') ||
|
||||||
|
rawMessage.includes('Claude Code executable not found') ||
|
||||||
|
rawMessage.includes('Claude Code native binary not found')
|
||||||
|
) {
|
||||||
|
return 'Claude CLI could not be launched. Make sure the Claude CLI is installed and available in PATH, or check that Node.js is correctly installed. Try running "which claude" or "claude --version" in your terminal to verify.';
|
||||||
}
|
}
|
||||||
return String(error);
|
|
||||||
|
// Claude Code process crash
|
||||||
|
if (rawMessage.includes('Claude Code process exited')) {
|
||||||
|
return 'Claude exited unexpectedly. Try again. If it keeps happening, re-run `claude login` or update your API key in Setup.';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rate limiting
|
||||||
|
if (rawMessage.toLowerCase().includes('rate limit') || rawMessage.includes('429')) {
|
||||||
|
return 'Rate limited. Please wait a moment and try again.';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Network errors
|
||||||
|
if (
|
||||||
|
rawMessage.toLowerCase().includes('network') ||
|
||||||
|
rawMessage.toLowerCase().includes('econnrefused') ||
|
||||||
|
rawMessage.toLowerCase().includes('timeout')
|
||||||
|
) {
|
||||||
|
return 'Network error. Check your internet connection and try again.';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authentication errors
|
||||||
|
if (
|
||||||
|
rawMessage.toLowerCase().includes('not authenticated') ||
|
||||||
|
rawMessage.toLowerCase().includes('unauthorized') ||
|
||||||
|
rawMessage.includes('401')
|
||||||
|
) {
|
||||||
|
return 'Authentication failed. Please check your API key or run `claude login` to authenticate.';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return original message for unknown errors
|
||||||
|
return rawMessage;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getErrorMessage(error: unknown): string {
|
||||||
|
let rawMessage: string;
|
||||||
|
if (error instanceof Error) {
|
||||||
|
rawMessage = error.message;
|
||||||
|
} else {
|
||||||
|
rawMessage = String(error);
|
||||||
|
}
|
||||||
|
return mapBacklogPlanError(rawMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function logError(error: unknown, context: string): void {
|
export function logError(error: unknown, context: string): void {
|
||||||
|
|||||||
@@ -25,7 +25,11 @@ import {
|
|||||||
saveBacklogPlan,
|
saveBacklogPlan,
|
||||||
} from './common.js';
|
} from './common.js';
|
||||||
import type { SettingsService } from '../../services/settings-service.js';
|
import type { SettingsService } from '../../services/settings-service.js';
|
||||||
import { getAutoLoadClaudeMdSetting, getPromptCustomization } from '../../lib/settings-helpers.js';
|
import {
|
||||||
|
getAutoLoadClaudeMdSetting,
|
||||||
|
getPromptCustomization,
|
||||||
|
getPhaseModelWithOverrides,
|
||||||
|
} from '../../lib/settings-helpers.js';
|
||||||
|
|
||||||
const featureLoader = new FeatureLoader();
|
const featureLoader = new FeatureLoader();
|
||||||
|
|
||||||
@@ -117,18 +121,42 @@ export async function generateBacklogPlan(
|
|||||||
content: 'Generating plan with AI...',
|
content: 'Generating plan with AI...',
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get the model to use from settings or provided override
|
// Get the model to use from settings or provided override with provider info
|
||||||
let effectiveModel = model;
|
let effectiveModel = model;
|
||||||
let thinkingLevel: ThinkingLevel | undefined;
|
let thinkingLevel: ThinkingLevel | undefined;
|
||||||
if (!effectiveModel) {
|
let claudeCompatibleProvider: import('@automaker/types').ClaudeCompatibleProvider | undefined;
|
||||||
const settings = await settingsService?.getGlobalSettings();
|
let credentials: import('@automaker/types').Credentials | undefined;
|
||||||
const phaseModelEntry =
|
|
||||||
settings?.phaseModels?.backlogPlanningModel || DEFAULT_PHASE_MODELS.backlogPlanningModel;
|
if (effectiveModel) {
|
||||||
const resolved = resolvePhaseModel(phaseModelEntry);
|
// Use explicit override - resolve model alias and get credentials
|
||||||
|
const resolved = resolvePhaseModel({ model: effectiveModel });
|
||||||
|
effectiveModel = resolved.model;
|
||||||
|
thinkingLevel = resolved.thinkingLevel;
|
||||||
|
credentials = await settingsService?.getCredentials();
|
||||||
|
} else if (settingsService) {
|
||||||
|
// Use settings-based model with provider info
|
||||||
|
const phaseResult = await getPhaseModelWithOverrides(
|
||||||
|
'backlogPlanningModel',
|
||||||
|
settingsService,
|
||||||
|
projectPath,
|
||||||
|
'[BacklogPlan]'
|
||||||
|
);
|
||||||
|
const resolved = resolvePhaseModel(phaseResult.phaseModel);
|
||||||
|
effectiveModel = resolved.model;
|
||||||
|
thinkingLevel = resolved.thinkingLevel;
|
||||||
|
claudeCompatibleProvider = phaseResult.provider;
|
||||||
|
credentials = phaseResult.credentials;
|
||||||
|
} else {
|
||||||
|
// Fallback to defaults
|
||||||
|
const resolved = resolvePhaseModel(DEFAULT_PHASE_MODELS.backlogPlanningModel);
|
||||||
effectiveModel = resolved.model;
|
effectiveModel = resolved.model;
|
||||||
thinkingLevel = resolved.thinkingLevel;
|
thinkingLevel = resolved.thinkingLevel;
|
||||||
}
|
}
|
||||||
logger.info('[BacklogPlan] Using model:', effectiveModel);
|
logger.info(
|
||||||
|
'[BacklogPlan] Using model:',
|
||||||
|
effectiveModel,
|
||||||
|
claudeCompatibleProvider ? `via provider: ${claudeCompatibleProvider.name}` : 'direct API'
|
||||||
|
);
|
||||||
|
|
||||||
const provider = ProviderFactory.getProviderForModel(effectiveModel);
|
const provider = ProviderFactory.getProviderForModel(effectiveModel);
|
||||||
// Strip provider prefix - providers expect bare model IDs
|
// Strip provider prefix - providers expect bare model IDs
|
||||||
@@ -173,6 +201,8 @@ ${userPrompt}`;
|
|||||||
settingSources: autoLoadClaudeMd ? ['user', 'project'] : undefined,
|
settingSources: autoLoadClaudeMd ? ['user', 'project'] : undefined,
|
||||||
readOnly: true, // Plan generation only generates text, doesn't write files
|
readOnly: true, // Plan generation only generates text, doesn't write files
|
||||||
thinkingLevel, // Pass thinking level for extended thinking
|
thinkingLevel, // Pass thinking level for extended thinking
|
||||||
|
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
});
|
});
|
||||||
|
|
||||||
let responseText = '';
|
let responseText = '';
|
||||||
|
|||||||
@@ -85,8 +85,9 @@ export function createApplyHandler() {
|
|||||||
if (!change.feature) continue;
|
if (!change.feature) continue;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Create the new feature
|
// Create the new feature - use the AI-generated ID if provided
|
||||||
const newFeature = await featureLoader.create(projectPath, {
|
const newFeature = await featureLoader.create(projectPath, {
|
||||||
|
id: change.feature.id, // Use descriptive ID from AI if provided
|
||||||
title: change.feature.title,
|
title: change.feature.title,
|
||||||
description: change.feature.description || '',
|
description: change.feature.description || '',
|
||||||
category: change.feature.category || 'Uncategorized',
|
category: change.feature.category || 'Uncategorized',
|
||||||
|
|||||||
@@ -53,13 +53,12 @@ export function createGenerateHandler(events: EventEmitter, settingsService?: Se
|
|||||||
setRunningState(true, abortController);
|
setRunningState(true, abortController);
|
||||||
|
|
||||||
// Start generation in background
|
// Start generation in background
|
||||||
|
// Note: generateBacklogPlan handles its own error event emission,
|
||||||
|
// so we only log here to avoid duplicate error toasts
|
||||||
generateBacklogPlan(projectPath, prompt, events, abortController, settingsService, model)
|
generateBacklogPlan(projectPath, prompt, events, abortController, settingsService, model)
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
|
// Just log - error event already emitted by generateBacklogPlan
|
||||||
logError(error, 'Generate backlog plan failed (background)');
|
logError(error, 'Generate backlog plan failed (background)');
|
||||||
events.emit('backlog-plan:event', {
|
|
||||||
type: 'backlog_plan_error',
|
|
||||||
error: getErrorMessage(error),
|
|
||||||
});
|
|
||||||
})
|
})
|
||||||
.finally(() => {
|
.finally(() => {
|
||||||
setRunningState(false, null);
|
setRunningState(false, null);
|
||||||
|
|||||||
@@ -1,78 +0,0 @@
|
|||||||
/**
|
|
||||||
* Common utilities for code-review routes
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
|
||||||
|
|
||||||
const logger = createLogger('CodeReview');
|
|
||||||
|
|
||||||
// Re-export shared utilities
|
|
||||||
export { getErrorMessageShared as getErrorMessage };
|
|
||||||
export const logError = createLogError(logger);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Review state interface
|
|
||||||
*/
|
|
||||||
interface ReviewState {
|
|
||||||
isRunning: boolean;
|
|
||||||
abortController: AbortController | null;
|
|
||||||
projectPath: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Shared state for code review operations
|
|
||||||
* Using an object to avoid mutable `let` exports which can cause issues in ES modules
|
|
||||||
*/
|
|
||||||
const reviewState: ReviewState = {
|
|
||||||
isRunning: false,
|
|
||||||
abortController: null,
|
|
||||||
projectPath: null,
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a review is currently running
|
|
||||||
*/
|
|
||||||
export function isRunning(): boolean {
|
|
||||||
return reviewState.isRunning;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the current abort controller (for stopping reviews)
|
|
||||||
*/
|
|
||||||
export function getAbortController(): AbortController | null {
|
|
||||||
return reviewState.abortController;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the current project path being reviewed
|
|
||||||
*/
|
|
||||||
export function getCurrentProjectPath(): string | null {
|
|
||||||
return reviewState.projectPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the running state for code review operations
|
|
||||||
*/
|
|
||||||
export function setRunningState(
|
|
||||||
running: boolean,
|
|
||||||
controller: AbortController | null = null,
|
|
||||||
projectPath: string | null = null
|
|
||||||
): void {
|
|
||||||
reviewState.isRunning = running;
|
|
||||||
reviewState.abortController = controller;
|
|
||||||
reviewState.projectPath = projectPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the current review status
|
|
||||||
*/
|
|
||||||
export function getReviewStatus(): {
|
|
||||||
isRunning: boolean;
|
|
||||||
projectPath: string | null;
|
|
||||||
} {
|
|
||||||
return {
|
|
||||||
isRunning: reviewState.isRunning,
|
|
||||||
projectPath: reviewState.projectPath,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
/**
|
|
||||||
* Code Review routes - HTTP API for triggering and managing code reviews
|
|
||||||
*
|
|
||||||
* Provides endpoints for:
|
|
||||||
* - Triggering code reviews on projects
|
|
||||||
* - Checking review status
|
|
||||||
* - Stopping in-progress reviews
|
|
||||||
*
|
|
||||||
* Uses the CodeReviewService for actual review execution with AI providers.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { Router } from 'express';
|
|
||||||
import type { CodeReviewService } from '../../services/code-review-service.js';
|
|
||||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
|
||||||
import { createTriggerHandler } from './routes/trigger.js';
|
|
||||||
import { createStatusHandler } from './routes/status.js';
|
|
||||||
import { createStopHandler } from './routes/stop.js';
|
|
||||||
import { createProvidersHandler } from './routes/providers.js';
|
|
||||||
|
|
||||||
export function createCodeReviewRoutes(codeReviewService: CodeReviewService): Router {
|
|
||||||
const router = Router();
|
|
||||||
|
|
||||||
// POST /trigger - Start a new code review
|
|
||||||
router.post(
|
|
||||||
'/trigger',
|
|
||||||
validatePathParams('projectPath'),
|
|
||||||
createTriggerHandler(codeReviewService)
|
|
||||||
);
|
|
||||||
|
|
||||||
// GET /status - Get current review status
|
|
||||||
router.get('/status', createStatusHandler());
|
|
||||||
|
|
||||||
// POST /stop - Stop current review
|
|
||||||
router.post('/stop', createStopHandler());
|
|
||||||
|
|
||||||
// GET /providers - Get available providers and their status
|
|
||||||
router.get('/providers', createProvidersHandler(codeReviewService));
|
|
||||||
|
|
||||||
return router;
|
|
||||||
}
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
/**
|
|
||||||
* GET /providers endpoint - Get available code review providers
|
|
||||||
*
|
|
||||||
* Returns the status of all available AI providers that can be used for code reviews.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import type { CodeReviewService } from '../../../services/code-review-service.js';
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
import { getErrorMessage, logError } from '../common.js';
|
|
||||||
|
|
||||||
const logger = createLogger('CodeReview');
|
|
||||||
|
|
||||||
export function createProvidersHandler(codeReviewService: CodeReviewService) {
|
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
|
||||||
logger.debug('========== /providers endpoint called ==========');
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Check if refresh is requested
|
|
||||||
const forceRefresh = req.query.refresh === 'true';
|
|
||||||
|
|
||||||
const providers = await codeReviewService.getProviderStatus(forceRefresh);
|
|
||||||
const bestProvider = await codeReviewService.getBestProvider();
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
providers,
|
|
||||||
recommended: bestProvider,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Providers handler exception');
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: getErrorMessage(error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
/**
|
|
||||||
* GET /status endpoint - Get current code review status
|
|
||||||
*
|
|
||||||
* Returns whether a code review is currently running and which project.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
import { getReviewStatus, getErrorMessage, logError } from '../common.js';
|
|
||||||
|
|
||||||
const logger = createLogger('CodeReview');
|
|
||||||
|
|
||||||
export function createStatusHandler() {
|
|
||||||
return async (_req: Request, res: Response): Promise<void> => {
|
|
||||||
logger.debug('========== /status endpoint called ==========');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const status = getReviewStatus();
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
...status,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Status handler exception');
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: getErrorMessage(error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /stop endpoint - Stop the current code review
|
|
||||||
*
|
|
||||||
* Aborts any running code review operation.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
import {
|
|
||||||
isRunning,
|
|
||||||
getAbortController,
|
|
||||||
setRunningState,
|
|
||||||
getErrorMessage,
|
|
||||||
logError,
|
|
||||||
} from '../common.js';
|
|
||||||
|
|
||||||
const logger = createLogger('CodeReview');
|
|
||||||
|
|
||||||
export function createStopHandler() {
|
|
||||||
return async (_req: Request, res: Response): Promise<void> => {
|
|
||||||
logger.info('========== /stop endpoint called ==========');
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (!isRunning()) {
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
message: 'No code review is currently running',
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Abort the current operation
|
|
||||||
const abortController = getAbortController();
|
|
||||||
if (abortController) {
|
|
||||||
abortController.abort();
|
|
||||||
logger.info('Code review aborted');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reset state
|
|
||||||
setRunningState(false, null, null);
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
message: 'Code review stopped',
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Stop handler exception');
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: getErrorMessage(error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,188 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /trigger endpoint - Trigger a code review
|
|
||||||
*
|
|
||||||
* Starts an asynchronous code review on the specified project.
|
|
||||||
* Progress updates are streamed via WebSocket events.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import type { CodeReviewService } from '../../../services/code-review-service.js';
|
|
||||||
import type { CodeReviewCategory, ThinkingLevel, ModelId } from '@automaker/types';
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
import { isRunning, setRunningState, getErrorMessage, logError } from '../common.js';
|
|
||||||
|
|
||||||
const logger = createLogger('CodeReview');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Maximum number of files allowed per review request
|
|
||||||
*/
|
|
||||||
const MAX_FILES_PER_REQUEST = 100;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Maximum length for baseRef parameter
|
|
||||||
*/
|
|
||||||
const MAX_BASE_REF_LENGTH = 256;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Valid categories for code review
|
|
||||||
*/
|
|
||||||
const VALID_CATEGORIES: CodeReviewCategory[] = [
|
|
||||||
'tech_stack',
|
|
||||||
'security',
|
|
||||||
'code_quality',
|
|
||||||
'implementation',
|
|
||||||
'architecture',
|
|
||||||
'performance',
|
|
||||||
'testing',
|
|
||||||
'documentation',
|
|
||||||
];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Valid thinking levels
|
|
||||||
*/
|
|
||||||
const VALID_THINKING_LEVELS: ThinkingLevel[] = ['low', 'medium', 'high'];
|
|
||||||
|
|
||||||
interface TriggerRequestBody {
|
|
||||||
projectPath: string;
|
|
||||||
files?: string[];
|
|
||||||
baseRef?: string;
|
|
||||||
categories?: CodeReviewCategory[];
|
|
||||||
autoFix?: boolean;
|
|
||||||
model?: ModelId;
|
|
||||||
thinkingLevel?: ThinkingLevel;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate and sanitize the request body
|
|
||||||
*/
|
|
||||||
function validateRequestBody(body: TriggerRequestBody): { valid: boolean; error?: string } {
|
|
||||||
const { files, baseRef, categories, autoFix, thinkingLevel } = body;
|
|
||||||
|
|
||||||
// Validate files array
|
|
||||||
if (files !== undefined) {
|
|
||||||
if (!Array.isArray(files)) {
|
|
||||||
return { valid: false, error: 'files must be an array' };
|
|
||||||
}
|
|
||||||
if (files.length > MAX_FILES_PER_REQUEST) {
|
|
||||||
return { valid: false, error: `Maximum ${MAX_FILES_PER_REQUEST} files allowed per request` };
|
|
||||||
}
|
|
||||||
for (const file of files) {
|
|
||||||
if (typeof file !== 'string') {
|
|
||||||
return { valid: false, error: 'Each file must be a string' };
|
|
||||||
}
|
|
||||||
if (file.length > 500) {
|
|
||||||
return { valid: false, error: 'File path too long' };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate baseRef
|
|
||||||
if (baseRef !== undefined) {
|
|
||||||
if (typeof baseRef !== 'string') {
|
|
||||||
return { valid: false, error: 'baseRef must be a string' };
|
|
||||||
}
|
|
||||||
if (baseRef.length > MAX_BASE_REF_LENGTH) {
|
|
||||||
return { valid: false, error: 'baseRef is too long' };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate categories
|
|
||||||
if (categories !== undefined) {
|
|
||||||
if (!Array.isArray(categories)) {
|
|
||||||
return { valid: false, error: 'categories must be an array' };
|
|
||||||
}
|
|
||||||
for (const category of categories) {
|
|
||||||
if (!VALID_CATEGORIES.includes(category)) {
|
|
||||||
return { valid: false, error: `Invalid category: ${category}` };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate autoFix
|
|
||||||
if (autoFix !== undefined && typeof autoFix !== 'boolean') {
|
|
||||||
return { valid: false, error: 'autoFix must be a boolean' };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate thinkingLevel
|
|
||||||
if (thinkingLevel !== undefined) {
|
|
||||||
if (!VALID_THINKING_LEVELS.includes(thinkingLevel)) {
|
|
||||||
return { valid: false, error: `Invalid thinkingLevel: ${thinkingLevel}` };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { valid: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createTriggerHandler(codeReviewService: CodeReviewService) {
|
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
|
||||||
logger.info('========== /trigger endpoint called ==========');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const body = req.body as TriggerRequestBody;
|
|
||||||
const { projectPath, files, baseRef, categories, autoFix, model, thinkingLevel } = body;
|
|
||||||
|
|
||||||
// Validate required parameters
|
|
||||||
if (!projectPath) {
|
|
||||||
res.status(400).json({
|
|
||||||
success: false,
|
|
||||||
error: 'projectPath is required',
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// SECURITY: Validate all input parameters
|
|
||||||
const validation = validateRequestBody(body);
|
|
||||||
if (!validation.valid) {
|
|
||||||
res.status(400).json({
|
|
||||||
success: false,
|
|
||||||
error: validation.error,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if a review is already running
|
|
||||||
if (isRunning()) {
|
|
||||||
res.status(409).json({
|
|
||||||
success: false,
|
|
||||||
error: 'A code review is already in progress',
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set up abort controller for cancellation
|
|
||||||
const abortController = new AbortController();
|
|
||||||
setRunningState(true, abortController, projectPath);
|
|
||||||
|
|
||||||
// Start the review in the background
|
|
||||||
codeReviewService
|
|
||||||
.executeReview({
|
|
||||||
projectPath,
|
|
||||||
files,
|
|
||||||
baseRef,
|
|
||||||
categories,
|
|
||||||
autoFix,
|
|
||||||
model,
|
|
||||||
thinkingLevel,
|
|
||||||
abortController,
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
logError(error, 'Code review failed');
|
|
||||||
})
|
|
||||||
.finally(() => {
|
|
||||||
setRunningState(false, null, null);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Return immediate response
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
message: 'Code review started',
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Trigger handler exception');
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: getErrorMessage(error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -12,7 +12,6 @@
|
|||||||
|
|
||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
import { createLogger } from '@automaker/utils';
|
import { createLogger } from '@automaker/utils';
|
||||||
import { DEFAULT_PHASE_MODELS } from '@automaker/types';
|
|
||||||
import { PathNotAllowedError } from '@automaker/platform';
|
import { PathNotAllowedError } from '@automaker/platform';
|
||||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||||
import { simpleQuery } from '../../../providers/simple-query-service.js';
|
import { simpleQuery } from '../../../providers/simple-query-service.js';
|
||||||
@@ -22,6 +21,7 @@ import type { SettingsService } from '../../../services/settings-service.js';
|
|||||||
import {
|
import {
|
||||||
getAutoLoadClaudeMdSetting,
|
getAutoLoadClaudeMdSetting,
|
||||||
getPromptCustomization,
|
getPromptCustomization,
|
||||||
|
getPhaseModelWithOverrides,
|
||||||
} from '../../../lib/settings-helpers.js';
|
} from '../../../lib/settings-helpers.js';
|
||||||
|
|
||||||
const logger = createLogger('DescribeFile');
|
const logger = createLogger('DescribeFile');
|
||||||
@@ -155,15 +155,23 @@ ${contentToAnalyze}`;
|
|||||||
'[DescribeFile]'
|
'[DescribeFile]'
|
||||||
);
|
);
|
||||||
|
|
||||||
// Get model from phase settings
|
// Get model from phase settings with provider info
|
||||||
const settings = await settingsService?.getGlobalSettings();
|
const {
|
||||||
logger.info(`Raw phaseModels from settings:`, JSON.stringify(settings?.phaseModels, null, 2));
|
phaseModel: phaseModelEntry,
|
||||||
const phaseModelEntry =
|
provider,
|
||||||
settings?.phaseModels?.fileDescriptionModel || DEFAULT_PHASE_MODELS.fileDescriptionModel;
|
credentials,
|
||||||
logger.info(`fileDescriptionModel entry:`, JSON.stringify(phaseModelEntry));
|
} = await getPhaseModelWithOverrides(
|
||||||
|
'fileDescriptionModel',
|
||||||
|
settingsService,
|
||||||
|
cwd,
|
||||||
|
'[DescribeFile]'
|
||||||
|
);
|
||||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||||
|
|
||||||
logger.info(`Resolved model: ${model}, thinkingLevel: ${thinkingLevel}`);
|
logger.info(
|
||||||
|
`Resolved model: ${model}, thinkingLevel: ${thinkingLevel}`,
|
||||||
|
provider ? `via provider: ${provider.name}` : 'direct API'
|
||||||
|
);
|
||||||
|
|
||||||
// Use simpleQuery - provider abstraction handles routing to correct provider
|
// Use simpleQuery - provider abstraction handles routing to correct provider
|
||||||
const result = await simpleQuery({
|
const result = await simpleQuery({
|
||||||
@@ -175,6 +183,8 @@ ${contentToAnalyze}`;
|
|||||||
thinkingLevel,
|
thinkingLevel,
|
||||||
readOnly: true, // File description only reads, doesn't write
|
readOnly: true, // File description only reads, doesn't write
|
||||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||||
|
claudeCompatibleProvider: provider, // Pass provider for alternative endpoint configuration
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
});
|
});
|
||||||
|
|
||||||
const description = result.text;
|
const description = result.text;
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
|
|
||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
import { createLogger, readImageAsBase64 } from '@automaker/utils';
|
import { createLogger, readImageAsBase64 } from '@automaker/utils';
|
||||||
import { DEFAULT_PHASE_MODELS, isCursorModel } from '@automaker/types';
|
import { isCursorModel } from '@automaker/types';
|
||||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||||
import { simpleQuery } from '../../../providers/simple-query-service.js';
|
import { simpleQuery } from '../../../providers/simple-query-service.js';
|
||||||
import * as secureFs from '../../../lib/secure-fs.js';
|
import * as secureFs from '../../../lib/secure-fs.js';
|
||||||
@@ -22,6 +22,7 @@ import type { SettingsService } from '../../../services/settings-service.js';
|
|||||||
import {
|
import {
|
||||||
getAutoLoadClaudeMdSetting,
|
getAutoLoadClaudeMdSetting,
|
||||||
getPromptCustomization,
|
getPromptCustomization,
|
||||||
|
getPhaseModelWithOverrides,
|
||||||
} from '../../../lib/settings-helpers.js';
|
} from '../../../lib/settings-helpers.js';
|
||||||
|
|
||||||
const logger = createLogger('DescribeImage');
|
const logger = createLogger('DescribeImage');
|
||||||
@@ -273,13 +274,23 @@ export function createDescribeImageHandler(
|
|||||||
'[DescribeImage]'
|
'[DescribeImage]'
|
||||||
);
|
);
|
||||||
|
|
||||||
// Get model from phase settings
|
// Get model from phase settings with provider info
|
||||||
const settings = await settingsService?.getGlobalSettings();
|
const {
|
||||||
const phaseModelEntry =
|
phaseModel: phaseModelEntry,
|
||||||
settings?.phaseModels?.imageDescriptionModel || DEFAULT_PHASE_MODELS.imageDescriptionModel;
|
provider,
|
||||||
|
credentials,
|
||||||
|
} = await getPhaseModelWithOverrides(
|
||||||
|
'imageDescriptionModel',
|
||||||
|
settingsService,
|
||||||
|
cwd,
|
||||||
|
'[DescribeImage]'
|
||||||
|
);
|
||||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||||
|
|
||||||
logger.info(`[${requestId}] Using model: ${model}`);
|
logger.info(
|
||||||
|
`[${requestId}] Using model: ${model}`,
|
||||||
|
provider ? `via provider: ${provider.name}` : 'direct API'
|
||||||
|
);
|
||||||
|
|
||||||
// Get customized prompts from settings
|
// Get customized prompts from settings
|
||||||
const prompts = await getPromptCustomization(settingsService, '[DescribeImage]');
|
const prompts = await getPromptCustomization(settingsService, '[DescribeImage]');
|
||||||
@@ -325,6 +336,8 @@ export function createDescribeImageHandler(
|
|||||||
thinkingLevel,
|
thinkingLevel,
|
||||||
readOnly: true, // Image description only reads, doesn't write
|
readOnly: true, // Image description only reads, doesn't write
|
||||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||||
|
claudeCompatibleProvider: provider, // Pass provider for alternative endpoint configuration
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.info(`[${requestId}] simpleQuery completed in ${Date.now() - queryStart}ms`);
|
logger.info(`[${requestId}] simpleQuery completed in ${Date.now() - queryStart}ms`);
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import { resolveModelString } from '@automaker/model-resolver';
|
|||||||
import { CLAUDE_MODEL_MAP, type ThinkingLevel } from '@automaker/types';
|
import { CLAUDE_MODEL_MAP, type ThinkingLevel } from '@automaker/types';
|
||||||
import { simpleQuery } from '../../../providers/simple-query-service.js';
|
import { simpleQuery } from '../../../providers/simple-query-service.js';
|
||||||
import type { SettingsService } from '../../../services/settings-service.js';
|
import type { SettingsService } from '../../../services/settings-service.js';
|
||||||
import { getPromptCustomization } from '../../../lib/settings-helpers.js';
|
import { getPromptCustomization, getProviderByModelId } from '../../../lib/settings-helpers.js';
|
||||||
import {
|
import {
|
||||||
buildUserPrompt,
|
buildUserPrompt,
|
||||||
isValidEnhancementMode,
|
isValidEnhancementMode,
|
||||||
@@ -33,6 +33,8 @@ interface EnhanceRequestBody {
|
|||||||
model?: string;
|
model?: string;
|
||||||
/** Optional thinking level for Claude models */
|
/** Optional thinking level for Claude models */
|
||||||
thinkingLevel?: ThinkingLevel;
|
thinkingLevel?: ThinkingLevel;
|
||||||
|
/** Optional project path for per-project Claude API profile */
|
||||||
|
projectPath?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -62,7 +64,7 @@ export function createEnhanceHandler(
|
|||||||
): (req: Request, res: Response) => Promise<void> {
|
): (req: Request, res: Response) => Promise<void> {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
const { originalText, enhancementMode, model, thinkingLevel } =
|
const { originalText, enhancementMode, model, thinkingLevel, projectPath } =
|
||||||
req.body as EnhanceRequestBody;
|
req.body as EnhanceRequestBody;
|
||||||
|
|
||||||
// Validate required fields
|
// Validate required fields
|
||||||
@@ -121,8 +123,32 @@ export function createEnhanceHandler(
|
|||||||
// Build the user prompt with few-shot examples
|
// Build the user prompt with few-shot examples
|
||||||
const userPrompt = buildUserPrompt(validMode, trimmedText, true);
|
const userPrompt = buildUserPrompt(validMode, trimmedText, true);
|
||||||
|
|
||||||
// Resolve the model - use the passed model, default to sonnet for quality
|
// Check if the model is a provider model (like "GLM-4.5-Air")
|
||||||
const resolvedModel = resolveModelString(model, CLAUDE_MODEL_MAP.sonnet);
|
// If so, get the provider config and resolved Claude model
|
||||||
|
let claudeCompatibleProvider: import('@automaker/types').ClaudeCompatibleProvider | undefined;
|
||||||
|
let providerResolvedModel: string | undefined;
|
||||||
|
let credentials = await settingsService?.getCredentials();
|
||||||
|
|
||||||
|
if (model && settingsService) {
|
||||||
|
const providerResult = await getProviderByModelId(
|
||||||
|
model,
|
||||||
|
settingsService,
|
||||||
|
'[EnhancePrompt]'
|
||||||
|
);
|
||||||
|
if (providerResult.provider) {
|
||||||
|
claudeCompatibleProvider = providerResult.provider;
|
||||||
|
providerResolvedModel = providerResult.resolvedModel;
|
||||||
|
credentials = providerResult.credentials;
|
||||||
|
logger.info(
|
||||||
|
`Using provider "${providerResult.provider.name}" for model "${model}"` +
|
||||||
|
(providerResolvedModel ? ` -> resolved to "${providerResolvedModel}"` : '')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve the model - use provider resolved model, passed model, or default to sonnet
|
||||||
|
const resolvedModel =
|
||||||
|
providerResolvedModel || resolveModelString(model, CLAUDE_MODEL_MAP.sonnet);
|
||||||
|
|
||||||
logger.debug(`Using model: ${resolvedModel}`);
|
logger.debug(`Using model: ${resolvedModel}`);
|
||||||
|
|
||||||
@@ -137,6 +163,8 @@ export function createEnhanceHandler(
|
|||||||
allowedTools: [],
|
allowedTools: [],
|
||||||
thinkingLevel,
|
thinkingLevel,
|
||||||
readOnly: true, // Prompt enhancement only generates text, doesn't write files
|
readOnly: true, // Prompt enhancement only generates text, doesn't write files
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
|
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||||
});
|
});
|
||||||
|
|
||||||
const enhancedText = result.text;
|
const enhancedText = result.text;
|
||||||
|
|||||||
@@ -16,6 +16,8 @@ import { createBulkDeleteHandler } from './routes/bulk-delete.js';
|
|||||||
import { createDeleteHandler } from './routes/delete.js';
|
import { createDeleteHandler } from './routes/delete.js';
|
||||||
import { createAgentOutputHandler, createRawOutputHandler } from './routes/agent-output.js';
|
import { createAgentOutputHandler, createRawOutputHandler } from './routes/agent-output.js';
|
||||||
import { createGenerateTitleHandler } from './routes/generate-title.js';
|
import { createGenerateTitleHandler } from './routes/generate-title.js';
|
||||||
|
import { createExportHandler } from './routes/export.js';
|
||||||
|
import { createImportHandler, createConflictCheckHandler } from './routes/import.js';
|
||||||
|
|
||||||
export function createFeaturesRoutes(
|
export function createFeaturesRoutes(
|
||||||
featureLoader: FeatureLoader,
|
featureLoader: FeatureLoader,
|
||||||
@@ -46,6 +48,13 @@ export function createFeaturesRoutes(
|
|||||||
router.post('/agent-output', createAgentOutputHandler(featureLoader));
|
router.post('/agent-output', createAgentOutputHandler(featureLoader));
|
||||||
router.post('/raw-output', createRawOutputHandler(featureLoader));
|
router.post('/raw-output', createRawOutputHandler(featureLoader));
|
||||||
router.post('/generate-title', createGenerateTitleHandler(settingsService));
|
router.post('/generate-title', createGenerateTitleHandler(settingsService));
|
||||||
|
router.post('/export', validatePathParams('projectPath'), createExportHandler(featureLoader));
|
||||||
|
router.post('/import', validatePathParams('projectPath'), createImportHandler(featureLoader));
|
||||||
|
router.post(
|
||||||
|
'/check-conflicts',
|
||||||
|
validatePathParams('projectPath'),
|
||||||
|
createConflictCheckHandler(featureLoader)
|
||||||
|
);
|
||||||
|
|
||||||
return router;
|
return router;
|
||||||
}
|
}
|
||||||
|
|||||||
96
apps/server/src/routes/features/routes/export.ts
Normal file
96
apps/server/src/routes/features/routes/export.ts
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
/**
|
||||||
|
* POST /export endpoint - Export features to JSON or YAML format
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import type { FeatureLoader } from '../../../services/feature-loader.js';
|
||||||
|
import {
|
||||||
|
getFeatureExportService,
|
||||||
|
type ExportFormat,
|
||||||
|
type BulkExportOptions,
|
||||||
|
} from '../../../services/feature-export-service.js';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
|
||||||
|
interface ExportRequest {
|
||||||
|
projectPath: string;
|
||||||
|
/** Feature IDs to export. If empty/undefined, exports all features */
|
||||||
|
featureIds?: string[];
|
||||||
|
/** Export format: 'json' or 'yaml' */
|
||||||
|
format?: ExportFormat;
|
||||||
|
/** Whether to include description history */
|
||||||
|
includeHistory?: boolean;
|
||||||
|
/** Whether to include plan spec */
|
||||||
|
includePlanSpec?: boolean;
|
||||||
|
/** Filter by category */
|
||||||
|
category?: string;
|
||||||
|
/** Filter by status */
|
||||||
|
status?: string;
|
||||||
|
/** Pretty print output */
|
||||||
|
prettyPrint?: boolean;
|
||||||
|
/** Optional metadata to include */
|
||||||
|
metadata?: {
|
||||||
|
projectName?: string;
|
||||||
|
projectPath?: string;
|
||||||
|
branch?: string;
|
||||||
|
[key: string]: unknown;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createExportHandler(featureLoader: FeatureLoader) {
|
||||||
|
const exportService = getFeatureExportService();
|
||||||
|
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const {
|
||||||
|
projectPath,
|
||||||
|
featureIds,
|
||||||
|
format = 'json',
|
||||||
|
includeHistory = true,
|
||||||
|
includePlanSpec = true,
|
||||||
|
category,
|
||||||
|
status,
|
||||||
|
prettyPrint = true,
|
||||||
|
metadata,
|
||||||
|
} = req.body as ExportRequest;
|
||||||
|
|
||||||
|
if (!projectPath) {
|
||||||
|
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate format
|
||||||
|
if (format !== 'json' && format !== 'yaml') {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'format must be "json" or "yaml"',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const options: BulkExportOptions = {
|
||||||
|
format,
|
||||||
|
includeHistory,
|
||||||
|
includePlanSpec,
|
||||||
|
category,
|
||||||
|
status,
|
||||||
|
featureIds,
|
||||||
|
prettyPrint,
|
||||||
|
metadata,
|
||||||
|
};
|
||||||
|
|
||||||
|
const exportData = await exportService.exportFeatures(projectPath, options);
|
||||||
|
|
||||||
|
// Return the export data as a string in the response
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: exportData,
|
||||||
|
format,
|
||||||
|
contentType: format === 'json' ? 'application/json' : 'application/x-yaml',
|
||||||
|
filename: `features-export.${format === 'json' ? 'json' : 'yaml'}`,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Export features failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -16,6 +16,7 @@ const logger = createLogger('GenerateTitle');
|
|||||||
|
|
||||||
interface GenerateTitleRequestBody {
|
interface GenerateTitleRequestBody {
|
||||||
description: string;
|
description: string;
|
||||||
|
projectPath?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface GenerateTitleSuccessResponse {
|
interface GenerateTitleSuccessResponse {
|
||||||
@@ -33,7 +34,7 @@ export function createGenerateTitleHandler(
|
|||||||
): (req: Request, res: Response) => Promise<void> {
|
): (req: Request, res: Response) => Promise<void> {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
const { description } = req.body as GenerateTitleRequestBody;
|
const { description, projectPath } = req.body as GenerateTitleRequestBody;
|
||||||
|
|
||||||
if (!description || typeof description !== 'string') {
|
if (!description || typeof description !== 'string') {
|
||||||
const response: GenerateTitleErrorResponse = {
|
const response: GenerateTitleErrorResponse = {
|
||||||
@@ -60,6 +61,9 @@ export function createGenerateTitleHandler(
|
|||||||
const prompts = await getPromptCustomization(settingsService, '[GenerateTitle]');
|
const prompts = await getPromptCustomization(settingsService, '[GenerateTitle]');
|
||||||
const systemPrompt = prompts.titleGeneration.systemPrompt;
|
const systemPrompt = prompts.titleGeneration.systemPrompt;
|
||||||
|
|
||||||
|
// Get credentials for API calls (uses hardcoded haiku model, no phase setting)
|
||||||
|
const credentials = await settingsService?.getCredentials();
|
||||||
|
|
||||||
const userPrompt = `Generate a concise title for this feature:\n\n${trimmedDescription}`;
|
const userPrompt = `Generate a concise title for this feature:\n\n${trimmedDescription}`;
|
||||||
|
|
||||||
// Use simpleQuery - provider abstraction handles all the streaming/extraction
|
// Use simpleQuery - provider abstraction handles all the streaming/extraction
|
||||||
@@ -69,6 +73,7 @@ export function createGenerateTitleHandler(
|
|||||||
cwd: process.cwd(),
|
cwd: process.cwd(),
|
||||||
maxTurns: 1,
|
maxTurns: 1,
|
||||||
allowedTools: [],
|
allowedTools: [],
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
});
|
});
|
||||||
|
|
||||||
const title = result.text;
|
const title = result.text;
|
||||||
|
|||||||
210
apps/server/src/routes/features/routes/import.ts
Normal file
210
apps/server/src/routes/features/routes/import.ts
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
/**
|
||||||
|
* POST /import endpoint - Import features from JSON or YAML format
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import type { FeatureLoader } from '../../../services/feature-loader.js';
|
||||||
|
import type { FeatureImportResult, Feature, FeatureExport } from '@automaker/types';
|
||||||
|
import { getFeatureExportService } from '../../../services/feature-export-service.js';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
|
||||||
|
interface ImportRequest {
|
||||||
|
projectPath: string;
|
||||||
|
/** Raw JSON or YAML string containing feature data */
|
||||||
|
data: string;
|
||||||
|
/** Whether to overwrite existing features with same ID */
|
||||||
|
overwrite?: boolean;
|
||||||
|
/** Whether to preserve branch info from imported features */
|
||||||
|
preserveBranchInfo?: boolean;
|
||||||
|
/** Optional category to assign to all imported features */
|
||||||
|
targetCategory?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ConflictCheckRequest {
|
||||||
|
projectPath: string;
|
||||||
|
/** Raw JSON or YAML string containing feature data */
|
||||||
|
data: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ConflictInfo {
|
||||||
|
featureId: string;
|
||||||
|
title?: string;
|
||||||
|
existingTitle?: string;
|
||||||
|
hasConflict: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createImportHandler(featureLoader: FeatureLoader) {
|
||||||
|
const exportService = getFeatureExportService();
|
||||||
|
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const {
|
||||||
|
projectPath,
|
||||||
|
data,
|
||||||
|
overwrite = false,
|
||||||
|
preserveBranchInfo = false,
|
||||||
|
targetCategory,
|
||||||
|
} = req.body as ImportRequest;
|
||||||
|
|
||||||
|
if (!projectPath) {
|
||||||
|
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
res.status(400).json({ success: false, error: 'data is required' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect format and parse the data
|
||||||
|
const format = exportService.detectFormat(data);
|
||||||
|
if (!format) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid data format. Expected valid JSON or YAML.',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = exportService.parseImportData(data);
|
||||||
|
if (!parsed) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to parse import data. Ensure it is valid JSON or YAML.',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine if this is a single feature or bulk import
|
||||||
|
const isBulkImport =
|
||||||
|
'features' in parsed && Array.isArray((parsed as { features: unknown }).features);
|
||||||
|
|
||||||
|
let results: FeatureImportResult[];
|
||||||
|
|
||||||
|
if (isBulkImport) {
|
||||||
|
// Bulk import
|
||||||
|
results = await exportService.importFeatures(projectPath, data, {
|
||||||
|
overwrite,
|
||||||
|
preserveBranchInfo,
|
||||||
|
targetCategory,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Single feature import - we know it's not a bulk export at this point
|
||||||
|
// It must be either a Feature or FeatureExport
|
||||||
|
const singleData = parsed as Feature | FeatureExport;
|
||||||
|
|
||||||
|
const result = await exportService.importFeature(projectPath, {
|
||||||
|
data: singleData,
|
||||||
|
overwrite,
|
||||||
|
preserveBranchInfo,
|
||||||
|
targetCategory,
|
||||||
|
});
|
||||||
|
results = [result];
|
||||||
|
}
|
||||||
|
|
||||||
|
const successCount = results.filter((r) => r.success).length;
|
||||||
|
const failureCount = results.filter((r) => !r.success).length;
|
||||||
|
const allSuccessful = failureCount === 0;
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: allSuccessful,
|
||||||
|
importedCount: successCount,
|
||||||
|
failedCount: failureCount,
|
||||||
|
results,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Import features failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create handler for checking conflicts before import
|
||||||
|
*/
|
||||||
|
export function createConflictCheckHandler(featureLoader: FeatureLoader) {
|
||||||
|
const exportService = getFeatureExportService();
|
||||||
|
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const { projectPath, data } = req.body as ConflictCheckRequest;
|
||||||
|
|
||||||
|
if (!projectPath) {
|
||||||
|
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
res.status(400).json({ success: false, error: 'data is required' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the import data
|
||||||
|
const format = exportService.detectFormat(data);
|
||||||
|
if (!format) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid data format. Expected valid JSON or YAML.',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = exportService.parseImportData(data);
|
||||||
|
if (!parsed) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to parse import data.',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract features from the data using type guards
|
||||||
|
let featuresToCheck: Array<{ id: string; title?: string }> = [];
|
||||||
|
|
||||||
|
if (exportService.isBulkExport(parsed)) {
|
||||||
|
// Bulk export format
|
||||||
|
featuresToCheck = parsed.features.map((f) => ({
|
||||||
|
id: f.feature.id,
|
||||||
|
title: f.feature.title,
|
||||||
|
}));
|
||||||
|
} else if (exportService.isFeatureExport(parsed)) {
|
||||||
|
// Single FeatureExport format
|
||||||
|
featuresToCheck = [
|
||||||
|
{
|
||||||
|
id: parsed.feature.id,
|
||||||
|
title: parsed.feature.title,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
} else if (exportService.isRawFeature(parsed)) {
|
||||||
|
// Raw Feature format
|
||||||
|
featuresToCheck = [{ id: parsed.id, title: parsed.title }];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check each feature for conflicts in parallel
|
||||||
|
const conflicts: ConflictInfo[] = await Promise.all(
|
||||||
|
featuresToCheck.map(async (feature) => {
|
||||||
|
const existing = await featureLoader.get(projectPath, feature.id);
|
||||||
|
return {
|
||||||
|
featureId: feature.id,
|
||||||
|
title: feature.title,
|
||||||
|
existingTitle: existing?.title,
|
||||||
|
hasConflict: !!existing,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
const hasConflicts = conflicts.some((c) => c.hasConflict);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
hasConflicts,
|
||||||
|
conflicts,
|
||||||
|
totalFeatures: featuresToCheck.length,
|
||||||
|
conflictCount: conflicts.filter((c) => c.hasConflict).length,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Conflict check failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,5 +1,12 @@
|
|||||||
/**
|
/**
|
||||||
* GET /image endpoint - Serve image files
|
* GET /image endpoint - Serve image files
|
||||||
|
*
|
||||||
|
* Requires authentication via auth middleware:
|
||||||
|
* - apiKey query parameter (Electron mode)
|
||||||
|
* - token query parameter (web mode)
|
||||||
|
* - session cookie (web mode)
|
||||||
|
* - X-API-Key header (Electron mode)
|
||||||
|
* - X-Session-Token header (web mode)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
|
|||||||
@@ -34,7 +34,11 @@ import {
|
|||||||
ValidationComment,
|
ValidationComment,
|
||||||
ValidationLinkedPR,
|
ValidationLinkedPR,
|
||||||
} from './validation-schema.js';
|
} from './validation-schema.js';
|
||||||
import { getPromptCustomization } from '../../../lib/settings-helpers.js';
|
import {
|
||||||
|
getPromptCustomization,
|
||||||
|
getAutoLoadClaudeMdSetting,
|
||||||
|
getProviderByModelId,
|
||||||
|
} from '../../../lib/settings-helpers.js';
|
||||||
import {
|
import {
|
||||||
trySetValidationRunning,
|
trySetValidationRunning,
|
||||||
clearValidationStatus,
|
clearValidationStatus,
|
||||||
@@ -43,7 +47,6 @@ import {
|
|||||||
logger,
|
logger,
|
||||||
} from './validation-common.js';
|
} from './validation-common.js';
|
||||||
import type { SettingsService } from '../../../services/settings-service.js';
|
import type { SettingsService } from '../../../services/settings-service.js';
|
||||||
import { getAutoLoadClaudeMdSetting } from '../../../lib/settings-helpers.js';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Request body for issue validation
|
* Request body for issue validation
|
||||||
@@ -164,12 +167,33 @@ ${basePrompt}`;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Using model: ${model}`);
|
// Check if the model is a provider model (like "GLM-4.5-Air")
|
||||||
|
// If so, get the provider config and resolved Claude model
|
||||||
|
let claudeCompatibleProvider: import('@automaker/types').ClaudeCompatibleProvider | undefined;
|
||||||
|
let providerResolvedModel: string | undefined;
|
||||||
|
let credentials = await settingsService?.getCredentials();
|
||||||
|
|
||||||
|
if (settingsService) {
|
||||||
|
const providerResult = await getProviderByModelId(model, settingsService, '[ValidateIssue]');
|
||||||
|
if (providerResult.provider) {
|
||||||
|
claudeCompatibleProvider = providerResult.provider;
|
||||||
|
providerResolvedModel = providerResult.resolvedModel;
|
||||||
|
credentials = providerResult.credentials;
|
||||||
|
logger.info(
|
||||||
|
`Using provider "${providerResult.provider.name}" for model "${model}"` +
|
||||||
|
(providerResolvedModel ? ` -> resolved to "${providerResolvedModel}"` : '')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use provider resolved model if available, otherwise use original model
|
||||||
|
const effectiveModel = providerResolvedModel || (model as string);
|
||||||
|
logger.info(`Using model: ${effectiveModel}`);
|
||||||
|
|
||||||
// Use streamingQuery with event callbacks
|
// Use streamingQuery with event callbacks
|
||||||
const result = await streamingQuery({
|
const result = await streamingQuery({
|
||||||
prompt: finalPrompt,
|
prompt: finalPrompt,
|
||||||
model: model as string,
|
model: effectiveModel,
|
||||||
cwd: projectPath,
|
cwd: projectPath,
|
||||||
systemPrompt: useStructuredOutput ? issueValidationSystemPrompt : undefined,
|
systemPrompt: useStructuredOutput ? issueValidationSystemPrompt : undefined,
|
||||||
abortController,
|
abortController,
|
||||||
@@ -177,6 +201,8 @@ ${basePrompt}`;
|
|||||||
reasoningEffort: effectiveReasoningEffort,
|
reasoningEffort: effectiveReasoningEffort,
|
||||||
readOnly: true, // Issue validation only reads code, doesn't write
|
readOnly: true, // Issue validation only reads code, doesn't write
|
||||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||||
|
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
outputFormat: useStructuredOutput
|
outputFormat: useStructuredOutput
|
||||||
? {
|
? {
|
||||||
type: 'json_schema',
|
type: 'json_schema',
|
||||||
|
|||||||
143
apps/server/src/routes/provider-usage/index.ts
Normal file
143
apps/server/src/routes/provider-usage/index.ts
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
/**
|
||||||
|
* Provider Usage Routes
|
||||||
|
*
|
||||||
|
* API endpoints for fetching usage data from all AI providers.
|
||||||
|
*
|
||||||
|
* Endpoints:
|
||||||
|
* - GET /api/provider-usage - Get usage for all enabled providers
|
||||||
|
* - GET /api/provider-usage/:providerId - Get usage for a specific provider
|
||||||
|
* - GET /api/provider-usage/availability - Check availability of all providers
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Router, Request, Response } from 'express';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import type { UsageProviderId } from '@automaker/types';
|
||||||
|
import { ProviderUsageTracker } from '../../services/provider-usage-tracker.js';
|
||||||
|
|
||||||
|
const logger = createLogger('ProviderUsageRoutes');
|
||||||
|
|
||||||
|
// Valid provider IDs
|
||||||
|
const VALID_PROVIDER_IDS: UsageProviderId[] = [
|
||||||
|
'claude',
|
||||||
|
'codex',
|
||||||
|
'cursor',
|
||||||
|
'gemini',
|
||||||
|
'copilot',
|
||||||
|
'opencode',
|
||||||
|
'minimax',
|
||||||
|
'glm',
|
||||||
|
];
|
||||||
|
|
||||||
|
export function createProviderUsageRoutes(tracker: ProviderUsageTracker): Router {
|
||||||
|
const router = Router();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/provider-usage
|
||||||
|
* Fetch usage for all enabled providers
|
||||||
|
*/
|
||||||
|
router.get('/', async (req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const forceRefresh = req.query.refresh === 'true';
|
||||||
|
const usage = await tracker.fetchAllUsage(forceRefresh);
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: usage,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('Error fetching all provider usage:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/provider-usage/availability
|
||||||
|
* Check which providers are available
|
||||||
|
*/
|
||||||
|
router.get('/availability', async (_req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const availability = await tracker.checkAvailability();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: availability,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('Error checking provider availability:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/provider-usage/:providerId
|
||||||
|
* Fetch usage for a specific provider
|
||||||
|
*/
|
||||||
|
router.get('/:providerId', async (req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const providerId = req.params.providerId as UsageProviderId;
|
||||||
|
|
||||||
|
// Validate provider ID
|
||||||
|
if (!VALID_PROVIDER_IDS.includes(providerId)) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: `Invalid provider ID: ${providerId}. Valid providers: ${VALID_PROVIDER_IDS.join(', ')}`,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if provider is enabled
|
||||||
|
if (!tracker.isProviderEnabled(providerId)) {
|
||||||
|
res.status(200).json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
providerId,
|
||||||
|
providerName: providerId,
|
||||||
|
available: false,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
error: 'Provider is disabled',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const forceRefresh = req.query.refresh === 'true';
|
||||||
|
const usage = await tracker.fetchProviderUsage(providerId, forceRefresh);
|
||||||
|
|
||||||
|
if (!usage) {
|
||||||
|
res.status(200).json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
providerId,
|
||||||
|
providerName: providerId,
|
||||||
|
available: false,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
error: 'Failed to fetch usage data',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: usage,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error(`Error fetching usage for ${req.params.providerId}:`, error);
|
||||||
|
|
||||||
|
// Return 200 with error in data to avoid triggering logout
|
||||||
|
res.status(200).json({
|
||||||
|
success: false,
|
||||||
|
error: message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return router;
|
||||||
|
}
|
||||||
@@ -45,18 +45,24 @@ export function createUpdateGlobalHandler(settingsService: SettingsService) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Minimal debug logging to help diagnose accidental wipes.
|
// Minimal debug logging to help diagnose accidental wipes.
|
||||||
if ('projects' in updates || 'theme' in updates || 'localStorageMigrated' in updates) {
|
|
||||||
const projectsLen = Array.isArray((updates as any).projects)
|
const projectsLen = Array.isArray((updates as any).projects)
|
||||||
? (updates as any).projects.length
|
? (updates as any).projects.length
|
||||||
: undefined;
|
: undefined;
|
||||||
|
const trashedLen = Array.isArray((updates as any).trashedProjects)
|
||||||
|
? (updates as any).trashedProjects.length
|
||||||
|
: undefined;
|
||||||
logger.info(
|
logger.info(
|
||||||
`Update global settings request: projects=${projectsLen ?? 'n/a'}, theme=${
|
`[SERVER_SETTINGS_UPDATE] Request received: projects=${projectsLen ?? 'n/a'}, trashedProjects=${trashedLen ?? 'n/a'}, theme=${
|
||||||
(updates as any).theme ?? 'n/a'
|
(updates as any).theme ?? 'n/a'
|
||||||
}, localStorageMigrated=${(updates as any).localStorageMigrated ?? 'n/a'}`
|
}, localStorageMigrated=${(updates as any).localStorageMigrated ?? 'n/a'}`
|
||||||
);
|
);
|
||||||
}
|
|
||||||
|
|
||||||
|
logger.info('[SERVER_SETTINGS_UPDATE] Calling updateGlobalSettings...');
|
||||||
const settings = await settingsService.updateGlobalSettings(updates);
|
const settings = await settingsService.updateGlobalSettings(updates);
|
||||||
|
logger.info(
|
||||||
|
'[SERVER_SETTINGS_UPDATE] Update complete, projects count:',
|
||||||
|
settings.projects?.length ?? 0
|
||||||
|
);
|
||||||
|
|
||||||
// Apply server log level if it was updated
|
// Apply server log level if it was updated
|
||||||
if ('serverLogLevel' in updates && updates.serverLogLevel) {
|
if ('serverLogLevel' in updates && updates.serverLogLevel) {
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { Router } from 'express';
|
import { Router } from 'express';
|
||||||
import { createStatusHandler } from './routes/status.js';
|
|
||||||
import { createClaudeStatusHandler } from './routes/claude-status.js';
|
import { createClaudeStatusHandler } from './routes/claude-status.js';
|
||||||
import { createInstallClaudeHandler } from './routes/install-claude.js';
|
import { createInstallClaudeHandler } from './routes/install-claude.js';
|
||||||
import { createAuthClaudeHandler } from './routes/auth-claude.js';
|
import { createAuthClaudeHandler } from './routes/auth-claude.js';
|
||||||
@@ -13,10 +12,6 @@ import { createApiKeysHandler } from './routes/api-keys.js';
|
|||||||
import { createPlatformHandler } from './routes/platform.js';
|
import { createPlatformHandler } from './routes/platform.js';
|
||||||
import { createVerifyClaudeAuthHandler } from './routes/verify-claude-auth.js';
|
import { createVerifyClaudeAuthHandler } from './routes/verify-claude-auth.js';
|
||||||
import { createVerifyCodexAuthHandler } from './routes/verify-codex-auth.js';
|
import { createVerifyCodexAuthHandler } from './routes/verify-codex-auth.js';
|
||||||
import { createVerifyCodeRabbitAuthHandler } from './routes/verify-coderabbit-auth.js';
|
|
||||||
import { createCodeRabbitStatusHandler } from './routes/coderabbit-status.js';
|
|
||||||
import { createAuthCodeRabbitHandler } from './routes/auth-coderabbit.js';
|
|
||||||
import { createDeauthCodeRabbitHandler } from './routes/deauth-coderabbit.js';
|
|
||||||
import { createGhStatusHandler } from './routes/gh-status.js';
|
import { createGhStatusHandler } from './routes/gh-status.js';
|
||||||
import { createCursorStatusHandler } from './routes/cursor-status.js';
|
import { createCursorStatusHandler } from './routes/cursor-status.js';
|
||||||
import { createCodexStatusHandler } from './routes/codex-status.js';
|
import { createCodexStatusHandler } from './routes/codex-status.js';
|
||||||
@@ -29,6 +24,9 @@ import { createDeauthCursorHandler } from './routes/deauth-cursor.js';
|
|||||||
import { createAuthOpencodeHandler } from './routes/auth-opencode.js';
|
import { createAuthOpencodeHandler } from './routes/auth-opencode.js';
|
||||||
import { createDeauthOpencodeHandler } from './routes/deauth-opencode.js';
|
import { createDeauthOpencodeHandler } from './routes/deauth-opencode.js';
|
||||||
import { createOpencodeStatusHandler } from './routes/opencode-status.js';
|
import { createOpencodeStatusHandler } from './routes/opencode-status.js';
|
||||||
|
import { createGeminiStatusHandler } from './routes/gemini-status.js';
|
||||||
|
import { createAuthGeminiHandler } from './routes/auth-gemini.js';
|
||||||
|
import { createDeauthGeminiHandler } from './routes/deauth-gemini.js';
|
||||||
import {
|
import {
|
||||||
createGetOpencodeModelsHandler,
|
createGetOpencodeModelsHandler,
|
||||||
createRefreshOpencodeModelsHandler,
|
createRefreshOpencodeModelsHandler,
|
||||||
@@ -49,9 +47,6 @@ import {
|
|||||||
export function createSetupRoutes(): Router {
|
export function createSetupRoutes(): Router {
|
||||||
const router = Router();
|
const router = Router();
|
||||||
|
|
||||||
// Unified CLI status endpoint
|
|
||||||
router.get('/status', createStatusHandler());
|
|
||||||
|
|
||||||
router.get('/claude-status', createClaudeStatusHandler());
|
router.get('/claude-status', createClaudeStatusHandler());
|
||||||
router.post('/install-claude', createInstallClaudeHandler());
|
router.post('/install-claude', createInstallClaudeHandler());
|
||||||
router.post('/auth-claude', createAuthClaudeHandler());
|
router.post('/auth-claude', createAuthClaudeHandler());
|
||||||
@@ -62,7 +57,6 @@ export function createSetupRoutes(): Router {
|
|||||||
router.get('/platform', createPlatformHandler());
|
router.get('/platform', createPlatformHandler());
|
||||||
router.post('/verify-claude-auth', createVerifyClaudeAuthHandler());
|
router.post('/verify-claude-auth', createVerifyClaudeAuthHandler());
|
||||||
router.post('/verify-codex-auth', createVerifyCodexAuthHandler());
|
router.post('/verify-codex-auth', createVerifyCodexAuthHandler());
|
||||||
router.post('/verify-coderabbit-auth', createVerifyCodeRabbitAuthHandler());
|
|
||||||
router.get('/gh-status', createGhStatusHandler());
|
router.get('/gh-status', createGhStatusHandler());
|
||||||
|
|
||||||
// Cursor CLI routes
|
// Cursor CLI routes
|
||||||
@@ -81,10 +75,10 @@ export function createSetupRoutes(): Router {
|
|||||||
router.post('/auth-opencode', createAuthOpencodeHandler());
|
router.post('/auth-opencode', createAuthOpencodeHandler());
|
||||||
router.post('/deauth-opencode', createDeauthOpencodeHandler());
|
router.post('/deauth-opencode', createDeauthOpencodeHandler());
|
||||||
|
|
||||||
// CodeRabbit CLI routes
|
// Gemini CLI routes
|
||||||
router.get('/coderabbit-status', createCodeRabbitStatusHandler());
|
router.get('/gemini-status', createGeminiStatusHandler());
|
||||||
router.post('/auth-coderabbit', createAuthCodeRabbitHandler());
|
router.post('/auth-gemini', createAuthGeminiHandler());
|
||||||
router.post('/deauth-coderabbit', createDeauthCodeRabbitHandler());
|
router.post('/deauth-gemini', createDeauthGeminiHandler());
|
||||||
|
|
||||||
// OpenCode Dynamic Model Discovery routes
|
// OpenCode Dynamic Model Discovery routes
|
||||||
router.get('/opencode/models', createGetOpencodeModelsHandler());
|
router.get('/opencode/models', createGetOpencodeModelsHandler());
|
||||||
|
|||||||
@@ -1,80 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /auth-coderabbit endpoint - Authenticate CodeRabbit CLI via OAuth
|
|
||||||
*
|
|
||||||
* CodeRabbit CLI requires interactive authentication:
|
|
||||||
* 1. Run `cr auth login`
|
|
||||||
* 2. Browser opens with OAuth flow
|
|
||||||
* 3. After browser auth, CLI shows a token
|
|
||||||
* 4. User must press Enter to confirm
|
|
||||||
*
|
|
||||||
* Since step 4 requires interactive input, we can't fully automate this.
|
|
||||||
* Instead, we provide the command for the user to run manually.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { execSync } from 'child_process';
|
|
||||||
import { logError, getErrorMessage } from '../common.js';
|
|
||||||
import * as fs from 'fs';
|
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the CodeRabbit CLI command (coderabbit or cr)
|
|
||||||
*/
|
|
||||||
function findCodeRabbitCommand(): string | null {
|
|
||||||
const commands = ['coderabbit', 'cr'];
|
|
||||||
for (const command of commands) {
|
|
||||||
try {
|
|
||||||
const whichCommand = process.platform === 'win32' ? 'where' : 'which';
|
|
||||||
const result = execSync(`${whichCommand} ${command}`, {
|
|
||||||
encoding: 'utf8',
|
|
||||||
timeout: 2000,
|
|
||||||
}).trim();
|
|
||||||
if (result) {
|
|
||||||
return result.split('\n')[0];
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// Command not found, try next
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createAuthCodeRabbitHandler() {
|
|
||||||
return async (_req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
// Remove the disconnected marker file to reconnect the app to the CLI
|
|
||||||
const markerPath = path.join(process.cwd(), '.automaker', '.coderabbit-disconnected');
|
|
||||||
if (fs.existsSync(markerPath)) {
|
|
||||||
fs.unlinkSync(markerPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find CodeRabbit CLI
|
|
||||||
const cliPath = findCodeRabbitCommand();
|
|
||||||
if (!cliPath) {
|
|
||||||
res.status(400).json({
|
|
||||||
success: false,
|
|
||||||
error: 'CodeRabbit CLI is not installed. Please install it first.',
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// CodeRabbit CLI requires interactive input (pressing Enter after OAuth)
|
|
||||||
// We can't automate this, so we return the command for the user to run
|
|
||||||
const command = cliPath.includes('coderabbit') ? 'coderabbit auth login' : 'cr auth login';
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
requiresManualAuth: true,
|
|
||||||
command,
|
|
||||||
message: `Please run "${command}" in your terminal to authenticate. After completing OAuth in your browser, press Enter in the terminal to confirm.`,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Auth CodeRabbit failed');
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: getErrorMessage(error),
|
|
||||||
message: 'Failed to initiate CodeRabbit authentication',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
42
apps/server/src/routes/setup/routes/auth-gemini.ts
Normal file
42
apps/server/src/routes/setup/routes/auth-gemini.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
/**
|
||||||
|
* POST /auth-gemini endpoint - Connect Gemini CLI to the app
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
import * as fs from 'fs/promises';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
const DISCONNECTED_MARKER_FILE = '.gemini-disconnected';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates handler for POST /api/setup/auth-gemini
|
||||||
|
* Removes the disconnection marker to allow Gemini CLI to be used
|
||||||
|
*/
|
||||||
|
export function createAuthGeminiHandler() {
|
||||||
|
return async (_req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const projectRoot = process.cwd();
|
||||||
|
const automakerDir = path.join(projectRoot, '.automaker');
|
||||||
|
const markerPath = path.join(automakerDir, DISCONNECTED_MARKER_FILE);
|
||||||
|
|
||||||
|
// Remove the disconnection marker if it exists
|
||||||
|
try {
|
||||||
|
await fs.unlink(markerPath);
|
||||||
|
} catch {
|
||||||
|
// File doesn't exist, nothing to remove
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: 'Gemini CLI connected to app',
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Auth Gemini failed');
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,240 +0,0 @@
|
|||||||
/**
|
|
||||||
* GET /coderabbit-status endpoint - Get CodeRabbit CLI installation and auth status
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { spawn, execSync } from 'child_process';
|
|
||||||
import { getErrorMessage, logError } from '../common.js';
|
|
||||||
import * as fs from 'fs';
|
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
const DISCONNECTED_MARKER_FILE = '.coderabbit-disconnected';
|
|
||||||
|
|
||||||
function isCodeRabbitDisconnectedFromApp(): boolean {
|
|
||||||
try {
|
|
||||||
const projectRoot = process.cwd();
|
|
||||||
const markerPath = path.join(projectRoot, '.automaker', DISCONNECTED_MARKER_FILE);
|
|
||||||
return fs.existsSync(markerPath);
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the CodeRabbit CLI command (coderabbit or cr)
|
|
||||||
*/
|
|
||||||
function findCodeRabbitCommand(): string | null {
|
|
||||||
const commands = ['coderabbit', 'cr'];
|
|
||||||
for (const command of commands) {
|
|
||||||
try {
|
|
||||||
const whichCommand = process.platform === 'win32' ? 'where' : 'which';
|
|
||||||
const result = execSync(`${whichCommand} ${command}`, {
|
|
||||||
encoding: 'utf8',
|
|
||||||
timeout: 2000,
|
|
||||||
}).trim();
|
|
||||||
if (result) {
|
|
||||||
return result.split('\n')[0];
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// Command not found, try next
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get CodeRabbit CLI version
|
|
||||||
*/
|
|
||||||
async function getCodeRabbitVersion(command: string): Promise<string | null> {
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
const child = spawn(command, ['--version'], {
|
|
||||||
stdio: 'pipe',
|
|
||||||
timeout: 5000,
|
|
||||||
});
|
|
||||||
|
|
||||||
let stdout = '';
|
|
||||||
child.stdout?.on('data', (data) => {
|
|
||||||
stdout += data.toString();
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on('close', (code) => {
|
|
||||||
if (code === 0 && stdout) {
|
|
||||||
resolve(stdout.trim());
|
|
||||||
} else {
|
|
||||||
resolve(null);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on('error', () => {
|
|
||||||
resolve(null);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
interface CodeRabbitAuthInfo {
|
|
||||||
authenticated: boolean;
|
|
||||||
method: 'oauth' | 'none';
|
|
||||||
username?: string;
|
|
||||||
email?: string;
|
|
||||||
organization?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check CodeRabbit CLI authentication status
|
|
||||||
* Parses output like:
|
|
||||||
* ```
|
|
||||||
* CodeRabbit CLI Status
|
|
||||||
* ✅ Authentication: Logged in
|
|
||||||
* User Information:
|
|
||||||
* 👤 Name: Kacper
|
|
||||||
* 📧 Email: kacperlachowiczwp.pl@wp.pl
|
|
||||||
* 🔧 Username: Shironex
|
|
||||||
* Organization Information:
|
|
||||||
* 🏢 Name: Anime-World-SPZOO
|
|
||||||
* ```
|
|
||||||
*/
|
|
||||||
async function getCodeRabbitAuthStatus(command: string): Promise<CodeRabbitAuthInfo> {
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
const child = spawn(command, ['auth', 'status'], {
|
|
||||||
stdio: 'pipe',
|
|
||||||
timeout: 10000,
|
|
||||||
});
|
|
||||||
|
|
||||||
let stdout = '';
|
|
||||||
let stderr = '';
|
|
||||||
|
|
||||||
child.stdout?.on('data', (data) => {
|
|
||||||
stdout += data.toString();
|
|
||||||
});
|
|
||||||
|
|
||||||
child.stderr?.on('data', (data) => {
|
|
||||||
stderr += data.toString();
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on('close', (code) => {
|
|
||||||
const output = stdout + stderr;
|
|
||||||
|
|
||||||
// Check for "Logged in" in Authentication line
|
|
||||||
const isAuthenticated =
|
|
||||||
code === 0 &&
|
|
||||||
(output.includes('Logged in') || output.includes('logged in')) &&
|
|
||||||
!output.toLowerCase().includes('not logged in');
|
|
||||||
|
|
||||||
if (isAuthenticated) {
|
|
||||||
// Parse the structured output format
|
|
||||||
// Username: look for "Username: <value>" line
|
|
||||||
const usernameMatch = output.match(/Username:\s*(\S+)/i);
|
|
||||||
// Email: look for "Email: <value>" line
|
|
||||||
const emailMatch = output.match(/Email:\s*(\S+@\S+)/i);
|
|
||||||
// Organization: look for "Name: <value>" under Organization Information
|
|
||||||
// The org name appears after "Organization Information:" section
|
|
||||||
const orgSection = output.split(/Organization Information:/i)[1];
|
|
||||||
const orgMatch = orgSection?.match(/Name:\s*(.+?)(?:\n|$)/i);
|
|
||||||
|
|
||||||
resolve({
|
|
||||||
authenticated: true,
|
|
||||||
method: 'oauth',
|
|
||||||
username: usernameMatch?.[1]?.trim(),
|
|
||||||
email: emailMatch?.[1]?.trim(),
|
|
||||||
organization: orgMatch?.[1]?.trim(),
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
resolve({
|
|
||||||
authenticated: false,
|
|
||||||
method: 'none',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on('error', () => {
|
|
||||||
resolve({
|
|
||||||
authenticated: false,
|
|
||||||
method: 'none',
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates handler for GET /api/setup/coderabbit-status
|
|
||||||
* Returns CodeRabbit CLI installation and authentication status
|
|
||||||
*/
|
|
||||||
export function createCodeRabbitStatusHandler() {
|
|
||||||
const installCommand = 'npm install -g coderabbit';
|
|
||||||
const loginCommand = 'coderabbit auth login';
|
|
||||||
|
|
||||||
return async (_req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
// Check if user has manually disconnected from the app
|
|
||||||
if (isCodeRabbitDisconnectedFromApp()) {
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
installed: true,
|
|
||||||
version: null,
|
|
||||||
path: null,
|
|
||||||
auth: {
|
|
||||||
authenticated: false,
|
|
||||||
method: 'none',
|
|
||||||
},
|
|
||||||
recommendation: 'CodeRabbit CLI is disconnected. Click Sign In to reconnect.',
|
|
||||||
installCommand,
|
|
||||||
loginCommand,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find CodeRabbit CLI
|
|
||||||
const cliPath = findCodeRabbitCommand();
|
|
||||||
|
|
||||||
if (!cliPath) {
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
installed: false,
|
|
||||||
version: null,
|
|
||||||
path: null,
|
|
||||||
auth: {
|
|
||||||
authenticated: false,
|
|
||||||
method: 'none',
|
|
||||||
},
|
|
||||||
recommendation: 'Install CodeRabbit CLI to enable AI-powered code reviews.',
|
|
||||||
installCommand,
|
|
||||||
loginCommand,
|
|
||||||
installCommands: {
|
|
||||||
macos: 'curl -fsSL https://coderabbit.ai/install | bash',
|
|
||||||
npm: installCommand,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get version
|
|
||||||
const version = await getCodeRabbitVersion(cliPath);
|
|
||||||
|
|
||||||
// Get auth status
|
|
||||||
const authStatus = await getCodeRabbitAuthStatus(cliPath);
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
installed: true,
|
|
||||||
version,
|
|
||||||
path: cliPath,
|
|
||||||
auth: authStatus,
|
|
||||||
recommendation: authStatus.authenticated
|
|
||||||
? undefined
|
|
||||||
: 'Sign in to CodeRabbit to enable AI-powered code reviews.',
|
|
||||||
installCommand,
|
|
||||||
loginCommand,
|
|
||||||
installCommands: {
|
|
||||||
macos: 'curl -fsSL https://coderabbit.ai/install | bash',
|
|
||||||
npm: installCommand,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Get CodeRabbit status failed');
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: getErrorMessage(error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,113 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /deauth-coderabbit endpoint - Sign out from CodeRabbit CLI
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { spawn, execSync } from 'child_process';
|
|
||||||
import { logError, getErrorMessage } from '../common.js';
|
|
||||||
import * as fs from 'fs';
|
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the CodeRabbit CLI command (coderabbit or cr)
|
|
||||||
*/
|
|
||||||
function findCodeRabbitCommand(): string | null {
|
|
||||||
const commands = ['coderabbit', 'cr'];
|
|
||||||
for (const command of commands) {
|
|
||||||
try {
|
|
||||||
const whichCommand = process.platform === 'win32' ? 'where' : 'which';
|
|
||||||
const result = execSync(`${whichCommand} ${command}`, {
|
|
||||||
encoding: 'utf8',
|
|
||||||
timeout: 2000,
|
|
||||||
}).trim();
|
|
||||||
if (result) {
|
|
||||||
return result.split('\n')[0];
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// Command not found, try next
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createDeauthCodeRabbitHandler() {
|
|
||||||
return async (_req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
// Find CodeRabbit CLI
|
|
||||||
const cliPath = findCodeRabbitCommand();
|
|
||||||
|
|
||||||
if (cliPath) {
|
|
||||||
// Try to run the CLI logout command
|
|
||||||
const logoutResult = await new Promise<{ success: boolean; error?: string }>((resolve) => {
|
|
||||||
const child = spawn(cliPath, ['auth', 'logout'], {
|
|
||||||
stdio: 'pipe',
|
|
||||||
timeout: 10000,
|
|
||||||
});
|
|
||||||
|
|
||||||
let stderr = '';
|
|
||||||
child.stderr?.on('data', (data) => {
|
|
||||||
stderr += data.toString();
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on('close', (code) => {
|
|
||||||
if (code === 0) {
|
|
||||||
resolve({ success: true });
|
|
||||||
} else {
|
|
||||||
resolve({ success: false, error: stderr || 'Logout command failed' });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on('error', (err) => {
|
|
||||||
resolve({ success: false, error: err.message });
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!logoutResult.success) {
|
|
||||||
// CLI logout failed, create marker file as fallback
|
|
||||||
const automakerDir = path.join(process.cwd(), '.automaker');
|
|
||||||
const markerPath = path.join(automakerDir, '.coderabbit-disconnected');
|
|
||||||
|
|
||||||
if (!fs.existsSync(automakerDir)) {
|
|
||||||
fs.mkdirSync(automakerDir, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
fs.writeFileSync(
|
|
||||||
markerPath,
|
|
||||||
JSON.stringify({
|
|
||||||
disconnectedAt: new Date().toISOString(),
|
|
||||||
message: 'CodeRabbit CLI is disconnected from the app',
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// CLI not installed, just create marker file
|
|
||||||
const automakerDir = path.join(process.cwd(), '.automaker');
|
|
||||||
const markerPath = path.join(automakerDir, '.coderabbit-disconnected');
|
|
||||||
|
|
||||||
if (!fs.existsSync(automakerDir)) {
|
|
||||||
fs.mkdirSync(automakerDir, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
fs.writeFileSync(
|
|
||||||
markerPath,
|
|
||||||
JSON.stringify({
|
|
||||||
disconnectedAt: new Date().toISOString(),
|
|
||||||
message: 'CodeRabbit CLI is disconnected from the app',
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
message: 'Successfully signed out from CodeRabbit CLI',
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Deauth CodeRabbit failed');
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: getErrorMessage(error),
|
|
||||||
message: 'Failed to sign out from CodeRabbit CLI',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
42
apps/server/src/routes/setup/routes/deauth-gemini.ts
Normal file
42
apps/server/src/routes/setup/routes/deauth-gemini.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
/**
|
||||||
|
* POST /deauth-gemini endpoint - Disconnect Gemini CLI from the app
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
import * as fs from 'fs/promises';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
const DISCONNECTED_MARKER_FILE = '.gemini-disconnected';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates handler for POST /api/setup/deauth-gemini
|
||||||
|
* Creates a marker file to disconnect Gemini CLI from the app
|
||||||
|
*/
|
||||||
|
export function createDeauthGeminiHandler() {
|
||||||
|
return async (_req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const projectRoot = process.cwd();
|
||||||
|
const automakerDir = path.join(projectRoot, '.automaker');
|
||||||
|
|
||||||
|
// Ensure .automaker directory exists
|
||||||
|
await fs.mkdir(automakerDir, { recursive: true });
|
||||||
|
|
||||||
|
const markerPath = path.join(automakerDir, DISCONNECTED_MARKER_FILE);
|
||||||
|
|
||||||
|
// Create the disconnection marker
|
||||||
|
await fs.writeFile(markerPath, 'Gemini CLI disconnected from app');
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: 'Gemini CLI disconnected from app',
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Deauth Gemini failed');
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
79
apps/server/src/routes/setup/routes/gemini-status.ts
Normal file
79
apps/server/src/routes/setup/routes/gemini-status.ts
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
/**
|
||||||
|
* GET /gemini-status endpoint - Get Gemini CLI installation and auth status
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import { GeminiProvider } from '../../../providers/gemini-provider.js';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
import * as fs from 'fs/promises';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
const DISCONNECTED_MARKER_FILE = '.gemini-disconnected';
|
||||||
|
|
||||||
|
async function isGeminiDisconnectedFromApp(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const projectRoot = process.cwd();
|
||||||
|
const markerPath = path.join(projectRoot, '.automaker', DISCONNECTED_MARKER_FILE);
|
||||||
|
await fs.access(markerPath);
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates handler for GET /api/setup/gemini-status
|
||||||
|
* Returns Gemini CLI installation and authentication status
|
||||||
|
*/
|
||||||
|
export function createGeminiStatusHandler() {
|
||||||
|
const installCommand = 'npm install -g @google/gemini-cli';
|
||||||
|
const loginCommand = 'gemini';
|
||||||
|
|
||||||
|
return async (_req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
// Check if user has manually disconnected from the app
|
||||||
|
if (await isGeminiDisconnectedFromApp()) {
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
installed: true,
|
||||||
|
version: null,
|
||||||
|
path: null,
|
||||||
|
auth: {
|
||||||
|
authenticated: false,
|
||||||
|
method: 'none',
|
||||||
|
hasApiKey: false,
|
||||||
|
},
|
||||||
|
installCommand,
|
||||||
|
loginCommand,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const provider = new GeminiProvider();
|
||||||
|
const status = await provider.detectInstallation();
|
||||||
|
const auth = await provider.checkAuth();
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
installed: status.installed,
|
||||||
|
version: status.version || null,
|
||||||
|
path: status.path || null,
|
||||||
|
auth: {
|
||||||
|
authenticated: auth.authenticated,
|
||||||
|
method: auth.method,
|
||||||
|
hasApiKey: auth.hasApiKey || false,
|
||||||
|
hasEnvApiKey: auth.hasEnvApiKey || false,
|
||||||
|
error: auth.error,
|
||||||
|
},
|
||||||
|
installCommand,
|
||||||
|
loginCommand,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Get Gemini status failed');
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: getErrorMessage(error),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,249 +0,0 @@
|
|||||||
/**
|
|
||||||
* GET /status endpoint - Get unified CLI availability status
|
|
||||||
*
|
|
||||||
* Returns the installation and authentication status of all supported CLIs
|
|
||||||
* in a single response. This is useful for quickly determining which
|
|
||||||
* providers are available without making multiple API calls.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { getClaudeStatus } from '../get-claude-status.js';
|
|
||||||
import { getErrorMessage, logError } from '../common.js';
|
|
||||||
import { CursorProvider } from '../../../providers/cursor-provider.js';
|
|
||||||
import { CodexProvider } from '../../../providers/codex-provider.js';
|
|
||||||
import { OpencodeProvider } from '../../../providers/opencode-provider.js';
|
|
||||||
import * as fs from 'fs';
|
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a CLI has been manually disconnected from the app
|
|
||||||
*/
|
|
||||||
function isCliDisconnected(cliName: string): boolean {
|
|
||||||
try {
|
|
||||||
const projectRoot = process.cwd();
|
|
||||||
const markerPath = path.join(projectRoot, '.automaker', `.${cliName}-disconnected`);
|
|
||||||
return fs.existsSync(markerPath);
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* CLI status response for a single provider
|
|
||||||
*/
|
|
||||||
interface CliStatusResponse {
|
|
||||||
installed: boolean;
|
|
||||||
version: string | null;
|
|
||||||
path: string | null;
|
|
||||||
auth: {
|
|
||||||
authenticated: boolean;
|
|
||||||
method: string;
|
|
||||||
};
|
|
||||||
disconnected: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unified status response for all CLIs
|
|
||||||
*/
|
|
||||||
interface UnifiedStatusResponse {
|
|
||||||
success: boolean;
|
|
||||||
timestamp: string;
|
|
||||||
clis: {
|
|
||||||
claude: CliStatusResponse | null;
|
|
||||||
cursor: CliStatusResponse | null;
|
|
||||||
codex: CliStatusResponse | null;
|
|
||||||
opencode: CliStatusResponse | null;
|
|
||||||
};
|
|
||||||
availableProviders: string[];
|
|
||||||
hasAnyAuthenticated: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get detailed Claude CLI status
|
|
||||||
*/
|
|
||||||
async function getClaudeCliStatus(): Promise<CliStatusResponse> {
|
|
||||||
const disconnected = isCliDisconnected('claude');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const status = await getClaudeStatus();
|
|
||||||
return {
|
|
||||||
installed: status.installed,
|
|
||||||
version: status.version || null,
|
|
||||||
path: status.path || null,
|
|
||||||
auth: {
|
|
||||||
authenticated: disconnected ? false : status.auth.authenticated,
|
|
||||||
method: disconnected ? 'none' : status.auth.method,
|
|
||||||
},
|
|
||||||
disconnected,
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
return {
|
|
||||||
installed: false,
|
|
||||||
version: null,
|
|
||||||
path: null,
|
|
||||||
auth: { authenticated: false, method: 'none' },
|
|
||||||
disconnected,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get detailed Cursor CLI status
|
|
||||||
*/
|
|
||||||
async function getCursorCliStatus(): Promise<CliStatusResponse> {
|
|
||||||
const disconnected = isCliDisconnected('cursor');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const provider = new CursorProvider();
|
|
||||||
const [installed, version, auth] = await Promise.all([
|
|
||||||
provider.isInstalled(),
|
|
||||||
provider.getVersion(),
|
|
||||||
provider.checkAuth(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const cliPath = installed ? provider.getCliPath() : null;
|
|
||||||
|
|
||||||
return {
|
|
||||||
installed,
|
|
||||||
version: version || null,
|
|
||||||
path: cliPath,
|
|
||||||
auth: {
|
|
||||||
authenticated: disconnected ? false : auth.authenticated,
|
|
||||||
method: disconnected ? 'none' : auth.method,
|
|
||||||
},
|
|
||||||
disconnected,
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
return {
|
|
||||||
installed: false,
|
|
||||||
version: null,
|
|
||||||
path: null,
|
|
||||||
auth: { authenticated: false, method: 'none' },
|
|
||||||
disconnected,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get detailed Codex CLI status
|
|
||||||
*/
|
|
||||||
async function getCodexCliStatus(): Promise<CliStatusResponse> {
|
|
||||||
const disconnected = isCliDisconnected('codex');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const provider = new CodexProvider();
|
|
||||||
const status = await provider.detectInstallation();
|
|
||||||
|
|
||||||
let authMethod = 'none';
|
|
||||||
if (!disconnected && status.authenticated) {
|
|
||||||
authMethod = status.hasApiKey ? 'api_key_env' : 'cli_authenticated';
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
installed: status.installed,
|
|
||||||
version: status.version || null,
|
|
||||||
path: status.path || null,
|
|
||||||
auth: {
|
|
||||||
authenticated: disconnected ? false : status.authenticated || false,
|
|
||||||
method: authMethod,
|
|
||||||
},
|
|
||||||
disconnected,
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
return {
|
|
||||||
installed: false,
|
|
||||||
version: null,
|
|
||||||
path: null,
|
|
||||||
auth: { authenticated: false, method: 'none' },
|
|
||||||
disconnected,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get detailed OpenCode CLI status
|
|
||||||
*/
|
|
||||||
async function getOpencodeCliStatus(): Promise<CliStatusResponse> {
|
|
||||||
try {
|
|
||||||
const provider = new OpencodeProvider();
|
|
||||||
const status = await provider.detectInstallation();
|
|
||||||
|
|
||||||
let authMethod = 'none';
|
|
||||||
if (status.authenticated) {
|
|
||||||
authMethod = status.hasApiKey ? 'api_key_env' : 'cli_authenticated';
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
installed: status.installed,
|
|
||||||
version: status.version || null,
|
|
||||||
path: status.path || null,
|
|
||||||
auth: {
|
|
||||||
authenticated: status.authenticated || false,
|
|
||||||
method: authMethod,
|
|
||||||
},
|
|
||||||
disconnected: false, // OpenCode doesn't have disconnect feature
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
return {
|
|
||||||
installed: false,
|
|
||||||
version: null,
|
|
||||||
path: null,
|
|
||||||
auth: { authenticated: false, method: 'none' },
|
|
||||||
disconnected: false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates handler for GET /api/setup/status
|
|
||||||
* Returns unified CLI availability status for all providers
|
|
||||||
*/
|
|
||||||
export function createStatusHandler() {
|
|
||||||
return async (_req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
// Fetch all CLI statuses in parallel for performance
|
|
||||||
const [claude, cursor, codex, opencode] = await Promise.all([
|
|
||||||
getClaudeCliStatus(),
|
|
||||||
getCursorCliStatus(),
|
|
||||||
getCodexCliStatus(),
|
|
||||||
getOpencodeCliStatus(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Determine which providers are available (installed and authenticated)
|
|
||||||
const availableProviders: string[] = [];
|
|
||||||
if (claude.installed && claude.auth.authenticated) {
|
|
||||||
availableProviders.push('claude');
|
|
||||||
}
|
|
||||||
if (cursor.installed && cursor.auth.authenticated) {
|
|
||||||
availableProviders.push('cursor');
|
|
||||||
}
|
|
||||||
if (codex.installed && codex.auth.authenticated) {
|
|
||||||
availableProviders.push('codex');
|
|
||||||
}
|
|
||||||
if (opencode.installed && opencode.auth.authenticated) {
|
|
||||||
availableProviders.push('opencode');
|
|
||||||
}
|
|
||||||
|
|
||||||
const response: UnifiedStatusResponse = {
|
|
||||||
success: true,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
clis: {
|
|
||||||
claude,
|
|
||||||
cursor,
|
|
||||||
codex,
|
|
||||||
opencode,
|
|
||||||
},
|
|
||||||
availableProviders,
|
|
||||||
hasAnyAuthenticated: availableProviders.length > 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
res.json(response);
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Get unified CLI status failed');
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: getErrorMessage(error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,163 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /verify-coderabbit-auth endpoint - Verify CodeRabbit authentication
|
|
||||||
* Validates API key format and optionally tests the connection
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { spawn } from 'child_process';
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
import { AuthRateLimiter, validateApiKey } from '../../../lib/auth-utils.js';
|
|
||||||
|
|
||||||
const logger = createLogger('Setup');
|
|
||||||
const rateLimiter = new AuthRateLimiter();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Test CodeRabbit CLI authentication by running a simple command
|
|
||||||
*/
|
|
||||||
async function testCodeRabbitCli(
|
|
||||||
apiKey?: string
|
|
||||||
): Promise<{ authenticated: boolean; error?: string }> {
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
// Set up environment with API key if provided
|
|
||||||
const env = { ...process.env };
|
|
||||||
if (apiKey) {
|
|
||||||
env.CODERABBIT_API_KEY = apiKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to run coderabbit auth status to verify auth
|
|
||||||
const child = spawn('coderabbit', ['auth', 'status'], {
|
|
||||||
stdio: ['pipe', 'pipe', 'pipe'],
|
|
||||||
env,
|
|
||||||
timeout: 10000,
|
|
||||||
});
|
|
||||||
|
|
||||||
let stdout = '';
|
|
||||||
let stderr = '';
|
|
||||||
|
|
||||||
child.stdout?.on('data', (data) => {
|
|
||||||
stdout += data.toString();
|
|
||||||
});
|
|
||||||
|
|
||||||
child.stderr?.on('data', (data) => {
|
|
||||||
stderr += data.toString();
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on('close', (code) => {
|
|
||||||
if (code === 0) {
|
|
||||||
// Check output for authentication status
|
|
||||||
const output = stdout.toLowerCase() + stderr.toLowerCase();
|
|
||||||
if (
|
|
||||||
output.includes('authenticated') ||
|
|
||||||
output.includes('logged in') ||
|
|
||||||
output.includes('valid')
|
|
||||||
) {
|
|
||||||
resolve({ authenticated: true });
|
|
||||||
} else if (output.includes('not authenticated') || output.includes('not logged in')) {
|
|
||||||
resolve({ authenticated: false, error: 'CodeRabbit CLI is not authenticated.' });
|
|
||||||
} else {
|
|
||||||
// Command succeeded, assume authenticated
|
|
||||||
resolve({ authenticated: true });
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Command failed
|
|
||||||
const errorMsg = stderr || stdout || 'CodeRabbit CLI authentication check failed.';
|
|
||||||
resolve({ authenticated: false, error: errorMsg.trim() });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on('error', (err) => {
|
|
||||||
// CodeRabbit CLI not installed or other error
|
|
||||||
resolve({ authenticated: false, error: `CodeRabbit CLI error: ${err.message}` });
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate CodeRabbit API key format
|
|
||||||
* CodeRabbit API keys typically start with 'cr-'
|
|
||||||
*/
|
|
||||||
function validateCodeRabbitKey(apiKey: string): { isValid: boolean; error?: string } {
|
|
||||||
if (!apiKey || apiKey.trim().length === 0) {
|
|
||||||
return { isValid: false, error: 'API key cannot be empty.' };
|
|
||||||
}
|
|
||||||
|
|
||||||
// CodeRabbit API keys typically start with 'cr-'
|
|
||||||
if (!apiKey.startsWith('cr-')) {
|
|
||||||
return {
|
|
||||||
isValid: false,
|
|
||||||
error: 'Invalid CodeRabbit API key format. Keys should start with "cr-".',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (apiKey.length < 10) {
|
|
||||||
return { isValid: false, error: 'API key is too short.' };
|
|
||||||
}
|
|
||||||
|
|
||||||
return { isValid: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createVerifyCodeRabbitAuthHandler() {
|
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const { authMethod, apiKey } = req.body as {
|
|
||||||
authMethod?: 'cli' | 'api_key';
|
|
||||||
apiKey?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Rate limiting to prevent abuse
|
|
||||||
const clientIp = req.ip || req.socket.remoteAddress || 'unknown';
|
|
||||||
if (!rateLimiter.canAttempt(clientIp)) {
|
|
||||||
const resetTime = rateLimiter.getResetTime(clientIp);
|
|
||||||
res.status(429).json({
|
|
||||||
success: false,
|
|
||||||
authenticated: false,
|
|
||||||
error: 'Too many authentication attempts. Please try again later.',
|
|
||||||
resetTime,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[Setup] Verifying CodeRabbit authentication using method: ${authMethod || 'auto'}${apiKey ? ' (with provided key)' : ''}`
|
|
||||||
);
|
|
||||||
|
|
||||||
// For API key verification
|
|
||||||
if (authMethod === 'api_key' && apiKey) {
|
|
||||||
// Validate key format
|
|
||||||
const validation = validateCodeRabbitKey(apiKey);
|
|
||||||
if (!validation.isValid) {
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
authenticated: false,
|
|
||||||
error: validation.error,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test the CLI with the provided API key
|
|
||||||
const result = await testCodeRabbitCli(apiKey);
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
authenticated: result.authenticated,
|
|
||||||
error: result.error,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// For CLI auth or auto detection
|
|
||||||
const result = await testCodeRabbitCli();
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
authenticated: result.authenticated,
|
|
||||||
error: result.error,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[Setup] Verify CodeRabbit auth endpoint error:', error);
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
authenticated: false,
|
|
||||||
error: error instanceof Error ? error.message : 'Verification failed',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
/**
|
|
||||||
* Common utilities and state for suggestions routes
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
|
||||||
|
|
||||||
const logger = createLogger('Suggestions');
|
|
||||||
|
|
||||||
// Shared state for tracking generation status - private
|
|
||||||
let isRunning = false;
|
|
||||||
let currentAbortController: AbortController | null = null;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the current running state
|
|
||||||
*/
|
|
||||||
export function getSuggestionsStatus(): {
|
|
||||||
isRunning: boolean;
|
|
||||||
currentAbortController: AbortController | null;
|
|
||||||
} {
|
|
||||||
return { isRunning, currentAbortController };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the running state and abort controller
|
|
||||||
*/
|
|
||||||
export function setRunningState(running: boolean, controller: AbortController | null = null): void {
|
|
||||||
isRunning = running;
|
|
||||||
currentAbortController = controller;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Re-export shared utilities
|
|
||||||
export { getErrorMessageShared as getErrorMessage };
|
|
||||||
export const logError = createLogError(logger);
|
|
||||||
@@ -1,296 +0,0 @@
|
|||||||
/**
|
|
||||||
* Business logic for generating suggestions
|
|
||||||
*
|
|
||||||
* Model is configurable via phaseModels.suggestionsModel in settings
|
|
||||||
* (AI Suggestions in the UI). Supports both Claude and Cursor models.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { EventEmitter } from '../../lib/events.js';
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
import { DEFAULT_PHASE_MODELS, isCursorModel, type ThinkingLevel } from '@automaker/types';
|
|
||||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
|
||||||
import { extractJsonWithArray } from '../../lib/json-extractor.js';
|
|
||||||
import { streamingQuery } from '../../providers/simple-query-service.js';
|
|
||||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
|
||||||
import { getAppSpecPath } from '@automaker/platform';
|
|
||||||
import * as secureFs from '../../lib/secure-fs.js';
|
|
||||||
import type { SettingsService } from '../../services/settings-service.js';
|
|
||||||
import { getAutoLoadClaudeMdSetting, getPromptCustomization } from '../../lib/settings-helpers.js';
|
|
||||||
|
|
||||||
const logger = createLogger('Suggestions');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract implemented features from app_spec.txt XML content
|
|
||||||
*
|
|
||||||
* Note: This uses regex-based parsing which is sufficient for our controlled
|
|
||||||
* XML structure. If more complex XML parsing is needed in the future, consider
|
|
||||||
* using a library like 'fast-xml-parser' or 'xml2js'.
|
|
||||||
*/
|
|
||||||
function extractImplementedFeatures(specContent: string): string[] {
|
|
||||||
const features: string[] = [];
|
|
||||||
|
|
||||||
// Match <implemented_features>...</implemented_features> section
|
|
||||||
const implementedMatch = specContent.match(
|
|
||||||
/<implemented_features>([\s\S]*?)<\/implemented_features>/
|
|
||||||
);
|
|
||||||
|
|
||||||
if (implementedMatch) {
|
|
||||||
const implementedSection = implementedMatch[1];
|
|
||||||
|
|
||||||
// Extract feature names from <name>...</name> tags using matchAll
|
|
||||||
const nameRegex = /<name>(.*?)<\/name>/g;
|
|
||||||
const matches = implementedSection.matchAll(nameRegex);
|
|
||||||
|
|
||||||
for (const match of matches) {
|
|
||||||
features.push(match[1].trim());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return features;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load existing context (app spec and backlog features) to avoid duplicates
|
|
||||||
*/
|
|
||||||
async function loadExistingContext(projectPath: string): Promise<string> {
|
|
||||||
let context = '';
|
|
||||||
|
|
||||||
// 1. Read app_spec.txt for implemented features
|
|
||||||
try {
|
|
||||||
const appSpecPath = getAppSpecPath(projectPath);
|
|
||||||
const specContent = (await secureFs.readFile(appSpecPath, 'utf-8')) as string;
|
|
||||||
|
|
||||||
if (specContent && specContent.trim().length > 0) {
|
|
||||||
const implementedFeatures = extractImplementedFeatures(specContent);
|
|
||||||
|
|
||||||
if (implementedFeatures.length > 0) {
|
|
||||||
context += '\n\n=== ALREADY IMPLEMENTED FEATURES ===\n';
|
|
||||||
context += 'These features are already implemented in the codebase:\n';
|
|
||||||
context += implementedFeatures.map((feature) => `- ${feature}`).join('\n') + '\n';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// app_spec.txt doesn't exist or can't be read - that's okay
|
|
||||||
logger.debug('No app_spec.txt found or error reading it:', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Load existing features from backlog
|
|
||||||
try {
|
|
||||||
const featureLoader = new FeatureLoader();
|
|
||||||
const features = await featureLoader.getAll(projectPath);
|
|
||||||
|
|
||||||
if (features.length > 0) {
|
|
||||||
context += '\n\n=== EXISTING FEATURES IN BACKLOG ===\n';
|
|
||||||
context += 'These features are already planned or in progress:\n';
|
|
||||||
context +=
|
|
||||||
features
|
|
||||||
.map((feature) => {
|
|
||||||
const status = feature.status || 'pending';
|
|
||||||
const title = feature.title || feature.description?.substring(0, 50) || 'Untitled';
|
|
||||||
return `- ${title} (${status})`;
|
|
||||||
})
|
|
||||||
.join('\n') + '\n';
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// Features directory doesn't exist or can't be read - that's okay
|
|
||||||
logger.debug('No features found or error loading them:', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
return context;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* JSON Schema for suggestions output
|
|
||||||
*/
|
|
||||||
const suggestionsSchema = {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
suggestions: {
|
|
||||||
type: 'array',
|
|
||||||
items: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
id: { type: 'string' },
|
|
||||||
category: { type: 'string' },
|
|
||||||
description: { type: 'string' },
|
|
||||||
priority: {
|
|
||||||
type: 'number',
|
|
||||||
minimum: 1,
|
|
||||||
maximum: 3,
|
|
||||||
},
|
|
||||||
reasoning: { type: 'string' },
|
|
||||||
},
|
|
||||||
required: ['category', 'description', 'priority', 'reasoning'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['suggestions'],
|
|
||||||
additionalProperties: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function generateSuggestions(
|
|
||||||
projectPath: string,
|
|
||||||
suggestionType: string,
|
|
||||||
events: EventEmitter,
|
|
||||||
abortController: AbortController,
|
|
||||||
settingsService?: SettingsService,
|
|
||||||
modelOverride?: string,
|
|
||||||
thinkingLevelOverride?: ThinkingLevel
|
|
||||||
): Promise<void> {
|
|
||||||
// Get customized prompts from settings
|
|
||||||
const prompts = await getPromptCustomization(settingsService, '[Suggestions]');
|
|
||||||
|
|
||||||
// Map suggestion types to their prompts
|
|
||||||
const typePrompts: Record<string, string> = {
|
|
||||||
features: prompts.suggestions.featuresPrompt,
|
|
||||||
refactoring: prompts.suggestions.refactoringPrompt,
|
|
||||||
security: prompts.suggestions.securityPrompt,
|
|
||||||
performance: prompts.suggestions.performancePrompt,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Load existing context to avoid duplicates
|
|
||||||
const existingContext = await loadExistingContext(projectPath);
|
|
||||||
|
|
||||||
const prompt = `${typePrompts[suggestionType] || typePrompts.features}
|
|
||||||
${existingContext}
|
|
||||||
|
|
||||||
${existingContext ? '\nIMPORTANT: Do NOT suggest features that are already implemented or already in the backlog above. Focus on NEW ideas that complement what already exists.\n' : ''}
|
|
||||||
${prompts.suggestions.baseTemplate}`;
|
|
||||||
|
|
||||||
// Don't send initial message - let the agent output speak for itself
|
|
||||||
// The first agent message will be captured as an info entry
|
|
||||||
|
|
||||||
// Load autoLoadClaudeMd setting
|
|
||||||
const autoLoadClaudeMd = await getAutoLoadClaudeMdSetting(
|
|
||||||
projectPath,
|
|
||||||
settingsService,
|
|
||||||
'[Suggestions]'
|
|
||||||
);
|
|
||||||
|
|
||||||
// Get model from phase settings (AI Suggestions = suggestionsModel)
|
|
||||||
// Use override if provided, otherwise fall back to settings
|
|
||||||
const settings = await settingsService?.getGlobalSettings();
|
|
||||||
let model: string;
|
|
||||||
let thinkingLevel: ThinkingLevel | undefined;
|
|
||||||
|
|
||||||
if (modelOverride) {
|
|
||||||
// Use explicit override - resolve the model string
|
|
||||||
const resolved = resolvePhaseModel({
|
|
||||||
model: modelOverride,
|
|
||||||
thinkingLevel: thinkingLevelOverride,
|
|
||||||
});
|
|
||||||
model = resolved.model;
|
|
||||||
thinkingLevel = resolved.thinkingLevel;
|
|
||||||
} else {
|
|
||||||
// Use settings-based model
|
|
||||||
const phaseModelEntry =
|
|
||||||
settings?.phaseModels?.suggestionsModel || DEFAULT_PHASE_MODELS.suggestionsModel;
|
|
||||||
const resolved = resolvePhaseModel(phaseModelEntry);
|
|
||||||
model = resolved.model;
|
|
||||||
thinkingLevel = resolved.thinkingLevel;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('[Suggestions] Using model:', model);
|
|
||||||
|
|
||||||
let responseText = '';
|
|
||||||
|
|
||||||
// Determine if we should use structured output (Claude supports it, Cursor doesn't)
|
|
||||||
const useStructuredOutput = !isCursorModel(model);
|
|
||||||
|
|
||||||
// Build the final prompt - for Cursor, include JSON schema instructions
|
|
||||||
let finalPrompt = prompt;
|
|
||||||
if (!useStructuredOutput) {
|
|
||||||
finalPrompt = `${prompt}
|
|
||||||
|
|
||||||
CRITICAL INSTRUCTIONS:
|
|
||||||
1. DO NOT write any files. Return the JSON in your response only.
|
|
||||||
2. After analyzing the project, respond with ONLY a JSON object - no explanations, no markdown, just raw JSON.
|
|
||||||
3. The JSON must match this exact schema:
|
|
||||||
|
|
||||||
${JSON.stringify(suggestionsSchema, null, 2)}
|
|
||||||
|
|
||||||
Your entire response should be valid JSON starting with { and ending with }. No text before or after.`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use streamingQuery with event callbacks
|
|
||||||
const result = await streamingQuery({
|
|
||||||
prompt: finalPrompt,
|
|
||||||
model,
|
|
||||||
cwd: projectPath,
|
|
||||||
maxTurns: 250,
|
|
||||||
allowedTools: ['Read', 'Glob', 'Grep'],
|
|
||||||
abortController,
|
|
||||||
thinkingLevel,
|
|
||||||
readOnly: true, // Suggestions only reads code, doesn't write
|
|
||||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
|
||||||
outputFormat: useStructuredOutput
|
|
||||||
? {
|
|
||||||
type: 'json_schema',
|
|
||||||
schema: suggestionsSchema,
|
|
||||||
}
|
|
||||||
: undefined,
|
|
||||||
onText: (text) => {
|
|
||||||
responseText += text;
|
|
||||||
events.emit('suggestions:event', {
|
|
||||||
type: 'suggestions_progress',
|
|
||||||
content: text,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
onToolUse: (tool, input) => {
|
|
||||||
events.emit('suggestions:event', {
|
|
||||||
type: 'suggestions_tool',
|
|
||||||
tool,
|
|
||||||
input,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// Use structured output if available, otherwise fall back to parsing text
|
|
||||||
try {
|
|
||||||
let structuredOutput: { suggestions: Array<Record<string, unknown>> } | null = null;
|
|
||||||
|
|
||||||
if (result.structured_output) {
|
|
||||||
structuredOutput = result.structured_output as {
|
|
||||||
suggestions: Array<Record<string, unknown>>;
|
|
||||||
};
|
|
||||||
logger.debug('Received structured output:', structuredOutput);
|
|
||||||
} else if (responseText) {
|
|
||||||
// Fallback: try to parse from text using shared extraction utility
|
|
||||||
logger.warn('No structured output received, attempting to parse from text');
|
|
||||||
structuredOutput = extractJsonWithArray<{ suggestions: Array<Record<string, unknown>> }>(
|
|
||||||
responseText,
|
|
||||||
'suggestions',
|
|
||||||
{ logger }
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (structuredOutput && structuredOutput.suggestions) {
|
|
||||||
// Use structured output directly
|
|
||||||
events.emit('suggestions:event', {
|
|
||||||
type: 'suggestions_complete',
|
|
||||||
suggestions: structuredOutput.suggestions.map((s: Record<string, unknown>, i: number) => ({
|
|
||||||
...s,
|
|
||||||
id: s.id || `suggestion-${Date.now()}-${i}`,
|
|
||||||
})),
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
throw new Error('No valid JSON found in response');
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// Log the parsing error for debugging
|
|
||||||
logger.error('Failed to parse suggestions JSON from AI response:', error);
|
|
||||||
// Return generic suggestions if parsing fails
|
|
||||||
events.emit('suggestions:event', {
|
|
||||||
type: 'suggestions_complete',
|
|
||||||
suggestions: [
|
|
||||||
{
|
|
||||||
id: `suggestion-${Date.now()}-0`,
|
|
||||||
category: 'Analysis',
|
|
||||||
description: 'Review the AI analysis output for insights',
|
|
||||||
priority: 1,
|
|
||||||
reasoning: 'The AI provided analysis but suggestions need manual review',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
/**
|
|
||||||
* Suggestions routes - HTTP API for AI-powered feature suggestions
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { Router } from 'express';
|
|
||||||
import type { EventEmitter } from '../../lib/events.js';
|
|
||||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
|
||||||
import { createGenerateHandler } from './routes/generate.js';
|
|
||||||
import { createStopHandler } from './routes/stop.js';
|
|
||||||
import { createStatusHandler } from './routes/status.js';
|
|
||||||
import type { SettingsService } from '../../services/settings-service.js';
|
|
||||||
|
|
||||||
export function createSuggestionsRoutes(
|
|
||||||
events: EventEmitter,
|
|
||||||
settingsService?: SettingsService
|
|
||||||
): Router {
|
|
||||||
const router = Router();
|
|
||||||
|
|
||||||
router.post(
|
|
||||||
'/generate',
|
|
||||||
validatePathParams('projectPath'),
|
|
||||||
createGenerateHandler(events, settingsService)
|
|
||||||
);
|
|
||||||
router.post('/stop', createStopHandler());
|
|
||||||
router.get('/status', createStatusHandler());
|
|
||||||
|
|
||||||
return router;
|
|
||||||
}
|
|
||||||
@@ -1,75 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /generate endpoint - Generate suggestions
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import type { EventEmitter } from '../../../lib/events.js';
|
|
||||||
import { createLogger } from '@automaker/utils';
|
|
||||||
import type { ThinkingLevel } from '@automaker/types';
|
|
||||||
import { getSuggestionsStatus, setRunningState, getErrorMessage, logError } from '../common.js';
|
|
||||||
import { generateSuggestions } from '../generate-suggestions.js';
|
|
||||||
import type { SettingsService } from '../../../services/settings-service.js';
|
|
||||||
|
|
||||||
const logger = createLogger('Suggestions');
|
|
||||||
|
|
||||||
export function createGenerateHandler(events: EventEmitter, settingsService?: SettingsService) {
|
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const {
|
|
||||||
projectPath,
|
|
||||||
suggestionType = 'features',
|
|
||||||
model,
|
|
||||||
thinkingLevel,
|
|
||||||
} = req.body as {
|
|
||||||
projectPath: string;
|
|
||||||
suggestionType?: string;
|
|
||||||
model?: string;
|
|
||||||
thinkingLevel?: ThinkingLevel;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!projectPath) {
|
|
||||||
res.status(400).json({ success: false, error: 'projectPath required' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { isRunning } = getSuggestionsStatus();
|
|
||||||
if (isRunning) {
|
|
||||||
res.json({
|
|
||||||
success: false,
|
|
||||||
error: 'Suggestions generation is already running',
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
setRunningState(true);
|
|
||||||
const abortController = new AbortController();
|
|
||||||
setRunningState(true, abortController);
|
|
||||||
|
|
||||||
// Start generation in background
|
|
||||||
generateSuggestions(
|
|
||||||
projectPath,
|
|
||||||
suggestionType,
|
|
||||||
events,
|
|
||||||
abortController,
|
|
||||||
settingsService,
|
|
||||||
model,
|
|
||||||
thinkingLevel
|
|
||||||
)
|
|
||||||
.catch((error) => {
|
|
||||||
logError(error, 'Generate suggestions failed (background)');
|
|
||||||
events.emit('suggestions:event', {
|
|
||||||
type: 'suggestions_error',
|
|
||||||
error: getErrorMessage(error),
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.finally(() => {
|
|
||||||
setRunningState(false, null);
|
|
||||||
});
|
|
||||||
|
|
||||||
res.json({ success: true });
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Generate suggestions failed');
|
|
||||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
/**
|
|
||||||
* GET /status endpoint - Get status
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { getSuggestionsStatus, getErrorMessage, logError } from '../common.js';
|
|
||||||
|
|
||||||
export function createStatusHandler() {
|
|
||||||
return async (_req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const { isRunning } = getSuggestionsStatus();
|
|
||||||
res.json({ success: true, isRunning });
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Get status failed');
|
|
||||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /stop endpoint - Stop suggestions generation
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
|
||||||
import { getSuggestionsStatus, setRunningState, getErrorMessage, logError } from '../common.js';
|
|
||||||
|
|
||||||
export function createStopHandler() {
|
|
||||||
return async (_req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const { currentAbortController } = getSuggestionsStatus();
|
|
||||||
if (currentAbortController) {
|
|
||||||
currentAbortController.abort();
|
|
||||||
}
|
|
||||||
setRunningState(false, null);
|
|
||||||
res.json({ success: true });
|
|
||||||
} catch (error) {
|
|
||||||
logError(error, 'Stop suggestions failed');
|
|
||||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -29,18 +29,31 @@ import {
|
|||||||
createGetAvailableEditorsHandler,
|
createGetAvailableEditorsHandler,
|
||||||
createRefreshEditorsHandler,
|
createRefreshEditorsHandler,
|
||||||
} from './routes/open-in-editor.js';
|
} from './routes/open-in-editor.js';
|
||||||
|
import {
|
||||||
|
createOpenInTerminalHandler,
|
||||||
|
createGetAvailableTerminalsHandler,
|
||||||
|
createGetDefaultTerminalHandler,
|
||||||
|
createRefreshTerminalsHandler,
|
||||||
|
createOpenInExternalTerminalHandler,
|
||||||
|
} from './routes/open-in-terminal.js';
|
||||||
import { createInitGitHandler } from './routes/init-git.js';
|
import { createInitGitHandler } from './routes/init-git.js';
|
||||||
import { createMigrateHandler } from './routes/migrate.js';
|
import { createMigrateHandler } from './routes/migrate.js';
|
||||||
import { createStartDevHandler } from './routes/start-dev.js';
|
import { createStartDevHandler } from './routes/start-dev.js';
|
||||||
import { createStopDevHandler } from './routes/stop-dev.js';
|
import { createStopDevHandler } from './routes/stop-dev.js';
|
||||||
import { createListDevServersHandler } from './routes/list-dev-servers.js';
|
import { createListDevServersHandler } from './routes/list-dev-servers.js';
|
||||||
import { createGetDevServerLogsHandler } from './routes/dev-server-logs.js';
|
import { createGetDevServerLogsHandler } from './routes/dev-server-logs.js';
|
||||||
|
import { createStartTestsHandler } from './routes/start-tests.js';
|
||||||
|
import { createStopTestsHandler } from './routes/stop-tests.js';
|
||||||
|
import { createGetTestLogsHandler } from './routes/test-logs.js';
|
||||||
import {
|
import {
|
||||||
createGetInitScriptHandler,
|
createGetInitScriptHandler,
|
||||||
createPutInitScriptHandler,
|
createPutInitScriptHandler,
|
||||||
createDeleteInitScriptHandler,
|
createDeleteInitScriptHandler,
|
||||||
createRunInitScriptHandler,
|
createRunInitScriptHandler,
|
||||||
} from './routes/init-script.js';
|
} from './routes/init-script.js';
|
||||||
|
import { createDiscardChangesHandler } from './routes/discard-changes.js';
|
||||||
|
import { createListRemotesHandler } from './routes/list-remotes.js';
|
||||||
|
import { createAddRemoteHandler } from './routes/add-remote.js';
|
||||||
import type { SettingsService } from '../../services/settings-service.js';
|
import type { SettingsService } from '../../services/settings-service.js';
|
||||||
|
|
||||||
export function createWorktreeRoutes(
|
export function createWorktreeRoutes(
|
||||||
@@ -97,15 +110,31 @@ export function createWorktreeRoutes(
|
|||||||
);
|
);
|
||||||
router.post('/switch-branch', requireValidWorktree, createSwitchBranchHandler());
|
router.post('/switch-branch', requireValidWorktree, createSwitchBranchHandler());
|
||||||
router.post('/open-in-editor', validatePathParams('worktreePath'), createOpenInEditorHandler());
|
router.post('/open-in-editor', validatePathParams('worktreePath'), createOpenInEditorHandler());
|
||||||
|
router.post(
|
||||||
|
'/open-in-terminal',
|
||||||
|
validatePathParams('worktreePath'),
|
||||||
|
createOpenInTerminalHandler()
|
||||||
|
);
|
||||||
router.get('/default-editor', createGetDefaultEditorHandler());
|
router.get('/default-editor', createGetDefaultEditorHandler());
|
||||||
router.get('/available-editors', createGetAvailableEditorsHandler());
|
router.get('/available-editors', createGetAvailableEditorsHandler());
|
||||||
router.post('/refresh-editors', createRefreshEditorsHandler());
|
router.post('/refresh-editors', createRefreshEditorsHandler());
|
||||||
|
|
||||||
|
// External terminal routes
|
||||||
|
router.get('/available-terminals', createGetAvailableTerminalsHandler());
|
||||||
|
router.get('/default-terminal', createGetDefaultTerminalHandler());
|
||||||
|
router.post('/refresh-terminals', createRefreshTerminalsHandler());
|
||||||
|
router.post(
|
||||||
|
'/open-in-external-terminal',
|
||||||
|
validatePathParams('worktreePath'),
|
||||||
|
createOpenInExternalTerminalHandler()
|
||||||
|
);
|
||||||
|
|
||||||
router.post('/init-git', validatePathParams('projectPath'), createInitGitHandler());
|
router.post('/init-git', validatePathParams('projectPath'), createInitGitHandler());
|
||||||
router.post('/migrate', createMigrateHandler());
|
router.post('/migrate', createMigrateHandler());
|
||||||
router.post(
|
router.post(
|
||||||
'/start-dev',
|
'/start-dev',
|
||||||
validatePathParams('projectPath', 'worktreePath'),
|
validatePathParams('projectPath', 'worktreePath'),
|
||||||
createStartDevHandler()
|
createStartDevHandler(settingsService)
|
||||||
);
|
);
|
||||||
router.post('/stop-dev', createStopDevHandler());
|
router.post('/stop-dev', createStopDevHandler());
|
||||||
router.post('/list-dev-servers', createListDevServersHandler());
|
router.post('/list-dev-servers', createListDevServersHandler());
|
||||||
@@ -115,6 +144,15 @@ export function createWorktreeRoutes(
|
|||||||
createGetDevServerLogsHandler()
|
createGetDevServerLogsHandler()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Test runner routes
|
||||||
|
router.post(
|
||||||
|
'/start-tests',
|
||||||
|
validatePathParams('worktreePath', 'projectPath?'),
|
||||||
|
createStartTestsHandler(settingsService)
|
||||||
|
);
|
||||||
|
router.post('/stop-tests', createStopTestsHandler());
|
||||||
|
router.get('/test-logs', validatePathParams('worktreePath?'), createGetTestLogsHandler());
|
||||||
|
|
||||||
// Init script routes
|
// Init script routes
|
||||||
router.get('/init-script', createGetInitScriptHandler());
|
router.get('/init-script', createGetInitScriptHandler());
|
||||||
router.put('/init-script', validatePathParams('projectPath'), createPutInitScriptHandler());
|
router.put('/init-script', validatePathParams('projectPath'), createPutInitScriptHandler());
|
||||||
@@ -125,5 +163,29 @@ export function createWorktreeRoutes(
|
|||||||
createRunInitScriptHandler(events)
|
createRunInitScriptHandler(events)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Discard changes route
|
||||||
|
router.post(
|
||||||
|
'/discard-changes',
|
||||||
|
validatePathParams('worktreePath'),
|
||||||
|
requireGitRepoOnly,
|
||||||
|
createDiscardChangesHandler()
|
||||||
|
);
|
||||||
|
|
||||||
|
// List remotes route
|
||||||
|
router.post(
|
||||||
|
'/list-remotes',
|
||||||
|
validatePathParams('worktreePath'),
|
||||||
|
requireValidWorktree,
|
||||||
|
createListRemotesHandler()
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add remote route
|
||||||
|
router.post(
|
||||||
|
'/add-remote',
|
||||||
|
validatePathParams('worktreePath'),
|
||||||
|
requireGitRepoOnly,
|
||||||
|
createAddRemoteHandler()
|
||||||
|
);
|
||||||
|
|
||||||
return router;
|
return router;
|
||||||
}
|
}
|
||||||
|
|||||||
166
apps/server/src/routes/worktree/routes/add-remote.ts
Normal file
166
apps/server/src/routes/worktree/routes/add-remote.ts
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
/**
|
||||||
|
* POST /add-remote endpoint - Add a new remote to a git repository
|
||||||
|
*
|
||||||
|
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
||||||
|
* the requireValidWorktree middleware in index.ts
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import { execFile } from 'child_process';
|
||||||
|
import { promisify } from 'util';
|
||||||
|
import { getErrorMessage, logWorktreeError } from '../common.js';
|
||||||
|
|
||||||
|
const execFileAsync = promisify(execFile);
|
||||||
|
|
||||||
|
/** Maximum allowed length for remote names */
|
||||||
|
const MAX_REMOTE_NAME_LENGTH = 250;
|
||||||
|
|
||||||
|
/** Maximum allowed length for remote URLs */
|
||||||
|
const MAX_REMOTE_URL_LENGTH = 2048;
|
||||||
|
|
||||||
|
/** Timeout for git fetch operations (30 seconds) */
|
||||||
|
const FETCH_TIMEOUT_MS = 30000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate remote name - must be alphanumeric with dashes/underscores
|
||||||
|
* Git remote names have similar restrictions to branch names
|
||||||
|
*/
|
||||||
|
function isValidRemoteName(name: string): boolean {
|
||||||
|
// Remote names should be alphanumeric, may contain dashes, underscores, periods
|
||||||
|
// Cannot start with a dash or period, cannot be empty
|
||||||
|
if (!name || name.length === 0 || name.length > MAX_REMOTE_NAME_LENGTH) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return /^[a-zA-Z0-9][a-zA-Z0-9._-]*$/.test(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate remote URL - basic validation for git remote URLs
|
||||||
|
* Supports HTTPS, SSH, and git:// protocols
|
||||||
|
*/
|
||||||
|
function isValidRemoteUrl(url: string): boolean {
|
||||||
|
if (!url || url.length === 0 || url.length > MAX_REMOTE_URL_LENGTH) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// Support common git URL formats:
|
||||||
|
// - https://github.com/user/repo.git
|
||||||
|
// - git@github.com:user/repo.git
|
||||||
|
// - git://github.com/user/repo.git
|
||||||
|
// - ssh://git@github.com/user/repo.git
|
||||||
|
const httpsPattern = /^https?:\/\/.+/;
|
||||||
|
const sshPattern = /^[a-zA-Z0-9._-]+@[a-zA-Z0-9._-]+:.+/;
|
||||||
|
const gitProtocolPattern = /^git:\/\/.+/;
|
||||||
|
const sshProtocolPattern = /^ssh:\/\/.+/;
|
||||||
|
|
||||||
|
return (
|
||||||
|
httpsPattern.test(url) ||
|
||||||
|
sshPattern.test(url) ||
|
||||||
|
gitProtocolPattern.test(url) ||
|
||||||
|
sshProtocolPattern.test(url)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createAddRemoteHandler() {
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const { worktreePath, remoteName, remoteUrl } = req.body as {
|
||||||
|
worktreePath: string;
|
||||||
|
remoteName: string;
|
||||||
|
remoteUrl: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Validate required fields
|
||||||
|
const requiredFields = { worktreePath, remoteName, remoteUrl };
|
||||||
|
for (const [key, value] of Object.entries(requiredFields)) {
|
||||||
|
if (!value) {
|
||||||
|
res.status(400).json({ success: false, error: `${key} required` });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate remote name
|
||||||
|
if (!isValidRemoteName(remoteName)) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
'Invalid remote name. Must start with alphanumeric character and contain only letters, numbers, dashes, underscores, or periods.',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate remote URL
|
||||||
|
if (!isValidRemoteUrl(remoteUrl)) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid remote URL. Must be a valid git URL (HTTPS, SSH, or git:// protocol).',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if remote already exists
|
||||||
|
try {
|
||||||
|
const { stdout: existingRemotes } = await execFileAsync('git', ['remote'], {
|
||||||
|
cwd: worktreePath,
|
||||||
|
});
|
||||||
|
const remoteNames = existingRemotes
|
||||||
|
.trim()
|
||||||
|
.split('\n')
|
||||||
|
.filter((r) => r.trim());
|
||||||
|
if (remoteNames.includes(remoteName)) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: `Remote '${remoteName}' already exists`,
|
||||||
|
code: 'REMOTE_EXISTS',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// If git remote fails, continue with adding the remote. Log for debugging.
|
||||||
|
logWorktreeError(
|
||||||
|
error,
|
||||||
|
'Checking for existing remotes failed, proceeding to add.',
|
||||||
|
worktreePath
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the remote using execFile with array arguments to prevent command injection
|
||||||
|
await execFileAsync('git', ['remote', 'add', remoteName, remoteUrl], {
|
||||||
|
cwd: worktreePath,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Optionally fetch from the new remote to get its branches
|
||||||
|
let fetchSucceeded = false;
|
||||||
|
try {
|
||||||
|
await execFileAsync('git', ['fetch', remoteName, '--quiet'], {
|
||||||
|
cwd: worktreePath,
|
||||||
|
timeout: FETCH_TIMEOUT_MS,
|
||||||
|
});
|
||||||
|
fetchSucceeded = true;
|
||||||
|
} catch (fetchError) {
|
||||||
|
// Fetch failed (maybe offline or invalid URL), but remote was added successfully
|
||||||
|
logWorktreeError(
|
||||||
|
fetchError,
|
||||||
|
`Fetch from new remote '${remoteName}' failed (remote added successfully)`,
|
||||||
|
worktreePath
|
||||||
|
);
|
||||||
|
fetchSucceeded = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
remoteName,
|
||||||
|
remoteUrl,
|
||||||
|
fetched: fetchSucceeded,
|
||||||
|
message: fetchSucceeded
|
||||||
|
? `Successfully added remote '${remoteName}' and fetched its branches`
|
||||||
|
: `Successfully added remote '${remoteName}' (fetch failed - you may need to fetch manually)`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
const worktreePath = req.body?.worktreePath;
|
||||||
|
logWorktreeError(error, 'Add remote failed', worktreePath);
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -39,7 +39,10 @@ export function createDiffsHandler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Git worktrees are stored in project directory
|
// Git worktrees are stored in project directory
|
||||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
// Sanitize featureId the same way it's sanitized when creating worktrees
|
||||||
|
// (see create.ts: branchName.replace(/[^a-zA-Z0-9_-]/g, '-'))
|
||||||
|
const sanitizedFeatureId = featureId.replace(/[^a-zA-Z0-9_-]/g, '-');
|
||||||
|
const worktreePath = path.join(projectPath, '.worktrees', sanitizedFeatureId);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Check if worktree exists
|
// Check if worktree exists
|
||||||
|
|||||||
112
apps/server/src/routes/worktree/routes/discard-changes.ts
Normal file
112
apps/server/src/routes/worktree/routes/discard-changes.ts
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
/**
|
||||||
|
* POST /discard-changes endpoint - Discard all uncommitted changes in a worktree
|
||||||
|
*
|
||||||
|
* This performs a destructive operation that:
|
||||||
|
* 1. Resets staged changes (git reset HEAD)
|
||||||
|
* 2. Discards modified tracked files (git checkout .)
|
||||||
|
* 3. Removes untracked files and directories (git clean -fd)
|
||||||
|
*
|
||||||
|
* Note: Git repository validation (isGitRepo) is handled by
|
||||||
|
* the requireGitRepoOnly middleware in index.ts
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import { exec } from 'child_process';
|
||||||
|
import { promisify } from 'util';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
|
||||||
|
const execAsync = promisify(exec);
|
||||||
|
|
||||||
|
export function createDiscardChangesHandler() {
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const { worktreePath } = req.body as {
|
||||||
|
worktreePath: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!worktreePath) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'worktreePath required',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for uncommitted changes first
|
||||||
|
const { stdout: status } = await execAsync('git status --porcelain', {
|
||||||
|
cwd: worktreePath,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!status.trim()) {
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
discarded: false,
|
||||||
|
message: 'No changes to discard',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count the files that will be affected
|
||||||
|
const lines = status.trim().split('\n').filter(Boolean);
|
||||||
|
const fileCount = lines.length;
|
||||||
|
|
||||||
|
// Get branch name before discarding
|
||||||
|
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||||
|
cwd: worktreePath,
|
||||||
|
});
|
||||||
|
const branchName = branchOutput.trim();
|
||||||
|
|
||||||
|
// Discard all changes:
|
||||||
|
// 1. Reset any staged changes
|
||||||
|
await execAsync('git reset HEAD', { cwd: worktreePath }).catch(() => {
|
||||||
|
// Ignore errors - might fail if there's nothing staged
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2. Discard changes in tracked files
|
||||||
|
await execAsync('git checkout .', { cwd: worktreePath }).catch(() => {
|
||||||
|
// Ignore errors - might fail if there are no tracked changes
|
||||||
|
});
|
||||||
|
|
||||||
|
// 3. Remove untracked files and directories
|
||||||
|
await execAsync('git clean -fd', { cwd: worktreePath }).catch(() => {
|
||||||
|
// Ignore errors - might fail if there are no untracked files
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify all changes were discarded
|
||||||
|
const { stdout: finalStatus } = await execAsync('git status --porcelain', {
|
||||||
|
cwd: worktreePath,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (finalStatus.trim()) {
|
||||||
|
// Some changes couldn't be discarded (possibly ignored files or permission issues)
|
||||||
|
const remainingCount = finalStatus.trim().split('\n').filter(Boolean).length;
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
discarded: true,
|
||||||
|
filesDiscarded: fileCount - remainingCount,
|
||||||
|
filesRemaining: remainingCount,
|
||||||
|
branch: branchName,
|
||||||
|
message: `Discarded ${fileCount - remainingCount} files, ${remainingCount} files could not be removed`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
discarded: true,
|
||||||
|
filesDiscarded: fileCount,
|
||||||
|
filesRemaining: 0,
|
||||||
|
branch: branchName,
|
||||||
|
message: `Discarded ${fileCount} ${fileCount === 1 ? 'file' : 'files'}`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Discard changes failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -37,7 +37,10 @@ export function createFileDiffHandler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Git worktrees are stored in project directory
|
// Git worktrees are stored in project directory
|
||||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
// Sanitize featureId the same way it's sanitized when creating worktrees
|
||||||
|
// (see create.ts: branchName.replace(/[^a-zA-Z0-9_-]/g, '-'))
|
||||||
|
const sanitizedFeatureId = featureId.replace(/[^a-zA-Z0-9_-]/g, '-');
|
||||||
|
const worktreePath = path.join(projectPath, '.worktrees', sanitizedFeatureId);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await secureFs.access(worktreePath);
|
await secureFs.access(worktreePath);
|
||||||
|
|||||||
@@ -10,14 +10,14 @@ import { exec } from 'child_process';
|
|||||||
import { promisify } from 'util';
|
import { promisify } from 'util';
|
||||||
import { existsSync } from 'fs';
|
import { existsSync } from 'fs';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { query } from '@anthropic-ai/claude-agent-sdk';
|
|
||||||
import { createLogger } from '@automaker/utils';
|
import { createLogger } from '@automaker/utils';
|
||||||
import { DEFAULT_PHASE_MODELS, isCursorModel, stripProviderPrefix } from '@automaker/types';
|
import { isCursorModel, stripProviderPrefix } from '@automaker/types';
|
||||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||||
import { mergeCommitMessagePrompts } from '@automaker/prompts';
|
import { mergeCommitMessagePrompts } from '@automaker/prompts';
|
||||||
import { ProviderFactory } from '../../../providers/provider-factory.js';
|
import { ProviderFactory } from '../../../providers/provider-factory.js';
|
||||||
import type { SettingsService } from '../../../services/settings-service.js';
|
import type { SettingsService } from '../../../services/settings-service.js';
|
||||||
import { getErrorMessage, logError } from '../common.js';
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
import { getPhaseModelWithOverrides } from '../../../lib/settings-helpers.js';
|
||||||
|
|
||||||
const logger = createLogger('GenerateCommitMessage');
|
const logger = createLogger('GenerateCommitMessage');
|
||||||
const execAsync = promisify(exec);
|
const execAsync = promisify(exec);
|
||||||
@@ -74,33 +74,6 @@ interface GenerateCommitMessageErrorResponse {
|
|||||||
error: string;
|
error: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function extractTextFromStream(
|
|
||||||
stream: AsyncIterable<{
|
|
||||||
type: string;
|
|
||||||
subtype?: string;
|
|
||||||
result?: string;
|
|
||||||
message?: {
|
|
||||||
content?: Array<{ type: string; text?: string }>;
|
|
||||||
};
|
|
||||||
}>
|
|
||||||
): Promise<string> {
|
|
||||||
let responseText = '';
|
|
||||||
|
|
||||||
for await (const msg of stream) {
|
|
||||||
if (msg.type === 'assistant' && msg.message?.content) {
|
|
||||||
for (const block of msg.message.content) {
|
|
||||||
if (block.type === 'text' && block.text) {
|
|
||||||
responseText += block.text;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (msg.type === 'result' && msg.subtype === 'success') {
|
|
||||||
responseText = msg.result || responseText;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return responseText;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createGenerateCommitMessageHandler(
|
export function createGenerateCommitMessageHandler(
|
||||||
settingsService?: SettingsService
|
settingsService?: SettingsService
|
||||||
): (req: Request, res: Response) => Promise<void> {
|
): (req: Request, res: Response) => Promise<void> {
|
||||||
@@ -184,67 +157,68 @@ export function createGenerateCommitMessageHandler(
|
|||||||
|
|
||||||
const userPrompt = `Generate a commit message for these changes:\n\n\`\`\`diff\n${truncatedDiff}\n\`\`\``;
|
const userPrompt = `Generate a commit message for these changes:\n\n\`\`\`diff\n${truncatedDiff}\n\`\`\``;
|
||||||
|
|
||||||
// Get model from phase settings
|
// Get model from phase settings with provider info
|
||||||
const settings = await settingsService?.getGlobalSettings();
|
const {
|
||||||
const phaseModelEntry =
|
phaseModel: phaseModelEntry,
|
||||||
settings?.phaseModels?.commitMessageModel || DEFAULT_PHASE_MODELS.commitMessageModel;
|
provider: claudeCompatibleProvider,
|
||||||
const { model } = resolvePhaseModel(phaseModelEntry);
|
credentials,
|
||||||
|
} = await getPhaseModelWithOverrides(
|
||||||
|
'commitMessageModel',
|
||||||
|
settingsService,
|
||||||
|
worktreePath,
|
||||||
|
'[GenerateCommitMessage]'
|
||||||
|
);
|
||||||
|
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||||
|
|
||||||
logger.info(`Using model for commit message: ${model}`);
|
logger.info(
|
||||||
|
`Using model for commit message: ${model}`,
|
||||||
|
claudeCompatibleProvider ? `via provider: ${claudeCompatibleProvider.name}` : 'direct API'
|
||||||
|
);
|
||||||
|
|
||||||
// Get the effective system prompt (custom or default)
|
// Get the effective system prompt (custom or default)
|
||||||
const systemPrompt = await getSystemPrompt(settingsService);
|
const systemPrompt = await getSystemPrompt(settingsService);
|
||||||
|
|
||||||
let message: string;
|
// Get provider for the model type
|
||||||
|
const aiProvider = ProviderFactory.getProviderForModel(model);
|
||||||
// Route to appropriate provider based on model type
|
|
||||||
if (isCursorModel(model)) {
|
|
||||||
// Use Cursor provider for Cursor models
|
|
||||||
logger.info(`Using Cursor provider for model: ${model}`);
|
|
||||||
|
|
||||||
const provider = ProviderFactory.getProviderForModel(model);
|
|
||||||
const bareModel = stripProviderPrefix(model);
|
const bareModel = stripProviderPrefix(model);
|
||||||
|
|
||||||
const cursorPrompt = `${systemPrompt}\n\n${userPrompt}`;
|
// For Cursor models, combine prompts since Cursor doesn't support systemPrompt separation
|
||||||
|
const effectivePrompt = isCursorModel(model)
|
||||||
|
? `${systemPrompt}\n\n${userPrompt}`
|
||||||
|
: userPrompt;
|
||||||
|
const effectiveSystemPrompt = isCursorModel(model) ? undefined : systemPrompt;
|
||||||
|
|
||||||
|
logger.info(`Using ${aiProvider.getName()} provider for model: ${model}`);
|
||||||
|
|
||||||
let responseText = '';
|
let responseText = '';
|
||||||
const cursorStream = provider.executeQuery({
|
const stream = aiProvider.executeQuery({
|
||||||
prompt: cursorPrompt,
|
prompt: effectivePrompt,
|
||||||
model: bareModel,
|
model: bareModel,
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
|
systemPrompt: effectiveSystemPrompt,
|
||||||
maxTurns: 1,
|
maxTurns: 1,
|
||||||
allowedTools: [],
|
allowedTools: [],
|
||||||
readOnly: true,
|
readOnly: true,
|
||||||
|
thinkingLevel, // Pass thinking level for extended thinking support
|
||||||
|
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
});
|
});
|
||||||
|
|
||||||
// Wrap with timeout to prevent indefinite hangs
|
// Wrap with timeout to prevent indefinite hangs
|
||||||
for await (const msg of withTimeout(cursorStream, AI_TIMEOUT_MS)) {
|
for await (const msg of withTimeout(stream, AI_TIMEOUT_MS)) {
|
||||||
if (msg.type === 'assistant' && msg.message?.content) {
|
if (msg.type === 'assistant' && msg.message?.content) {
|
||||||
for (const block of msg.message.content) {
|
for (const block of msg.message.content) {
|
||||||
if (block.type === 'text' && block.text) {
|
if (block.type === 'text' && block.text) {
|
||||||
responseText += block.text;
|
responseText += block.text;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else if (msg.type === 'result' && msg.subtype === 'success' && msg.result) {
|
||||||
|
// Use result if available (some providers return final text here)
|
||||||
|
responseText = msg.result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
message = responseText.trim();
|
const message = responseText.trim();
|
||||||
} else {
|
|
||||||
// Use Claude SDK for Claude models
|
|
||||||
const stream = query({
|
|
||||||
prompt: userPrompt,
|
|
||||||
options: {
|
|
||||||
model,
|
|
||||||
systemPrompt,
|
|
||||||
maxTurns: 1,
|
|
||||||
allowedTools: [],
|
|
||||||
permissionMode: 'default',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// Wrap with timeout to prevent indefinite hangs
|
|
||||||
message = await extractTextFromStream(withTimeout(stream, AI_TIMEOUT_MS));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!message || message.trim().length === 0) {
|
if (!message || message.trim().length === 0) {
|
||||||
logger.warn('Received empty response from model');
|
logger.warn('Received empty response from model');
|
||||||
|
|||||||
@@ -28,7 +28,10 @@ export function createInfoHandler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check if worktree exists (git worktrees are stored in project directory)
|
// Check if worktree exists (git worktrees are stored in project directory)
|
||||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
// Sanitize featureId the same way it's sanitized when creating worktrees
|
||||||
|
// (see create.ts: branchName.replace(/[^a-zA-Z0-9_-]/g, '-'))
|
||||||
|
const sanitizedFeatureId = featureId.replace(/[^a-zA-Z0-9_-]/g, '-');
|
||||||
|
const worktreePath = path.join(projectPath, '.worktrees', sanitizedFeatureId);
|
||||||
try {
|
try {
|
||||||
await secureFs.access(worktreePath);
|
await secureFs.access(worktreePath);
|
||||||
const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||||
|
|||||||
@@ -110,9 +110,22 @@ export function createListBranchesHandler() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get ahead/behind count for current branch
|
// Check if any remotes are configured for this repository
|
||||||
|
let hasAnyRemotes = false;
|
||||||
|
try {
|
||||||
|
const { stdout: remotesOutput } = await execAsync('git remote', {
|
||||||
|
cwd: worktreePath,
|
||||||
|
});
|
||||||
|
hasAnyRemotes = remotesOutput.trim().length > 0;
|
||||||
|
} catch {
|
||||||
|
// If git remote fails, assume no remotes
|
||||||
|
hasAnyRemotes = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get ahead/behind count for current branch and check if remote branch exists
|
||||||
let aheadCount = 0;
|
let aheadCount = 0;
|
||||||
let behindCount = 0;
|
let behindCount = 0;
|
||||||
|
let hasRemoteBranch = false;
|
||||||
try {
|
try {
|
||||||
// First check if there's a remote tracking branch
|
// First check if there's a remote tracking branch
|
||||||
const { stdout: upstreamOutput } = await execAsync(
|
const { stdout: upstreamOutput } = await execAsync(
|
||||||
@@ -121,6 +134,7 @@ export function createListBranchesHandler() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (upstreamOutput.trim()) {
|
if (upstreamOutput.trim()) {
|
||||||
|
hasRemoteBranch = true;
|
||||||
const { stdout: aheadBehindOutput } = await execAsync(
|
const { stdout: aheadBehindOutput } = await execAsync(
|
||||||
`git rev-list --left-right --count ${currentBranch}@{upstream}...HEAD`,
|
`git rev-list --left-right --count ${currentBranch}@{upstream}...HEAD`,
|
||||||
{ cwd: worktreePath }
|
{ cwd: worktreePath }
|
||||||
@@ -130,7 +144,18 @@ export function createListBranchesHandler() {
|
|||||||
behindCount = behind || 0;
|
behindCount = behind || 0;
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
// No upstream branch set, that's okay
|
// No upstream branch set - check if the branch exists on any remote
|
||||||
|
try {
|
||||||
|
// Check if there's a matching branch on origin (most common remote)
|
||||||
|
const { stdout: remoteBranchOutput } = await execAsync(
|
||||||
|
`git ls-remote --heads origin ${currentBranch}`,
|
||||||
|
{ cwd: worktreePath, timeout: 5000 }
|
||||||
|
);
|
||||||
|
hasRemoteBranch = remoteBranchOutput.trim().length > 0;
|
||||||
|
} catch {
|
||||||
|
// No remote branch found or origin doesn't exist
|
||||||
|
hasRemoteBranch = false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
@@ -140,6 +165,8 @@ export function createListBranchesHandler() {
|
|||||||
branches,
|
branches,
|
||||||
aheadCount,
|
aheadCount,
|
||||||
behindCount,
|
behindCount,
|
||||||
|
hasRemoteBranch,
|
||||||
|
hasAnyRemotes,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
127
apps/server/src/routes/worktree/routes/list-remotes.ts
Normal file
127
apps/server/src/routes/worktree/routes/list-remotes.ts
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
/**
|
||||||
|
* POST /list-remotes endpoint - List all remotes and their branches
|
||||||
|
*
|
||||||
|
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
||||||
|
* the requireValidWorktree middleware in index.ts
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import { exec } from 'child_process';
|
||||||
|
import { promisify } from 'util';
|
||||||
|
import { getErrorMessage, logWorktreeError } from '../common.js';
|
||||||
|
|
||||||
|
const execAsync = promisify(exec);
|
||||||
|
|
||||||
|
interface RemoteBranch {
|
||||||
|
name: string;
|
||||||
|
fullRef: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RemoteInfo {
|
||||||
|
name: string;
|
||||||
|
url: string;
|
||||||
|
branches: RemoteBranch[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createListRemotesHandler() {
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const { worktreePath } = req.body as {
|
||||||
|
worktreePath: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!worktreePath) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'worktreePath required',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get list of remotes
|
||||||
|
const { stdout: remotesOutput } = await execAsync('git remote -v', {
|
||||||
|
cwd: worktreePath,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Parse remotes (each remote appears twice - once for fetch, once for push)
|
||||||
|
const remotesSet = new Map<string, string>();
|
||||||
|
remotesOutput
|
||||||
|
.trim()
|
||||||
|
.split('\n')
|
||||||
|
.filter((line) => line.trim())
|
||||||
|
.forEach((line) => {
|
||||||
|
const match = line.match(/^(\S+)\s+(\S+)\s+\(fetch\)$/);
|
||||||
|
if (match) {
|
||||||
|
remotesSet.set(match[1], match[2]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch latest from all remotes (silently, don't fail if offline)
|
||||||
|
try {
|
||||||
|
await execAsync('git fetch --all --quiet', {
|
||||||
|
cwd: worktreePath,
|
||||||
|
timeout: 15000, // 15 second timeout
|
||||||
|
});
|
||||||
|
} catch {
|
||||||
|
// Ignore fetch errors - we'll use cached remote refs
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all remote branches
|
||||||
|
const { stdout: remoteBranchesOutput } = await execAsync(
|
||||||
|
'git branch -r --format="%(refname:short)"',
|
||||||
|
{ cwd: worktreePath }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Group branches by remote
|
||||||
|
const remotesBranches = new Map<string, RemoteBranch[]>();
|
||||||
|
remotesSet.forEach((_, remoteName) => {
|
||||||
|
remotesBranches.set(remoteName, []);
|
||||||
|
});
|
||||||
|
|
||||||
|
remoteBranchesOutput
|
||||||
|
.trim()
|
||||||
|
.split('\n')
|
||||||
|
.filter((line) => line.trim())
|
||||||
|
.forEach((line) => {
|
||||||
|
const cleanLine = line.trim().replace(/^['"]|['"]$/g, '');
|
||||||
|
// Skip HEAD pointers like "origin/HEAD"
|
||||||
|
if (cleanLine.includes('/HEAD')) return;
|
||||||
|
|
||||||
|
// Parse remote name from branch ref (e.g., "origin/main" -> "origin")
|
||||||
|
const slashIndex = cleanLine.indexOf('/');
|
||||||
|
if (slashIndex === -1) return;
|
||||||
|
|
||||||
|
const remoteName = cleanLine.substring(0, slashIndex);
|
||||||
|
const branchName = cleanLine.substring(slashIndex + 1);
|
||||||
|
|
||||||
|
if (remotesBranches.has(remoteName)) {
|
||||||
|
remotesBranches.get(remoteName)!.push({
|
||||||
|
name: branchName,
|
||||||
|
fullRef: cleanLine,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Build final result
|
||||||
|
const remotes: RemoteInfo[] = [];
|
||||||
|
remotesSet.forEach((url, name) => {
|
||||||
|
remotes.push({
|
||||||
|
name,
|
||||||
|
url,
|
||||||
|
branches: remotesBranches.get(name) || [],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
remotes,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
const worktreePath = req.body?.worktreePath;
|
||||||
|
logWorktreeError(error, 'List remotes failed', worktreePath);
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -368,6 +368,13 @@ export function createListHandler() {
|
|||||||
: new Map<string, WorktreePRInfo>();
|
: new Map<string, WorktreePRInfo>();
|
||||||
|
|
||||||
for (const worktree of worktrees) {
|
for (const worktree of worktrees) {
|
||||||
|
// Skip PR assignment for the main worktree - it's not meaningful to show
|
||||||
|
// PRs on the main branch tab, and can be confusing if someone created
|
||||||
|
// a PR from main to another branch
|
||||||
|
if (worktree.isMain) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
const metadata = allMetadata.get(worktree.branch);
|
const metadata = allMetadata.get(worktree.branch);
|
||||||
const githubPR = githubPRs.get(worktree.branch);
|
const githubPR = githubPRs.get(worktree.branch);
|
||||||
|
|
||||||
@@ -387,8 +394,8 @@ export function createListHandler() {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else if (metadata?.pr) {
|
} else if (metadata?.pr && metadata.pr.state === 'OPEN') {
|
||||||
// Fall back to stored metadata (for PRs not in recent GitHub response)
|
// Fall back to stored metadata only if the PR is still OPEN
|
||||||
worktree.pr = metadata.pr;
|
worktree.pr = metadata.pr;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
/**
|
/**
|
||||||
* POST /merge endpoint - Merge feature (merge worktree branch into main)
|
* POST /merge endpoint - Merge feature (merge worktree branch into a target branch)
|
||||||
|
*
|
||||||
|
* Allows merging a worktree branch into any target branch (defaults to 'main').
|
||||||
*
|
*
|
||||||
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
* Note: Git repository validation (isGitRepo, hasCommits) is handled by
|
||||||
* the requireValidProject middleware in index.ts
|
* the requireValidProject middleware in index.ts
|
||||||
@@ -8,18 +10,21 @@
|
|||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
import { exec } from 'child_process';
|
import { exec } from 'child_process';
|
||||||
import { promisify } from 'util';
|
import { promisify } from 'util';
|
||||||
import { getErrorMessage, logError } from '../common.js';
|
import { getErrorMessage, logError, isValidBranchName, execGitCommand } from '../common.js';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
|
||||||
const execAsync = promisify(exec);
|
const execAsync = promisify(exec);
|
||||||
|
const logger = createLogger('Worktree');
|
||||||
|
|
||||||
export function createMergeHandler() {
|
export function createMergeHandler() {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
const { projectPath, branchName, worktreePath, options } = req.body as {
|
const { projectPath, branchName, worktreePath, targetBranch, options } = req.body as {
|
||||||
projectPath: string;
|
projectPath: string;
|
||||||
branchName: string;
|
branchName: string;
|
||||||
worktreePath: string;
|
worktreePath: string;
|
||||||
options?: { squash?: boolean; message?: string };
|
targetBranch?: string; // Branch to merge into (defaults to 'main')
|
||||||
|
options?: { squash?: boolean; message?: string; deleteWorktreeAndBranch?: boolean };
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!projectPath || !branchName || !worktreePath) {
|
if (!projectPath || !branchName || !worktreePath) {
|
||||||
@@ -30,7 +35,10 @@ export function createMergeHandler() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate branch exists
|
// Determine the target branch (default to 'main')
|
||||||
|
const mergeTo = targetBranch || 'main';
|
||||||
|
|
||||||
|
// Validate source branch exists
|
||||||
try {
|
try {
|
||||||
await execAsync(`git rev-parse --verify ${branchName}`, { cwd: projectPath });
|
await execAsync(`git rev-parse --verify ${branchName}`, { cwd: projectPath });
|
||||||
} catch {
|
} catch {
|
||||||
@@ -41,12 +49,44 @@ export function createMergeHandler() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Merge the feature branch
|
// Validate target branch exists
|
||||||
|
try {
|
||||||
|
await execAsync(`git rev-parse --verify ${mergeTo}`, { cwd: projectPath });
|
||||||
|
} catch {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: `Target branch "${mergeTo}" does not exist`,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge the feature branch into the target branch
|
||||||
const mergeCmd = options?.squash
|
const mergeCmd = options?.squash
|
||||||
? `git merge --squash ${branchName}`
|
? `git merge --squash ${branchName}`
|
||||||
: `git merge ${branchName} -m "${options?.message || `Merge ${branchName}`}"`;
|
: `git merge ${branchName} -m "${options?.message || `Merge ${branchName} into ${mergeTo}`}"`;
|
||||||
|
|
||||||
|
try {
|
||||||
await execAsync(mergeCmd, { cwd: projectPath });
|
await execAsync(mergeCmd, { cwd: projectPath });
|
||||||
|
} catch (mergeError: unknown) {
|
||||||
|
// Check if this is a merge conflict
|
||||||
|
const err = mergeError as { stdout?: string; stderr?: string; message?: string };
|
||||||
|
const output = `${err.stdout || ''} ${err.stderr || ''} ${err.message || ''}`;
|
||||||
|
const hasConflicts =
|
||||||
|
output.includes('CONFLICT') || output.includes('Automatic merge failed');
|
||||||
|
|
||||||
|
if (hasConflicts) {
|
||||||
|
// Return conflict-specific error message that frontend can detect
|
||||||
|
res.status(409).json({
|
||||||
|
success: false,
|
||||||
|
error: `Merge CONFLICT: Automatic merge of "${branchName}" into "${mergeTo}" failed. Please resolve conflicts manually.`,
|
||||||
|
hasConflicts: true,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-throw non-conflict errors to be handled by outer catch
|
||||||
|
throw mergeError;
|
||||||
|
}
|
||||||
|
|
||||||
// If squash merge, need to commit
|
// If squash merge, need to commit
|
||||||
if (options?.squash) {
|
if (options?.squash) {
|
||||||
@@ -55,17 +95,46 @@ export function createMergeHandler() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clean up worktree and branch
|
// Optionally delete the worktree and branch after merging
|
||||||
|
let worktreeDeleted = false;
|
||||||
|
let branchDeleted = false;
|
||||||
|
|
||||||
|
if (options?.deleteWorktreeAndBranch) {
|
||||||
|
// Remove the worktree
|
||||||
try {
|
try {
|
||||||
await execAsync(`git worktree remove "${worktreePath}" --force`, {
|
await execGitCommand(['worktree', 'remove', worktreePath, '--force'], projectPath);
|
||||||
cwd: projectPath,
|
worktreeDeleted = true;
|
||||||
});
|
|
||||||
await execAsync(`git branch -D ${branchName}`, { cwd: projectPath });
|
|
||||||
} catch {
|
} catch {
|
||||||
// Cleanup errors are non-fatal
|
// Try with prune if remove fails
|
||||||
|
try {
|
||||||
|
await execGitCommand(['worktree', 'prune'], projectPath);
|
||||||
|
worktreeDeleted = true;
|
||||||
|
} catch {
|
||||||
|
logger.warn(`Failed to remove worktree: ${worktreePath}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({ success: true, mergedBranch: branchName });
|
// Delete the branch (but not main/master)
|
||||||
|
if (branchName !== 'main' && branchName !== 'master') {
|
||||||
|
if (!isValidBranchName(branchName)) {
|
||||||
|
logger.warn(`Invalid branch name detected, skipping deletion: ${branchName}`);
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
await execGitCommand(['branch', '-D', branchName], projectPath);
|
||||||
|
branchDeleted = true;
|
||||||
|
} catch {
|
||||||
|
logger.warn(`Failed to delete branch: ${branchName}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
mergedBranch: branchName,
|
||||||
|
targetBranch: mergeTo,
|
||||||
|
deleted: options?.deleteWorktreeAndBranch ? { worktreeDeleted, branchDeleted } : undefined,
|
||||||
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logError(error, 'Merge worktree failed');
|
logError(error, 'Merge worktree failed');
|
||||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
|||||||
181
apps/server/src/routes/worktree/routes/open-in-terminal.ts
Normal file
181
apps/server/src/routes/worktree/routes/open-in-terminal.ts
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
/**
|
||||||
|
* Terminal endpoints for opening worktree directories in terminals
|
||||||
|
*
|
||||||
|
* POST /open-in-terminal - Open in system default terminal (integrated)
|
||||||
|
* GET /available-terminals - List all available external terminals
|
||||||
|
* GET /default-terminal - Get the default external terminal
|
||||||
|
* POST /refresh-terminals - Clear terminal cache and re-detect
|
||||||
|
* POST /open-in-external-terminal - Open a directory in an external terminal
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import { isAbsolute } from 'path';
|
||||||
|
import {
|
||||||
|
openInTerminal,
|
||||||
|
clearTerminalCache,
|
||||||
|
detectAllTerminals,
|
||||||
|
detectDefaultTerminal,
|
||||||
|
openInExternalTerminal,
|
||||||
|
} from '@automaker/platform';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
|
||||||
|
const logger = createLogger('open-in-terminal');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handler to open in system default terminal (integrated terminal behavior)
|
||||||
|
*/
|
||||||
|
export function createOpenInTerminalHandler() {
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const { worktreePath } = req.body as {
|
||||||
|
worktreePath: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!worktreePath || typeof worktreePath !== 'string') {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'worktreePath required and must be a string',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Security: Validate that worktreePath is an absolute path
|
||||||
|
if (!isAbsolute(worktreePath)) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'worktreePath must be an absolute path',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the platform utility to open in terminal
|
||||||
|
const result = await openInTerminal(worktreePath);
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
message: `Opened terminal in ${worktreePath}`,
|
||||||
|
terminalName: result.terminalName,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Open in terminal failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handler to get all available external terminals
|
||||||
|
*/
|
||||||
|
export function createGetAvailableTerminalsHandler() {
|
||||||
|
return async (_req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const terminals = await detectAllTerminals();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
terminals,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Get available terminals failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handler to get the default external terminal
|
||||||
|
*/
|
||||||
|
export function createGetDefaultTerminalHandler() {
|
||||||
|
return async (_req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const terminal = await detectDefaultTerminal();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: terminal
|
||||||
|
? {
|
||||||
|
terminalId: terminal.id,
|
||||||
|
terminalName: terminal.name,
|
||||||
|
terminalCommand: terminal.command,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Get default terminal failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handler to refresh the terminal cache and re-detect available terminals
|
||||||
|
* Useful when the user has installed/uninstalled terminals
|
||||||
|
*/
|
||||||
|
export function createRefreshTerminalsHandler() {
|
||||||
|
return async (_req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
// Clear the cache
|
||||||
|
clearTerminalCache();
|
||||||
|
|
||||||
|
// Re-detect terminals (this will repopulate the cache)
|
||||||
|
const terminals = await detectAllTerminals();
|
||||||
|
|
||||||
|
logger.info(`Terminal cache refreshed, found ${terminals.length} terminals`);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
terminals,
|
||||||
|
message: `Found ${terminals.length} available external terminals`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Refresh terminals failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handler to open a directory in an external terminal
|
||||||
|
*/
|
||||||
|
export function createOpenInExternalTerminalHandler() {
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const { worktreePath, terminalId } = req.body as {
|
||||||
|
worktreePath: string;
|
||||||
|
terminalId?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!worktreePath || typeof worktreePath !== 'string') {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'worktreePath required and must be a string',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isAbsolute(worktreePath)) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'worktreePath must be an absolute path',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await openInExternalTerminal(worktreePath, terminalId);
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
message: `Opened ${worktreePath} in ${result.terminalName}`,
|
||||||
|
terminalName: result.terminalName,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Open in external terminal failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -15,9 +15,10 @@ const execAsync = promisify(exec);
|
|||||||
export function createPushHandler() {
|
export function createPushHandler() {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
const { worktreePath, force } = req.body as {
|
const { worktreePath, force, remote } = req.body as {
|
||||||
worktreePath: string;
|
worktreePath: string;
|
||||||
force?: boolean;
|
force?: boolean;
|
||||||
|
remote?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!worktreePath) {
|
if (!worktreePath) {
|
||||||
@@ -34,15 +35,18 @@ export function createPushHandler() {
|
|||||||
});
|
});
|
||||||
const branchName = branchOutput.trim();
|
const branchName = branchOutput.trim();
|
||||||
|
|
||||||
|
// Use specified remote or default to 'origin'
|
||||||
|
const targetRemote = remote || 'origin';
|
||||||
|
|
||||||
// Push the branch
|
// Push the branch
|
||||||
const forceFlag = force ? '--force' : '';
|
const forceFlag = force ? '--force' : '';
|
||||||
try {
|
try {
|
||||||
await execAsync(`git push -u origin ${branchName} ${forceFlag}`, {
|
await execAsync(`git push -u ${targetRemote} ${branchName} ${forceFlag}`, {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
});
|
});
|
||||||
} catch {
|
} catch {
|
||||||
// Try setting upstream
|
// Try setting upstream
|
||||||
await execAsync(`git push --set-upstream origin ${branchName} ${forceFlag}`, {
|
await execAsync(`git push --set-upstream ${targetRemote} ${branchName} ${forceFlag}`, {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -52,7 +56,7 @@ export function createPushHandler() {
|
|||||||
result: {
|
result: {
|
||||||
branch: branchName,
|
branch: branchName,
|
||||||
pushed: true,
|
pushed: true,
|
||||||
message: `Successfully pushed ${branchName} to origin`,
|
message: `Successfully pushed ${branchName} to ${targetRemote}`,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -1,16 +1,22 @@
|
|||||||
/**
|
/**
|
||||||
* POST /start-dev endpoint - Start a dev server for a worktree
|
* POST /start-dev endpoint - Start a dev server for a worktree
|
||||||
*
|
*
|
||||||
* Spins up a development server (npm run dev) in the worktree directory
|
* Spins up a development server in the worktree directory on a unique port,
|
||||||
* on a unique port, allowing preview of the worktree's changes without
|
* allowing preview of the worktree's changes without affecting the main dev server.
|
||||||
* affecting the main dev server.
|
*
|
||||||
|
* If a custom devCommand is configured in project settings, it will be used.
|
||||||
|
* Otherwise, auto-detection based on package manager (npm/yarn/pnpm/bun run dev) is used.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
|
import type { SettingsService } from '../../../services/settings-service.js';
|
||||||
import { getDevServerService } from '../../../services/dev-server-service.js';
|
import { getDevServerService } from '../../../services/dev-server-service.js';
|
||||||
import { getErrorMessage, logError } from '../common.js';
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
|
||||||
export function createStartDevHandler() {
|
const logger = createLogger('start-dev');
|
||||||
|
|
||||||
|
export function createStartDevHandler(settingsService?: SettingsService) {
|
||||||
return async (req: Request, res: Response): Promise<void> => {
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
const { projectPath, worktreePath } = req.body as {
|
const { projectPath, worktreePath } = req.body as {
|
||||||
@@ -34,8 +40,25 @@ export function createStartDevHandler() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get custom dev command from project settings (if configured)
|
||||||
|
let customCommand: string | undefined;
|
||||||
|
if (settingsService) {
|
||||||
|
const projectSettings = await settingsService.getProjectSettings(projectPath);
|
||||||
|
const devCommand = projectSettings?.devCommand?.trim();
|
||||||
|
if (devCommand) {
|
||||||
|
customCommand = devCommand;
|
||||||
|
logger.debug(`Using custom dev command from project settings: ${customCommand}`);
|
||||||
|
} else {
|
||||||
|
logger.debug('No custom dev command configured, using auto-detection');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const devServerService = getDevServerService();
|
const devServerService = getDevServerService();
|
||||||
const result = await devServerService.startDevServer(projectPath, worktreePath);
|
const result = await devServerService.startDevServer(
|
||||||
|
projectPath,
|
||||||
|
worktreePath,
|
||||||
|
customCommand
|
||||||
|
);
|
||||||
|
|
||||||
if (result.success && result.result) {
|
if (result.success && result.result) {
|
||||||
res.json({
|
res.json({
|
||||||
|
|||||||
92
apps/server/src/routes/worktree/routes/start-tests.ts
Normal file
92
apps/server/src/routes/worktree/routes/start-tests.ts
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
/**
|
||||||
|
* POST /start-tests endpoint - Start tests for a worktree
|
||||||
|
*
|
||||||
|
* Runs the test command configured in project settings.
|
||||||
|
* If no testCommand is configured, returns an error.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import type { SettingsService } from '../../../services/settings-service.js';
|
||||||
|
import { getTestRunnerService } from '../../../services/test-runner-service.js';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
|
||||||
|
export function createStartTestsHandler(settingsService?: SettingsService) {
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const body = req.body;
|
||||||
|
|
||||||
|
// Validate request body
|
||||||
|
if (!body || typeof body !== 'object') {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Request body must be an object',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const worktreePath = typeof body.worktreePath === 'string' ? body.worktreePath : undefined;
|
||||||
|
const projectPath = typeof body.projectPath === 'string' ? body.projectPath : undefined;
|
||||||
|
const testFile = typeof body.testFile === 'string' ? body.testFile : undefined;
|
||||||
|
|
||||||
|
if (!worktreePath) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'worktreePath is required and must be a string',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get project settings to find the test command
|
||||||
|
// Use projectPath if provided, otherwise use worktreePath
|
||||||
|
const settingsPath = projectPath || worktreePath;
|
||||||
|
|
||||||
|
if (!settingsService) {
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Settings service not available',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectSettings = await settingsService.getProjectSettings(settingsPath);
|
||||||
|
const testCommand = projectSettings?.testCommand;
|
||||||
|
|
||||||
|
if (!testCommand) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
'No test command configured. Please configure a test command in Project Settings > Testing Configuration.',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testRunnerService = getTestRunnerService();
|
||||||
|
const result = await testRunnerService.startTests(worktreePath, {
|
||||||
|
command: testCommand,
|
||||||
|
testFile,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.success && result.result) {
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
sessionId: result.result.sessionId,
|
||||||
|
worktreePath: result.result.worktreePath,
|
||||||
|
command: result.result.command,
|
||||||
|
status: result.result.status,
|
||||||
|
testFile: result.result.testFile,
|
||||||
|
message: result.result.message,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: result.error || 'Failed to start tests',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Start tests failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -28,7 +28,10 @@ export function createStatusHandler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Git worktrees are stored in project directory
|
// Git worktrees are stored in project directory
|
||||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
// Sanitize featureId the same way it's sanitized when creating worktrees
|
||||||
|
// (see create.ts: branchName.replace(/[^a-zA-Z0-9_-]/g, '-'))
|
||||||
|
const sanitizedFeatureId = featureId.replace(/[^a-zA-Z0-9_-]/g, '-');
|
||||||
|
const worktreePath = path.join(projectPath, '.worktrees', sanitizedFeatureId);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await secureFs.access(worktreePath);
|
await secureFs.access(worktreePath);
|
||||||
|
|||||||
58
apps/server/src/routes/worktree/routes/stop-tests.ts
Normal file
58
apps/server/src/routes/worktree/routes/stop-tests.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
/**
|
||||||
|
* POST /stop-tests endpoint - Stop a running test session
|
||||||
|
*
|
||||||
|
* Stops the test runner process for a specific session,
|
||||||
|
* cancelling any ongoing tests and freeing up resources.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import { getTestRunnerService } from '../../../services/test-runner-service.js';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
|
||||||
|
export function createStopTestsHandler() {
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const body = req.body;
|
||||||
|
|
||||||
|
// Validate request body
|
||||||
|
if (!body || typeof body !== 'object') {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Request body must be an object',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionId = typeof body.sessionId === 'string' ? body.sessionId : undefined;
|
||||||
|
|
||||||
|
if (!sessionId) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'sessionId is required and must be a string',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testRunnerService = getTestRunnerService();
|
||||||
|
const result = await testRunnerService.stopTests(sessionId);
|
||||||
|
|
||||||
|
if (result.success && result.result) {
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
sessionId: result.result.sessionId,
|
||||||
|
message: result.result.message,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: result.error || 'Failed to stop tests',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Stop tests failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
160
apps/server/src/routes/worktree/routes/test-logs.ts
Normal file
160
apps/server/src/routes/worktree/routes/test-logs.ts
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
/**
|
||||||
|
* GET /test-logs endpoint - Get buffered logs for a test runner session
|
||||||
|
*
|
||||||
|
* Returns the scrollback buffer containing historical log output for a test run.
|
||||||
|
* Used by clients to populate the log panel on initial connection
|
||||||
|
* before subscribing to real-time updates via WebSocket.
|
||||||
|
*
|
||||||
|
* Query parameters:
|
||||||
|
* - worktreePath: Path to the worktree (optional if sessionId provided)
|
||||||
|
* - sessionId: Specific test session ID (optional, uses active session if not provided)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Request, Response } from 'express';
|
||||||
|
import { getTestRunnerService } from '../../../services/test-runner-service.js';
|
||||||
|
import { getErrorMessage, logError } from '../common.js';
|
||||||
|
|
||||||
|
interface SessionInfo {
|
||||||
|
sessionId: string;
|
||||||
|
worktreePath?: string;
|
||||||
|
command?: string;
|
||||||
|
testFile?: string;
|
||||||
|
exitCode?: number | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface OutputResult {
|
||||||
|
sessionId: string;
|
||||||
|
status: string;
|
||||||
|
output: string;
|
||||||
|
startedAt: string;
|
||||||
|
finishedAt?: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildLogsResponse(session: SessionInfo, output: OutputResult) {
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
result: {
|
||||||
|
sessionId: session.sessionId,
|
||||||
|
worktreePath: session.worktreePath,
|
||||||
|
command: session.command,
|
||||||
|
status: output.status,
|
||||||
|
testFile: session.testFile,
|
||||||
|
logs: output.output,
|
||||||
|
startedAt: output.startedAt,
|
||||||
|
finishedAt: output.finishedAt,
|
||||||
|
exitCode: session.exitCode ?? null,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createGetTestLogsHandler() {
|
||||||
|
return async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const { worktreePath, sessionId } = req.query as {
|
||||||
|
worktreePath?: string;
|
||||||
|
sessionId?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const testRunnerService = getTestRunnerService();
|
||||||
|
|
||||||
|
// If sessionId is provided, get logs for that specific session
|
||||||
|
if (sessionId) {
|
||||||
|
const result = testRunnerService.getSessionOutput(sessionId);
|
||||||
|
|
||||||
|
if (result.success && result.result) {
|
||||||
|
const session = testRunnerService.getSession(sessionId);
|
||||||
|
res.json(
|
||||||
|
buildLogsResponse(
|
||||||
|
{
|
||||||
|
sessionId: result.result.sessionId,
|
||||||
|
worktreePath: session?.worktreePath,
|
||||||
|
command: session?.command,
|
||||||
|
testFile: session?.testFile,
|
||||||
|
exitCode: session?.exitCode,
|
||||||
|
},
|
||||||
|
result.result
|
||||||
|
)
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
res.status(404).json({
|
||||||
|
success: false,
|
||||||
|
error: result.error || 'Failed to get test logs',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If worktreePath is provided, get logs for the active session
|
||||||
|
if (worktreePath) {
|
||||||
|
const activeSession = testRunnerService.getActiveSession(worktreePath);
|
||||||
|
|
||||||
|
if (activeSession) {
|
||||||
|
const result = testRunnerService.getSessionOutput(activeSession.id);
|
||||||
|
|
||||||
|
if (result.success && result.result) {
|
||||||
|
res.json(
|
||||||
|
buildLogsResponse(
|
||||||
|
{
|
||||||
|
sessionId: activeSession.id,
|
||||||
|
worktreePath: activeSession.worktreePath,
|
||||||
|
command: activeSession.command,
|
||||||
|
testFile: activeSession.testFile,
|
||||||
|
exitCode: activeSession.exitCode,
|
||||||
|
},
|
||||||
|
result.result
|
||||||
|
)
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
res.status(404).json({
|
||||||
|
success: false,
|
||||||
|
error: result.error || 'Failed to get test logs',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No active session - check for most recent session for this worktree
|
||||||
|
const sessions = testRunnerService.listSessions(worktreePath);
|
||||||
|
if (sessions.result.sessions.length > 0) {
|
||||||
|
// Get the most recent session (list is not sorted, so find it)
|
||||||
|
const mostRecent = sessions.result.sessions.reduce((latest, current) => {
|
||||||
|
const latestTime = new Date(latest.startedAt).getTime();
|
||||||
|
const currentTime = new Date(current.startedAt).getTime();
|
||||||
|
return currentTime > latestTime ? current : latest;
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = testRunnerService.getSessionOutput(mostRecent.sessionId);
|
||||||
|
if (result.success && result.result) {
|
||||||
|
res.json(
|
||||||
|
buildLogsResponse(
|
||||||
|
{
|
||||||
|
sessionId: mostRecent.sessionId,
|
||||||
|
worktreePath: mostRecent.worktreePath,
|
||||||
|
command: mostRecent.command,
|
||||||
|
testFile: mostRecent.testFile,
|
||||||
|
exitCode: mostRecent.exitCode,
|
||||||
|
},
|
||||||
|
result.result
|
||||||
|
)
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.status(404).json({
|
||||||
|
success: false,
|
||||||
|
error: 'No test sessions found for this worktree',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Neither sessionId nor worktreePath provided
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Either worktreePath or sessionId query parameter is required',
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logError(error, 'Get test logs failed');
|
||||||
|
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -29,6 +29,7 @@ import {
|
|||||||
getSkillsConfiguration,
|
getSkillsConfiguration,
|
||||||
getSubagentsConfiguration,
|
getSubagentsConfiguration,
|
||||||
getCustomSubagents,
|
getCustomSubagents,
|
||||||
|
getProviderByModelId,
|
||||||
} from '../lib/settings-helpers.js';
|
} from '../lib/settings-helpers.js';
|
||||||
|
|
||||||
interface Message {
|
interface Message {
|
||||||
@@ -274,6 +275,30 @@ export class AgentService {
|
|||||||
? await getCustomSubagents(this.settingsService, effectiveWorkDir)
|
? await getCustomSubagents(this.settingsService, effectiveWorkDir)
|
||||||
: undefined;
|
: undefined;
|
||||||
|
|
||||||
|
// Get credentials for API calls
|
||||||
|
const credentials = await this.settingsService?.getCredentials();
|
||||||
|
|
||||||
|
// Try to find a provider for the model (if it's a provider model like "GLM-4.7")
|
||||||
|
// This allows users to select provider models in the Agent Runner UI
|
||||||
|
let claudeCompatibleProvider: import('@automaker/types').ClaudeCompatibleProvider | undefined;
|
||||||
|
let providerResolvedModel: string | undefined;
|
||||||
|
const requestedModel = model || session.model;
|
||||||
|
if (requestedModel && this.settingsService) {
|
||||||
|
const providerResult = await getProviderByModelId(
|
||||||
|
requestedModel,
|
||||||
|
this.settingsService,
|
||||||
|
'[AgentService]'
|
||||||
|
);
|
||||||
|
if (providerResult.provider) {
|
||||||
|
claudeCompatibleProvider = providerResult.provider;
|
||||||
|
providerResolvedModel = providerResult.resolvedModel;
|
||||||
|
this.logger.info(
|
||||||
|
`[AgentService] Using provider "${providerResult.provider.name}" for model "${requestedModel}"` +
|
||||||
|
(providerResolvedModel ? ` -> resolved to "${providerResolvedModel}"` : '')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Load project context files (CLAUDE.md, CODE_QUALITY.md, etc.) and memory files
|
// Load project context files (CLAUDE.md, CODE_QUALITY.md, etc.) and memory files
|
||||||
// Use the user's message as task context for smart memory selection
|
// Use the user's message as task context for smart memory selection
|
||||||
const contextResult = await loadContextFiles({
|
const contextResult = await loadContextFiles({
|
||||||
@@ -299,10 +324,16 @@ export class AgentService {
|
|||||||
// Use thinking level and reasoning effort from request, or fall back to session's stored values
|
// Use thinking level and reasoning effort from request, or fall back to session's stored values
|
||||||
const effectiveThinkingLevel = thinkingLevel ?? session.thinkingLevel;
|
const effectiveThinkingLevel = thinkingLevel ?? session.thinkingLevel;
|
||||||
const effectiveReasoningEffort = reasoningEffort ?? session.reasoningEffort;
|
const effectiveReasoningEffort = reasoningEffort ?? session.reasoningEffort;
|
||||||
|
|
||||||
|
// When using a provider model, use the resolved Claude model (from mapsToClaudeModel)
|
||||||
|
// e.g., "GLM-4.5-Air" -> "claude-haiku-4-5"
|
||||||
|
const modelForSdk = providerResolvedModel || model;
|
||||||
|
const sessionModelForSdk = providerResolvedModel ? undefined : session.model;
|
||||||
|
|
||||||
const sdkOptions = createChatOptions({
|
const sdkOptions = createChatOptions({
|
||||||
cwd: effectiveWorkDir,
|
cwd: effectiveWorkDir,
|
||||||
model: model,
|
model: modelForSdk,
|
||||||
sessionModel: session.model,
|
sessionModel: sessionModelForSdk,
|
||||||
systemPrompt: combinedSystemPrompt,
|
systemPrompt: combinedSystemPrompt,
|
||||||
abortController: session.abortController!,
|
abortController: session.abortController!,
|
||||||
autoLoadClaudeMd,
|
autoLoadClaudeMd,
|
||||||
@@ -378,6 +409,8 @@ export class AgentService {
|
|||||||
agents: customSubagents, // Pass custom subagents for task delegation
|
agents: customSubagents, // Pass custom subagents for task delegation
|
||||||
thinkingLevel: effectiveThinkingLevel, // Pass thinking level for Claude models
|
thinkingLevel: effectiveThinkingLevel, // Pass thinking level for Claude models
|
||||||
reasoningEffort: effectiveReasoningEffort, // Pass reasoning effort for Codex models
|
reasoningEffort: effectiveReasoningEffort, // Pass reasoning effort for Codex models
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
|
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration (GLM, MiniMax, etc.)
|
||||||
};
|
};
|
||||||
|
|
||||||
// Build prompt content with images
|
// Build prompt content with images
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -468,10 +468,41 @@ export class ClaudeUsageService {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Strip ANSI escape codes from text
|
* Strip ANSI escape codes from text
|
||||||
|
* Handles CSI, OSC, and other common ANSI sequences
|
||||||
*/
|
*/
|
||||||
private stripAnsiCodes(text: string): string {
|
private stripAnsiCodes(text: string): string {
|
||||||
|
// First strip ANSI sequences (colors, etc) and handle CR
|
||||||
// eslint-disable-next-line no-control-regex
|
// eslint-disable-next-line no-control-regex
|
||||||
return text.replace(/\x1B\[[0-9;]*[A-Za-z]/g, '');
|
let clean = text
|
||||||
|
// CSI sequences: ESC [ ... (letter or @)
|
||||||
|
.replace(/\x1B\[[0-9;?]*[A-Za-z@]/g, '')
|
||||||
|
// OSC sequences: ESC ] ... terminated by BEL, ST, or another ESC
|
||||||
|
.replace(/\x1B\][^\x07\x1B]*(?:\x07|\x1B\\)?/g, '')
|
||||||
|
// Other ESC sequences: ESC (letter)
|
||||||
|
.replace(/\x1B[A-Za-z]/g, '')
|
||||||
|
// Carriage returns: replace with newline to avoid concatenation
|
||||||
|
.replace(/\r\n/g, '\n')
|
||||||
|
.replace(/\r/g, '\n');
|
||||||
|
|
||||||
|
// Handle backspaces (\x08) by applying them
|
||||||
|
// If we encounter a backspace, remove the character before it
|
||||||
|
while (clean.includes('\x08')) {
|
||||||
|
clean = clean.replace(/[^\x08]\x08/, '');
|
||||||
|
clean = clean.replace(/^\x08+/, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Explicitly strip known "Synchronized Output" and "Window Title" garbage
|
||||||
|
// even if ESC is missing (seen in some environments)
|
||||||
|
clean = clean
|
||||||
|
.replace(/\[\?2026[hl]/g, '') // CSI ? 2026 h/l
|
||||||
|
.replace(/\]0;[^\x07]*\x07/g, '') // OSC 0; Title BEL
|
||||||
|
.replace(/\]0;.*?(\[\?|$)/g, ''); // OSC 0; Title ... (unterminated or hit next sequence)
|
||||||
|
|
||||||
|
// Strip remaining non-printable control characters (except newline \n)
|
||||||
|
// ASCII 0-8, 11-31, 127
|
||||||
|
clean = clean.replace(/[\x00-\x08\x0B-\x1F\x7F]/g, '');
|
||||||
|
|
||||||
|
return clean;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -550,7 +581,7 @@ export class ClaudeUsageService {
|
|||||||
sectionLabel: string,
|
sectionLabel: string,
|
||||||
type: string
|
type: string
|
||||||
): { percentage: number; resetTime: string; resetText: string } {
|
): { percentage: number; resetTime: string; resetText: string } {
|
||||||
let percentage = 0;
|
let percentage: number | null = null;
|
||||||
let resetTime = this.getDefaultResetTime(type);
|
let resetTime = this.getDefaultResetTime(type);
|
||||||
let resetText = '';
|
let resetText = '';
|
||||||
|
|
||||||
@@ -564,7 +595,7 @@ export class ClaudeUsageService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (sectionIndex === -1) {
|
if (sectionIndex === -1) {
|
||||||
return { percentage, resetTime, resetText };
|
return { percentage: 0, resetTime, resetText };
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look at the lines following the section header (within a window of 5 lines)
|
// Look at the lines following the section header (within a window of 5 lines)
|
||||||
@@ -572,7 +603,8 @@ export class ClaudeUsageService {
|
|||||||
|
|
||||||
for (const line of searchWindow) {
|
for (const line of searchWindow) {
|
||||||
// Extract percentage - only take the first match (avoid picking up next section's data)
|
// Extract percentage - only take the first match (avoid picking up next section's data)
|
||||||
if (percentage === 0) {
|
// Use null to track "not found" since 0% is a valid percentage (100% left = 0% used)
|
||||||
|
if (percentage === null) {
|
||||||
const percentMatch = line.match(/(\d{1,3})\s*%\s*(left|used|remaining)/i);
|
const percentMatch = line.match(/(\d{1,3})\s*%\s*(left|used|remaining)/i);
|
||||||
if (percentMatch) {
|
if (percentMatch) {
|
||||||
const value = parseInt(percentMatch[1], 10);
|
const value = parseInt(percentMatch[1], 10);
|
||||||
@@ -584,18 +616,31 @@ export class ClaudeUsageService {
|
|||||||
|
|
||||||
// Extract reset time - only take the first match
|
// Extract reset time - only take the first match
|
||||||
if (!resetText && line.toLowerCase().includes('reset')) {
|
if (!resetText && line.toLowerCase().includes('reset')) {
|
||||||
resetText = line;
|
// Only extract the part starting from "Resets" (or "Reset") to avoid garbage prefixes
|
||||||
|
const match = line.match(/(Resets?.*)$/i);
|
||||||
|
// If regex fails despite 'includes', likely a complex string issues - verify match before using line
|
||||||
|
// Only fallback to line if it's reasonably short/clean, otherwise skip it to avoid showing garbage
|
||||||
|
if (match) {
|
||||||
|
resetText = match[1];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse the reset time if we found one
|
// Parse the reset time if we found one
|
||||||
if (resetText) {
|
if (resetText) {
|
||||||
|
// Clean up resetText: remove percentage info if it was matched on the same line
|
||||||
|
// e.g. "46%used Resets5:59pm" -> " Resets5:59pm"
|
||||||
|
resetText = resetText.replace(/(\d{1,3})\s*%\s*(left|used|remaining)/i, '').trim();
|
||||||
|
|
||||||
|
// Ensure space after "Resets" if missing (e.g. "Resets5:59pm" -> "Resets 5:59pm")
|
||||||
|
resetText = resetText.replace(/(resets?)(\d)/i, '$1 $2');
|
||||||
|
|
||||||
resetTime = this.parseResetTime(resetText, type);
|
resetTime = this.parseResetTime(resetText, type);
|
||||||
// Strip timezone like "(Asia/Dubai)" from the display text
|
// Strip timezone like "(Asia/Dubai)" from the display text
|
||||||
resetText = resetText.replace(/\s*\([A-Za-z_\/]+\)\s*$/, '').trim();
|
resetText = resetText.replace(/\s*\([A-Za-z_\/]+\)\s*$/, '').trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
return { percentage, resetTime, resetText };
|
return { percentage: percentage ?? 0, resetTime, resetText };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -624,7 +669,7 @@ export class ClaudeUsageService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Try to parse simple time-only format: "Resets 11am" or "Resets 3pm"
|
// Try to parse simple time-only format: "Resets 11am" or "Resets 3pm"
|
||||||
const simpleTimeMatch = text.match(/resets\s+(\d{1,2})(?::(\d{2}))?\s*(am|pm)/i);
|
const simpleTimeMatch = text.match(/resets\s*(\d{1,2})(?::(\d{2}))?\s*(am|pm)/i);
|
||||||
if (simpleTimeMatch) {
|
if (simpleTimeMatch) {
|
||||||
let hours = parseInt(simpleTimeMatch[1], 10);
|
let hours = parseInt(simpleTimeMatch[1], 10);
|
||||||
const minutes = simpleTimeMatch[2] ? parseInt(simpleTimeMatch[2], 10) : 0;
|
const minutes = simpleTimeMatch[2] ? parseInt(simpleTimeMatch[2], 10) : 0;
|
||||||
@@ -649,8 +694,11 @@ export class ClaudeUsageService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Try to parse date format: "Resets Dec 22 at 8pm" or "Resets Jan 15, 3:30pm"
|
// Try to parse date format: "Resets Dec 22 at 8pm" or "Resets Jan 15, 3:30pm"
|
||||||
|
// The regex explicitly matches only valid 3-letter month abbreviations to avoid
|
||||||
|
// matching words like "Resets" when there's no space separator.
|
||||||
|
// Optional "resets\s*" prefix handles cases with or without space after "Resets"
|
||||||
const dateMatch = text.match(
|
const dateMatch = text.match(
|
||||||
/([A-Za-z]{3,})\s+(\d{1,2})(?:\s+at\s+|\s*,?\s*)(\d{1,2})(?::(\d{2}))?\s*(am|pm)/i
|
/(?:resets\s*)?(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+(\d{1,2})(?:\s+at\s+|\s*,?\s*)(\d{1,2})(?::(\d{2}))?\s*(am|pm)/i
|
||||||
);
|
);
|
||||||
if (dateMatch) {
|
if (dateMatch) {
|
||||||
const monthName = dateMatch[1];
|
const monthName = dateMatch[1];
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
288
apps/server/src/services/copilot-usage-service.ts
Normal file
288
apps/server/src/services/copilot-usage-service.ts
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
/**
|
||||||
|
* GitHub Copilot Usage Service
|
||||||
|
*
|
||||||
|
* Fetches usage data from GitHub's Copilot API using GitHub OAuth.
|
||||||
|
* Based on CodexBar reference implementation.
|
||||||
|
*
|
||||||
|
* Authentication methods:
|
||||||
|
* 1. GitHub CLI token (~/.config/gh/hosts.yml)
|
||||||
|
* 2. GitHub OAuth device flow (stored in config)
|
||||||
|
*
|
||||||
|
* API Endpoints:
|
||||||
|
* - GET https://api.github.com/copilot_internal/user - Quota and plan info
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as os from 'os';
|
||||||
|
import { execSync } from 'child_process';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import type { CopilotProviderUsage, UsageWindow } from '@automaker/types';
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotUsage');
|
||||||
|
|
||||||
|
// GitHub API endpoint for Copilot
|
||||||
|
const COPILOT_USER_ENDPOINT = 'https://api.github.com/copilot_internal/user';
|
||||||
|
|
||||||
|
interface CopilotQuotaSnapshot {
|
||||||
|
percentageUsed?: number;
|
||||||
|
percentageRemaining?: number;
|
||||||
|
limit?: number;
|
||||||
|
used?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CopilotUserResponse {
|
||||||
|
copilotPlan?: string;
|
||||||
|
copilot_plan?: string;
|
||||||
|
quotaSnapshots?: {
|
||||||
|
premiumInteractions?: CopilotQuotaSnapshot;
|
||||||
|
chat?: CopilotQuotaSnapshot;
|
||||||
|
};
|
||||||
|
plan?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class CopilotUsageService {
|
||||||
|
private cachedToken: string | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if GitHub Copilot credentials are available
|
||||||
|
*/
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
const token = await this.getGitHubToken();
|
||||||
|
return !!token;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get GitHub token from various sources
|
||||||
|
*/
|
||||||
|
private async getGitHubToken(): Promise<string | null> {
|
||||||
|
if (this.cachedToken) {
|
||||||
|
return this.cachedToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Check environment variable
|
||||||
|
if (process.env.GITHUB_TOKEN) {
|
||||||
|
this.cachedToken = process.env.GITHUB_TOKEN;
|
||||||
|
return this.cachedToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Check GH_TOKEN (GitHub CLI uses this)
|
||||||
|
if (process.env.GH_TOKEN) {
|
||||||
|
this.cachedToken = process.env.GH_TOKEN;
|
||||||
|
return this.cachedToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Try to get token from GitHub CLI
|
||||||
|
try {
|
||||||
|
const token = execSync('gh auth token', {
|
||||||
|
encoding: 'utf8',
|
||||||
|
timeout: 5000,
|
||||||
|
stdio: ['pipe', 'pipe', 'pipe'],
|
||||||
|
}).trim();
|
||||||
|
|
||||||
|
if (token) {
|
||||||
|
this.cachedToken = token;
|
||||||
|
return this.cachedToken;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
logger.debug('Failed to get token from gh CLI');
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Check GitHub CLI hosts.yml file
|
||||||
|
const ghHostsPath = path.join(os.homedir(), '.config', 'gh', 'hosts.yml');
|
||||||
|
if (fs.existsSync(ghHostsPath)) {
|
||||||
|
try {
|
||||||
|
const content = fs.readFileSync(ghHostsPath, 'utf8');
|
||||||
|
// Simple YAML parsing for oauth_token
|
||||||
|
const match = content.match(/oauth_token:\s*(.+)/);
|
||||||
|
if (match) {
|
||||||
|
this.cachedToken = match[1].trim();
|
||||||
|
return this.cachedToken;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.debug('Failed to read gh hosts.yml:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Check CodexBar config (for users who also use CodexBar)
|
||||||
|
const codexbarConfigPath = path.join(os.homedir(), '.codexbar', 'config.json');
|
||||||
|
if (fs.existsSync(codexbarConfigPath)) {
|
||||||
|
try {
|
||||||
|
const content = fs.readFileSync(codexbarConfigPath, 'utf8');
|
||||||
|
const config = JSON.parse(content);
|
||||||
|
if (config.github?.oauth_token) {
|
||||||
|
this.cachedToken = config.github.oauth_token;
|
||||||
|
return this.cachedToken;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.debug('Failed to read CodexBar config:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make an authenticated request to GitHub Copilot API
|
||||||
|
*/
|
||||||
|
private async makeRequest<T>(url: string): Promise<T | null> {
|
||||||
|
const token = await this.getGitHubToken();
|
||||||
|
if (!token) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `token ${token}`,
|
||||||
|
Accept: 'application/json',
|
||||||
|
'User-Agent': 'automaker/1.0',
|
||||||
|
// Copilot-specific headers (from CodexBar reference)
|
||||||
|
'Editor-Version': 'vscode/1.96.2',
|
||||||
|
'Editor-Plugin-Version': 'copilot-chat/0.26.7',
|
||||||
|
'X-Github-Api-Version': '2025-04-01',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 401 || response.status === 403) {
|
||||||
|
// Clear cached token on auth failure
|
||||||
|
this.cachedToken = null;
|
||||||
|
logger.warn('GitHub Copilot API authentication failed');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (response.status === 404) {
|
||||||
|
// User may not have Copilot access
|
||||||
|
logger.info('GitHub Copilot not available for this user');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
logger.error(`GitHub Copilot API error: ${response.status} ${response.statusText}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (await response.json()) as T;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to fetch from GitHub Copilot API:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch usage data from GitHub Copilot
|
||||||
|
*/
|
||||||
|
async fetchUsageData(): Promise<CopilotProviderUsage> {
|
||||||
|
logger.info('[fetchUsageData] Starting GitHub Copilot usage fetch...');
|
||||||
|
|
||||||
|
const baseUsage: CopilotProviderUsage = {
|
||||||
|
providerId: 'copilot',
|
||||||
|
providerName: 'GitHub Copilot',
|
||||||
|
available: false,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check if token is available
|
||||||
|
const hasToken = await this.getGitHubToken();
|
||||||
|
if (!hasToken) {
|
||||||
|
baseUsage.error = 'GitHub authentication not available';
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch Copilot user data
|
||||||
|
const userResponse = await this.makeRequest<CopilotUserResponse>(COPILOT_USER_ENDPOINT);
|
||||||
|
if (!userResponse) {
|
||||||
|
baseUsage.error = 'Failed to fetch GitHub Copilot usage data';
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
baseUsage.available = true;
|
||||||
|
|
||||||
|
// Parse quota snapshots
|
||||||
|
const quotas = userResponse.quotaSnapshots;
|
||||||
|
if (quotas) {
|
||||||
|
// Premium interactions quota
|
||||||
|
if (quotas.premiumInteractions) {
|
||||||
|
const premium = quotas.premiumInteractions;
|
||||||
|
const usedPercent =
|
||||||
|
premium.percentageUsed !== undefined
|
||||||
|
? premium.percentageUsed
|
||||||
|
: premium.percentageRemaining !== undefined
|
||||||
|
? 100 - premium.percentageRemaining
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
const premiumWindow: UsageWindow = {
|
||||||
|
name: 'Premium Interactions',
|
||||||
|
usedPercent,
|
||||||
|
resetsAt: '', // GitHub doesn't provide reset time
|
||||||
|
resetText: 'Resets monthly',
|
||||||
|
limit: premium.limit,
|
||||||
|
used: premium.used,
|
||||||
|
};
|
||||||
|
|
||||||
|
baseUsage.primary = premiumWindow;
|
||||||
|
baseUsage.premiumInteractions = premiumWindow;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Chat quota
|
||||||
|
if (quotas.chat) {
|
||||||
|
const chat = quotas.chat;
|
||||||
|
const usedPercent =
|
||||||
|
chat.percentageUsed !== undefined
|
||||||
|
? chat.percentageUsed
|
||||||
|
: chat.percentageRemaining !== undefined
|
||||||
|
? 100 - chat.percentageRemaining
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
const chatWindow: UsageWindow = {
|
||||||
|
name: 'Chat',
|
||||||
|
usedPercent,
|
||||||
|
resetsAt: '',
|
||||||
|
resetText: 'Resets monthly',
|
||||||
|
limit: chat.limit,
|
||||||
|
used: chat.used,
|
||||||
|
};
|
||||||
|
|
||||||
|
baseUsage.secondary = chatWindow;
|
||||||
|
baseUsage.chatQuota = chatWindow;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse plan type
|
||||||
|
const planType = userResponse.copilotPlan || userResponse.copilot_plan || userResponse.plan;
|
||||||
|
if (planType) {
|
||||||
|
baseUsage.copilotPlan = planType;
|
||||||
|
baseUsage.plan = {
|
||||||
|
type: planType,
|
||||||
|
displayName: this.formatPlanName(planType),
|
||||||
|
isPaid: planType.toLowerCase() !== 'free',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`[fetchUsageData] ✓ GitHub Copilot usage: Premium=${baseUsage.premiumInteractions?.usedPercent || 0}%, ` +
|
||||||
|
`Chat=${baseUsage.chatQuota?.usedPercent || 0}%, Plan=${planType || 'unknown'}`
|
||||||
|
);
|
||||||
|
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format plan name for display
|
||||||
|
*/
|
||||||
|
private formatPlanName(plan: string): string {
|
||||||
|
const planMap: Record<string, string> = {
|
||||||
|
free: 'Free',
|
||||||
|
individual: 'Individual',
|
||||||
|
business: 'Business',
|
||||||
|
enterprise: 'Enterprise',
|
||||||
|
};
|
||||||
|
return planMap[plan.toLowerCase()] || plan;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear cached token
|
||||||
|
*/
|
||||||
|
clearCache(): void {
|
||||||
|
this.cachedToken = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
331
apps/server/src/services/cursor-usage-service.ts
Normal file
331
apps/server/src/services/cursor-usage-service.ts
Normal file
@@ -0,0 +1,331 @@
|
|||||||
|
/**
|
||||||
|
* Cursor Usage Service
|
||||||
|
*
|
||||||
|
* Fetches usage data from Cursor's API using session cookies or access token.
|
||||||
|
* Based on CodexBar reference implementation.
|
||||||
|
*
|
||||||
|
* Authentication methods (in priority order):
|
||||||
|
* 1. Cached session cookie from browser import
|
||||||
|
* 2. Access token from credentials file
|
||||||
|
*
|
||||||
|
* API Endpoints:
|
||||||
|
* - GET https://cursor.com/api/usage-summary - Plan usage, on-demand, billing dates
|
||||||
|
* - GET https://cursor.com/api/auth/me - User email and name
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as os from 'os';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import type { CursorProviderUsage, UsageWindow } from '@automaker/types';
|
||||||
|
|
||||||
|
const logger = createLogger('CursorUsage');
|
||||||
|
|
||||||
|
// Cursor API endpoints
|
||||||
|
const CURSOR_API_BASE = 'https://cursor.com/api';
|
||||||
|
const USAGE_SUMMARY_ENDPOINT = `${CURSOR_API_BASE}/usage-summary`;
|
||||||
|
const AUTH_ME_ENDPOINT = `${CURSOR_API_BASE}/auth/me`;
|
||||||
|
|
||||||
|
// Session cookie names used by Cursor
|
||||||
|
const SESSION_COOKIE_NAMES = [
|
||||||
|
'WorkosCursorSessionToken',
|
||||||
|
'__Secure-next-auth.session-token',
|
||||||
|
'next-auth.session-token',
|
||||||
|
];
|
||||||
|
|
||||||
|
interface CursorUsageSummary {
|
||||||
|
planUsage?: {
|
||||||
|
percent: number;
|
||||||
|
resetAt?: string;
|
||||||
|
};
|
||||||
|
onDemandUsage?: {
|
||||||
|
percent: number;
|
||||||
|
costUsd?: number;
|
||||||
|
};
|
||||||
|
billingCycleEnd?: string;
|
||||||
|
plan?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CursorAuthMe {
|
||||||
|
email?: string;
|
||||||
|
name?: string;
|
||||||
|
plan?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class CursorUsageService {
|
||||||
|
private cachedSessionCookie: string | null = null;
|
||||||
|
private cachedAccessToken: string | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Cursor credentials are available
|
||||||
|
*/
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
return await this.hasValidCredentials();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if we have valid Cursor credentials
|
||||||
|
*/
|
||||||
|
private async hasValidCredentials(): Promise<boolean> {
|
||||||
|
const token = await this.getAccessToken();
|
||||||
|
return !!token;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get access token from credentials file
|
||||||
|
*/
|
||||||
|
private async getAccessToken(): Promise<string | null> {
|
||||||
|
if (this.cachedAccessToken) {
|
||||||
|
return this.cachedAccessToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check environment variable first
|
||||||
|
if (process.env.CURSOR_ACCESS_TOKEN) {
|
||||||
|
this.cachedAccessToken = process.env.CURSOR_ACCESS_TOKEN;
|
||||||
|
return this.cachedAccessToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check credentials files
|
||||||
|
const credentialPaths = [
|
||||||
|
path.join(os.homedir(), '.cursor', 'credentials.json'),
|
||||||
|
path.join(os.homedir(), '.config', 'cursor', 'credentials.json'),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const credPath of credentialPaths) {
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(credPath)) {
|
||||||
|
const content = fs.readFileSync(credPath, 'utf8');
|
||||||
|
const creds = JSON.parse(content);
|
||||||
|
if (creds.accessToken) {
|
||||||
|
this.cachedAccessToken = creds.accessToken;
|
||||||
|
return this.cachedAccessToken;
|
||||||
|
}
|
||||||
|
if (creds.token) {
|
||||||
|
this.cachedAccessToken = creds.token;
|
||||||
|
return this.cachedAccessToken;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.debug(`Failed to read credentials from ${credPath}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get session cookie for API calls
|
||||||
|
* Returns a cookie string like "WorkosCursorSessionToken=xxx"
|
||||||
|
*/
|
||||||
|
private async getSessionCookie(): Promise<string | null> {
|
||||||
|
if (this.cachedSessionCookie) {
|
||||||
|
return this.cachedSessionCookie;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for cookie in environment
|
||||||
|
if (process.env.CURSOR_SESSION_COOKIE) {
|
||||||
|
this.cachedSessionCookie = process.env.CURSOR_SESSION_COOKIE;
|
||||||
|
return this.cachedSessionCookie;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for saved session file
|
||||||
|
const sessionPath = path.join(os.homedir(), '.cursor', 'session.json');
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(sessionPath)) {
|
||||||
|
const content = fs.readFileSync(sessionPath, 'utf8');
|
||||||
|
const session = JSON.parse(content);
|
||||||
|
for (const cookieName of SESSION_COOKIE_NAMES) {
|
||||||
|
if (session[cookieName]) {
|
||||||
|
this.cachedSessionCookie = `${cookieName}=${session[cookieName]}`;
|
||||||
|
return this.cachedSessionCookie;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.debug('Failed to read session file:', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make an authenticated request to Cursor API
|
||||||
|
*/
|
||||||
|
private async makeRequest<T>(url: string): Promise<T | null> {
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
Accept: 'application/json',
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Try access token first
|
||||||
|
const accessToken = await this.getAccessToken();
|
||||||
|
if (accessToken) {
|
||||||
|
headers['Authorization'] = `Bearer ${accessToken}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try session cookie as fallback
|
||||||
|
const sessionCookie = await this.getSessionCookie();
|
||||||
|
if (sessionCookie) {
|
||||||
|
headers['Cookie'] = sessionCookie;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!accessToken && !sessionCookie) {
|
||||||
|
logger.warn('No Cursor credentials available for API request');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 401 || response.status === 403) {
|
||||||
|
// Clear cached credentials on auth failure
|
||||||
|
this.cachedAccessToken = null;
|
||||||
|
this.cachedSessionCookie = null;
|
||||||
|
logger.warn('Cursor API authentication failed');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
logger.error(`Cursor API error: ${response.status} ${response.statusText}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (await response.json()) as T;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to fetch from Cursor API:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch usage data from Cursor
|
||||||
|
*/
|
||||||
|
async fetchUsageData(): Promise<CursorProviderUsage> {
|
||||||
|
logger.info('[fetchUsageData] Starting Cursor usage fetch...');
|
||||||
|
|
||||||
|
const baseUsage: CursorProviderUsage = {
|
||||||
|
providerId: 'cursor',
|
||||||
|
providerName: 'Cursor',
|
||||||
|
available: false,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check if credentials are available
|
||||||
|
const hasCredentials = await this.hasValidCredentials();
|
||||||
|
if (!hasCredentials) {
|
||||||
|
baseUsage.error = 'Cursor credentials not available';
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch usage summary
|
||||||
|
const usageSummary = await this.makeRequest<CursorUsageSummary>(USAGE_SUMMARY_ENDPOINT);
|
||||||
|
if (!usageSummary) {
|
||||||
|
baseUsage.error = 'Failed to fetch Cursor usage data';
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
baseUsage.available = true;
|
||||||
|
|
||||||
|
// Parse plan usage
|
||||||
|
if (usageSummary.planUsage) {
|
||||||
|
const planWindow: UsageWindow = {
|
||||||
|
name: 'Plan Usage',
|
||||||
|
usedPercent: usageSummary.planUsage.percent || 0,
|
||||||
|
resetsAt: usageSummary.planUsage.resetAt || '',
|
||||||
|
resetText: usageSummary.planUsage.resetAt
|
||||||
|
? this.formatResetTime(usageSummary.planUsage.resetAt)
|
||||||
|
: '',
|
||||||
|
};
|
||||||
|
baseUsage.primary = planWindow;
|
||||||
|
baseUsage.planUsage = planWindow;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse on-demand usage
|
||||||
|
if (usageSummary.onDemandUsage) {
|
||||||
|
const onDemandWindow: UsageWindow = {
|
||||||
|
name: 'On-Demand Usage',
|
||||||
|
usedPercent: usageSummary.onDemandUsage.percent || 0,
|
||||||
|
resetsAt: usageSummary.billingCycleEnd || '',
|
||||||
|
resetText: usageSummary.billingCycleEnd
|
||||||
|
? this.formatResetTime(usageSummary.billingCycleEnd)
|
||||||
|
: '',
|
||||||
|
};
|
||||||
|
baseUsage.secondary = onDemandWindow;
|
||||||
|
baseUsage.onDemandUsage = onDemandWindow;
|
||||||
|
|
||||||
|
if (usageSummary.onDemandUsage.costUsd !== undefined) {
|
||||||
|
baseUsage.onDemandCostUsd = usageSummary.onDemandUsage.costUsd;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse billing cycle end
|
||||||
|
if (usageSummary.billingCycleEnd) {
|
||||||
|
baseUsage.billingCycleEnd = usageSummary.billingCycleEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse plan type
|
||||||
|
if (usageSummary.plan) {
|
||||||
|
baseUsage.plan = {
|
||||||
|
type: usageSummary.plan,
|
||||||
|
displayName: this.formatPlanName(usageSummary.plan),
|
||||||
|
isPaid: usageSummary.plan.toLowerCase() !== 'free',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`[fetchUsageData] ✓ Cursor usage: Plan=${baseUsage.planUsage?.usedPercent || 0}%, ` +
|
||||||
|
`OnDemand=${baseUsage.onDemandUsage?.usedPercent || 0}%`
|
||||||
|
);
|
||||||
|
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format reset time as human-readable string
|
||||||
|
*/
|
||||||
|
private formatResetTime(resetAt: string): string {
|
||||||
|
try {
|
||||||
|
const date = new Date(resetAt);
|
||||||
|
const now = new Date();
|
||||||
|
const diff = date.getTime() - now.getTime();
|
||||||
|
|
||||||
|
if (diff < 0) return 'Expired';
|
||||||
|
|
||||||
|
const hours = Math.floor(diff / 3600000);
|
||||||
|
const days = Math.floor(hours / 24);
|
||||||
|
|
||||||
|
if (days > 0) {
|
||||||
|
return `Resets in ${days}d`;
|
||||||
|
}
|
||||||
|
if (hours > 0) {
|
||||||
|
return `Resets in ${hours}h`;
|
||||||
|
}
|
||||||
|
return 'Resets soon';
|
||||||
|
} catch {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format plan name for display
|
||||||
|
*/
|
||||||
|
private formatPlanName(plan: string): string {
|
||||||
|
const planMap: Record<string, string> = {
|
||||||
|
free: 'Free',
|
||||||
|
pro: 'Pro',
|
||||||
|
business: 'Business',
|
||||||
|
enterprise: 'Enterprise',
|
||||||
|
};
|
||||||
|
return planMap[plan.toLowerCase()] || plan;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear cached credentials (useful for logout)
|
||||||
|
*/
|
||||||
|
clearCache(): void {
|
||||||
|
this.cachedAccessToken = null;
|
||||||
|
this.cachedSessionCookie = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -273,12 +273,56 @@ class DevServerService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a custom command string into cmd and args
|
||||||
|
* Handles quoted strings with spaces (e.g., "my command" arg1 arg2)
|
||||||
|
*/
|
||||||
|
private parseCustomCommand(command: string): { cmd: string; args: string[] } {
|
||||||
|
const tokens: string[] = [];
|
||||||
|
let current = '';
|
||||||
|
let inQuote = false;
|
||||||
|
let quoteChar = '';
|
||||||
|
|
||||||
|
for (let i = 0; i < command.length; i++) {
|
||||||
|
const char = command[i];
|
||||||
|
|
||||||
|
if (inQuote) {
|
||||||
|
if (char === quoteChar) {
|
||||||
|
inQuote = false;
|
||||||
|
} else {
|
||||||
|
current += char;
|
||||||
|
}
|
||||||
|
} else if (char === '"' || char === "'") {
|
||||||
|
inQuote = true;
|
||||||
|
quoteChar = char;
|
||||||
|
} else if (char === ' ') {
|
||||||
|
if (current) {
|
||||||
|
tokens.push(current);
|
||||||
|
current = '';
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
current += char;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current) {
|
||||||
|
tokens.push(current);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [cmd, ...args] = tokens;
|
||||||
|
return { cmd: cmd || '', args };
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start a dev server for a worktree
|
* Start a dev server for a worktree
|
||||||
|
* @param projectPath - The project root path
|
||||||
|
* @param worktreePath - The worktree directory path
|
||||||
|
* @param customCommand - Optional custom command to run instead of auto-detected dev command
|
||||||
*/
|
*/
|
||||||
async startDevServer(
|
async startDevServer(
|
||||||
projectPath: string,
|
projectPath: string,
|
||||||
worktreePath: string
|
worktreePath: string,
|
||||||
|
customCommand?: string
|
||||||
): Promise<{
|
): Promise<{
|
||||||
success: boolean;
|
success: boolean;
|
||||||
result?: {
|
result?: {
|
||||||
@@ -311,7 +355,24 @@ class DevServerService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for package.json
|
// Determine the dev command to use
|
||||||
|
let devCommand: { cmd: string; args: string[] };
|
||||||
|
|
||||||
|
// Normalize custom command: trim whitespace and treat empty strings as undefined
|
||||||
|
const normalizedCustomCommand = customCommand?.trim();
|
||||||
|
|
||||||
|
if (normalizedCustomCommand) {
|
||||||
|
// Use the provided custom command
|
||||||
|
devCommand = this.parseCustomCommand(normalizedCustomCommand);
|
||||||
|
if (!devCommand.cmd) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid custom command: command cannot be empty',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
logger.debug(`Using custom command: ${normalizedCustomCommand}`);
|
||||||
|
} else {
|
||||||
|
// Check for package.json when auto-detecting
|
||||||
const packageJsonPath = path.join(worktreePath, 'package.json');
|
const packageJsonPath = path.join(worktreePath, 'package.json');
|
||||||
if (!(await this.fileExists(packageJsonPath))) {
|
if (!(await this.fileExists(packageJsonPath))) {
|
||||||
return {
|
return {
|
||||||
@@ -320,14 +381,16 @@ class DevServerService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get dev command
|
// Get dev command from package manager detection
|
||||||
const devCommand = await this.getDevCommand(worktreePath);
|
const detectedCommand = await this.getDevCommand(worktreePath);
|
||||||
if (!devCommand) {
|
if (!detectedCommand) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error: `Could not determine dev command for: ${worktreePath}`,
|
error: `Could not determine dev command for: ${worktreePath}`,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
devCommand = detectedCommand;
|
||||||
|
}
|
||||||
|
|
||||||
// Find available port
|
// Find available port
|
||||||
let port: number;
|
let port: number;
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ import { createLogger } from '@automaker/utils';
|
|||||||
import type { EventEmitter } from '../lib/events.js';
|
import type { EventEmitter } from '../lib/events.js';
|
||||||
import type { SettingsService } from './settings-service.js';
|
import type { SettingsService } from './settings-service.js';
|
||||||
import type { EventHistoryService } from './event-history-service.js';
|
import type { EventHistoryService } from './event-history-service.js';
|
||||||
|
import type { FeatureLoader } from './feature-loader.js';
|
||||||
import type {
|
import type {
|
||||||
EventHook,
|
EventHook,
|
||||||
EventHookTrigger,
|
EventHookTrigger,
|
||||||
@@ -57,6 +58,7 @@ interface HookContext {
|
|||||||
interface AutoModeEventPayload {
|
interface AutoModeEventPayload {
|
||||||
type?: string;
|
type?: string;
|
||||||
featureId?: string;
|
featureId?: string;
|
||||||
|
featureName?: string;
|
||||||
passes?: boolean;
|
passes?: boolean;
|
||||||
message?: string;
|
message?: string;
|
||||||
error?: string;
|
error?: string;
|
||||||
@@ -83,19 +85,22 @@ export class EventHookService {
|
|||||||
private emitter: EventEmitter | null = null;
|
private emitter: EventEmitter | null = null;
|
||||||
private settingsService: SettingsService | null = null;
|
private settingsService: SettingsService | null = null;
|
||||||
private eventHistoryService: EventHistoryService | null = null;
|
private eventHistoryService: EventHistoryService | null = null;
|
||||||
|
private featureLoader: FeatureLoader | null = null;
|
||||||
private unsubscribe: (() => void) | null = null;
|
private unsubscribe: (() => void) | null = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize the service with event emitter, settings service, and event history service
|
* Initialize the service with event emitter, settings service, event history service, and feature loader
|
||||||
*/
|
*/
|
||||||
initialize(
|
initialize(
|
||||||
emitter: EventEmitter,
|
emitter: EventEmitter,
|
||||||
settingsService: SettingsService,
|
settingsService: SettingsService,
|
||||||
eventHistoryService?: EventHistoryService
|
eventHistoryService?: EventHistoryService,
|
||||||
|
featureLoader?: FeatureLoader
|
||||||
): void {
|
): void {
|
||||||
this.emitter = emitter;
|
this.emitter = emitter;
|
||||||
this.settingsService = settingsService;
|
this.settingsService = settingsService;
|
||||||
this.eventHistoryService = eventHistoryService || null;
|
this.eventHistoryService = eventHistoryService || null;
|
||||||
|
this.featureLoader = featureLoader || null;
|
||||||
|
|
||||||
// Subscribe to events
|
// Subscribe to events
|
||||||
this.unsubscribe = emitter.subscribe((type, payload) => {
|
this.unsubscribe = emitter.subscribe((type, payload) => {
|
||||||
@@ -120,6 +125,7 @@ export class EventHookService {
|
|||||||
this.emitter = null;
|
this.emitter = null;
|
||||||
this.settingsService = null;
|
this.settingsService = null;
|
||||||
this.eventHistoryService = null;
|
this.eventHistoryService = null;
|
||||||
|
this.featureLoader = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -149,9 +155,23 @@ export class EventHookService {
|
|||||||
|
|
||||||
if (!trigger) return;
|
if (!trigger) return;
|
||||||
|
|
||||||
|
// Load feature name if we have featureId but no featureName
|
||||||
|
let featureName: string | undefined = undefined;
|
||||||
|
if (payload.featureId && payload.projectPath && this.featureLoader) {
|
||||||
|
try {
|
||||||
|
const feature = await this.featureLoader.get(payload.projectPath, payload.featureId);
|
||||||
|
if (feature?.title) {
|
||||||
|
featureName = feature.title;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`Failed to load feature ${payload.featureId} for event hook:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Build context for variable substitution
|
// Build context for variable substitution
|
||||||
const context: HookContext = {
|
const context: HookContext = {
|
||||||
featureId: payload.featureId,
|
featureId: payload.featureId,
|
||||||
|
featureName: payload.featureName,
|
||||||
projectPath: payload.projectPath,
|
projectPath: payload.projectPath,
|
||||||
projectName: payload.projectPath ? this.extractProjectName(payload.projectPath) : undefined,
|
projectName: payload.projectPath ? this.extractProjectName(payload.projectPath) : undefined,
|
||||||
error: payload.error || payload.message,
|
error: payload.error || payload.message,
|
||||||
@@ -313,6 +333,7 @@ export class EventHookService {
|
|||||||
eventType: context.eventType,
|
eventType: context.eventType,
|
||||||
timestamp: context.timestamp,
|
timestamp: context.timestamp,
|
||||||
featureId: context.featureId,
|
featureId: context.featureId,
|
||||||
|
featureName: context.featureName,
|
||||||
projectPath: context.projectPath,
|
projectPath: context.projectPath,
|
||||||
projectName: context.projectName,
|
projectName: context.projectName,
|
||||||
error: context.error,
|
error: context.error,
|
||||||
|
|||||||
540
apps/server/src/services/feature-export-service.ts
Normal file
540
apps/server/src/services/feature-export-service.ts
Normal file
@@ -0,0 +1,540 @@
|
|||||||
|
/**
|
||||||
|
* Feature Export Service - Handles exporting and importing features in JSON/YAML formats
|
||||||
|
*
|
||||||
|
* Provides functionality to:
|
||||||
|
* - Export single features to JSON or YAML format
|
||||||
|
* - Export multiple features (bulk export)
|
||||||
|
* - Import features from JSON or YAML data
|
||||||
|
* - Validate import data for compatibility
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import { stringify as yamlStringify, parse as yamlParse } from 'yaml';
|
||||||
|
import type { Feature, FeatureExport, FeatureImport, FeatureImportResult } from '@automaker/types';
|
||||||
|
import { FeatureLoader } from './feature-loader.js';
|
||||||
|
|
||||||
|
const logger = createLogger('FeatureExportService');
|
||||||
|
|
||||||
|
/** Current export format version */
|
||||||
|
export const FEATURE_EXPORT_VERSION = '1.0.0';
|
||||||
|
|
||||||
|
/** Supported export formats */
|
||||||
|
export type ExportFormat = 'json' | 'yaml';
|
||||||
|
|
||||||
|
/** Options for exporting features */
|
||||||
|
export interface ExportOptions {
|
||||||
|
/** Format to export in (default: 'json') */
|
||||||
|
format?: ExportFormat;
|
||||||
|
/** Whether to include description history (default: true) */
|
||||||
|
includeHistory?: boolean;
|
||||||
|
/** Whether to include plan spec (default: true) */
|
||||||
|
includePlanSpec?: boolean;
|
||||||
|
/** Optional metadata to include */
|
||||||
|
metadata?: {
|
||||||
|
projectName?: string;
|
||||||
|
projectPath?: string;
|
||||||
|
branch?: string;
|
||||||
|
[key: string]: unknown;
|
||||||
|
};
|
||||||
|
/** Who/what is performing the export */
|
||||||
|
exportedBy?: string;
|
||||||
|
/** Pretty print output (default: true) */
|
||||||
|
prettyPrint?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Options for bulk export */
|
||||||
|
export interface BulkExportOptions extends ExportOptions {
|
||||||
|
/** Filter by category */
|
||||||
|
category?: string;
|
||||||
|
/** Filter by status */
|
||||||
|
status?: string;
|
||||||
|
/** Feature IDs to include (if not specified, exports all) */
|
||||||
|
featureIds?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Result of a bulk export */
|
||||||
|
export interface BulkExportResult {
|
||||||
|
/** Export format version */
|
||||||
|
version: string;
|
||||||
|
/** ISO date string when the export was created */
|
||||||
|
exportedAt: string;
|
||||||
|
/** Number of features exported */
|
||||||
|
count: number;
|
||||||
|
/** The exported features */
|
||||||
|
features: FeatureExport[];
|
||||||
|
/** Export metadata */
|
||||||
|
metadata?: {
|
||||||
|
projectName?: string;
|
||||||
|
projectPath?: string;
|
||||||
|
branch?: string;
|
||||||
|
[key: string]: unknown;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* FeatureExportService - Manages feature export and import operations
|
||||||
|
*/
|
||||||
|
export class FeatureExportService {
|
||||||
|
private featureLoader: FeatureLoader;
|
||||||
|
|
||||||
|
constructor(featureLoader?: FeatureLoader) {
|
||||||
|
this.featureLoader = featureLoader || new FeatureLoader();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Export a single feature to the specified format
|
||||||
|
*
|
||||||
|
* @param projectPath - Path to the project
|
||||||
|
* @param featureId - ID of the feature to export
|
||||||
|
* @param options - Export options
|
||||||
|
* @returns Promise resolving to the exported feature string
|
||||||
|
*/
|
||||||
|
async exportFeature(
|
||||||
|
projectPath: string,
|
||||||
|
featureId: string,
|
||||||
|
options: ExportOptions = {}
|
||||||
|
): Promise<string> {
|
||||||
|
const feature = await this.featureLoader.get(projectPath, featureId);
|
||||||
|
if (!feature) {
|
||||||
|
throw new Error(`Feature ${featureId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.exportFeatureData(feature, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Export feature data to the specified format (without fetching from disk)
|
||||||
|
*
|
||||||
|
* @param feature - The feature to export
|
||||||
|
* @param options - Export options
|
||||||
|
* @returns The exported feature string
|
||||||
|
*/
|
||||||
|
exportFeatureData(feature: Feature, options: ExportOptions = {}): string {
|
||||||
|
const {
|
||||||
|
format = 'json',
|
||||||
|
includeHistory = true,
|
||||||
|
includePlanSpec = true,
|
||||||
|
metadata,
|
||||||
|
exportedBy,
|
||||||
|
prettyPrint = true,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
// Prepare feature data, optionally excluding some fields
|
||||||
|
const featureData = this.prepareFeatureForExport(feature, {
|
||||||
|
includeHistory,
|
||||||
|
includePlanSpec,
|
||||||
|
});
|
||||||
|
|
||||||
|
const exportData: FeatureExport = {
|
||||||
|
version: FEATURE_EXPORT_VERSION,
|
||||||
|
feature: featureData,
|
||||||
|
exportedAt: new Date().toISOString(),
|
||||||
|
...(exportedBy ? { exportedBy } : {}),
|
||||||
|
...(metadata ? { metadata } : {}),
|
||||||
|
};
|
||||||
|
|
||||||
|
return this.serialize(exportData, format, prettyPrint);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Export multiple features to the specified format
|
||||||
|
*
|
||||||
|
* @param projectPath - Path to the project
|
||||||
|
* @param options - Bulk export options
|
||||||
|
* @returns Promise resolving to the exported features string
|
||||||
|
*/
|
||||||
|
async exportFeatures(projectPath: string, options: BulkExportOptions = {}): Promise<string> {
|
||||||
|
const {
|
||||||
|
format = 'json',
|
||||||
|
category,
|
||||||
|
status,
|
||||||
|
featureIds,
|
||||||
|
includeHistory = true,
|
||||||
|
includePlanSpec = true,
|
||||||
|
metadata,
|
||||||
|
prettyPrint = true,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
// Get all features
|
||||||
|
let features = await this.featureLoader.getAll(projectPath);
|
||||||
|
|
||||||
|
// Apply filters
|
||||||
|
if (featureIds && featureIds.length > 0) {
|
||||||
|
const idSet = new Set(featureIds);
|
||||||
|
features = features.filter((f) => idSet.has(f.id));
|
||||||
|
}
|
||||||
|
if (category) {
|
||||||
|
features = features.filter((f) => f.category === category);
|
||||||
|
}
|
||||||
|
if (status) {
|
||||||
|
features = features.filter((f) => f.status === status);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate timestamp once for consistent export time across all features
|
||||||
|
const exportedAt = new Date().toISOString();
|
||||||
|
|
||||||
|
// Prepare feature exports
|
||||||
|
const featureExports: FeatureExport[] = features.map((feature) => ({
|
||||||
|
version: FEATURE_EXPORT_VERSION,
|
||||||
|
feature: this.prepareFeatureForExport(feature, { includeHistory, includePlanSpec }),
|
||||||
|
exportedAt,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const bulkExport: BulkExportResult = {
|
||||||
|
version: FEATURE_EXPORT_VERSION,
|
||||||
|
exportedAt,
|
||||||
|
count: featureExports.length,
|
||||||
|
features: featureExports,
|
||||||
|
...(metadata ? { metadata } : {}),
|
||||||
|
};
|
||||||
|
|
||||||
|
logger.info(`Exported ${featureExports.length} features from ${projectPath}`);
|
||||||
|
|
||||||
|
return this.serialize(bulkExport, format, prettyPrint);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import a feature from JSON or YAML data
|
||||||
|
*
|
||||||
|
* @param projectPath - Path to the project
|
||||||
|
* @param importData - Import configuration
|
||||||
|
* @returns Promise resolving to the import result
|
||||||
|
*/
|
||||||
|
async importFeature(
|
||||||
|
projectPath: string,
|
||||||
|
importData: FeatureImport
|
||||||
|
): Promise<FeatureImportResult> {
|
||||||
|
const warnings: string[] = [];
|
||||||
|
const errors: string[] = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Extract feature from data (handle both raw Feature and wrapped FeatureExport)
|
||||||
|
const feature = this.extractFeatureFromImport(importData.data);
|
||||||
|
if (!feature) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
importedAt: new Date().toISOString(),
|
||||||
|
errors: ['Invalid import data: could not extract feature'],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate required fields
|
||||||
|
const validationErrors = this.validateFeature(feature);
|
||||||
|
if (validationErrors.length > 0) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
importedAt: new Date().toISOString(),
|
||||||
|
errors: validationErrors,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine the feature ID to use
|
||||||
|
const featureId = importData.newId || feature.id || this.featureLoader.generateFeatureId();
|
||||||
|
|
||||||
|
// Check for existing feature
|
||||||
|
const existingFeature = await this.featureLoader.get(projectPath, featureId);
|
||||||
|
if (existingFeature && !importData.overwrite) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
importedAt: new Date().toISOString(),
|
||||||
|
errors: [`Feature with ID ${featureId} already exists. Set overwrite: true to replace.`],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare feature for import
|
||||||
|
const featureToImport: Feature = {
|
||||||
|
...feature,
|
||||||
|
id: featureId,
|
||||||
|
// Optionally override category
|
||||||
|
...(importData.targetCategory ? { category: importData.targetCategory } : {}),
|
||||||
|
// Clear branch info if not preserving
|
||||||
|
...(importData.preserveBranchInfo ? {} : { branchName: undefined }),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Clear runtime-specific fields that shouldn't be imported
|
||||||
|
delete featureToImport.titleGenerating;
|
||||||
|
delete featureToImport.error;
|
||||||
|
|
||||||
|
// Handle image paths - they won't be valid after import
|
||||||
|
if (featureToImport.imagePaths && featureToImport.imagePaths.length > 0) {
|
||||||
|
warnings.push(
|
||||||
|
`Feature had ${featureToImport.imagePaths.length} image path(s) that were cleared during import. Images must be re-attached.`
|
||||||
|
);
|
||||||
|
featureToImport.imagePaths = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle text file paths - they won't be valid after import
|
||||||
|
if (featureToImport.textFilePaths && featureToImport.textFilePaths.length > 0) {
|
||||||
|
warnings.push(
|
||||||
|
`Feature had ${featureToImport.textFilePaths.length} text file path(s) that were cleared during import. Files must be re-attached.`
|
||||||
|
);
|
||||||
|
featureToImport.textFilePaths = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create or update the feature
|
||||||
|
if (existingFeature) {
|
||||||
|
await this.featureLoader.update(projectPath, featureId, featureToImport);
|
||||||
|
logger.info(`Updated feature ${featureId} via import`);
|
||||||
|
} else {
|
||||||
|
await this.featureLoader.create(projectPath, featureToImport);
|
||||||
|
logger.info(`Created feature ${featureId} via import`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
featureId,
|
||||||
|
importedAt: new Date().toISOString(),
|
||||||
|
warnings: warnings.length > 0 ? warnings : undefined,
|
||||||
|
wasOverwritten: !!existingFeature,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to import feature:', error);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
importedAt: new Date().toISOString(),
|
||||||
|
errors: [`Import failed: ${error instanceof Error ? error.message : String(error)}`],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import multiple features from JSON or YAML data
|
||||||
|
*
|
||||||
|
* @param projectPath - Path to the project
|
||||||
|
* @param data - Raw JSON or YAML string, or parsed data
|
||||||
|
* @param options - Import options applied to all features
|
||||||
|
* @returns Promise resolving to array of import results
|
||||||
|
*/
|
||||||
|
async importFeatures(
|
||||||
|
projectPath: string,
|
||||||
|
data: string | BulkExportResult,
|
||||||
|
options: Omit<FeatureImport, 'data'> = {}
|
||||||
|
): Promise<FeatureImportResult[]> {
|
||||||
|
let bulkData: BulkExportResult;
|
||||||
|
|
||||||
|
// Parse if string
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
const parsed = this.parseImportData(data);
|
||||||
|
if (!parsed || !this.isBulkExport(parsed)) {
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
importedAt: new Date().toISOString(),
|
||||||
|
errors: ['Invalid bulk import data: expected BulkExportResult format'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
bulkData = parsed as BulkExportResult;
|
||||||
|
} else {
|
||||||
|
bulkData = data;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import each feature
|
||||||
|
const results: FeatureImportResult[] = [];
|
||||||
|
for (const featureExport of bulkData.features) {
|
||||||
|
const result = await this.importFeature(projectPath, {
|
||||||
|
data: featureExport,
|
||||||
|
...options,
|
||||||
|
});
|
||||||
|
results.push(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
const successCount = results.filter((r) => r.success).length;
|
||||||
|
logger.info(`Bulk import complete: ${successCount}/${results.length} features imported`);
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse import data from JSON or YAML string
|
||||||
|
*
|
||||||
|
* @param data - Raw JSON or YAML string
|
||||||
|
* @returns Parsed data or null if parsing fails
|
||||||
|
*/
|
||||||
|
parseImportData(data: string): Feature | FeatureExport | BulkExportResult | null {
|
||||||
|
const trimmed = data.trim();
|
||||||
|
|
||||||
|
// Try JSON first
|
||||||
|
if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
|
||||||
|
try {
|
||||||
|
return JSON.parse(trimmed);
|
||||||
|
} catch {
|
||||||
|
// Fall through to YAML
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try YAML
|
||||||
|
try {
|
||||||
|
return yamlParse(trimmed);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to parse import data:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detect the format of import data
|
||||||
|
*
|
||||||
|
* @param data - Raw string data
|
||||||
|
* @returns Detected format or null if unknown
|
||||||
|
*/
|
||||||
|
detectFormat(data: string): ExportFormat | null {
|
||||||
|
const trimmed = data.trim();
|
||||||
|
|
||||||
|
// JSON detection
|
||||||
|
if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
|
||||||
|
try {
|
||||||
|
JSON.parse(trimmed);
|
||||||
|
return 'json';
|
||||||
|
} catch {
|
||||||
|
// Not valid JSON
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// YAML detection (if it parses and wasn't JSON)
|
||||||
|
try {
|
||||||
|
yamlParse(trimmed);
|
||||||
|
return 'yaml';
|
||||||
|
} catch {
|
||||||
|
// Not valid YAML either
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepare a feature for export by optionally removing fields
|
||||||
|
*/
|
||||||
|
private prepareFeatureForExport(
|
||||||
|
feature: Feature,
|
||||||
|
options: { includeHistory?: boolean; includePlanSpec?: boolean }
|
||||||
|
): Feature {
|
||||||
|
const { includeHistory = true, includePlanSpec = true } = options;
|
||||||
|
|
||||||
|
// Clone to avoid modifying original
|
||||||
|
const exported: Feature = { ...feature };
|
||||||
|
|
||||||
|
// Remove transient fields that shouldn't be exported
|
||||||
|
delete exported.titleGenerating;
|
||||||
|
delete exported.error;
|
||||||
|
|
||||||
|
// Optionally exclude history
|
||||||
|
if (!includeHistory) {
|
||||||
|
delete exported.descriptionHistory;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optionally exclude plan spec
|
||||||
|
if (!includePlanSpec) {
|
||||||
|
delete exported.planSpec;
|
||||||
|
}
|
||||||
|
|
||||||
|
return exported;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract a Feature from import data (handles both raw and wrapped formats)
|
||||||
|
*/
|
||||||
|
private extractFeatureFromImport(data: Feature | FeatureExport): Feature | null {
|
||||||
|
if (!data || typeof data !== 'object') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's a FeatureExport wrapper
|
||||||
|
if ('version' in data && 'feature' in data && 'exportedAt' in data) {
|
||||||
|
const exportData = data as FeatureExport;
|
||||||
|
return exportData.feature;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assume it's a raw Feature
|
||||||
|
return data as Feature;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if parsed data is a bulk export
|
||||||
|
*/
|
||||||
|
isBulkExport(data: unknown): data is BulkExportResult {
|
||||||
|
if (!data || typeof data !== 'object') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const obj = data as Record<string, unknown>;
|
||||||
|
return 'version' in obj && 'features' in obj && Array.isArray(obj.features);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if parsed data is a single FeatureExport
|
||||||
|
*/
|
||||||
|
isFeatureExport(data: unknown): data is FeatureExport {
|
||||||
|
if (!data || typeof data !== 'object') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const obj = data as Record<string, unknown>;
|
||||||
|
return (
|
||||||
|
'version' in obj &&
|
||||||
|
'feature' in obj &&
|
||||||
|
'exportedAt' in obj &&
|
||||||
|
typeof obj.feature === 'object' &&
|
||||||
|
obj.feature !== null &&
|
||||||
|
'id' in (obj.feature as Record<string, unknown>)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if parsed data is a raw Feature
|
||||||
|
*/
|
||||||
|
isRawFeature(data: unknown): data is Feature {
|
||||||
|
if (!data || typeof data !== 'object') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const obj = data as Record<string, unknown>;
|
||||||
|
// A raw feature has 'id' but not the 'version' + 'feature' wrapper of FeatureExport
|
||||||
|
return 'id' in obj && !('feature' in obj && 'version' in obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate a feature has required fields
|
||||||
|
*/
|
||||||
|
private validateFeature(feature: Feature): string[] {
|
||||||
|
const errors: string[] = [];
|
||||||
|
|
||||||
|
if (!feature.description && !feature.title) {
|
||||||
|
errors.push('Feature must have at least a title or description');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!feature.category) {
|
||||||
|
errors.push('Feature must have a category');
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Serialize export data to string (handles both single feature and bulk exports)
|
||||||
|
*/
|
||||||
|
private serialize<T extends FeatureExport | BulkExportResult>(
|
||||||
|
data: T,
|
||||||
|
format: ExportFormat,
|
||||||
|
prettyPrint: boolean
|
||||||
|
): string {
|
||||||
|
if (format === 'yaml') {
|
||||||
|
return yamlStringify(data, {
|
||||||
|
indent: 2,
|
||||||
|
lineWidth: 120,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return prettyPrint ? JSON.stringify(data, null, 2) : JSON.stringify(data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Singleton instance
|
||||||
|
let featureExportServiceInstance: FeatureExportService | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the singleton feature export service instance
|
||||||
|
*/
|
||||||
|
export function getFeatureExportService(): FeatureExportService {
|
||||||
|
if (!featureExportServiceInstance) {
|
||||||
|
featureExportServiceInstance = new FeatureExportService();
|
||||||
|
}
|
||||||
|
return featureExportServiceInstance;
|
||||||
|
}
|
||||||
362
apps/server/src/services/gemini-usage-service.ts
Normal file
362
apps/server/src/services/gemini-usage-service.ts
Normal file
@@ -0,0 +1,362 @@
|
|||||||
|
/**
|
||||||
|
* Gemini Usage Service
|
||||||
|
*
|
||||||
|
* Fetches usage data from Google's Gemini/Cloud Code API using OAuth credentials.
|
||||||
|
* Based on CodexBar reference implementation.
|
||||||
|
*
|
||||||
|
* Authentication methods:
|
||||||
|
* 1. OAuth credentials from ~/.gemini/oauth_creds.json
|
||||||
|
* 2. API key (limited - only supports API calls, not quota info)
|
||||||
|
*
|
||||||
|
* API Endpoints:
|
||||||
|
* - POST https://cloudcode-pa.googleapis.com/v1internal:retrieveUserQuota - Quota info
|
||||||
|
* - POST https://cloudcode-pa.googleapis.com/v1internal:loadCodeAssist - Tier detection
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as os from 'os';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import type { GeminiProviderUsage, UsageWindow } from '@automaker/types';
|
||||||
|
|
||||||
|
const logger = createLogger('GeminiUsage');
|
||||||
|
|
||||||
|
// Gemini API endpoints
|
||||||
|
const QUOTA_ENDPOINT = 'https://cloudcode-pa.googleapis.com/v1internal:retrieveUserQuota';
|
||||||
|
const CODE_ASSIST_ENDPOINT = 'https://cloudcode-pa.googleapis.com/v1internal:loadCodeAssist';
|
||||||
|
const TOKEN_REFRESH_ENDPOINT = 'https://oauth2.googleapis.com/token';
|
||||||
|
|
||||||
|
// Gemini CLI client credentials (from Gemini CLI installation)
|
||||||
|
// These are embedded in the Gemini CLI and are public
|
||||||
|
const GEMINI_CLIENT_ID =
|
||||||
|
'764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com';
|
||||||
|
const GEMINI_CLIENT_SECRET = 'd-FL95Q19q7MQmFpd7hHD0Ty';
|
||||||
|
|
||||||
|
interface GeminiOAuthCreds {
|
||||||
|
access_token: string;
|
||||||
|
refresh_token: string;
|
||||||
|
id_token?: string;
|
||||||
|
expiry_date: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GeminiQuotaResponse {
|
||||||
|
quotas?: Array<{
|
||||||
|
remainingFraction: number;
|
||||||
|
resetTime: string;
|
||||||
|
modelId?: string;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GeminiCodeAssistResponse {
|
||||||
|
tier?: string;
|
||||||
|
claims?: {
|
||||||
|
hd?: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GeminiUsageService {
|
||||||
|
private cachedCreds: GeminiOAuthCreds | null = null;
|
||||||
|
private settingsPath = path.join(os.homedir(), '.gemini', 'settings.json');
|
||||||
|
private credsPath = path.join(os.homedir(), '.gemini', 'oauth_creds.json');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Gemini credentials are available
|
||||||
|
*/
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
const creds = await this.getOAuthCreds();
|
||||||
|
return !!creds;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get authentication type from settings
|
||||||
|
*/
|
||||||
|
private getAuthType(): string | null {
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(this.settingsPath)) {
|
||||||
|
const content = fs.readFileSync(this.settingsPath, 'utf8');
|
||||||
|
const settings = JSON.parse(content);
|
||||||
|
return settings.auth_type || settings.authType || null;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.debug('Failed to read Gemini settings:', error);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get OAuth credentials from file
|
||||||
|
*/
|
||||||
|
private async getOAuthCreds(): Promise<GeminiOAuthCreds | null> {
|
||||||
|
// Check auth type - only oauth-personal supports quota API
|
||||||
|
const authType = this.getAuthType();
|
||||||
|
if (authType && authType !== 'oauth-personal') {
|
||||||
|
logger.debug(`Gemini auth type is ${authType}, not oauth-personal - quota API not available`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check cached credentials
|
||||||
|
if (this.cachedCreds) {
|
||||||
|
// Check if expired
|
||||||
|
if (this.cachedCreds.expiry_date > Date.now()) {
|
||||||
|
return this.cachedCreds;
|
||||||
|
}
|
||||||
|
// Try to refresh
|
||||||
|
const refreshed = await this.refreshToken(this.cachedCreds.refresh_token);
|
||||||
|
if (refreshed) {
|
||||||
|
this.cachedCreds = refreshed;
|
||||||
|
return this.cachedCreds;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load from file
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(this.credsPath)) {
|
||||||
|
const content = fs.readFileSync(this.credsPath, 'utf8');
|
||||||
|
const creds = JSON.parse(content) as GeminiOAuthCreds;
|
||||||
|
|
||||||
|
// Check if expired
|
||||||
|
if (creds.expiry_date && creds.expiry_date <= Date.now()) {
|
||||||
|
// Try to refresh
|
||||||
|
if (creds.refresh_token) {
|
||||||
|
const refreshed = await this.refreshToken(creds.refresh_token);
|
||||||
|
if (refreshed) {
|
||||||
|
this.cachedCreds = refreshed;
|
||||||
|
// Save refreshed credentials
|
||||||
|
this.saveCreds(refreshed);
|
||||||
|
return this.cachedCreds;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.warn('Gemini OAuth token expired and refresh failed');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.cachedCreds = creds;
|
||||||
|
return this.cachedCreds;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.debug('Failed to read Gemini OAuth credentials:', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh OAuth token
|
||||||
|
*/
|
||||||
|
private async refreshToken(refreshToken: string): Promise<GeminiOAuthCreds | null> {
|
||||||
|
try {
|
||||||
|
const response = await fetch(TOKEN_REFRESH_ENDPOINT, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/x-www-form-urlencoded',
|
||||||
|
},
|
||||||
|
body: new URLSearchParams({
|
||||||
|
client_id: GEMINI_CLIENT_ID,
|
||||||
|
client_secret: GEMINI_CLIENT_SECRET,
|
||||||
|
refresh_token: refreshToken,
|
||||||
|
grant_type: 'refresh_token',
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
logger.error(`Token refresh failed: ${response.status}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as {
|
||||||
|
access_token: string;
|
||||||
|
expires_in: number;
|
||||||
|
id_token?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
access_token: data.access_token,
|
||||||
|
refresh_token: refreshToken,
|
||||||
|
id_token: data.id_token,
|
||||||
|
expiry_date: Date.now() + data.expires_in * 1000,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to refresh Gemini token:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save credentials to file
|
||||||
|
*/
|
||||||
|
private saveCreds(creds: GeminiOAuthCreds): void {
|
||||||
|
try {
|
||||||
|
const dir = path.dirname(this.credsPath);
|
||||||
|
if (!fs.existsSync(dir)) {
|
||||||
|
fs.mkdirSync(dir, { recursive: true });
|
||||||
|
}
|
||||||
|
fs.writeFileSync(this.credsPath, JSON.stringify(creds, null, 2));
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to save Gemini credentials:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make an authenticated request to Gemini API
|
||||||
|
*/
|
||||||
|
private async makeRequest<T>(url: string, body?: unknown): Promise<T | null> {
|
||||||
|
const creds = await this.getOAuthCreds();
|
||||||
|
if (!creds) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${creds.access_token}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: body ? JSON.stringify(body) : undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 401 || response.status === 403) {
|
||||||
|
// Clear cached credentials on auth failure
|
||||||
|
this.cachedCreds = null;
|
||||||
|
logger.warn('Gemini API authentication failed');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
logger.error(`Gemini API error: ${response.status} ${response.statusText}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (await response.json()) as T;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to fetch from Gemini API:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch usage data from Gemini
|
||||||
|
*/
|
||||||
|
async fetchUsageData(): Promise<GeminiProviderUsage> {
|
||||||
|
logger.info('[fetchUsageData] Starting Gemini usage fetch...');
|
||||||
|
|
||||||
|
const baseUsage: GeminiProviderUsage = {
|
||||||
|
providerId: 'gemini',
|
||||||
|
providerName: 'Gemini',
|
||||||
|
available: false,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check if credentials are available
|
||||||
|
const creds = await this.getOAuthCreds();
|
||||||
|
if (!creds) {
|
||||||
|
baseUsage.error = 'Gemini OAuth credentials not available';
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch quota information
|
||||||
|
const quotaResponse = await this.makeRequest<GeminiQuotaResponse>(QUOTA_ENDPOINT, {
|
||||||
|
projectId: '-', // Use default project
|
||||||
|
});
|
||||||
|
|
||||||
|
if (quotaResponse?.quotas && quotaResponse.quotas.length > 0) {
|
||||||
|
baseUsage.available = true;
|
||||||
|
|
||||||
|
const primaryQuota = quotaResponse.quotas[0];
|
||||||
|
|
||||||
|
// Convert remaining fraction to used percent
|
||||||
|
const usedPercent = Math.round((1 - (primaryQuota.remainingFraction || 0)) * 100);
|
||||||
|
|
||||||
|
const quotaWindow: UsageWindow = {
|
||||||
|
name: 'Quota',
|
||||||
|
usedPercent,
|
||||||
|
resetsAt: primaryQuota.resetTime || '',
|
||||||
|
resetText: primaryQuota.resetTime ? this.formatResetTime(primaryQuota.resetTime) : '',
|
||||||
|
};
|
||||||
|
|
||||||
|
baseUsage.primary = quotaWindow;
|
||||||
|
baseUsage.remainingFraction = primaryQuota.remainingFraction;
|
||||||
|
baseUsage.modelId = primaryQuota.modelId;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch tier information
|
||||||
|
const codeAssistResponse = await this.makeRequest<GeminiCodeAssistResponse>(
|
||||||
|
CODE_ASSIST_ENDPOINT,
|
||||||
|
{
|
||||||
|
metadata: {
|
||||||
|
ide: 'automaker',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (codeAssistResponse?.tier) {
|
||||||
|
baseUsage.tierType = codeAssistResponse.tier;
|
||||||
|
|
||||||
|
// Determine plan info from tier
|
||||||
|
const tierMap: Record<string, { type: string; displayName: string; isPaid: boolean }> = {
|
||||||
|
'standard-tier': { type: 'paid', displayName: 'Paid', isPaid: true },
|
||||||
|
'free-tier': {
|
||||||
|
type: codeAssistResponse.claims?.hd ? 'workspace' : 'free',
|
||||||
|
displayName: codeAssistResponse.claims?.hd ? 'Workspace' : 'Free',
|
||||||
|
isPaid: false,
|
||||||
|
},
|
||||||
|
'legacy-tier': { type: 'legacy', displayName: 'Legacy', isPaid: false },
|
||||||
|
};
|
||||||
|
|
||||||
|
const tierInfo = tierMap[codeAssistResponse.tier] || {
|
||||||
|
type: codeAssistResponse.tier,
|
||||||
|
displayName: codeAssistResponse.tier,
|
||||||
|
isPaid: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
baseUsage.plan = tierInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUsage.available) {
|
||||||
|
logger.info(
|
||||||
|
`[fetchUsageData] ✓ Gemini usage: ${baseUsage.primary?.usedPercent || 0}% used, ` +
|
||||||
|
`tier=${baseUsage.tierType || 'unknown'}`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
baseUsage.error = 'Failed to fetch Gemini quota data';
|
||||||
|
}
|
||||||
|
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format reset time as human-readable string
|
||||||
|
*/
|
||||||
|
private formatResetTime(resetAt: string): string {
|
||||||
|
try {
|
||||||
|
const date = new Date(resetAt);
|
||||||
|
const now = new Date();
|
||||||
|
const diff = date.getTime() - now.getTime();
|
||||||
|
|
||||||
|
if (diff < 0) return 'Expired';
|
||||||
|
|
||||||
|
const minutes = Math.floor(diff / 60000);
|
||||||
|
const hours = Math.floor(minutes / 60);
|
||||||
|
const days = Math.floor(hours / 24);
|
||||||
|
|
||||||
|
if (days > 0) {
|
||||||
|
return `Resets in ${days}d ${hours % 24}h`;
|
||||||
|
}
|
||||||
|
if (hours > 0) {
|
||||||
|
return `Resets in ${hours}h ${minutes % 60}m`;
|
||||||
|
}
|
||||||
|
if (minutes > 0) {
|
||||||
|
return `Resets in ${minutes}m`;
|
||||||
|
}
|
||||||
|
return 'Resets soon';
|
||||||
|
} catch {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear cached credentials
|
||||||
|
*/
|
||||||
|
clearCache(): void {
|
||||||
|
this.cachedCreds = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
140
apps/server/src/services/glm-usage-service.ts
Normal file
140
apps/server/src/services/glm-usage-service.ts
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
/**
|
||||||
|
* GLM (z.AI) Usage Service
|
||||||
|
*
|
||||||
|
* Fetches usage data from z.AI's API.
|
||||||
|
* GLM is a Claude-compatible provider offered by z.AI.
|
||||||
|
*
|
||||||
|
* Authentication:
|
||||||
|
* - API Token from provider config or GLM_API_KEY environment variable
|
||||||
|
*
|
||||||
|
* Note: z.AI's API may not expose a dedicated usage endpoint.
|
||||||
|
* This service checks for API availability and reports basic status.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import type { GLMProviderUsage, ClaudeCompatibleProvider } from '@automaker/types';
|
||||||
|
|
||||||
|
const logger = createLogger('GLMUsage');
|
||||||
|
|
||||||
|
// GLM API base (z.AI)
|
||||||
|
const GLM_API_BASE = 'https://api.z.ai';
|
||||||
|
|
||||||
|
export class GLMUsageService {
|
||||||
|
private providerConfig: ClaudeCompatibleProvider | null = null;
|
||||||
|
private cachedApiKey: string | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the provider config (called from settings)
|
||||||
|
*/
|
||||||
|
setProviderConfig(config: ClaudeCompatibleProvider | null): void {
|
||||||
|
this.providerConfig = config;
|
||||||
|
this.cachedApiKey = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if GLM is available
|
||||||
|
*/
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
const apiKey = this.getApiKey();
|
||||||
|
return !!apiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get API key from various sources
|
||||||
|
*/
|
||||||
|
private getApiKey(): string | null {
|
||||||
|
if (this.cachedApiKey) {
|
||||||
|
return this.cachedApiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Check environment variable
|
||||||
|
if (process.env.GLM_API_KEY) {
|
||||||
|
this.cachedApiKey = process.env.GLM_API_KEY;
|
||||||
|
return this.cachedApiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Check provider config
|
||||||
|
if (this.providerConfig?.apiKey) {
|
||||||
|
this.cachedApiKey = this.providerConfig.apiKey;
|
||||||
|
return this.cachedApiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch usage data from GLM
|
||||||
|
*
|
||||||
|
* Note: z.AI may not have a public usage API.
|
||||||
|
* This returns basic availability status.
|
||||||
|
*/
|
||||||
|
async fetchUsageData(): Promise<GLMProviderUsage> {
|
||||||
|
logger.info('[fetchUsageData] Starting GLM usage fetch...');
|
||||||
|
|
||||||
|
const baseUsage: GLMProviderUsage = {
|
||||||
|
providerId: 'glm',
|
||||||
|
providerName: 'z.AI GLM',
|
||||||
|
available: false,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const apiKey = this.getApiKey();
|
||||||
|
if (!apiKey) {
|
||||||
|
baseUsage.error = 'GLM API key not available';
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
// GLM/z.AI is available if we have an API key
|
||||||
|
// z.AI doesn't appear to have a public usage endpoint
|
||||||
|
baseUsage.available = true;
|
||||||
|
|
||||||
|
// Check if API key is valid by making a simple request
|
||||||
|
try {
|
||||||
|
const baseUrl = this.providerConfig?.baseUrl || GLM_API_BASE;
|
||||||
|
const response = await fetch(`${baseUrl}/api/anthropic/v1/messages`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${apiKey}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'anthropic-version': '2023-06-01',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
model: 'GLM-4.7',
|
||||||
|
max_tokens: 1,
|
||||||
|
messages: [{ role: 'user', content: 'hi' }],
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
// We just want to check if auth works, not actually make a request
|
||||||
|
// A 400 with invalid request is fine - it means auth worked
|
||||||
|
if (response.status === 401 || response.status === 403) {
|
||||||
|
baseUsage.available = false;
|
||||||
|
baseUsage.error = 'GLM API authentication failed';
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Network error or other issue - still mark as available since we have the key
|
||||||
|
logger.debug('GLM API check failed (may be fine):', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: z.AI doesn't appear to expose usage metrics via API
|
||||||
|
// Users should check their z.AI dashboard for detailed usage
|
||||||
|
if (baseUsage.available) {
|
||||||
|
baseUsage.plan = {
|
||||||
|
type: 'api',
|
||||||
|
displayName: 'API Access',
|
||||||
|
isPaid: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[fetchUsageData] GLM available: ${baseUsage.available}`);
|
||||||
|
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear cached credentials
|
||||||
|
*/
|
||||||
|
clearCache(): void {
|
||||||
|
this.cachedApiKey = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -39,9 +39,13 @@ import { ProviderFactory } from '../providers/provider-factory.js';
|
|||||||
import type { SettingsService } from './settings-service.js';
|
import type { SettingsService } from './settings-service.js';
|
||||||
import type { FeatureLoader } from './feature-loader.js';
|
import type { FeatureLoader } from './feature-loader.js';
|
||||||
import { createChatOptions, validateWorkingDirectory } from '../lib/sdk-options.js';
|
import { createChatOptions, validateWorkingDirectory } from '../lib/sdk-options.js';
|
||||||
import { resolveModelString } from '@automaker/model-resolver';
|
import { resolveModelString, resolvePhaseModel } from '@automaker/model-resolver';
|
||||||
import { stripProviderPrefix } from '@automaker/types';
|
import { stripProviderPrefix } from '@automaker/types';
|
||||||
import { getPromptCustomization } from '../lib/settings-helpers.js';
|
import {
|
||||||
|
getPromptCustomization,
|
||||||
|
getProviderByModelId,
|
||||||
|
getPhaseModelWithOverrides,
|
||||||
|
} from '../lib/settings-helpers.js';
|
||||||
|
|
||||||
const logger = createLogger('IdeationService');
|
const logger = createLogger('IdeationService');
|
||||||
|
|
||||||
@@ -208,7 +212,27 @@ export class IdeationService {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Resolve model alias to canonical identifier (with prefix)
|
// Resolve model alias to canonical identifier (with prefix)
|
||||||
const modelId = resolveModelString(options?.model ?? 'sonnet');
|
let modelId = resolveModelString(options?.model ?? 'sonnet');
|
||||||
|
|
||||||
|
// Try to find a provider for this model (e.g., GLM, MiniMax models)
|
||||||
|
let claudeCompatibleProvider: import('@automaker/types').ClaudeCompatibleProvider | undefined;
|
||||||
|
let credentials = await this.settingsService?.getCredentials();
|
||||||
|
|
||||||
|
if (this.settingsService && options?.model) {
|
||||||
|
const providerResult = await getProviderByModelId(
|
||||||
|
options.model,
|
||||||
|
this.settingsService,
|
||||||
|
'[IdeationService]'
|
||||||
|
);
|
||||||
|
if (providerResult.provider) {
|
||||||
|
claudeCompatibleProvider = providerResult.provider;
|
||||||
|
// Use resolved model from provider if available (maps to Claude model)
|
||||||
|
if (providerResult.resolvedModel) {
|
||||||
|
modelId = providerResult.resolvedModel;
|
||||||
|
}
|
||||||
|
credentials = providerResult.credentials ?? credentials;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Create SDK options
|
// Create SDK options
|
||||||
const sdkOptions = createChatOptions({
|
const sdkOptions = createChatOptions({
|
||||||
@@ -232,6 +256,8 @@ export class IdeationService {
|
|||||||
maxTurns: 1, // Single turn for ideation
|
maxTurns: 1, // Single turn for ideation
|
||||||
abortController: activeSession.abortController!,
|
abortController: activeSession.abortController!,
|
||||||
conversationHistory: conversationHistory.length > 0 ? conversationHistory : undefined,
|
conversationHistory: conversationHistory.length > 0 ? conversationHistory : undefined,
|
||||||
|
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
};
|
};
|
||||||
|
|
||||||
const stream = provider.executeQuery(executeOptions);
|
const stream = provider.executeQuery(executeOptions);
|
||||||
@@ -662,8 +688,24 @@ export class IdeationService {
|
|||||||
existingWorkContext
|
existingWorkContext
|
||||||
);
|
);
|
||||||
|
|
||||||
// Resolve model alias to canonical identifier (with prefix)
|
// Get model from phase settings with provider info (ideationModel)
|
||||||
const modelId = resolveModelString('sonnet');
|
const phaseResult = await getPhaseModelWithOverrides(
|
||||||
|
'ideationModel',
|
||||||
|
this.settingsService,
|
||||||
|
projectPath,
|
||||||
|
'[IdeationService]'
|
||||||
|
);
|
||||||
|
const resolved = resolvePhaseModel(phaseResult.phaseModel);
|
||||||
|
// resolvePhaseModel already resolves model aliases internally - no need to call resolveModelString again
|
||||||
|
const modelId = resolved.model;
|
||||||
|
const claudeCompatibleProvider = phaseResult.provider;
|
||||||
|
const credentials = phaseResult.credentials;
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
'generateSuggestions using model:',
|
||||||
|
modelId,
|
||||||
|
claudeCompatibleProvider ? `via provider: ${claudeCompatibleProvider.name}` : 'direct API'
|
||||||
|
);
|
||||||
|
|
||||||
// Create SDK options
|
// Create SDK options
|
||||||
const sdkOptions = createChatOptions({
|
const sdkOptions = createChatOptions({
|
||||||
@@ -688,6 +730,9 @@ export class IdeationService {
|
|||||||
// Disable all tools - we just want text generation, not codebase analysis
|
// Disable all tools - we just want text generation, not codebase analysis
|
||||||
allowedTools: [],
|
allowedTools: [],
|
||||||
abortController: new AbortController(),
|
abortController: new AbortController(),
|
||||||
|
readOnly: true, // Suggestions only need to return JSON, never write files
|
||||||
|
claudeCompatibleProvider, // Pass provider for alternative endpoint configuration
|
||||||
|
credentials, // Pass credentials for resolving 'credentials' apiKeySource
|
||||||
};
|
};
|
||||||
|
|
||||||
const stream = provider.executeQuery(executeOptions);
|
const stream = provider.executeQuery(executeOptions);
|
||||||
|
|||||||
260
apps/server/src/services/minimax-usage-service.ts
Normal file
260
apps/server/src/services/minimax-usage-service.ts
Normal file
@@ -0,0 +1,260 @@
|
|||||||
|
/**
|
||||||
|
* MiniMax Usage Service
|
||||||
|
*
|
||||||
|
* Fetches usage data from MiniMax's coding plan API.
|
||||||
|
* Based on CodexBar reference implementation.
|
||||||
|
*
|
||||||
|
* Authentication methods:
|
||||||
|
* 1. API Token (MINIMAX_API_KEY environment variable or provider config)
|
||||||
|
* 2. Cookie-based authentication (from platform login)
|
||||||
|
*
|
||||||
|
* API Endpoints:
|
||||||
|
* - GET https://api.minimax.io/v1/coding_plan/remains - Token-based usage
|
||||||
|
* - GET https://platform.minimax.io/v1/api/openplatform/coding_plan/remains - Fallback
|
||||||
|
*
|
||||||
|
* For China mainland: platform.minimaxi.com
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import type { MiniMaxProviderUsage, UsageWindow, ClaudeCompatibleProvider } from '@automaker/types';
|
||||||
|
|
||||||
|
const logger = createLogger('MiniMaxUsage');
|
||||||
|
|
||||||
|
// MiniMax API endpoints
|
||||||
|
const MINIMAX_API_BASE = 'https://api.minimax.io';
|
||||||
|
const MINIMAX_PLATFORM_BASE = 'https://platform.minimax.io';
|
||||||
|
const MINIMAX_CHINA_BASE = 'https://platform.minimaxi.com';
|
||||||
|
|
||||||
|
const CODING_PLAN_ENDPOINT = '/v1/coding_plan/remains';
|
||||||
|
const PLATFORM_CODING_PLAN_ENDPOINT = '/v1/api/openplatform/coding_plan/remains';
|
||||||
|
|
||||||
|
interface MiniMaxCodingPlanResponse {
|
||||||
|
base_resp?: {
|
||||||
|
status_code?: number;
|
||||||
|
status_msg?: string;
|
||||||
|
};
|
||||||
|
model_remains?: Array<{
|
||||||
|
model: string;
|
||||||
|
used: number;
|
||||||
|
total: number;
|
||||||
|
}>;
|
||||||
|
remains_time?: number; // Seconds until reset
|
||||||
|
start_time?: string;
|
||||||
|
end_time?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MiniMaxUsageService {
|
||||||
|
private providerConfig: ClaudeCompatibleProvider | null = null;
|
||||||
|
private cachedApiKey: string | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the provider config (called from settings)
|
||||||
|
*/
|
||||||
|
setProviderConfig(config: ClaudeCompatibleProvider | null): void {
|
||||||
|
this.providerConfig = config;
|
||||||
|
this.cachedApiKey = null; // Clear cache when config changes
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if MiniMax is available
|
||||||
|
*/
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
const apiKey = this.getApiKey();
|
||||||
|
return !!apiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get API key from various sources
|
||||||
|
*/
|
||||||
|
private getApiKey(): string | null {
|
||||||
|
if (this.cachedApiKey) {
|
||||||
|
return this.cachedApiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Check environment variable
|
||||||
|
if (process.env.MINIMAX_API_KEY) {
|
||||||
|
this.cachedApiKey = process.env.MINIMAX_API_KEY;
|
||||||
|
return this.cachedApiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Check provider config
|
||||||
|
if (this.providerConfig?.apiKey) {
|
||||||
|
this.cachedApiKey = this.providerConfig.apiKey;
|
||||||
|
return this.cachedApiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine if we should use China endpoint
|
||||||
|
*/
|
||||||
|
private isChina(): boolean {
|
||||||
|
if (this.providerConfig?.baseUrl) {
|
||||||
|
return this.providerConfig.baseUrl.includes('minimaxi.com');
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make an authenticated request to MiniMax API
|
||||||
|
*/
|
||||||
|
private async makeRequest<T>(url: string): Promise<T | null> {
|
||||||
|
const apiKey = this.getApiKey();
|
||||||
|
if (!apiKey) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${apiKey}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 401 || response.status === 403) {
|
||||||
|
this.cachedApiKey = null;
|
||||||
|
logger.warn('MiniMax API authentication failed');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
logger.error(`MiniMax API error: ${response.status} ${response.statusText}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (await response.json()) as T;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to fetch from MiniMax API:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch usage data from MiniMax
|
||||||
|
*/
|
||||||
|
async fetchUsageData(): Promise<MiniMaxProviderUsage> {
|
||||||
|
logger.info('[fetchUsageData] Starting MiniMax usage fetch...');
|
||||||
|
|
||||||
|
const baseUsage: MiniMaxProviderUsage = {
|
||||||
|
providerId: 'minimax',
|
||||||
|
providerName: 'MiniMax',
|
||||||
|
available: false,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const apiKey = this.getApiKey();
|
||||||
|
if (!apiKey) {
|
||||||
|
baseUsage.error = 'MiniMax API key not available';
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine the correct endpoint
|
||||||
|
const isChina = this.isChina();
|
||||||
|
const baseUrl = isChina ? MINIMAX_CHINA_BASE : MINIMAX_API_BASE;
|
||||||
|
const endpoint = `${baseUrl}${CODING_PLAN_ENDPOINT}`;
|
||||||
|
|
||||||
|
// Fetch coding plan data
|
||||||
|
let codingPlan = await this.makeRequest<MiniMaxCodingPlanResponse>(endpoint);
|
||||||
|
|
||||||
|
// Try fallback endpoint if primary fails
|
||||||
|
if (!codingPlan) {
|
||||||
|
const platformBase = isChina ? MINIMAX_CHINA_BASE : MINIMAX_PLATFORM_BASE;
|
||||||
|
const fallbackEndpoint = `${platformBase}${PLATFORM_CODING_PLAN_ENDPOINT}`;
|
||||||
|
codingPlan = await this.makeRequest<MiniMaxCodingPlanResponse>(fallbackEndpoint);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!codingPlan) {
|
||||||
|
baseUsage.error = 'Failed to fetch MiniMax usage data';
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for error response
|
||||||
|
if (codingPlan.base_resp?.status_code && codingPlan.base_resp.status_code !== 0) {
|
||||||
|
baseUsage.error = codingPlan.base_resp.status_msg || 'MiniMax API error';
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
baseUsage.available = true;
|
||||||
|
|
||||||
|
// Parse model remains
|
||||||
|
if (codingPlan.model_remains && codingPlan.model_remains.length > 0) {
|
||||||
|
let totalUsed = 0;
|
||||||
|
let totalLimit = 0;
|
||||||
|
|
||||||
|
for (const model of codingPlan.model_remains) {
|
||||||
|
totalUsed += model.used;
|
||||||
|
totalLimit += model.total;
|
||||||
|
}
|
||||||
|
|
||||||
|
const usedPercent = totalLimit > 0 ? Math.round((totalUsed / totalLimit) * 100) : 0;
|
||||||
|
|
||||||
|
// Calculate reset time
|
||||||
|
const resetsAt = codingPlan.remains_time
|
||||||
|
? new Date(Date.now() + codingPlan.remains_time * 1000).toISOString()
|
||||||
|
: codingPlan.end_time || '';
|
||||||
|
|
||||||
|
const usageWindow: UsageWindow = {
|
||||||
|
name: 'Coding Plan',
|
||||||
|
usedPercent,
|
||||||
|
resetsAt,
|
||||||
|
resetText: resetsAt ? this.formatResetTime(resetsAt) : '',
|
||||||
|
used: totalUsed,
|
||||||
|
limit: totalLimit,
|
||||||
|
};
|
||||||
|
|
||||||
|
baseUsage.primary = usageWindow;
|
||||||
|
baseUsage.tokenRemains = totalLimit - totalUsed;
|
||||||
|
baseUsage.totalTokens = totalLimit;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse plan times
|
||||||
|
if (codingPlan.start_time) {
|
||||||
|
baseUsage.planStartTime = codingPlan.start_time;
|
||||||
|
}
|
||||||
|
if (codingPlan.end_time) {
|
||||||
|
baseUsage.planEndTime = codingPlan.end_time;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`[fetchUsageData] ✓ MiniMax usage: ${baseUsage.primary?.usedPercent || 0}% used, ` +
|
||||||
|
`${baseUsage.tokenRemains || 0} tokens remaining`
|
||||||
|
);
|
||||||
|
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format reset time as human-readable string
|
||||||
|
*/
|
||||||
|
private formatResetTime(resetAt: string): string {
|
||||||
|
try {
|
||||||
|
const date = new Date(resetAt);
|
||||||
|
const now = new Date();
|
||||||
|
const diff = date.getTime() - now.getTime();
|
||||||
|
|
||||||
|
if (diff < 0) return 'Expired';
|
||||||
|
|
||||||
|
const hours = Math.floor(diff / 3600000);
|
||||||
|
const days = Math.floor(hours / 24);
|
||||||
|
|
||||||
|
if (days > 0) {
|
||||||
|
return `Resets in ${days}d`;
|
||||||
|
}
|
||||||
|
if (hours > 0) {
|
||||||
|
return `Resets in ${hours}h`;
|
||||||
|
}
|
||||||
|
return 'Resets soon';
|
||||||
|
} catch {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear cached credentials
|
||||||
|
*/
|
||||||
|
clearCache(): void {
|
||||||
|
this.cachedApiKey = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
144
apps/server/src/services/opencode-usage-service.ts
Normal file
144
apps/server/src/services/opencode-usage-service.ts
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
/**
|
||||||
|
* OpenCode Usage Service
|
||||||
|
*
|
||||||
|
* Fetches usage data from OpenCode's server API.
|
||||||
|
* Based on CodexBar reference implementation.
|
||||||
|
*
|
||||||
|
* Note: OpenCode usage tracking is limited as they use a proprietary
|
||||||
|
* server function API that requires browser cookies for authentication.
|
||||||
|
* This service provides basic status checking based on local config.
|
||||||
|
*
|
||||||
|
* API Endpoints (require browser cookies):
|
||||||
|
* - POST https://opencode.ai/_server - Server functions
|
||||||
|
* - workspaces: Get workspace info
|
||||||
|
* - subscription.get: Get usage data
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as os from 'os';
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import type { OpenCodeProviderUsage, UsageWindow } from '@automaker/types';
|
||||||
|
|
||||||
|
const logger = createLogger('OpenCodeUsage');
|
||||||
|
|
||||||
|
// OpenCode config locations
|
||||||
|
const OPENCODE_CONFIG_PATHS = [
|
||||||
|
path.join(os.homedir(), '.opencode', 'config.json'),
|
||||||
|
path.join(os.homedir(), '.config', 'opencode', 'config.json'),
|
||||||
|
];
|
||||||
|
|
||||||
|
interface OpenCodeConfig {
|
||||||
|
workspaceId?: string;
|
||||||
|
email?: string;
|
||||||
|
authenticated?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface OpenCodeUsageData {
|
||||||
|
rollingUsage?: {
|
||||||
|
usagePercent: number;
|
||||||
|
resetInSec: number;
|
||||||
|
};
|
||||||
|
weeklyUsage?: {
|
||||||
|
usagePercent: number;
|
||||||
|
resetInSec: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export class OpenCodeUsageService {
|
||||||
|
private cachedConfig: OpenCodeConfig | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if OpenCode is available
|
||||||
|
*/
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
const config = this.getConfig();
|
||||||
|
return !!config?.authenticated;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get OpenCode config from disk
|
||||||
|
*/
|
||||||
|
private getConfig(): OpenCodeConfig | null {
|
||||||
|
if (this.cachedConfig) {
|
||||||
|
return this.cachedConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check environment variable for workspace ID
|
||||||
|
if (process.env.OPENCODE_WORKSPACE_ID) {
|
||||||
|
this.cachedConfig = {
|
||||||
|
workspaceId: process.env.OPENCODE_WORKSPACE_ID,
|
||||||
|
authenticated: true,
|
||||||
|
};
|
||||||
|
return this.cachedConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check config files
|
||||||
|
for (const configPath of OPENCODE_CONFIG_PATHS) {
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(configPath)) {
|
||||||
|
const content = fs.readFileSync(configPath, 'utf8');
|
||||||
|
const config = JSON.parse(content) as OpenCodeConfig;
|
||||||
|
this.cachedConfig = config;
|
||||||
|
return this.cachedConfig;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.debug(`Failed to read OpenCode config from ${configPath}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch usage data from OpenCode
|
||||||
|
*
|
||||||
|
* Note: OpenCode's usage API requires browser cookies which we don't have access to.
|
||||||
|
* This implementation returns basic availability status.
|
||||||
|
* For full usage tracking, users should check the OpenCode dashboard.
|
||||||
|
*/
|
||||||
|
async fetchUsageData(): Promise<OpenCodeProviderUsage> {
|
||||||
|
logger.info('[fetchUsageData] Starting OpenCode usage fetch...');
|
||||||
|
|
||||||
|
const baseUsage: OpenCodeProviderUsage = {
|
||||||
|
providerId: 'opencode',
|
||||||
|
providerName: 'OpenCode',
|
||||||
|
available: false,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const config = this.getConfig();
|
||||||
|
if (!config) {
|
||||||
|
baseUsage.error = 'OpenCode not configured';
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!config.authenticated) {
|
||||||
|
baseUsage.error = 'OpenCode not authenticated';
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
// OpenCode is available but we can't get detailed usage without browser cookies
|
||||||
|
baseUsage.available = true;
|
||||||
|
baseUsage.workspaceId = config.workspaceId;
|
||||||
|
|
||||||
|
// Note: Full usage tracking requires browser cookie authentication
|
||||||
|
// which is not available in a server-side context.
|
||||||
|
// Users should check the OpenCode dashboard for detailed usage.
|
||||||
|
baseUsage.error =
|
||||||
|
'Usage details require browser authentication. Check opencode.ai for details.';
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`[fetchUsageData] OpenCode available, workspace: ${config.workspaceId || 'unknown'}`
|
||||||
|
);
|
||||||
|
|
||||||
|
return baseUsage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear cached config
|
||||||
|
*/
|
||||||
|
clearCache(): void {
|
||||||
|
this.cachedConfig = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -234,51 +234,75 @@ export class PipelineService {
|
|||||||
*
|
*
|
||||||
* Determines what status a feature should transition to based on current status.
|
* Determines what status a feature should transition to based on current status.
|
||||||
* Flow: in_progress -> pipeline_step_0 -> pipeline_step_1 -> ... -> final status
|
* Flow: in_progress -> pipeline_step_0 -> pipeline_step_1 -> ... -> final status
|
||||||
|
* Steps in the excludedStepIds array will be skipped.
|
||||||
*
|
*
|
||||||
* @param currentStatus - Current feature status
|
* @param currentStatus - Current feature status
|
||||||
* @param config - Pipeline configuration (or null if no pipeline)
|
* @param config - Pipeline configuration (or null if no pipeline)
|
||||||
* @param skipTests - Whether to skip tests (affects final status)
|
* @param skipTests - Whether to skip tests (affects final status)
|
||||||
|
* @param excludedStepIds - Optional array of step IDs to skip
|
||||||
* @returns The next status in the pipeline flow
|
* @returns The next status in the pipeline flow
|
||||||
*/
|
*/
|
||||||
getNextStatus(
|
getNextStatus(
|
||||||
currentStatus: FeatureStatusWithPipeline,
|
currentStatus: FeatureStatusWithPipeline,
|
||||||
config: PipelineConfig | null,
|
config: PipelineConfig | null,
|
||||||
skipTests: boolean
|
skipTests: boolean,
|
||||||
|
excludedStepIds?: string[]
|
||||||
): FeatureStatusWithPipeline {
|
): FeatureStatusWithPipeline {
|
||||||
const steps = config?.steps || [];
|
const steps = config?.steps || [];
|
||||||
|
const exclusions = new Set(excludedStepIds || []);
|
||||||
|
|
||||||
// Sort steps by order
|
// Sort steps by order and filter out excluded steps
|
||||||
const sortedSteps = [...steps].sort((a, b) => a.order - b.order);
|
const sortedSteps = [...steps]
|
||||||
|
.sort((a, b) => a.order - b.order)
|
||||||
|
.filter((step) => !exclusions.has(step.id));
|
||||||
|
|
||||||
// If no pipeline steps, use original logic
|
// If no pipeline steps (or all excluded), use original logic
|
||||||
if (sortedSteps.length === 0) {
|
if (sortedSteps.length === 0) {
|
||||||
if (currentStatus === 'in_progress') {
|
// If coming from in_progress or already in a pipeline step, go to final status
|
||||||
|
if (currentStatus === 'in_progress' || currentStatus.startsWith('pipeline_')) {
|
||||||
return skipTests ? 'waiting_approval' : 'verified';
|
return skipTests ? 'waiting_approval' : 'verified';
|
||||||
}
|
}
|
||||||
return currentStatus;
|
return currentStatus;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Coming from in_progress -> go to first pipeline step
|
// Coming from in_progress -> go to first non-excluded pipeline step
|
||||||
if (currentStatus === 'in_progress') {
|
if (currentStatus === 'in_progress') {
|
||||||
return `pipeline_${sortedSteps[0].id}`;
|
return `pipeline_${sortedSteps[0].id}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Coming from a pipeline step -> go to next step or final status
|
// Coming from a pipeline step -> go to next non-excluded step or final status
|
||||||
if (currentStatus.startsWith('pipeline_')) {
|
if (currentStatus.startsWith('pipeline_')) {
|
||||||
const currentStepId = currentStatus.replace('pipeline_', '');
|
const currentStepId = currentStatus.replace('pipeline_', '');
|
||||||
const currentIndex = sortedSteps.findIndex((s) => s.id === currentStepId);
|
const currentIndex = sortedSteps.findIndex((s) => s.id === currentStepId);
|
||||||
|
|
||||||
if (currentIndex === -1) {
|
if (currentIndex === -1) {
|
||||||
// Step not found, go to final status
|
// Current step not found in filtered list (might be excluded or invalid)
|
||||||
|
// Find next valid step after this one from the original sorted list
|
||||||
|
const allSortedSteps = [...steps].sort((a, b) => a.order - b.order);
|
||||||
|
const originalIndex = allSortedSteps.findIndex((s) => s.id === currentStepId);
|
||||||
|
|
||||||
|
if (originalIndex === -1) {
|
||||||
|
// Step truly doesn't exist, go to final status
|
||||||
|
return skipTests ? 'waiting_approval' : 'verified';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the next non-excluded step after the current one
|
||||||
|
for (let i = originalIndex + 1; i < allSortedSteps.length; i++) {
|
||||||
|
if (!exclusions.has(allSortedSteps[i].id)) {
|
||||||
|
return `pipeline_${allSortedSteps[i].id}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No more non-excluded steps, go to final status
|
||||||
return skipTests ? 'waiting_approval' : 'verified';
|
return skipTests ? 'waiting_approval' : 'verified';
|
||||||
}
|
}
|
||||||
|
|
||||||
if (currentIndex < sortedSteps.length - 1) {
|
if (currentIndex < sortedSteps.length - 1) {
|
||||||
// Go to next step
|
// Go to next non-excluded step
|
||||||
return `pipeline_${sortedSteps[currentIndex + 1].id}`;
|
return `pipeline_${sortedSteps[currentIndex + 1].id}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Last step completed, go to final status
|
// Last non-excluded step completed, go to final status
|
||||||
return skipTests ? 'waiting_approval' : 'verified';
|
return skipTests ? 'waiting_approval' : 'verified';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
447
apps/server/src/services/provider-usage-tracker.ts
Normal file
447
apps/server/src/services/provider-usage-tracker.ts
Normal file
@@ -0,0 +1,447 @@
|
|||||||
|
/**
|
||||||
|
* Provider Usage Tracker
|
||||||
|
*
|
||||||
|
* Unified service that aggregates usage data from all supported AI providers.
|
||||||
|
* Manages caching, polling, and coordination of individual usage services.
|
||||||
|
*
|
||||||
|
* Supported providers:
|
||||||
|
* - Claude (via ClaudeUsageService)
|
||||||
|
* - Codex (via CodexUsageService)
|
||||||
|
* - Cursor (via CursorUsageService)
|
||||||
|
* - Gemini (via GeminiUsageService)
|
||||||
|
* - GitHub Copilot (via CopilotUsageService)
|
||||||
|
* - OpenCode (via OpenCodeUsageService)
|
||||||
|
* - MiniMax (via MiniMaxUsageService)
|
||||||
|
* - GLM (via GLMUsageService)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createLogger } from '@automaker/utils';
|
||||||
|
import type {
|
||||||
|
UsageProviderId,
|
||||||
|
ProviderUsage,
|
||||||
|
AllProvidersUsage,
|
||||||
|
ClaudeProviderUsage,
|
||||||
|
CodexProviderUsage,
|
||||||
|
ClaudeCompatibleProvider,
|
||||||
|
} from '@automaker/types';
|
||||||
|
import { ClaudeUsageService } from './claude-usage-service.js';
|
||||||
|
import { CodexUsageService, type CodexUsageData } from './codex-usage-service.js';
|
||||||
|
import { CursorUsageService } from './cursor-usage-service.js';
|
||||||
|
import { GeminiUsageService } from './gemini-usage-service.js';
|
||||||
|
import { CopilotUsageService } from './copilot-usage-service.js';
|
||||||
|
import { OpenCodeUsageService } from './opencode-usage-service.js';
|
||||||
|
import { MiniMaxUsageService } from './minimax-usage-service.js';
|
||||||
|
import { GLMUsageService } from './glm-usage-service.js';
|
||||||
|
import type { ClaudeUsage } from '../routes/claude/types.js';
|
||||||
|
|
||||||
|
const logger = createLogger('ProviderUsageTracker');
|
||||||
|
|
||||||
|
// Cache TTL in milliseconds (1 minute)
|
||||||
|
const CACHE_TTL_MS = 60 * 1000;
|
||||||
|
|
||||||
|
interface CachedUsage {
|
||||||
|
data: ProviderUsage;
|
||||||
|
fetchedAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ProviderUsageTracker {
|
||||||
|
private claudeService: ClaudeUsageService;
|
||||||
|
private codexService: CodexUsageService;
|
||||||
|
private cursorService: CursorUsageService;
|
||||||
|
private geminiService: GeminiUsageService;
|
||||||
|
private copilotService: CopilotUsageService;
|
||||||
|
private opencodeService: OpenCodeUsageService;
|
||||||
|
private minimaxService: MiniMaxUsageService;
|
||||||
|
private glmService: GLMUsageService;
|
||||||
|
|
||||||
|
private cache: Map<UsageProviderId, CachedUsage> = new Map();
|
||||||
|
private enabledProviders: Set<UsageProviderId> = new Set([
|
||||||
|
'claude',
|
||||||
|
'codex',
|
||||||
|
'cursor',
|
||||||
|
'gemini',
|
||||||
|
'copilot',
|
||||||
|
'opencode',
|
||||||
|
'minimax',
|
||||||
|
'glm',
|
||||||
|
]);
|
||||||
|
|
||||||
|
constructor(codexService?: CodexUsageService) {
|
||||||
|
this.claudeService = new ClaudeUsageService();
|
||||||
|
this.codexService = codexService || new CodexUsageService();
|
||||||
|
this.cursorService = new CursorUsageService();
|
||||||
|
this.geminiService = new GeminiUsageService();
|
||||||
|
this.copilotService = new CopilotUsageService();
|
||||||
|
this.opencodeService = new OpenCodeUsageService();
|
||||||
|
this.minimaxService = new MiniMaxUsageService();
|
||||||
|
this.glmService = new GLMUsageService();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set enabled providers (called when settings change)
|
||||||
|
*/
|
||||||
|
setEnabledProviders(providers: UsageProviderId[]): void {
|
||||||
|
this.enabledProviders = new Set(providers);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update custom provider configs (MiniMax, GLM)
|
||||||
|
*/
|
||||||
|
updateCustomProviderConfigs(providers: ClaudeCompatibleProvider[]): void {
|
||||||
|
const minimaxConfig = providers.find(
|
||||||
|
(p) => p.providerType === 'minimax' && p.enabled !== false
|
||||||
|
);
|
||||||
|
const glmConfig = providers.find((p) => p.providerType === 'glm' && p.enabled !== false);
|
||||||
|
|
||||||
|
this.minimaxService.setProviderConfig(minimaxConfig || null);
|
||||||
|
this.glmService.setProviderConfig(glmConfig || null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a provider is enabled
|
||||||
|
*/
|
||||||
|
isProviderEnabled(providerId: UsageProviderId): boolean {
|
||||||
|
return this.enabledProviders.has(providerId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if cached data is still fresh
|
||||||
|
*/
|
||||||
|
private isCacheFresh(providerId: UsageProviderId): boolean {
|
||||||
|
const cached = this.cache.get(providerId);
|
||||||
|
if (!cached) return false;
|
||||||
|
return Date.now() - cached.fetchedAt < CACHE_TTL_MS;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get cached data for a provider
|
||||||
|
*/
|
||||||
|
private getCached(providerId: UsageProviderId): ProviderUsage | null {
|
||||||
|
const cached = this.cache.get(providerId);
|
||||||
|
return cached?.data || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set cached data for a provider
|
||||||
|
*/
|
||||||
|
private setCached(providerId: UsageProviderId, data: ProviderUsage): void {
|
||||||
|
this.cache.set(providerId, {
|
||||||
|
data,
|
||||||
|
fetchedAt: Date.now(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert Claude usage to unified format
|
||||||
|
*/
|
||||||
|
private convertClaudeUsage(usage: ClaudeUsage): ClaudeProviderUsage {
|
||||||
|
return {
|
||||||
|
providerId: 'claude',
|
||||||
|
providerName: 'Claude',
|
||||||
|
available: true,
|
||||||
|
lastUpdated: usage.lastUpdated,
|
||||||
|
userTimezone: usage.userTimezone,
|
||||||
|
primary: {
|
||||||
|
name: 'Session (5-hour)',
|
||||||
|
usedPercent: usage.sessionPercentage,
|
||||||
|
resetsAt: usage.sessionResetTime,
|
||||||
|
resetText: usage.sessionResetText,
|
||||||
|
},
|
||||||
|
secondary: {
|
||||||
|
name: 'Weekly (All Models)',
|
||||||
|
usedPercent: usage.weeklyPercentage,
|
||||||
|
resetsAt: usage.weeklyResetTime,
|
||||||
|
resetText: usage.weeklyResetText,
|
||||||
|
},
|
||||||
|
sessionWindow: {
|
||||||
|
name: 'Session (5-hour)',
|
||||||
|
usedPercent: usage.sessionPercentage,
|
||||||
|
resetsAt: usage.sessionResetTime,
|
||||||
|
resetText: usage.sessionResetText,
|
||||||
|
},
|
||||||
|
weeklyWindow: {
|
||||||
|
name: 'Weekly (All Models)',
|
||||||
|
usedPercent: usage.weeklyPercentage,
|
||||||
|
resetsAt: usage.weeklyResetTime,
|
||||||
|
resetText: usage.weeklyResetText,
|
||||||
|
},
|
||||||
|
sonnetWindow: {
|
||||||
|
name: 'Weekly (Sonnet)',
|
||||||
|
usedPercent: usage.sonnetWeeklyPercentage,
|
||||||
|
resetsAt: usage.weeklyResetTime,
|
||||||
|
resetText: usage.sonnetResetText,
|
||||||
|
},
|
||||||
|
cost:
|
||||||
|
usage.costUsed !== null
|
||||||
|
? {
|
||||||
|
used: usage.costUsed,
|
||||||
|
limit: usage.costLimit,
|
||||||
|
currency: usage.costCurrency || 'USD',
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert Codex usage to unified format
|
||||||
|
*/
|
||||||
|
private convertCodexUsage(usage: CodexUsageData): CodexProviderUsage {
|
||||||
|
const result: CodexProviderUsage = {
|
||||||
|
providerId: 'codex',
|
||||||
|
providerName: 'Codex',
|
||||||
|
available: true,
|
||||||
|
lastUpdated: usage.lastUpdated,
|
||||||
|
planType: usage.rateLimits?.planType,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (usage.rateLimits?.primary) {
|
||||||
|
result.primary = {
|
||||||
|
name: `${usage.rateLimits.primary.windowDurationMins}min Window`,
|
||||||
|
usedPercent: usage.rateLimits.primary.usedPercent,
|
||||||
|
resetsAt: new Date(usage.rateLimits.primary.resetsAt * 1000).toISOString(),
|
||||||
|
resetText: this.formatResetTime(usage.rateLimits.primary.resetsAt * 1000),
|
||||||
|
windowDurationMins: usage.rateLimits.primary.windowDurationMins,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (usage.rateLimits?.secondary) {
|
||||||
|
result.secondary = {
|
||||||
|
name: `${usage.rateLimits.secondary.windowDurationMins}min Window`,
|
||||||
|
usedPercent: usage.rateLimits.secondary.usedPercent,
|
||||||
|
resetsAt: new Date(usage.rateLimits.secondary.resetsAt * 1000).toISOString(),
|
||||||
|
resetText: this.formatResetTime(usage.rateLimits.secondary.resetsAt * 1000),
|
||||||
|
windowDurationMins: usage.rateLimits.secondary.windowDurationMins,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (usage.rateLimits?.planType) {
|
||||||
|
result.plan = {
|
||||||
|
type: usage.rateLimits.planType,
|
||||||
|
displayName:
|
||||||
|
usage.rateLimits.planType.charAt(0).toUpperCase() + usage.rateLimits.planType.slice(1),
|
||||||
|
isPaid: usage.rateLimits.planType !== 'free',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format reset time as human-readable string
|
||||||
|
*/
|
||||||
|
private formatResetTime(resetAtMs: number): string {
|
||||||
|
const diff = resetAtMs - Date.now();
|
||||||
|
if (diff < 0) return 'Expired';
|
||||||
|
|
||||||
|
const minutes = Math.floor(diff / 60000);
|
||||||
|
const hours = Math.floor(minutes / 60);
|
||||||
|
const days = Math.floor(hours / 24);
|
||||||
|
|
||||||
|
if (days > 0) return `Resets in ${days}d ${hours % 24}h`;
|
||||||
|
if (hours > 0) return `Resets in ${hours}h ${minutes % 60}m`;
|
||||||
|
if (minutes > 0) return `Resets in ${minutes}m`;
|
||||||
|
return 'Resets soon';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch usage for a specific provider
|
||||||
|
*/
|
||||||
|
async fetchProviderUsage(
|
||||||
|
providerId: UsageProviderId,
|
||||||
|
forceRefresh = false
|
||||||
|
): Promise<ProviderUsage | null> {
|
||||||
|
// Check cache first
|
||||||
|
if (!forceRefresh && this.isCacheFresh(providerId)) {
|
||||||
|
return this.getCached(providerId);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
let usage: ProviderUsage | null = null;
|
||||||
|
|
||||||
|
switch (providerId) {
|
||||||
|
case 'claude': {
|
||||||
|
if (await this.claudeService.isAvailable()) {
|
||||||
|
const claudeUsage = await this.claudeService.fetchUsageData();
|
||||||
|
usage = this.convertClaudeUsage(claudeUsage);
|
||||||
|
} else {
|
||||||
|
usage = {
|
||||||
|
providerId: 'claude',
|
||||||
|
providerName: 'Claude',
|
||||||
|
available: false,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
error: 'Claude CLI not available',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'codex': {
|
||||||
|
if (await this.codexService.isAvailable()) {
|
||||||
|
const codexUsage = await this.codexService.fetchUsageData();
|
||||||
|
usage = this.convertCodexUsage(codexUsage);
|
||||||
|
} else {
|
||||||
|
usage = {
|
||||||
|
providerId: 'codex',
|
||||||
|
providerName: 'Codex',
|
||||||
|
available: false,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
error: 'Codex CLI not available',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'cursor': {
|
||||||
|
usage = await this.cursorService.fetchUsageData();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'gemini': {
|
||||||
|
usage = await this.geminiService.fetchUsageData();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'copilot': {
|
||||||
|
usage = await this.copilotService.fetchUsageData();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'opencode': {
|
||||||
|
usage = await this.opencodeService.fetchUsageData();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'minimax': {
|
||||||
|
usage = await this.minimaxService.fetchUsageData();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'glm': {
|
||||||
|
usage = await this.glmService.fetchUsageData();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (usage) {
|
||||||
|
this.setCached(providerId, usage);
|
||||||
|
}
|
||||||
|
|
||||||
|
return usage;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to fetch usage for ${providerId}:`, error);
|
||||||
|
return {
|
||||||
|
providerId,
|
||||||
|
providerName: this.getProviderName(providerId),
|
||||||
|
available: false,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error',
|
||||||
|
} as ProviderUsage;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get provider display name
|
||||||
|
*/
|
||||||
|
private getProviderName(providerId: UsageProviderId): string {
|
||||||
|
const names: Record<UsageProviderId, string> = {
|
||||||
|
claude: 'Claude',
|
||||||
|
codex: 'Codex',
|
||||||
|
cursor: 'Cursor',
|
||||||
|
gemini: 'Gemini',
|
||||||
|
copilot: 'GitHub Copilot',
|
||||||
|
opencode: 'OpenCode',
|
||||||
|
minimax: 'MiniMax',
|
||||||
|
glm: 'z.AI GLM',
|
||||||
|
};
|
||||||
|
return names[providerId] || providerId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch usage for all enabled providers
|
||||||
|
*/
|
||||||
|
async fetchAllUsage(forceRefresh = false): Promise<AllProvidersUsage> {
|
||||||
|
const providers: Partial<Record<UsageProviderId, ProviderUsage>> = {};
|
||||||
|
const errors: Array<{ providerId: UsageProviderId; message: string }> = [];
|
||||||
|
|
||||||
|
// Fetch all enabled providers in parallel
|
||||||
|
const enabledList = Array.from(this.enabledProviders);
|
||||||
|
const results = await Promise.allSettled(
|
||||||
|
enabledList.map((providerId) => this.fetchProviderUsage(providerId, forceRefresh))
|
||||||
|
);
|
||||||
|
|
||||||
|
results.forEach((result, index) => {
|
||||||
|
const providerId = enabledList[index];
|
||||||
|
|
||||||
|
if (result.status === 'fulfilled' && result.value) {
|
||||||
|
providers[providerId] = result.value;
|
||||||
|
if (result.value.error) {
|
||||||
|
errors.push({
|
||||||
|
providerId,
|
||||||
|
message: result.value.error,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else if (result.status === 'rejected') {
|
||||||
|
errors.push({
|
||||||
|
providerId,
|
||||||
|
message: result.reason?.message || 'Unknown error',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
providers,
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
errors,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check availability for all providers
|
||||||
|
*/
|
||||||
|
async checkAvailability(): Promise<Record<UsageProviderId, boolean>> {
|
||||||
|
const availability: Record<string, boolean> = {};
|
||||||
|
|
||||||
|
const checks = await Promise.allSettled([
|
||||||
|
this.claudeService.isAvailable(),
|
||||||
|
this.codexService.isAvailable(),
|
||||||
|
this.cursorService.isAvailable(),
|
||||||
|
this.geminiService.isAvailable(),
|
||||||
|
this.copilotService.isAvailable(),
|
||||||
|
this.opencodeService.isAvailable(),
|
||||||
|
this.minimaxService.isAvailable(),
|
||||||
|
this.glmService.isAvailable(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const providerIds: UsageProviderId[] = [
|
||||||
|
'claude',
|
||||||
|
'codex',
|
||||||
|
'cursor',
|
||||||
|
'gemini',
|
||||||
|
'copilot',
|
||||||
|
'opencode',
|
||||||
|
'minimax',
|
||||||
|
'glm',
|
||||||
|
];
|
||||||
|
|
||||||
|
checks.forEach((result, index) => {
|
||||||
|
availability[providerIds[index]] =
|
||||||
|
result.status === 'fulfilled' ? result.value : false;
|
||||||
|
});
|
||||||
|
|
||||||
|
return availability as Record<UsageProviderId, boolean>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all caches
|
||||||
|
*/
|
||||||
|
clearCache(): void {
|
||||||
|
this.cache.clear();
|
||||||
|
this.claudeService = new ClaudeUsageService(); // Reset Claude service
|
||||||
|
this.cursorService.clearCache();
|
||||||
|
this.geminiService.clearCache();
|
||||||
|
this.copilotService.clearCache();
|
||||||
|
this.opencodeService.clearCache();
|
||||||
|
this.minimaxService.clearCache();
|
||||||
|
this.glmService.clearCache();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,6 +9,9 @@
|
|||||||
|
|
||||||
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
||||||
import * as secureFs from '../lib/secure-fs.js';
|
import * as secureFs from '../lib/secure-fs.js';
|
||||||
|
import os from 'os';
|
||||||
|
import path from 'path';
|
||||||
|
import fs from 'fs/promises';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
getGlobalSettingsPath,
|
getGlobalSettingsPath,
|
||||||
@@ -28,6 +31,9 @@ import type {
|
|||||||
WorktreeInfo,
|
WorktreeInfo,
|
||||||
PhaseModelConfig,
|
PhaseModelConfig,
|
||||||
PhaseModelEntry,
|
PhaseModelEntry,
|
||||||
|
ClaudeApiProfile,
|
||||||
|
ClaudeCompatibleProvider,
|
||||||
|
ProviderModel,
|
||||||
} from '../types/settings.js';
|
} from '../types/settings.js';
|
||||||
import {
|
import {
|
||||||
DEFAULT_GLOBAL_SETTINGS,
|
DEFAULT_GLOBAL_SETTINGS,
|
||||||
@@ -38,6 +44,12 @@ import {
|
|||||||
CREDENTIALS_VERSION,
|
CREDENTIALS_VERSION,
|
||||||
PROJECT_SETTINGS_VERSION,
|
PROJECT_SETTINGS_VERSION,
|
||||||
} from '../types/settings.js';
|
} from '../types/settings.js';
|
||||||
|
import {
|
||||||
|
DEFAULT_MAX_CONCURRENCY,
|
||||||
|
migrateModelId,
|
||||||
|
migrateCursorModelIds,
|
||||||
|
migrateOpencodeModelIds,
|
||||||
|
} from '@automaker/types';
|
||||||
|
|
||||||
const logger = createLogger('SettingsService');
|
const logger = createLogger('SettingsService');
|
||||||
|
|
||||||
@@ -124,10 +136,14 @@ export class SettingsService {
|
|||||||
// Migrate legacy enhancementModel/validationModel to phaseModels
|
// Migrate legacy enhancementModel/validationModel to phaseModels
|
||||||
const migratedPhaseModels = this.migratePhaseModels(settings);
|
const migratedPhaseModels = this.migratePhaseModels(settings);
|
||||||
|
|
||||||
|
// Migrate model IDs to canonical format
|
||||||
|
const migratedModelSettings = this.migrateModelSettings(settings);
|
||||||
|
|
||||||
// Apply any missing defaults (for backwards compatibility)
|
// Apply any missing defaults (for backwards compatibility)
|
||||||
let result: GlobalSettings = {
|
let result: GlobalSettings = {
|
||||||
...DEFAULT_GLOBAL_SETTINGS,
|
...DEFAULT_GLOBAL_SETTINGS,
|
||||||
...settings,
|
...settings,
|
||||||
|
...migratedModelSettings,
|
||||||
keyboardShortcuts: {
|
keyboardShortcuts: {
|
||||||
...DEFAULT_GLOBAL_SETTINGS.keyboardShortcuts,
|
...DEFAULT_GLOBAL_SETTINGS.keyboardShortcuts,
|
||||||
...settings.keyboardShortcuts,
|
...settings.keyboardShortcuts,
|
||||||
@@ -158,6 +174,63 @@ export class SettingsService {
|
|||||||
needsSave = true;
|
needsSave = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Migration v4 -> v5: Auto-create "Direct Anthropic" profile for existing users
|
||||||
|
// If user has an Anthropic API key in credentials but no profiles, create a
|
||||||
|
// "Direct Anthropic" profile that references the credentials and set it as active.
|
||||||
|
if (storedVersion < 5) {
|
||||||
|
try {
|
||||||
|
const credentials = await this.getCredentials();
|
||||||
|
const hasAnthropicKey = !!credentials.apiKeys?.anthropic;
|
||||||
|
const hasNoProfiles = !result.claudeApiProfiles || result.claudeApiProfiles.length === 0;
|
||||||
|
const hasNoActiveProfile = !result.activeClaudeApiProfileId;
|
||||||
|
|
||||||
|
if (hasAnthropicKey && hasNoProfiles && hasNoActiveProfile) {
|
||||||
|
const directAnthropicProfile = {
|
||||||
|
id: `profile-${Date.now()}-direct-anthropic`,
|
||||||
|
name: 'Direct Anthropic',
|
||||||
|
baseUrl: 'https://api.anthropic.com',
|
||||||
|
apiKeySource: 'credentials' as const,
|
||||||
|
useAuthToken: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
result.claudeApiProfiles = [directAnthropicProfile];
|
||||||
|
result.activeClaudeApiProfileId = directAnthropicProfile.id;
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
'Migration v4->v5: Created "Direct Anthropic" profile using existing credentials'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(
|
||||||
|
'Migration v4->v5: Could not check credentials for auto-profile creation:',
|
||||||
|
error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
needsSave = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migration v5 -> v6: Convert claudeApiProfiles to claudeCompatibleProviders
|
||||||
|
// The new system uses a models[] array instead of modelMappings, and removes
|
||||||
|
// the "active profile" concept - models are selected directly in phase model configs.
|
||||||
|
if (storedVersion < 6) {
|
||||||
|
const legacyProfiles = settings.claudeApiProfiles || [];
|
||||||
|
if (
|
||||||
|
legacyProfiles.length > 0 &&
|
||||||
|
(!result.claudeCompatibleProviders || result.claudeCompatibleProviders.length === 0)
|
||||||
|
) {
|
||||||
|
logger.info(
|
||||||
|
`Migration v5->v6: Converting ${legacyProfiles.length} Claude API profile(s) to compatible providers`
|
||||||
|
);
|
||||||
|
result.claudeCompatibleProviders = this.migrateProfilesToProviders(legacyProfiles);
|
||||||
|
}
|
||||||
|
// Remove the deprecated activeClaudeApiProfileId field
|
||||||
|
if (result.activeClaudeApiProfileId) {
|
||||||
|
logger.info('Migration v5->v6: Removing deprecated activeClaudeApiProfileId');
|
||||||
|
delete result.activeClaudeApiProfileId;
|
||||||
|
}
|
||||||
|
needsSave = true;
|
||||||
|
}
|
||||||
|
|
||||||
// Update version if any migration occurred
|
// Update version if any migration occurred
|
||||||
if (needsSave) {
|
if (needsSave) {
|
||||||
result.version = SETTINGS_VERSION;
|
result.version = SETTINGS_VERSION;
|
||||||
@@ -223,19 +296,203 @@ export class SettingsService {
|
|||||||
* Convert a phase model value to PhaseModelEntry format
|
* Convert a phase model value to PhaseModelEntry format
|
||||||
*
|
*
|
||||||
* Handles migration from string format (v2) to object format (v3).
|
* Handles migration from string format (v2) to object format (v3).
|
||||||
* - String values like 'sonnet' become { model: 'sonnet' }
|
* Also migrates legacy model IDs to canonical prefixed format.
|
||||||
* - Object values are returned as-is (with type assertion)
|
* - String values like 'sonnet' become { model: 'claude-sonnet' }
|
||||||
|
* - Object values have their model ID migrated if needed
|
||||||
*
|
*
|
||||||
* @param value - Phase model value (string or PhaseModelEntry)
|
* @param value - Phase model value (string or PhaseModelEntry)
|
||||||
* @returns PhaseModelEntry object
|
* @returns PhaseModelEntry object with canonical model ID
|
||||||
*/
|
*/
|
||||||
private toPhaseModelEntry(value: string | PhaseModelEntry): PhaseModelEntry {
|
private toPhaseModelEntry(value: string | PhaseModelEntry): PhaseModelEntry {
|
||||||
if (typeof value === 'string') {
|
if (typeof value === 'string') {
|
||||||
// v2 format: just a model string
|
// v2 format: just a model string - migrate to canonical ID
|
||||||
return { model: value as PhaseModelEntry['model'] };
|
return { model: migrateModelId(value) as PhaseModelEntry['model'] };
|
||||||
}
|
}
|
||||||
// v3 format: already a PhaseModelEntry object
|
// v3 format: PhaseModelEntry object - migrate model ID if needed
|
||||||
return value;
|
return {
|
||||||
|
...value,
|
||||||
|
model: migrateModelId(value.model) as PhaseModelEntry['model'],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migrate ClaudeApiProfiles to ClaudeCompatibleProviders
|
||||||
|
*
|
||||||
|
* Converts the legacy profile format (with modelMappings) to the new
|
||||||
|
* provider format (with models[] array). Each model mapping entry becomes
|
||||||
|
* a ProviderModel with appropriate tier assignment.
|
||||||
|
*
|
||||||
|
* @param profiles - Legacy ClaudeApiProfile array
|
||||||
|
* @returns Array of ClaudeCompatibleProvider
|
||||||
|
*/
|
||||||
|
private migrateProfilesToProviders(profiles: ClaudeApiProfile[]): ClaudeCompatibleProvider[] {
|
||||||
|
return profiles.map((profile): ClaudeCompatibleProvider => {
|
||||||
|
// Convert modelMappings to models array
|
||||||
|
const models: ProviderModel[] = [];
|
||||||
|
|
||||||
|
if (profile.modelMappings) {
|
||||||
|
// Haiku mapping
|
||||||
|
if (profile.modelMappings.haiku) {
|
||||||
|
models.push({
|
||||||
|
id: profile.modelMappings.haiku,
|
||||||
|
displayName: this.inferModelDisplayName(profile.modelMappings.haiku, 'haiku'),
|
||||||
|
mapsToClaudeModel: 'haiku',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Sonnet mapping
|
||||||
|
if (profile.modelMappings.sonnet) {
|
||||||
|
models.push({
|
||||||
|
id: profile.modelMappings.sonnet,
|
||||||
|
displayName: this.inferModelDisplayName(profile.modelMappings.sonnet, 'sonnet'),
|
||||||
|
mapsToClaudeModel: 'sonnet',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Opus mapping
|
||||||
|
if (profile.modelMappings.opus) {
|
||||||
|
models.push({
|
||||||
|
id: profile.modelMappings.opus,
|
||||||
|
displayName: this.inferModelDisplayName(profile.modelMappings.opus, 'opus'),
|
||||||
|
mapsToClaudeModel: 'opus',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Infer provider type from base URL or name
|
||||||
|
const providerType = this.inferProviderType(profile);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: profile.id,
|
||||||
|
name: profile.name,
|
||||||
|
providerType,
|
||||||
|
enabled: true,
|
||||||
|
baseUrl: profile.baseUrl,
|
||||||
|
apiKeySource: profile.apiKeySource ?? 'inline',
|
||||||
|
apiKey: profile.apiKey,
|
||||||
|
useAuthToken: profile.useAuthToken,
|
||||||
|
timeoutMs: profile.timeoutMs,
|
||||||
|
disableNonessentialTraffic: profile.disableNonessentialTraffic,
|
||||||
|
models,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Infer a display name for a model based on its ID and tier
|
||||||
|
*
|
||||||
|
* @param modelId - The raw model ID
|
||||||
|
* @param tier - The tier hint (haiku/sonnet/opus)
|
||||||
|
* @returns A user-friendly display name
|
||||||
|
*/
|
||||||
|
private inferModelDisplayName(modelId: string, tier: 'haiku' | 'sonnet' | 'opus'): string {
|
||||||
|
// Common patterns in model IDs
|
||||||
|
const lowerModelId = modelId.toLowerCase();
|
||||||
|
|
||||||
|
// GLM models
|
||||||
|
if (lowerModelId.includes('glm')) {
|
||||||
|
return modelId.replace(/-/g, ' ').replace(/glm/i, 'GLM');
|
||||||
|
}
|
||||||
|
|
||||||
|
// MiniMax models
|
||||||
|
if (lowerModelId.includes('minimax')) {
|
||||||
|
return modelId.replace(/-/g, ' ').replace(/minimax/i, 'MiniMax');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Claude models via OpenRouter or similar
|
||||||
|
if (lowerModelId.includes('claude')) {
|
||||||
|
return modelId;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: use model ID as display name with tier in parentheses
|
||||||
|
return `${modelId} (${tier})`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Infer provider type from profile configuration
|
||||||
|
*
|
||||||
|
* @param profile - The legacy profile
|
||||||
|
* @returns The inferred provider type
|
||||||
|
*/
|
||||||
|
private inferProviderType(profile: ClaudeApiProfile): ClaudeCompatibleProvider['providerType'] {
|
||||||
|
const baseUrl = profile.baseUrl.toLowerCase();
|
||||||
|
const name = profile.name.toLowerCase();
|
||||||
|
|
||||||
|
// Check URL patterns
|
||||||
|
if (baseUrl.includes('z.ai') || baseUrl.includes('zhipuai')) {
|
||||||
|
return 'glm';
|
||||||
|
}
|
||||||
|
if (baseUrl.includes('minimax')) {
|
||||||
|
return 'minimax';
|
||||||
|
}
|
||||||
|
if (baseUrl.includes('openrouter')) {
|
||||||
|
return 'openrouter';
|
||||||
|
}
|
||||||
|
if (baseUrl.includes('anthropic.com')) {
|
||||||
|
return 'anthropic';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check name patterns
|
||||||
|
if (name.includes('glm') || name.includes('zhipu')) {
|
||||||
|
return 'glm';
|
||||||
|
}
|
||||||
|
if (name.includes('minimax')) {
|
||||||
|
return 'minimax';
|
||||||
|
}
|
||||||
|
if (name.includes('openrouter')) {
|
||||||
|
return 'openrouter';
|
||||||
|
}
|
||||||
|
if (name.includes('anthropic') || name.includes('direct')) {
|
||||||
|
return 'anthropic';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to custom
|
||||||
|
return 'custom';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migrate model-related settings to canonical format
|
||||||
|
*
|
||||||
|
* Migrates:
|
||||||
|
* - enabledCursorModels: legacy IDs to cursor- prefixed
|
||||||
|
* - enabledOpencodeModels: legacy slash format to dash format
|
||||||
|
* - cursorDefaultModel: legacy ID to cursor- prefixed
|
||||||
|
*
|
||||||
|
* @param settings - Settings to migrate
|
||||||
|
* @returns Settings with migrated model IDs
|
||||||
|
*/
|
||||||
|
private migrateModelSettings(settings: Partial<GlobalSettings>): Partial<GlobalSettings> {
|
||||||
|
const migrated: Partial<GlobalSettings> = { ...settings };
|
||||||
|
|
||||||
|
// Migrate Cursor models
|
||||||
|
if (settings.enabledCursorModels) {
|
||||||
|
migrated.enabledCursorModels = migrateCursorModelIds(
|
||||||
|
settings.enabledCursorModels as string[]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migrate Cursor default model
|
||||||
|
if (settings.cursorDefaultModel) {
|
||||||
|
const migratedDefault = migrateCursorModelIds([settings.cursorDefaultModel as string]);
|
||||||
|
if (migratedDefault.length > 0) {
|
||||||
|
migrated.cursorDefaultModel = migratedDefault[0];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migrate OpenCode models
|
||||||
|
if (settings.enabledOpencodeModels) {
|
||||||
|
migrated.enabledOpencodeModels = migrateOpencodeModelIds(
|
||||||
|
settings.enabledOpencodeModels as string[]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migrate OpenCode default model
|
||||||
|
if (settings.opencodeDefaultModel) {
|
||||||
|
const migratedDefault = migrateOpencodeModelIds([settings.opencodeDefaultModel as string]);
|
||||||
|
if (migratedDefault.length > 0) {
|
||||||
|
migrated.opencodeDefaultModel = migratedDefault[0];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return migrated;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -273,13 +530,39 @@ export class SettingsService {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const currentProjectsLen = Array.isArray(current.projects) ? current.projects.length : 0;
|
const currentProjectsLen = Array.isArray(current.projects) ? current.projects.length : 0;
|
||||||
|
// Check if this is a legitimate project removal (moved to trash) vs accidental wipe
|
||||||
|
const newTrashedProjectsLen = Array.isArray(sanitizedUpdates.trashedProjects)
|
||||||
|
? sanitizedUpdates.trashedProjects.length
|
||||||
|
: Array.isArray(current.trashedProjects)
|
||||||
|
? current.trashedProjects.length
|
||||||
|
: 0;
|
||||||
|
|
||||||
if (
|
if (
|
||||||
Array.isArray(sanitizedUpdates.projects) &&
|
Array.isArray(sanitizedUpdates.projects) &&
|
||||||
sanitizedUpdates.projects.length === 0 &&
|
sanitizedUpdates.projects.length === 0 &&
|
||||||
currentProjectsLen > 0
|
currentProjectsLen > 0
|
||||||
) {
|
) {
|
||||||
|
// Only treat as accidental wipe if trashedProjects is also empty
|
||||||
|
// (If projects are moved to trash, they appear in trashedProjects)
|
||||||
|
if (newTrashedProjectsLen === 0) {
|
||||||
|
logger.warn(
|
||||||
|
'[WIPE_PROTECTION] Attempted to set projects to empty array with no trash! Ignoring update.',
|
||||||
|
{
|
||||||
|
currentProjectsLen,
|
||||||
|
newProjectsLen: 0,
|
||||||
|
newTrashedProjectsLen,
|
||||||
|
currentProjects: current.projects?.map((p) => p.name),
|
||||||
|
}
|
||||||
|
);
|
||||||
attemptedProjectWipe = true;
|
attemptedProjectWipe = true;
|
||||||
delete sanitizedUpdates.projects;
|
delete sanitizedUpdates.projects;
|
||||||
|
} else {
|
||||||
|
logger.info('[LEGITIMATE_REMOVAL] Removing all projects to trash', {
|
||||||
|
currentProjectsLen,
|
||||||
|
newProjectsLen: 0,
|
||||||
|
movedToTrash: newTrashedProjectsLen,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ignoreEmptyArrayOverwrite('trashedProjects');
|
ignoreEmptyArrayOverwrite('trashedProjects');
|
||||||
@@ -287,18 +570,29 @@ export class SettingsService {
|
|||||||
ignoreEmptyArrayOverwrite('recentFolders');
|
ignoreEmptyArrayOverwrite('recentFolders');
|
||||||
ignoreEmptyArrayOverwrite('mcpServers');
|
ignoreEmptyArrayOverwrite('mcpServers');
|
||||||
ignoreEmptyArrayOverwrite('enabledCursorModels');
|
ignoreEmptyArrayOverwrite('enabledCursorModels');
|
||||||
|
ignoreEmptyArrayOverwrite('claudeApiProfiles');
|
||||||
|
// Note: claudeCompatibleProviders intentionally NOT guarded - users should be able to delete all providers
|
||||||
|
|
||||||
// Empty object overwrite guard
|
// Empty object overwrite guard
|
||||||
|
const ignoreEmptyObjectOverwrite = <K extends keyof GlobalSettings>(key: K): void => {
|
||||||
|
const nextVal = sanitizedUpdates[key] as unknown;
|
||||||
|
const curVal = current[key] as unknown;
|
||||||
if (
|
if (
|
||||||
sanitizedUpdates.lastSelectedSessionByProject &&
|
nextVal &&
|
||||||
typeof sanitizedUpdates.lastSelectedSessionByProject === 'object' &&
|
typeof nextVal === 'object' &&
|
||||||
!Array.isArray(sanitizedUpdates.lastSelectedSessionByProject) &&
|
!Array.isArray(nextVal) &&
|
||||||
Object.keys(sanitizedUpdates.lastSelectedSessionByProject).length === 0 &&
|
Object.keys(nextVal).length === 0 &&
|
||||||
current.lastSelectedSessionByProject &&
|
curVal &&
|
||||||
Object.keys(current.lastSelectedSessionByProject).length > 0
|
typeof curVal === 'object' &&
|
||||||
|
!Array.isArray(curVal) &&
|
||||||
|
Object.keys(curVal).length > 0
|
||||||
) {
|
) {
|
||||||
delete sanitizedUpdates.lastSelectedSessionByProject;
|
delete sanitizedUpdates[key];
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ignoreEmptyObjectOverwrite('lastSelectedSessionByProject');
|
||||||
|
ignoreEmptyObjectOverwrite('autoModeByWorktree');
|
||||||
|
|
||||||
// If a request attempted to wipe projects, also ignore theme changes in that same request.
|
// If a request attempted to wipe projects, also ignore theme changes in that same request.
|
||||||
if (attemptedProjectWipe) {
|
if (attemptedProjectWipe) {
|
||||||
@@ -327,6 +621,21 @@ export class SettingsService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Deep merge autoModeByWorktree if provided (preserves other worktree entries)
|
||||||
|
if (sanitizedUpdates.autoModeByWorktree) {
|
||||||
|
type WorktreeEntry = { maxConcurrency: number; branchName: string | null };
|
||||||
|
const mergedAutoModeByWorktree: Record<string, WorktreeEntry> = {
|
||||||
|
...current.autoModeByWorktree,
|
||||||
|
};
|
||||||
|
for (const [key, value] of Object.entries(sanitizedUpdates.autoModeByWorktree)) {
|
||||||
|
mergedAutoModeByWorktree[key] = {
|
||||||
|
...mergedAutoModeByWorktree[key],
|
||||||
|
...value,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
updated.autoModeByWorktree = mergedAutoModeByWorktree;
|
||||||
|
}
|
||||||
|
|
||||||
await writeSettingsJson(settingsPath, updated);
|
await writeSettingsJson(settingsPath, updated);
|
||||||
logger.info('Global settings updated');
|
logger.info('Global settings updated');
|
||||||
|
|
||||||
@@ -420,7 +729,6 @@ export class SettingsService {
|
|||||||
anthropic: { configured: boolean; masked: string };
|
anthropic: { configured: boolean; masked: string };
|
||||||
google: { configured: boolean; masked: string };
|
google: { configured: boolean; masked: string };
|
||||||
openai: { configured: boolean; masked: string };
|
openai: { configured: boolean; masked: string };
|
||||||
coderabbit: { configured: boolean; masked: string };
|
|
||||||
}> {
|
}> {
|
||||||
const credentials = await this.getCredentials();
|
const credentials = await this.getCredentials();
|
||||||
|
|
||||||
@@ -442,10 +750,6 @@ export class SettingsService {
|
|||||||
configured: !!credentials.apiKeys.openai,
|
configured: !!credentials.apiKeys.openai,
|
||||||
masked: maskKey(credentials.apiKeys.openai),
|
masked: maskKey(credentials.apiKeys.openai),
|
||||||
},
|
},
|
||||||
coderabbit: {
|
|
||||||
configured: !!credentials.apiKeys.coderabbit,
|
|
||||||
masked: maskKey(credentials.apiKeys.coderabbit),
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -517,6 +821,51 @@ export class SettingsService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle activeClaudeApiProfileId special cases:
|
||||||
|
// - "__USE_GLOBAL__" marker means delete the key (use global setting)
|
||||||
|
// - null means explicit "Direct Anthropic API"
|
||||||
|
// - string means specific profile ID
|
||||||
|
if (
|
||||||
|
'activeClaudeApiProfileId' in updates &&
|
||||||
|
updates.activeClaudeApiProfileId === '__USE_GLOBAL__'
|
||||||
|
) {
|
||||||
|
delete updated.activeClaudeApiProfileId;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle phaseModelOverrides special cases:
|
||||||
|
// - "__CLEAR__" marker means delete the key (use global settings for all phases)
|
||||||
|
// - object means partial overrides for specific phases
|
||||||
|
if (
|
||||||
|
'phaseModelOverrides' in updates &&
|
||||||
|
(updates as Record<string, unknown>).phaseModelOverrides === '__CLEAR__'
|
||||||
|
) {
|
||||||
|
delete updated.phaseModelOverrides;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle defaultFeatureModel special cases:
|
||||||
|
// - "__CLEAR__" marker means delete the key (use global setting)
|
||||||
|
// - object means project-specific override
|
||||||
|
if (
|
||||||
|
'defaultFeatureModel' in updates &&
|
||||||
|
(updates as Record<string, unknown>).defaultFeatureModel === '__CLEAR__'
|
||||||
|
) {
|
||||||
|
delete updated.defaultFeatureModel;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle devCommand special cases:
|
||||||
|
// - null means delete the key (use auto-detection)
|
||||||
|
// - string means custom command
|
||||||
|
if ('devCommand' in updates && updates.devCommand === null) {
|
||||||
|
delete updated.devCommand;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle testCommand special cases:
|
||||||
|
// - null means delete the key (use auto-detection)
|
||||||
|
// - string means custom command
|
||||||
|
if ('testCommand' in updates && updates.testCommand === null) {
|
||||||
|
delete updated.testCommand;
|
||||||
|
}
|
||||||
|
|
||||||
await writeSettingsJson(settingsPath, updated);
|
await writeSettingsJson(settingsPath, updated);
|
||||||
logger.info(`Project settings updated for ${projectPath}`);
|
logger.info(`Project settings updated for ${projectPath}`);
|
||||||
|
|
||||||
@@ -602,7 +951,7 @@ export class SettingsService {
|
|||||||
theme: (appState.theme as GlobalSettings['theme']) || 'dark',
|
theme: (appState.theme as GlobalSettings['theme']) || 'dark',
|
||||||
sidebarOpen: appState.sidebarOpen !== undefined ? (appState.sidebarOpen as boolean) : true,
|
sidebarOpen: appState.sidebarOpen !== undefined ? (appState.sidebarOpen as boolean) : true,
|
||||||
chatHistoryOpen: (appState.chatHistoryOpen as boolean) || false,
|
chatHistoryOpen: (appState.chatHistoryOpen as boolean) || false,
|
||||||
maxConcurrency: (appState.maxConcurrency as number) || 3,
|
maxConcurrency: (appState.maxConcurrency as number) || DEFAULT_MAX_CONCURRENCY,
|
||||||
defaultSkipTests:
|
defaultSkipTests:
|
||||||
appState.defaultSkipTests !== undefined ? (appState.defaultSkipTests as boolean) : true,
|
appState.defaultSkipTests !== undefined ? (appState.defaultSkipTests as boolean) : true,
|
||||||
enableDependencyBlocking:
|
enableDependencyBlocking:
|
||||||
@@ -663,14 +1012,12 @@ export class SettingsService {
|
|||||||
anthropic?: string;
|
anthropic?: string;
|
||||||
google?: string;
|
google?: string;
|
||||||
openai?: string;
|
openai?: string;
|
||||||
coderabbit?: string;
|
|
||||||
};
|
};
|
||||||
await this.updateCredentials({
|
await this.updateCredentials({
|
||||||
apiKeys: {
|
apiKeys: {
|
||||||
anthropic: apiKeys.anthropic || '',
|
anthropic: apiKeys.anthropic || '',
|
||||||
google: apiKeys.google || '',
|
google: apiKeys.google || '',
|
||||||
openai: apiKeys.openai || '',
|
openai: apiKeys.openai || '',
|
||||||
coderabbit: apiKeys.coderabbit || '',
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
migratedCredentials = true;
|
migratedCredentials = true;
|
||||||
@@ -773,4 +1120,203 @@ export class SettingsService {
|
|||||||
getDataDir(): string {
|
getDataDir(): string {
|
||||||
return this.dataDir;
|
return this.dataDir;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the legacy Electron userData directory path
|
||||||
|
*
|
||||||
|
* Returns the platform-specific path where Electron previously stored settings
|
||||||
|
* before the migration to shared data directories.
|
||||||
|
*
|
||||||
|
* @returns Absolute path to legacy userData directory
|
||||||
|
*/
|
||||||
|
private getLegacyElectronUserDataPath(): string {
|
||||||
|
const homeDir = os.homedir();
|
||||||
|
|
||||||
|
switch (process.platform) {
|
||||||
|
case 'darwin':
|
||||||
|
// macOS: ~/Library/Application Support/Automaker
|
||||||
|
return path.join(homeDir, 'Library', 'Application Support', 'Automaker');
|
||||||
|
case 'win32':
|
||||||
|
// Windows: %APPDATA%\Automaker
|
||||||
|
return path.join(
|
||||||
|
process.env.APPDATA || path.join(homeDir, 'AppData', 'Roaming'),
|
||||||
|
'Automaker'
|
||||||
|
);
|
||||||
|
default:
|
||||||
|
// Linux and others: ~/.config/Automaker
|
||||||
|
return path.join(process.env.XDG_CONFIG_HOME || path.join(homeDir, '.config'), 'Automaker');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migrate entire data directory from legacy Electron userData location to new shared data directory
|
||||||
|
*
|
||||||
|
* This handles the migration from when Electron stored data in the platform-specific
|
||||||
|
* userData directory (e.g., ~/.config/Automaker) to the new shared ./data directory.
|
||||||
|
*
|
||||||
|
* Migration only occurs if:
|
||||||
|
* 1. The new location does NOT have settings.json
|
||||||
|
* 2. The legacy location DOES have settings.json
|
||||||
|
*
|
||||||
|
* Migrates all files and directories including:
|
||||||
|
* - settings.json (global settings)
|
||||||
|
* - credentials.json (API keys)
|
||||||
|
* - sessions-metadata.json (chat session metadata)
|
||||||
|
* - agent-sessions/ (conversation histories)
|
||||||
|
* - Any other files in the data directory
|
||||||
|
*
|
||||||
|
* @returns Promise resolving to migration result
|
||||||
|
*/
|
||||||
|
async migrateFromLegacyElectronPath(): Promise<{
|
||||||
|
migrated: boolean;
|
||||||
|
migratedFiles: string[];
|
||||||
|
legacyPath: string;
|
||||||
|
errors: string[];
|
||||||
|
}> {
|
||||||
|
const legacyPath = this.getLegacyElectronUserDataPath();
|
||||||
|
const migratedFiles: string[] = [];
|
||||||
|
const errors: string[] = [];
|
||||||
|
|
||||||
|
// Skip if legacy path is the same as current data dir (no migration needed)
|
||||||
|
if (path.resolve(legacyPath) === path.resolve(this.dataDir)) {
|
||||||
|
logger.debug('Legacy path same as current data dir, skipping migration');
|
||||||
|
return { migrated: false, migratedFiles, legacyPath, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Checking for legacy data migration from: ${legacyPath}`);
|
||||||
|
logger.info(`Current data directory: ${this.dataDir}`);
|
||||||
|
|
||||||
|
// Check if new settings already exist
|
||||||
|
const newSettingsPath = getGlobalSettingsPath(this.dataDir);
|
||||||
|
let newSettingsExist = false;
|
||||||
|
try {
|
||||||
|
await fs.access(newSettingsPath);
|
||||||
|
newSettingsExist = true;
|
||||||
|
} catch {
|
||||||
|
// New settings don't exist, migration may be needed
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newSettingsExist) {
|
||||||
|
logger.debug('Settings already exist in new location, skipping migration');
|
||||||
|
return { migrated: false, migratedFiles, legacyPath, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if legacy directory exists and has settings
|
||||||
|
const legacySettingsPath = path.join(legacyPath, 'settings.json');
|
||||||
|
let legacySettingsExist = false;
|
||||||
|
try {
|
||||||
|
await fs.access(legacySettingsPath);
|
||||||
|
legacySettingsExist = true;
|
||||||
|
} catch {
|
||||||
|
// Legacy settings don't exist
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!legacySettingsExist) {
|
||||||
|
logger.debug('No legacy settings found, skipping migration');
|
||||||
|
return { migrated: false, migratedFiles, legacyPath, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform migration of specific application data files only
|
||||||
|
// (not Electron internal caches like Code Cache, GPU Cache, etc.)
|
||||||
|
logger.info('Found legacy data directory, migrating application data to new location...');
|
||||||
|
|
||||||
|
// Ensure new data directory exists
|
||||||
|
try {
|
||||||
|
await ensureDataDir(this.dataDir);
|
||||||
|
} catch (error) {
|
||||||
|
const msg = `Failed to create data directory: ${error}`;
|
||||||
|
logger.error(msg);
|
||||||
|
errors.push(msg);
|
||||||
|
return { migrated: false, migratedFiles, legacyPath, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only migrate specific application data files/directories
|
||||||
|
const itemsToMigrate = [
|
||||||
|
'settings.json',
|
||||||
|
'credentials.json',
|
||||||
|
'sessions-metadata.json',
|
||||||
|
'agent-sessions',
|
||||||
|
'.api-key',
|
||||||
|
'.sessions',
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const item of itemsToMigrate) {
|
||||||
|
const srcPath = path.join(legacyPath, item);
|
||||||
|
const destPath = path.join(this.dataDir, item);
|
||||||
|
|
||||||
|
// Check if source exists
|
||||||
|
try {
|
||||||
|
await fs.access(srcPath);
|
||||||
|
} catch {
|
||||||
|
// Source doesn't exist, skip
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if destination already exists
|
||||||
|
try {
|
||||||
|
await fs.access(destPath);
|
||||||
|
logger.debug(`Skipping ${item} - already exists in destination`);
|
||||||
|
continue;
|
||||||
|
} catch {
|
||||||
|
// Destination doesn't exist, proceed with copy
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy file or directory
|
||||||
|
try {
|
||||||
|
const stat = await fs.stat(srcPath);
|
||||||
|
if (stat.isDirectory()) {
|
||||||
|
await this.copyDirectory(srcPath, destPath);
|
||||||
|
migratedFiles.push(item + '/');
|
||||||
|
logger.info(`Migrated directory: ${item}/`);
|
||||||
|
} else {
|
||||||
|
const content = await fs.readFile(srcPath);
|
||||||
|
await fs.writeFile(destPath, content);
|
||||||
|
migratedFiles.push(item);
|
||||||
|
logger.info(`Migrated file: ${item}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const msg = `Failed to migrate ${item}: ${error}`;
|
||||||
|
logger.error(msg);
|
||||||
|
errors.push(msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (migratedFiles.length > 0) {
|
||||||
|
logger.info(
|
||||||
|
`Migration complete. Migrated ${migratedFiles.length} item(s): ${migratedFiles.join(', ')}`
|
||||||
|
);
|
||||||
|
logger.info(`Legacy path: ${legacyPath}`);
|
||||||
|
logger.info(`New path: ${this.dataDir}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
migrated: migratedFiles.length > 0,
|
||||||
|
migratedFiles,
|
||||||
|
legacyPath,
|
||||||
|
errors,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursively copy a directory from source to destination
|
||||||
|
*
|
||||||
|
* @param srcDir - Source directory path
|
||||||
|
* @param destDir - Destination directory path
|
||||||
|
*/
|
||||||
|
private async copyDirectory(srcDir: string, destDir: string): Promise<void> {
|
||||||
|
await fs.mkdir(destDir, { recursive: true });
|
||||||
|
const entries = await fs.readdir(srcDir, { withFileTypes: true });
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
const srcPath = path.join(srcDir, entry.name);
|
||||||
|
const destPath = path.join(destDir, entry.name);
|
||||||
|
|
||||||
|
if (entry.isDirectory()) {
|
||||||
|
await this.copyDirectory(srcPath, destPath);
|
||||||
|
} else if (entry.isFile()) {
|
||||||
|
const content = await fs.readFile(srcPath);
|
||||||
|
await fs.writeFile(destPath, content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user