mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-29 22:02:02 +00:00
style: fix formatting with Prettier
🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -21,4 +21,4 @@
|
||||
"mcp__puppeteer__puppeteer_evaluate"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
179
.github/scripts/upload-to-r2.js
vendored
179
.github/scripts/upload-to-r2.js
vendored
@@ -1,15 +1,11 @@
|
||||
const {
|
||||
S3Client,
|
||||
PutObjectCommand,
|
||||
GetObjectCommand,
|
||||
} = require("@aws-sdk/client-s3");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const https = require("https");
|
||||
const { pipeline } = require("stream/promises");
|
||||
const { S3Client, PutObjectCommand, GetObjectCommand } = require('@aws-sdk/client-s3');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const https = require('https');
|
||||
const { pipeline } = require('stream/promises');
|
||||
|
||||
const s3Client = new S3Client({
|
||||
region: "auto",
|
||||
region: 'auto',
|
||||
endpoint: process.env.R2_ENDPOINT,
|
||||
credentials: {
|
||||
accessKeyId: process.env.R2_ACCESS_KEY_ID,
|
||||
@@ -28,14 +24,14 @@ async function fetchExistingReleases() {
|
||||
const response = await s3Client.send(
|
||||
new GetObjectCommand({
|
||||
Bucket: BUCKET,
|
||||
Key: "releases.json",
|
||||
Key: 'releases.json',
|
||||
})
|
||||
);
|
||||
const body = await response.Body.transformToString();
|
||||
return JSON.parse(body);
|
||||
} catch (error) {
|
||||
if (error.name === "NoSuchKey" || error.$metadata?.httpStatusCode === 404) {
|
||||
console.log("No existing releases.json found, creating new one");
|
||||
if (error.name === 'NoSuchKey' || error.$metadata?.httpStatusCode === 404) {
|
||||
console.log('No existing releases.json found, creating new one');
|
||||
return { latestVersion: null, releases: [] };
|
||||
}
|
||||
throw error;
|
||||
@@ -85,7 +81,7 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
|
||||
resolve({
|
||||
accessible: false,
|
||||
statusCode,
|
||||
error: "Redirect without location header",
|
||||
error: 'Redirect without location header',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -93,18 +89,16 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
|
||||
return https
|
||||
.get(redirectUrl, { timeout: 10000 }, (redirectResponse) => {
|
||||
const redirectStatus = redirectResponse.statusCode;
|
||||
const contentType =
|
||||
redirectResponse.headers["content-type"] || "";
|
||||
const contentType = redirectResponse.headers['content-type'] || '';
|
||||
// Check if it's actually a file (zip/tar.gz) and not HTML
|
||||
const isFile =
|
||||
contentType.includes("application/zip") ||
|
||||
contentType.includes("application/gzip") ||
|
||||
contentType.includes("application/x-gzip") ||
|
||||
contentType.includes("application/x-tar") ||
|
||||
redirectUrl.includes(".zip") ||
|
||||
redirectUrl.includes(".tar.gz");
|
||||
const isGood =
|
||||
redirectStatus >= 200 && redirectStatus < 300 && isFile;
|
||||
contentType.includes('application/zip') ||
|
||||
contentType.includes('application/gzip') ||
|
||||
contentType.includes('application/x-gzip') ||
|
||||
contentType.includes('application/x-tar') ||
|
||||
redirectUrl.includes('.zip') ||
|
||||
redirectUrl.includes('.tar.gz');
|
||||
const isGood = redirectStatus >= 200 && redirectStatus < 300 && isFile;
|
||||
redirectResponse.destroy();
|
||||
resolve({
|
||||
accessible: isGood,
|
||||
@@ -113,38 +107,38 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
|
||||
contentType,
|
||||
});
|
||||
})
|
||||
.on("error", (error) => {
|
||||
.on('error', (error) => {
|
||||
resolve({
|
||||
accessible: false,
|
||||
statusCode,
|
||||
error: error.message,
|
||||
});
|
||||
})
|
||||
.on("timeout", function () {
|
||||
.on('timeout', function () {
|
||||
this.destroy();
|
||||
resolve({
|
||||
accessible: false,
|
||||
statusCode,
|
||||
error: "Timeout following redirect",
|
||||
error: 'Timeout following redirect',
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Check if status is good (200-299 range) and it's actually a file
|
||||
const contentType = response.headers["content-type"] || "";
|
||||
const contentType = response.headers['content-type'] || '';
|
||||
const isFile =
|
||||
contentType.includes("application/zip") ||
|
||||
contentType.includes("application/gzip") ||
|
||||
contentType.includes("application/x-gzip") ||
|
||||
contentType.includes("application/x-tar") ||
|
||||
url.includes(".zip") ||
|
||||
url.includes(".tar.gz");
|
||||
contentType.includes('application/zip') ||
|
||||
contentType.includes('application/gzip') ||
|
||||
contentType.includes('application/x-gzip') ||
|
||||
contentType.includes('application/x-tar') ||
|
||||
url.includes('.zip') ||
|
||||
url.includes('.tar.gz');
|
||||
const isGood = statusCode >= 200 && statusCode < 300 && isFile;
|
||||
response.destroy();
|
||||
resolve({ accessible: isGood, statusCode, contentType });
|
||||
});
|
||||
|
||||
request.on("error", (error) => {
|
||||
request.on('error', (error) => {
|
||||
resolve({
|
||||
accessible: false,
|
||||
statusCode: null,
|
||||
@@ -152,12 +146,12 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
|
||||
});
|
||||
});
|
||||
|
||||
request.on("timeout", () => {
|
||||
request.on('timeout', () => {
|
||||
request.destroy();
|
||||
resolve({
|
||||
accessible: false,
|
||||
statusCode: null,
|
||||
error: "Request timeout",
|
||||
error: 'Request timeout',
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -168,22 +162,14 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
|
||||
`✓ URL ${url} is now accessible after ${attempt} retries (status: ${result.statusCode})`
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
`✓ URL ${url} is accessible (status: ${result.statusCode})`
|
||||
);
|
||||
console.log(`✓ URL ${url} is accessible (status: ${result.statusCode})`);
|
||||
}
|
||||
return result.finalUrl || url; // Return the final URL (after redirects) if available
|
||||
} else {
|
||||
const errorMsg = result.error ? ` - ${result.error}` : "";
|
||||
const statusMsg = result.statusCode
|
||||
? ` (status: ${result.statusCode})`
|
||||
: "";
|
||||
const contentTypeMsg = result.contentType
|
||||
? ` [content-type: ${result.contentType}]`
|
||||
: "";
|
||||
console.log(
|
||||
`✗ URL ${url} not accessible${statusMsg}${contentTypeMsg}${errorMsg}`
|
||||
);
|
||||
const errorMsg = result.error ? ` - ${result.error}` : '';
|
||||
const statusMsg = result.statusCode ? ` (status: ${result.statusCode})` : '';
|
||||
const contentTypeMsg = result.contentType ? ` [content-type: ${result.contentType}]` : '';
|
||||
console.log(`✗ URL ${url} not accessible${statusMsg}${contentTypeMsg}${errorMsg}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`✗ URL ${url} check failed: ${error.message}`);
|
||||
@@ -191,9 +177,7 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
|
||||
|
||||
if (attempt < maxRetries - 1) {
|
||||
const delay = initialDelay * Math.pow(2, attempt);
|
||||
console.log(
|
||||
` Retrying in ${delay}ms... (attempt ${attempt + 1}/${maxRetries})`
|
||||
);
|
||||
console.log(` Retrying in ${delay}ms... (attempt ${attempt + 1}/${maxRetries})`);
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
}
|
||||
}
|
||||
@@ -207,12 +191,7 @@ async function downloadFromGitHub(url, outputPath) {
|
||||
const statusCode = response.statusCode;
|
||||
|
||||
// Follow redirects (all redirect types)
|
||||
if (
|
||||
statusCode === 301 ||
|
||||
statusCode === 302 ||
|
||||
statusCode === 307 ||
|
||||
statusCode === 308
|
||||
) {
|
||||
if (statusCode === 301 || statusCode === 302 || statusCode === 307 || statusCode === 308) {
|
||||
const redirectUrl = response.headers.location;
|
||||
response.destroy();
|
||||
if (!redirectUrl) {
|
||||
@@ -220,39 +199,33 @@ async function downloadFromGitHub(url, outputPath) {
|
||||
return;
|
||||
}
|
||||
// Resolve relative redirects
|
||||
const finalRedirectUrl = redirectUrl.startsWith("http")
|
||||
const finalRedirectUrl = redirectUrl.startsWith('http')
|
||||
? redirectUrl
|
||||
: new URL(redirectUrl, url).href;
|
||||
console.log(` Following redirect: ${finalRedirectUrl}`);
|
||||
return downloadFromGitHub(finalRedirectUrl, outputPath)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
return downloadFromGitHub(finalRedirectUrl, outputPath).then(resolve).catch(reject);
|
||||
}
|
||||
|
||||
if (statusCode !== 200) {
|
||||
response.destroy();
|
||||
reject(
|
||||
new Error(
|
||||
`Failed to download ${url}: ${statusCode} ${response.statusMessage}`
|
||||
)
|
||||
);
|
||||
reject(new Error(`Failed to download ${url}: ${statusCode} ${response.statusMessage}`));
|
||||
return;
|
||||
}
|
||||
|
||||
const fileStream = fs.createWriteStream(outputPath);
|
||||
response.pipe(fileStream);
|
||||
fileStream.on("finish", () => {
|
||||
fileStream.on('finish', () => {
|
||||
fileStream.close();
|
||||
resolve();
|
||||
});
|
||||
fileStream.on("error", (error) => {
|
||||
fileStream.on('error', (error) => {
|
||||
response.destroy();
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
|
||||
request.on("error", reject);
|
||||
request.on("timeout", () => {
|
||||
request.on('error', reject);
|
||||
request.on('timeout', () => {
|
||||
request.destroy();
|
||||
reject(new Error(`Request timeout for ${url}`));
|
||||
});
|
||||
@@ -260,8 +233,8 @@ async function downloadFromGitHub(url, outputPath) {
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const artifactsDir = "artifacts";
|
||||
const tempDir = path.join(artifactsDir, "temp");
|
||||
const artifactsDir = 'artifacts';
|
||||
const tempDir = path.join(artifactsDir, 'temp');
|
||||
|
||||
// Create temp directory for downloaded GitHub archives
|
||||
if (!fs.existsSync(tempDir)) {
|
||||
@@ -292,40 +265,30 @@ async function main() {
|
||||
|
||||
// Find all artifacts
|
||||
const artifacts = {
|
||||
windows: findArtifacts(path.join(artifactsDir, "windows-builds"), /\.exe$/),
|
||||
macos: findArtifacts(path.join(artifactsDir, "macos-builds"), /-x64\.dmg$/),
|
||||
macosArm: findArtifacts(
|
||||
path.join(artifactsDir, "macos-builds"),
|
||||
/-arm64\.dmg$/
|
||||
),
|
||||
linux: findArtifacts(
|
||||
path.join(artifactsDir, "linux-builds"),
|
||||
/\.AppImage$/
|
||||
),
|
||||
windows: findArtifacts(path.join(artifactsDir, 'windows-builds'), /\.exe$/),
|
||||
macos: findArtifacts(path.join(artifactsDir, 'macos-builds'), /-x64\.dmg$/),
|
||||
macosArm: findArtifacts(path.join(artifactsDir, 'macos-builds'), /-arm64\.dmg$/),
|
||||
linux: findArtifacts(path.join(artifactsDir, 'linux-builds'), /\.AppImage$/),
|
||||
sourceZip: [sourceZipPath],
|
||||
sourceTarGz: [sourceTarGzPath],
|
||||
};
|
||||
|
||||
console.log("Found artifacts:");
|
||||
console.log('Found artifacts:');
|
||||
for (const [platform, files] of Object.entries(artifacts)) {
|
||||
console.log(
|
||||
` ${platform}: ${
|
||||
files.length > 0
|
||||
? files.map((f) => path.basename(f)).join(", ")
|
||||
: "none"
|
||||
}`
|
||||
` ${platform}: ${files.length > 0 ? files.map((f) => path.basename(f)).join(', ') : 'none'}`
|
||||
);
|
||||
}
|
||||
|
||||
// Upload each artifact to R2
|
||||
const assets = {};
|
||||
const contentTypes = {
|
||||
windows: "application/x-msdownload",
|
||||
macos: "application/x-apple-diskimage",
|
||||
macosArm: "application/x-apple-diskimage",
|
||||
linux: "application/x-executable",
|
||||
sourceZip: "application/zip",
|
||||
sourceTarGz: "application/gzip",
|
||||
windows: 'application/x-msdownload',
|
||||
macos: 'application/x-apple-diskimage',
|
||||
macosArm: 'application/x-apple-diskimage',
|
||||
linux: 'application/x-executable',
|
||||
sourceZip: 'application/zip',
|
||||
sourceTarGz: 'application/gzip',
|
||||
};
|
||||
|
||||
for (const [platform, files] of Object.entries(artifacts)) {
|
||||
@@ -345,11 +308,11 @@ async function main() {
|
||||
filename,
|
||||
size,
|
||||
arch:
|
||||
platform === "macosArm"
|
||||
? "arm64"
|
||||
: platform === "sourceZip" || platform === "sourceTarGz"
|
||||
? "source"
|
||||
: "x64",
|
||||
platform === 'macosArm'
|
||||
? 'arm64'
|
||||
: platform === 'sourceZip' || platform === 'sourceTarGz'
|
||||
? 'source'
|
||||
: 'x64',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -364,9 +327,7 @@ async function main() {
|
||||
};
|
||||
|
||||
// Remove existing entry for this version if re-running
|
||||
releasesData.releases = releasesData.releases.filter(
|
||||
(r) => r.version !== VERSION
|
||||
);
|
||||
releasesData.releases = releasesData.releases.filter((r) => r.version !== VERSION);
|
||||
|
||||
// Prepend new release
|
||||
releasesData.releases.unshift(newRelease);
|
||||
@@ -376,19 +337,19 @@ async function main() {
|
||||
await s3Client.send(
|
||||
new PutObjectCommand({
|
||||
Bucket: BUCKET,
|
||||
Key: "releases.json",
|
||||
Key: 'releases.json',
|
||||
Body: JSON.stringify(releasesData, null, 2),
|
||||
ContentType: "application/json",
|
||||
CacheControl: "public, max-age=60",
|
||||
ContentType: 'application/json',
|
||||
CacheControl: 'public, max-age=60',
|
||||
})
|
||||
);
|
||||
|
||||
console.log("Successfully updated releases.json");
|
||||
console.log('Successfully updated releases.json');
|
||||
console.log(`Latest version: ${VERSION}`);
|
||||
console.log(`Total releases: ${releasesData.releases.length}`);
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error("Failed to upload to R2:", err);
|
||||
console.error('Failed to upload to R2:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
8
.github/workflows/e2e-tests.yml
vendored
8
.github/workflows/e2e-tests.yml
vendored
@@ -3,7 +3,7 @@ name: E2E Tests
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- "*"
|
||||
- '*'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
@@ -21,8 +21,8 @@ jobs:
|
||||
- name: Setup project
|
||||
uses: ./.github/actions/setup-project
|
||||
with:
|
||||
check-lockfile: "true"
|
||||
rebuild-node-pty-path: "apps/server"
|
||||
check-lockfile: 'true'
|
||||
rebuild-node-pty-path: 'apps/server'
|
||||
|
||||
- name: Install Playwright browsers
|
||||
run: npx playwright install --with-deps chromium
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
env:
|
||||
CI: true
|
||||
VITE_SERVER_URL: http://localhost:3008
|
||||
VITE_SKIP_SETUP: "true"
|
||||
VITE_SKIP_SETUP: 'true'
|
||||
|
||||
- name: Upload Playwright report
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
4
.github/workflows/pr-check.yml
vendored
4
.github/workflows/pr-check.yml
vendored
@@ -3,7 +3,7 @@ name: PR Build Check
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- "*"
|
||||
- '*'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
- name: Setup project
|
||||
uses: ./.github/actions/setup-project
|
||||
with:
|
||||
check-lockfile: "true"
|
||||
check-lockfile: 'true'
|
||||
|
||||
- name: Run build:electron (dir only - faster CI)
|
||||
run: npm run build:electron:dir
|
||||
|
||||
2
apps/app/next-env.d.ts
vendored
2
apps/app/next-env.d.ts
vendored
@@ -1,6 +1,6 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
import "./.next/dev/types/routes.d.ts";
|
||||
import './.next/dev/types/routes.d.ts';
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* Supports API key authentication via header or environment variable.
|
||||
*/
|
||||
|
||||
import type { Request, Response, NextFunction } from "express";
|
||||
import type { Request, Response, NextFunction } from 'express';
|
||||
|
||||
// API key from environment (optional - if not set, auth is disabled)
|
||||
const API_KEY = process.env.AUTOMAKER_API_KEY;
|
||||
@@ -23,12 +23,12 @@ export function authMiddleware(req: Request, res: Response, next: NextFunction):
|
||||
}
|
||||
|
||||
// Check for API key in header
|
||||
const providedKey = req.headers["x-api-key"] as string | undefined;
|
||||
const providedKey = req.headers['x-api-key'] as string | undefined;
|
||||
|
||||
if (!providedKey) {
|
||||
res.status(401).json({
|
||||
success: false,
|
||||
error: "Authentication required. Provide X-API-Key header.",
|
||||
error: 'Authentication required. Provide X-API-Key header.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -36,7 +36,7 @@ export function authMiddleware(req: Request, res: Response, next: NextFunction):
|
||||
if (providedKey !== API_KEY) {
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
error: "Invalid API key.",
|
||||
error: 'Invalid API key.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -57,6 +57,6 @@ export function isAuthEnabled(): boolean {
|
||||
export function getAuthStatus(): { enabled: boolean; method: string } {
|
||||
return {
|
||||
enabled: !!API_KEY,
|
||||
method: API_KEY ? "api_key" : "none",
|
||||
method: API_KEY ? 'api_key' : 'none',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import type {
|
||||
InstallationStatus,
|
||||
ValidationResult,
|
||||
ModelDefinition,
|
||||
} from "./types.js";
|
||||
} from './types.js';
|
||||
|
||||
/**
|
||||
* Base provider class that all provider implementations must extend
|
||||
@@ -33,9 +33,7 @@ export abstract class BaseProvider {
|
||||
* @param options Execution options
|
||||
* @returns AsyncGenerator yielding provider messages
|
||||
*/
|
||||
abstract executeQuery(
|
||||
options: ExecuteOptions
|
||||
): AsyncGenerator<ProviderMessage>;
|
||||
abstract executeQuery(options: ExecuteOptions): AsyncGenerator<ProviderMessage>;
|
||||
|
||||
/**
|
||||
* Detect if the provider is installed and configured
|
||||
@@ -59,7 +57,7 @@ export abstract class BaseProvider {
|
||||
|
||||
// Base validation (can be overridden)
|
||||
if (!this.config) {
|
||||
errors.push("Provider config is missing");
|
||||
errors.push('Provider config is missing');
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -76,7 +74,7 @@ export abstract class BaseProvider {
|
||||
*/
|
||||
supportsFeature(feature: string): boolean {
|
||||
// Default implementation - override in subclasses
|
||||
const commonFeatures = ["tools", "text"];
|
||||
const commonFeatures = ['tools', 'text'];
|
||||
return commonFeatures.includes(feature);
|
||||
}
|
||||
|
||||
|
||||
@@ -5,26 +5,24 @@
|
||||
* with the provider architecture.
|
||||
*/
|
||||
|
||||
import { query, type Options } from "@anthropic-ai/claude-agent-sdk";
|
||||
import { BaseProvider } from "./base-provider.js";
|
||||
import { query, type Options } from '@anthropic-ai/claude-agent-sdk';
|
||||
import { BaseProvider } from './base-provider.js';
|
||||
import type {
|
||||
ExecuteOptions,
|
||||
ProviderMessage,
|
||||
InstallationStatus,
|
||||
ModelDefinition,
|
||||
} from "./types.js";
|
||||
} from './types.js';
|
||||
|
||||
export class ClaudeProvider extends BaseProvider {
|
||||
getName(): string {
|
||||
return "claude";
|
||||
return 'claude';
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a query using Claude Agent SDK
|
||||
*/
|
||||
async *executeQuery(
|
||||
options: ExecuteOptions
|
||||
): AsyncGenerator<ProviderMessage> {
|
||||
async *executeQuery(options: ExecuteOptions): AsyncGenerator<ProviderMessage> {
|
||||
const {
|
||||
prompt,
|
||||
model,
|
||||
@@ -38,16 +36,7 @@ export class ClaudeProvider extends BaseProvider {
|
||||
} = options;
|
||||
|
||||
// Build Claude SDK options
|
||||
const defaultTools = [
|
||||
"Read",
|
||||
"Write",
|
||||
"Edit",
|
||||
"Glob",
|
||||
"Grep",
|
||||
"Bash",
|
||||
"WebSearch",
|
||||
"WebFetch",
|
||||
];
|
||||
const defaultTools = ['Read', 'Write', 'Edit', 'Glob', 'Grep', 'Bash', 'WebSearch', 'WebFetch'];
|
||||
const toolsToUse = allowedTools || defaultTools;
|
||||
|
||||
const sdkOptions: Options = {
|
||||
@@ -56,7 +45,7 @@ export class ClaudeProvider extends BaseProvider {
|
||||
maxTurns,
|
||||
cwd,
|
||||
allowedTools: toolsToUse,
|
||||
permissionMode: "acceptEdits",
|
||||
permissionMode: 'acceptEdits',
|
||||
sandbox: {
|
||||
enabled: true,
|
||||
autoAllowBashIfSandboxed: true,
|
||||
@@ -75,10 +64,10 @@ export class ClaudeProvider extends BaseProvider {
|
||||
// Multi-part prompt (with images)
|
||||
promptPayload = (async function* () {
|
||||
const multiPartPrompt = {
|
||||
type: "user" as const,
|
||||
session_id: "",
|
||||
type: 'user' as const,
|
||||
session_id: '',
|
||||
message: {
|
||||
role: "user" as const,
|
||||
role: 'user' as const,
|
||||
content: prompt,
|
||||
},
|
||||
parent_tool_use_id: null,
|
||||
@@ -99,10 +88,7 @@ export class ClaudeProvider extends BaseProvider {
|
||||
yield msg as ProviderMessage;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"[ClaudeProvider] executeQuery() error during execution:",
|
||||
error
|
||||
);
|
||||
console.error('[ClaudeProvider] executeQuery() error during execution:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -116,7 +102,7 @@ export class ClaudeProvider extends BaseProvider {
|
||||
|
||||
const status: InstallationStatus = {
|
||||
installed: true,
|
||||
method: "sdk",
|
||||
method: 'sdk',
|
||||
hasApiKey,
|
||||
authenticated: hasApiKey,
|
||||
};
|
||||
@@ -130,53 +116,53 @@ export class ClaudeProvider extends BaseProvider {
|
||||
getAvailableModels(): ModelDefinition[] {
|
||||
const models = [
|
||||
{
|
||||
id: "claude-opus-4-5-20251101",
|
||||
name: "Claude Opus 4.5",
|
||||
modelString: "claude-opus-4-5-20251101",
|
||||
provider: "anthropic",
|
||||
description: "Most capable Claude model",
|
||||
id: 'claude-opus-4-5-20251101',
|
||||
name: 'Claude Opus 4.5',
|
||||
modelString: 'claude-opus-4-5-20251101',
|
||||
provider: 'anthropic',
|
||||
description: 'Most capable Claude model',
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 16000,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: "premium" as const,
|
||||
tier: 'premium' as const,
|
||||
default: true,
|
||||
},
|
||||
{
|
||||
id: "claude-sonnet-4-20250514",
|
||||
name: "Claude Sonnet 4",
|
||||
modelString: "claude-sonnet-4-20250514",
|
||||
provider: "anthropic",
|
||||
description: "Balanced performance and cost",
|
||||
id: 'claude-sonnet-4-20250514',
|
||||
name: 'Claude Sonnet 4',
|
||||
modelString: 'claude-sonnet-4-20250514',
|
||||
provider: 'anthropic',
|
||||
description: 'Balanced performance and cost',
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 16000,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: "standard" as const,
|
||||
tier: 'standard' as const,
|
||||
},
|
||||
{
|
||||
id: "claude-3-5-sonnet-20241022",
|
||||
name: "Claude 3.5 Sonnet",
|
||||
modelString: "claude-3-5-sonnet-20241022",
|
||||
provider: "anthropic",
|
||||
description: "Fast and capable",
|
||||
id: 'claude-3-5-sonnet-20241022',
|
||||
name: 'Claude 3.5 Sonnet',
|
||||
modelString: 'claude-3-5-sonnet-20241022',
|
||||
provider: 'anthropic',
|
||||
description: 'Fast and capable',
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 8000,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: "standard" as const,
|
||||
tier: 'standard' as const,
|
||||
},
|
||||
{
|
||||
id: "claude-3-5-haiku-20241022",
|
||||
name: "Claude 3.5 Haiku",
|
||||
modelString: "claude-3-5-haiku-20241022",
|
||||
provider: "anthropic",
|
||||
description: "Fastest Claude model",
|
||||
id: 'claude-3-5-haiku-20241022',
|
||||
name: 'Claude 3.5 Haiku',
|
||||
modelString: 'claude-3-5-haiku-20241022',
|
||||
provider: 'anthropic',
|
||||
description: 'Fastest Claude model',
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 8000,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
tier: "basic" as const,
|
||||
tier: 'basic' as const,
|
||||
},
|
||||
] satisfies ModelDefinition[];
|
||||
return models;
|
||||
@@ -186,7 +172,7 @@ export class ClaudeProvider extends BaseProvider {
|
||||
* Check if the provider supports a specific feature
|
||||
*/
|
||||
supportsFeature(feature: string): boolean {
|
||||
const supportedFeatures = ["tools", "text", "vision", "thinking"];
|
||||
const supportedFeatures = ['tools', 'text', 'vision', 'thinking'];
|
||||
return supportedFeatures.includes(feature);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,9 +6,9 @@
|
||||
* new providers (Cursor, OpenCode, etc.) trivial - just add one line.
|
||||
*/
|
||||
|
||||
import { BaseProvider } from "./base-provider.js";
|
||||
import { ClaudeProvider } from "./claude-provider.js";
|
||||
import type { InstallationStatus } from "./types.js";
|
||||
import { BaseProvider } from './base-provider.js';
|
||||
import { ClaudeProvider } from './claude-provider.js';
|
||||
import type { InstallationStatus } from './types.js';
|
||||
|
||||
export class ProviderFactory {
|
||||
/**
|
||||
@@ -21,10 +21,7 @@ export class ProviderFactory {
|
||||
const lowerModel = modelId.toLowerCase();
|
||||
|
||||
// Claude models (claude-*, opus, sonnet, haiku)
|
||||
if (
|
||||
lowerModel.startsWith("claude-") ||
|
||||
["haiku", "sonnet", "opus"].includes(lowerModel)
|
||||
) {
|
||||
if (lowerModel.startsWith('claude-') || ['haiku', 'sonnet', 'opus'].includes(lowerModel)) {
|
||||
return new ClaudeProvider();
|
||||
}
|
||||
|
||||
@@ -37,9 +34,7 @@ export class ProviderFactory {
|
||||
// }
|
||||
|
||||
// Default to Claude for unknown models
|
||||
console.warn(
|
||||
`[ProviderFactory] Unknown model prefix for "${modelId}", defaulting to Claude`
|
||||
);
|
||||
console.warn(`[ProviderFactory] Unknown model prefix for "${modelId}", defaulting to Claude`);
|
||||
return new ClaudeProvider();
|
||||
}
|
||||
|
||||
@@ -58,9 +53,7 @@ export class ProviderFactory {
|
||||
*
|
||||
* @returns Map of provider name to installation status
|
||||
*/
|
||||
static async checkAllProviders(): Promise<
|
||||
Record<string, InstallationStatus>
|
||||
> {
|
||||
static async checkAllProviders(): Promise<Record<string, InstallationStatus>> {
|
||||
const providers = this.getAllProviders();
|
||||
const statuses: Record<string, InstallationStatus> = {};
|
||||
|
||||
@@ -83,8 +76,8 @@ export class ProviderFactory {
|
||||
const lowerName = name.toLowerCase();
|
||||
|
||||
switch (lowerName) {
|
||||
case "claude":
|
||||
case "anthropic":
|
||||
case 'claude':
|
||||
case 'anthropic':
|
||||
return new ClaudeProvider();
|
||||
|
||||
// Future providers:
|
||||
|
||||
@@ -15,7 +15,7 @@ export interface ProviderConfig {
|
||||
* Message in conversation history
|
||||
*/
|
||||
export interface ConversationMessage {
|
||||
role: "user" | "assistant";
|
||||
role: 'user' | 'assistant';
|
||||
content: string | Array<{ type: string; text?: string; source?: object }>;
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ export interface ExecuteOptions {
|
||||
* Content block in a provider message (matches Claude SDK format)
|
||||
*/
|
||||
export interface ContentBlock {
|
||||
type: "text" | "tool_use" | "thinking" | "tool_result";
|
||||
type: 'text' | 'tool_use' | 'thinking' | 'tool_result';
|
||||
text?: string;
|
||||
thinking?: string;
|
||||
name?: string;
|
||||
@@ -52,11 +52,11 @@ export interface ContentBlock {
|
||||
* Message returned by a provider (matches Claude SDK streaming format)
|
||||
*/
|
||||
export interface ProviderMessage {
|
||||
type: "assistant" | "user" | "error" | "result";
|
||||
subtype?: "success" | "error";
|
||||
type: 'assistant' | 'user' | 'error' | 'result';
|
||||
subtype?: 'success' | 'error';
|
||||
session_id?: string;
|
||||
message?: {
|
||||
role: "user" | "assistant";
|
||||
role: 'user' | 'assistant';
|
||||
content: ContentBlock[];
|
||||
};
|
||||
result?: string;
|
||||
@@ -71,7 +71,7 @@ export interface InstallationStatus {
|
||||
installed: boolean;
|
||||
path?: string;
|
||||
version?: string;
|
||||
method?: "cli" | "npm" | "brew" | "sdk";
|
||||
method?: 'cli' | 'npm' | 'brew' | 'sdk';
|
||||
hasApiKey?: boolean;
|
||||
authenticated?: boolean;
|
||||
error?: string;
|
||||
@@ -99,6 +99,6 @@ export interface ModelDefinition {
|
||||
maxOutputTokens?: number;
|
||||
supportsVision?: boolean;
|
||||
supportsTools?: boolean;
|
||||
tier?: "basic" | "standard" | "premium";
|
||||
tier?: 'basic' | 'standard' | 'premium';
|
||||
default?: boolean;
|
||||
}
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /clear endpoint - Clear conversation
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createClearHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -12,16 +12,14 @@ export function createClearHandler(agentService: AgentService) {
|
||||
const { sessionId } = req.body as { sessionId: string };
|
||||
|
||||
if (!sessionId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "sessionId is required" });
|
||||
res.status(400).json({ success: false, error: 'sessionId is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await agentService.clearSession(sessionId);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
logError(error, "Clear session failed");
|
||||
logError(error, 'Clear session failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /history endpoint - Get conversation history
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createHistoryHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -12,16 +12,14 @@ export function createHistoryHandler(agentService: AgentService) {
|
||||
const { sessionId } = req.body as { sessionId: string };
|
||||
|
||||
if (!sessionId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "sessionId is required" });
|
||||
res.status(400).json({ success: false, error: 'sessionId is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const result = agentService.getHistory(sessionId);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
logError(error, "Get history failed");
|
||||
logError(error, 'Get history failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /model endpoint - Set session model
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createModelHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,16 +15,14 @@ export function createModelHandler(agentService: AgentService) {
|
||||
};
|
||||
|
||||
if (!sessionId || !model) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "sessionId and model are required" });
|
||||
res.status(400).json({ success: false, error: 'sessionId and model are required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await agentService.setSessionModel(sessionId, model);
|
||||
res.json({ success: result });
|
||||
} catch (error) {
|
||||
logError(error, "Set session model failed");
|
||||
logError(error, 'Set session model failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /stop endpoint - Stop execution
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStopHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -12,16 +12,14 @@ export function createStopHandler(agentService: AgentService) {
|
||||
const { sessionId } = req.body as { sessionId: string };
|
||||
|
||||
if (!sessionId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "sessionId is required" });
|
||||
res.status(400).json({ success: false, error: 'sessionId is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await agentService.stopExecution(sessionId);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
logError(error, "Stop execution failed");
|
||||
logError(error, 'Stop execution failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,25 +2,22 @@
|
||||
* Spec Regeneration routes - HTTP API for AI-powered spec generation
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import type { EventEmitter } from "../../lib/events.js";
|
||||
import { createCreateHandler } from "./routes/create.js";
|
||||
import { createGenerateHandler } from "./routes/generate.js";
|
||||
import { createGenerateFeaturesHandler } from "./routes/generate-features.js";
|
||||
import { createStopHandler } from "./routes/stop.js";
|
||||
import { createStatusHandler } from "./routes/status.js";
|
||||
import { Router } from 'express';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createCreateHandler } from './routes/create.js';
|
||||
import { createGenerateHandler } from './routes/generate.js';
|
||||
import { createGenerateFeaturesHandler } from './routes/generate-features.js';
|
||||
import { createStopHandler } from './routes/stop.js';
|
||||
import { createStatusHandler } from './routes/status.js';
|
||||
|
||||
export function createSpecRegenerationRoutes(events: EventEmitter): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/create", createCreateHandler(events));
|
||||
router.post("/generate", createGenerateHandler(events));
|
||||
router.post("/generate-features", createGenerateFeaturesHandler(events));
|
||||
router.post("/stop", createStopHandler());
|
||||
router.get("/status", createStatusHandler());
|
||||
router.post('/create', createCreateHandler(events));
|
||||
router.post('/generate', createGenerateHandler(events));
|
||||
router.post('/generate-features', createGenerateFeaturesHandler(events));
|
||||
router.post('/stop', createStopHandler());
|
||||
router.get('/status', createStatusHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
* GET /status endpoint - Get generation status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getSpecRegenerationStatus, getErrorMessage } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getSpecRegenerationStatus, getErrorMessage } from '../common.js';
|
||||
|
||||
export function createStatusHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
|
||||
@@ -2,12 +2,8 @@
|
||||
* POST /stop endpoint - Stop generation
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import {
|
||||
getSpecRegenerationStatus,
|
||||
setRunningState,
|
||||
getErrorMessage,
|
||||
} from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getSpecRegenerationStatus, setRunningState, getErrorMessage } from '../common.js';
|
||||
|
||||
export function createStopHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /commit-feature endpoint - Commit feature changes
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { AutoModeService } from "../../../services/auto-mode-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createCommitFeatureHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -16,23 +16,17 @@ export function createCommitFeatureHandler(autoModeService: AutoModeService) {
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and featureId are required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath and featureId are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const commitHash = await autoModeService.commitFeature(
|
||||
projectPath,
|
||||
featureId,
|
||||
worktreePath
|
||||
);
|
||||
const commitHash = await autoModeService.commitFeature(projectPath, featureId, worktreePath);
|
||||
res.json({ success: true, commitHash });
|
||||
} catch (error) {
|
||||
logError(error, "Commit feature failed");
|
||||
logError(error, 'Commit feature failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /context-exists endpoint - Check if context exists for a feature
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { AutoModeService } from "../../../services/auto-mode-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createContextExistsHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,22 +15,17 @@ export function createContextExistsHandler(autoModeService: AutoModeService) {
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and featureId are required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath and featureId are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const exists = await autoModeService.contextExists(
|
||||
projectPath,
|
||||
featureId
|
||||
);
|
||||
const exists = await autoModeService.contextExists(projectPath, featureId);
|
||||
res.json({ success: true, exists });
|
||||
} catch (error) {
|
||||
logError(error, "Check context exists failed");
|
||||
logError(error, 'Check context exists failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /status endpoint - Get auto mode status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { AutoModeService } from "../../../services/auto-mode-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStatusHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,7 +15,7 @@ export function createStatusHandler(autoModeService: AutoModeService) {
|
||||
...status,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Get status failed");
|
||||
logError(error, 'Get status failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /stop-feature endpoint - Stop a specific feature
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { AutoModeService } from "../../../services/auto-mode-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStopFeatureHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -12,16 +12,14 @@ export function createStopFeatureHandler(autoModeService: AutoModeService) {
|
||||
const { featureId } = req.body as { featureId: string };
|
||||
|
||||
if (!featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "featureId is required" });
|
||||
res.status(400).json({ success: false, error: 'featureId is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const stopped = await autoModeService.stopFeature(featureId);
|
||||
res.json({ success: true, stopped });
|
||||
} catch (error) {
|
||||
logError(error, "Stop feature failed");
|
||||
logError(error, 'Stop feature failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /verify-feature endpoint - Verify a feature
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { AutoModeService } from "../../../services/auto-mode-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createVerifyFeatureHandler(autoModeService: AutoModeService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,22 +15,17 @@ export function createVerifyFeatureHandler(autoModeService: AutoModeService) {
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and featureId are required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath and featureId are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const passes = await autoModeService.verifyFeature(
|
||||
projectPath,
|
||||
featureId
|
||||
);
|
||||
const passes = await autoModeService.verifyFeature(projectPath, featureId);
|
||||
res.json({ success: true, passes });
|
||||
} catch (error) {
|
||||
logError(error, "Verify feature failed");
|
||||
logError(error, 'Verify feature failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
* with different enhancement modes (improve, expand, simplify, etc.)
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { createEnhanceHandler } from "./routes/enhance.js";
|
||||
import { Router } from 'express';
|
||||
import { createEnhanceHandler } from './routes/enhance.js';
|
||||
|
||||
/**
|
||||
* Create the enhance-prompt router
|
||||
@@ -16,7 +16,7 @@ import { createEnhanceHandler } from "./routes/enhance.js";
|
||||
export function createEnhancePromptRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/", createEnhanceHandler());
|
||||
router.post('/', createEnhanceHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /agent-output endpoint - Get agent output for a feature
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { FeatureLoader } from "../../../services/feature-loader.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createAgentOutputHandler(featureLoader: FeatureLoader) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,22 +15,17 @@ export function createAgentOutputHandler(featureLoader: FeatureLoader) {
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and featureId are required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath and featureId are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const content = await featureLoader.getAgentOutput(
|
||||
projectPath,
|
||||
featureId
|
||||
);
|
||||
const content = await featureLoader.getAgentOutput(projectPath, featureId);
|
||||
res.json({ success: true, content });
|
||||
} catch (error) {
|
||||
logError(error, "Get agent output failed");
|
||||
logError(error, 'Get agent output failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /delete endpoint - Delete a feature
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { FeatureLoader } from "../../../services/feature-loader.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createDeleteHandler(featureLoader: FeatureLoader) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,19 +15,17 @@ export function createDeleteHandler(featureLoader: FeatureLoader) {
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and featureId are required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath and featureId are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const success = await featureLoader.delete(projectPath, featureId);
|
||||
res.json({ success });
|
||||
} catch (error) {
|
||||
logError(error, "Delete feature failed");
|
||||
logError(error, 'Delete feature failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /get endpoint - Get a single feature
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { FeatureLoader } from "../../../services/feature-loader.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createGetHandler(featureLoader: FeatureLoader) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,24 +15,22 @@ export function createGetHandler(featureLoader: FeatureLoader) {
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and featureId are required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath and featureId are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const feature = await featureLoader.get(projectPath, featureId);
|
||||
if (!feature) {
|
||||
res.status(404).json({ success: false, error: "Feature not found" });
|
||||
res.status(404).json({ success: false, error: 'Feature not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true, feature });
|
||||
} catch (error) {
|
||||
logError(error, "Get feature failed");
|
||||
logError(error, 'Get feature failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -3,40 +3,40 @@
|
||||
* Provides REST API equivalents for Electron IPC file operations
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import type { EventEmitter } from "../../lib/events.js";
|
||||
import { createReadHandler } from "./routes/read.js";
|
||||
import { createWriteHandler } from "./routes/write.js";
|
||||
import { createMkdirHandler } from "./routes/mkdir.js";
|
||||
import { createReaddirHandler } from "./routes/readdir.js";
|
||||
import { createExistsHandler } from "./routes/exists.js";
|
||||
import { createStatHandler } from "./routes/stat.js";
|
||||
import { createDeleteHandler } from "./routes/delete.js";
|
||||
import { createValidatePathHandler } from "./routes/validate-path.js";
|
||||
import { createResolveDirectoryHandler } from "./routes/resolve-directory.js";
|
||||
import { createSaveImageHandler } from "./routes/save-image.js";
|
||||
import { createBrowseHandler } from "./routes/browse.js";
|
||||
import { createImageHandler } from "./routes/image.js";
|
||||
import { createSaveBoardBackgroundHandler } from "./routes/save-board-background.js";
|
||||
import { createDeleteBoardBackgroundHandler } from "./routes/delete-board-background.js";
|
||||
import { Router } from 'express';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createReadHandler } from './routes/read.js';
|
||||
import { createWriteHandler } from './routes/write.js';
|
||||
import { createMkdirHandler } from './routes/mkdir.js';
|
||||
import { createReaddirHandler } from './routes/readdir.js';
|
||||
import { createExistsHandler } from './routes/exists.js';
|
||||
import { createStatHandler } from './routes/stat.js';
|
||||
import { createDeleteHandler } from './routes/delete.js';
|
||||
import { createValidatePathHandler } from './routes/validate-path.js';
|
||||
import { createResolveDirectoryHandler } from './routes/resolve-directory.js';
|
||||
import { createSaveImageHandler } from './routes/save-image.js';
|
||||
import { createBrowseHandler } from './routes/browse.js';
|
||||
import { createImageHandler } from './routes/image.js';
|
||||
import { createSaveBoardBackgroundHandler } from './routes/save-board-background.js';
|
||||
import { createDeleteBoardBackgroundHandler } from './routes/delete-board-background.js';
|
||||
|
||||
export function createFsRoutes(_events: EventEmitter): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/read", createReadHandler());
|
||||
router.post("/write", createWriteHandler());
|
||||
router.post("/mkdir", createMkdirHandler());
|
||||
router.post("/readdir", createReaddirHandler());
|
||||
router.post("/exists", createExistsHandler());
|
||||
router.post("/stat", createStatHandler());
|
||||
router.post("/delete", createDeleteHandler());
|
||||
router.post("/validate-path", createValidatePathHandler());
|
||||
router.post("/resolve-directory", createResolveDirectoryHandler());
|
||||
router.post("/save-image", createSaveImageHandler());
|
||||
router.post("/browse", createBrowseHandler());
|
||||
router.get("/image", createImageHandler());
|
||||
router.post("/save-board-background", createSaveBoardBackgroundHandler());
|
||||
router.post("/delete-board-background", createDeleteBoardBackgroundHandler());
|
||||
router.post('/read', createReadHandler());
|
||||
router.post('/write', createWriteHandler());
|
||||
router.post('/mkdir', createMkdirHandler());
|
||||
router.post('/readdir', createReaddirHandler());
|
||||
router.post('/exists', createExistsHandler());
|
||||
router.post('/stat', createStatHandler());
|
||||
router.post('/delete', createDeleteHandler());
|
||||
router.post('/validate-path', createValidatePathHandler());
|
||||
router.post('/resolve-directory', createResolveDirectoryHandler());
|
||||
router.post('/save-image', createSaveImageHandler());
|
||||
router.post('/browse', createBrowseHandler());
|
||||
router.get('/image', createImageHandler());
|
||||
router.post('/save-board-background', createSaveBoardBackgroundHandler());
|
||||
router.post('/delete-board-background', createDeleteBoardBackgroundHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /diffs endpoint - Get diffs for the main project
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import { getGitRepositoryDiffs } from "../../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { getGitRepositoryDiffs } from '../../common.js';
|
||||
|
||||
export function createDiffsHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -12,7 +12,7 @@ export function createDiffsHandler() {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: "projectPath required" });
|
||||
res.status(400).json({ success: false, error: 'projectPath required' });
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -25,11 +25,11 @@ export function createDiffsHandler() {
|
||||
hasChanges: result.hasChanges,
|
||||
});
|
||||
} catch (innerError) {
|
||||
logError(innerError, "Git diff failed");
|
||||
res.json({ success: true, diff: "", files: [], hasChanges: false });
|
||||
logError(innerError, 'Git diff failed');
|
||||
res.json({ success: true, diff: '', files: [], hasChanges: false });
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Get diffs failed");
|
||||
logError(error, 'Get diffs failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
* POST /file-diff endpoint - Get diff for a specific file
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import { generateSyntheticDiffForNewFile } from "../../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { generateSyntheticDiffForNewFile } from '../../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -19,20 +19,17 @@ export function createFileDiffHandler() {
|
||||
};
|
||||
|
||||
if (!projectPath || !filePath) {
|
||||
res
|
||||
.status(400)
|
||||
.json({ success: false, error: "projectPath and filePath required" });
|
||||
res.status(400).json({ success: false, error: 'projectPath and filePath required' });
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// First check if the file is untracked
|
||||
const { stdout: status } = await execAsync(
|
||||
`git status --porcelain -- "${filePath}"`,
|
||||
{ cwd: projectPath }
|
||||
);
|
||||
const { stdout: status } = await execAsync(`git status --porcelain -- "${filePath}"`, {
|
||||
cwd: projectPath,
|
||||
});
|
||||
|
||||
const isUntracked = status.trim().startsWith("??");
|
||||
const isUntracked = status.trim().startsWith('??');
|
||||
|
||||
let diff: string;
|
||||
if (isUntracked) {
|
||||
@@ -40,23 +37,20 @@ export function createFileDiffHandler() {
|
||||
diff = await generateSyntheticDiffForNewFile(projectPath, filePath);
|
||||
} else {
|
||||
// Use regular git diff for tracked files
|
||||
const result = await execAsync(
|
||||
`git diff HEAD -- "${filePath}"`,
|
||||
{
|
||||
cwd: projectPath,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
}
|
||||
);
|
||||
const result = await execAsync(`git diff HEAD -- "${filePath}"`, {
|
||||
cwd: projectPath,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
});
|
||||
diff = result.stdout;
|
||||
}
|
||||
|
||||
res.json({ success: true, diff, filePath });
|
||||
} catch (innerError) {
|
||||
logError(innerError, "Git file diff failed");
|
||||
res.json({ success: true, diff: "", filePath });
|
||||
logError(innerError, 'Git file diff failed');
|
||||
res.json({ success: true, diff: '', filePath });
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Get file diff failed");
|
||||
logError(error, 'Get file diff failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,15 +2,15 @@
|
||||
* Health check routes
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { createIndexHandler } from "./routes/index.js";
|
||||
import { createDetailedHandler } from "./routes/detailed.js";
|
||||
import { Router } from 'express';
|
||||
import { createIndexHandler } from './routes/index.js';
|
||||
import { createDetailedHandler } from './routes/detailed.js';
|
||||
|
||||
export function createHealthRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
router.get("/", createIndexHandler());
|
||||
router.get("/detailed", createDetailedHandler());
|
||||
router.get('/', createIndexHandler());
|
||||
router.get('/detailed', createDetailedHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,18 +2,18 @@
|
||||
* GET /detailed endpoint - Detailed health check
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getAuthStatus } from "../../../lib/auth.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getAuthStatus } from '../../../lib/auth.js';
|
||||
|
||||
export function createDetailedHandler() {
|
||||
return (_req: Request, res: Response): void => {
|
||||
res.json({
|
||||
status: "ok",
|
||||
status: 'ok',
|
||||
timestamp: new Date().toISOString(),
|
||||
version: process.env.npm_package_version || "0.1.0",
|
||||
version: process.env.npm_package_version || '0.1.0',
|
||||
uptime: process.uptime(),
|
||||
memory: process.memoryUsage(),
|
||||
dataDir: process.env.DATA_DIR || "./data",
|
||||
dataDir: process.env.DATA_DIR || './data',
|
||||
auth: getAuthStatus(),
|
||||
env: {
|
||||
nodeVersion: process.version,
|
||||
|
||||
@@ -2,14 +2,14 @@
|
||||
* GET / endpoint - Basic health check
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { Request, Response } from 'express';
|
||||
|
||||
export function createIndexHandler() {
|
||||
return (_req: Request, res: Response): void => {
|
||||
res.json({
|
||||
status: "ok",
|
||||
status: 'ok',
|
||||
timestamp: new Date().toISOString(),
|
||||
version: process.env.npm_package_version || "0.1.0",
|
||||
version: process.env.npm_package_version || '0.1.0',
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,15 +2,15 @@
|
||||
* Models routes - HTTP API for model providers and availability
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { createAvailableHandler } from "./routes/available.js";
|
||||
import { createProvidersHandler } from "./routes/providers.js";
|
||||
import { Router } from 'express';
|
||||
import { createAvailableHandler } from './routes/available.js';
|
||||
import { createProvidersHandler } from './routes/providers.js';
|
||||
|
||||
export function createModelsRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
router.get("/available", createAvailableHandler());
|
||||
router.get("/providers", createProvidersHandler());
|
||||
router.get('/available', createAvailableHandler());
|
||||
router.get('/providers', createProvidersHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
* GET /available endpoint - Get available models
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
interface ModelDefinition {
|
||||
id: string;
|
||||
@@ -20,36 +20,36 @@ export function createAvailableHandler() {
|
||||
try {
|
||||
const models: ModelDefinition[] = [
|
||||
{
|
||||
id: "claude-opus-4-5-20251101",
|
||||
name: "Claude Opus 4.5",
|
||||
provider: "anthropic",
|
||||
id: 'claude-opus-4-5-20251101',
|
||||
name: 'Claude Opus 4.5',
|
||||
provider: 'anthropic',
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 16384,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "claude-sonnet-4-20250514",
|
||||
name: "Claude Sonnet 4",
|
||||
provider: "anthropic",
|
||||
id: 'claude-sonnet-4-20250514',
|
||||
name: 'Claude Sonnet 4',
|
||||
provider: 'anthropic',
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 16384,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "claude-3-5-sonnet-20241022",
|
||||
name: "Claude 3.5 Sonnet",
|
||||
provider: "anthropic",
|
||||
id: 'claude-3-5-sonnet-20241022',
|
||||
name: 'Claude 3.5 Sonnet',
|
||||
provider: 'anthropic',
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 8192,
|
||||
supportsVision: true,
|
||||
supportsTools: true,
|
||||
},
|
||||
{
|
||||
id: "claude-3-5-haiku-20241022",
|
||||
name: "Claude 3.5 Haiku",
|
||||
provider: "anthropic",
|
||||
id: 'claude-3-5-haiku-20241022',
|
||||
name: 'Claude 3.5 Haiku',
|
||||
provider: 'anthropic',
|
||||
contextWindow: 200000,
|
||||
maxOutputTokens: 8192,
|
||||
supportsVision: true,
|
||||
@@ -59,7 +59,7 @@ export function createAvailableHandler() {
|
||||
|
||||
res.json({ success: true, models });
|
||||
} catch (error) {
|
||||
logError(error, "Get available models failed");
|
||||
logError(error, 'Get available models failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,16 +2,14 @@
|
||||
* Running Agents routes - HTTP API for tracking active agent executions
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import type { AutoModeService } from "../../services/auto-mode-service.js";
|
||||
import { createIndexHandler } from "./routes/index.js";
|
||||
import { Router } from 'express';
|
||||
import type { AutoModeService } from '../../services/auto-mode-service.js';
|
||||
import { createIndexHandler } from './routes/index.js';
|
||||
|
||||
export function createRunningAgentsRoutes(
|
||||
autoModeService: AutoModeService
|
||||
): Router {
|
||||
export function createRunningAgentsRoutes(autoModeService: AutoModeService): Router {
|
||||
const router = Router();
|
||||
|
||||
router.get("/", createIndexHandler(autoModeService));
|
||||
router.get('/', createIndexHandler(autoModeService));
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* GET / endpoint - Get all running agents
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { AutoModeService } from "../../../services/auto-mode-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createIndexHandler(autoModeService: AutoModeService) {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
@@ -18,7 +18,7 @@ export function createIndexHandler(autoModeService: AutoModeService) {
|
||||
totalCount: runningAgents.length,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Get running agents failed");
|
||||
logError(error, 'Get running agents failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,24 +2,24 @@
|
||||
* Sessions routes - HTTP API for session management
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { AgentService } from "../../services/agent-service.js";
|
||||
import { createIndexHandler } from "./routes/index.js";
|
||||
import { createCreateHandler } from "./routes/create.js";
|
||||
import { createUpdateHandler } from "./routes/update.js";
|
||||
import { createArchiveHandler } from "./routes/archive.js";
|
||||
import { createUnarchiveHandler } from "./routes/unarchive.js";
|
||||
import { createDeleteHandler } from "./routes/delete.js";
|
||||
import { Router } from 'express';
|
||||
import { AgentService } from '../../services/agent-service.js';
|
||||
import { createIndexHandler } from './routes/index.js';
|
||||
import { createCreateHandler } from './routes/create.js';
|
||||
import { createUpdateHandler } from './routes/update.js';
|
||||
import { createArchiveHandler } from './routes/archive.js';
|
||||
import { createUnarchiveHandler } from './routes/unarchive.js';
|
||||
import { createDeleteHandler } from './routes/delete.js';
|
||||
|
||||
export function createSessionsRoutes(agentService: AgentService): Router {
|
||||
const router = Router();
|
||||
|
||||
router.get("/", createIndexHandler(agentService));
|
||||
router.post("/", createCreateHandler(agentService));
|
||||
router.put("/:sessionId", createUpdateHandler(agentService));
|
||||
router.post("/:sessionId/archive", createArchiveHandler(agentService));
|
||||
router.post("/:sessionId/unarchive", createUnarchiveHandler(agentService));
|
||||
router.delete("/:sessionId", createDeleteHandler(agentService));
|
||||
router.get('/', createIndexHandler(agentService));
|
||||
router.post('/', createCreateHandler(agentService));
|
||||
router.put('/:sessionId', createUpdateHandler(agentService));
|
||||
router.post('/:sessionId/archive', createArchiveHandler(agentService));
|
||||
router.post('/:sessionId/unarchive', createUnarchiveHandler(agentService));
|
||||
router.delete('/:sessionId', createDeleteHandler(agentService));
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /:sessionId/archive endpoint - Archive a session
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createArchiveHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -13,13 +13,13 @@ export function createArchiveHandler(agentService: AgentService) {
|
||||
const success = await agentService.archiveSession(sessionId);
|
||||
|
||||
if (!success) {
|
||||
res.status(404).json({ success: false, error: "Session not found" });
|
||||
res.status(404).json({ success: false, error: 'Session not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Archive session failed");
|
||||
logError(error, 'Archive session failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST / endpoint - Create a new session
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createCreateHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -17,19 +17,14 @@ export function createCreateHandler(agentService: AgentService) {
|
||||
};
|
||||
|
||||
if (!name) {
|
||||
res.status(400).json({ success: false, error: "name is required" });
|
||||
res.status(400).json({ success: false, error: 'name is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const session = await agentService.createSession(
|
||||
name,
|
||||
projectPath,
|
||||
workingDirectory,
|
||||
model
|
||||
);
|
||||
const session = await agentService.createSession(name, projectPath, workingDirectory, model);
|
||||
res.json({ success: true, session });
|
||||
} catch (error) {
|
||||
logError(error, "Create session failed");
|
||||
logError(error, 'Create session failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* DELETE /:sessionId endpoint - Delete a session
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createDeleteHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -13,13 +13,13 @@ export function createDeleteHandler(agentService: AgentService) {
|
||||
const success = await agentService.deleteSession(sessionId);
|
||||
|
||||
if (!success) {
|
||||
res.status(404).json({ success: false, error: "Session not found" });
|
||||
res.status(404).json({ success: false, error: 'Session not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Delete session failed");
|
||||
logError(error, 'Delete session failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,14 +2,14 @@
|
||||
* GET / endpoint - List all sessions
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createIndexHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const includeArchived = req.query.includeArchived === "true";
|
||||
const includeArchived = req.query.includeArchived === 'true';
|
||||
const sessionsRaw = await agentService.listSessions(includeArchived);
|
||||
|
||||
// Transform to match frontend SessionListItem interface
|
||||
@@ -17,7 +17,7 @@ export function createIndexHandler(agentService: AgentService) {
|
||||
sessionsRaw.map(async (s) => {
|
||||
const messages = await agentService.loadSession(s.id);
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
const preview = lastMessage?.content?.slice(0, 100) || "";
|
||||
const preview = lastMessage?.content?.slice(0, 100) || '';
|
||||
|
||||
return {
|
||||
id: s.id,
|
||||
@@ -36,7 +36,7 @@ export function createIndexHandler(agentService: AgentService) {
|
||||
|
||||
res.json({ success: true, sessions });
|
||||
} catch (error) {
|
||||
logError(error, "List sessions failed");
|
||||
logError(error, 'List sessions failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* POST /:sessionId/unarchive endpoint - Unarchive a session
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createUnarchiveHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -13,13 +13,13 @@ export function createUnarchiveHandler(agentService: AgentService) {
|
||||
const success = await agentService.unarchiveSession(sessionId);
|
||||
|
||||
if (!success) {
|
||||
res.status(404).json({ success: false, error: "Session not found" });
|
||||
res.status(404).json({ success: false, error: 'Session not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Unarchive session failed");
|
||||
logError(error, 'Unarchive session failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* PUT /:sessionId endpoint - Update a session
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { AgentService } from "../../../services/agent-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { AgentService } from '../../../services/agent-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createUpdateHandler(agentService: AgentService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -22,13 +22,13 @@ export function createUpdateHandler(agentService: AgentService) {
|
||||
model,
|
||||
});
|
||||
if (!session) {
|
||||
res.status(404).json({ success: false, error: "Session not found" });
|
||||
res.status(404).json({ success: false, error: 'Session not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true, session });
|
||||
} catch (error) {
|
||||
logError(error, "Update session failed");
|
||||
logError(error, 'Update session failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,36 +2,36 @@
|
||||
* Business logic for getting Claude CLI status
|
||||
*/
|
||||
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import { getApiKey } from "./common.js";
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
import { getApiKey } from './common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
export async function getClaudeStatus() {
|
||||
let installed = false;
|
||||
let version = "";
|
||||
let cliPath = "";
|
||||
let method = "none";
|
||||
let version = '';
|
||||
let cliPath = '';
|
||||
let method = 'none';
|
||||
|
||||
const isWindows = process.platform === "win32";
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
// Try to find Claude CLI using platform-specific command
|
||||
try {
|
||||
// Use 'where' on Windows, 'which' on Unix-like systems
|
||||
const findCommand = isWindows ? "where claude" : "which claude";
|
||||
const findCommand = isWindows ? 'where claude' : 'which claude';
|
||||
const { stdout } = await execAsync(findCommand);
|
||||
// 'where' on Windows can return multiple paths - take the first one
|
||||
cliPath = stdout.trim().split(/\r?\n/)[0];
|
||||
installed = true;
|
||||
method = "path";
|
||||
method = 'path';
|
||||
|
||||
// Get version
|
||||
try {
|
||||
const { stdout: versionOut } = await execAsync("claude --version");
|
||||
const { stdout: versionOut } = await execAsync('claude --version');
|
||||
version = versionOut.trim();
|
||||
} catch {
|
||||
// Version command might not be available
|
||||
@@ -40,22 +40,22 @@ export async function getClaudeStatus() {
|
||||
// Not in PATH, try common locations based on platform
|
||||
const commonPaths = isWindows
|
||||
? (() => {
|
||||
const appData = process.env.APPDATA || path.join(os.homedir(), "AppData", "Roaming");
|
||||
const appData = process.env.APPDATA || path.join(os.homedir(), 'AppData', 'Roaming');
|
||||
return [
|
||||
// Windows-specific paths
|
||||
path.join(os.homedir(), ".local", "bin", "claude.exe"),
|
||||
path.join(appData, "npm", "claude.cmd"),
|
||||
path.join(appData, "npm", "claude"),
|
||||
path.join(appData, ".npm-global", "bin", "claude.cmd"),
|
||||
path.join(appData, ".npm-global", "bin", "claude"),
|
||||
path.join(os.homedir(), '.local', 'bin', 'claude.exe'),
|
||||
path.join(appData, 'npm', 'claude.cmd'),
|
||||
path.join(appData, 'npm', 'claude'),
|
||||
path.join(appData, '.npm-global', 'bin', 'claude.cmd'),
|
||||
path.join(appData, '.npm-global', 'bin', 'claude'),
|
||||
];
|
||||
})()
|
||||
: [
|
||||
// Unix (Linux/macOS) paths
|
||||
path.join(os.homedir(), ".local", "bin", "claude"),
|
||||
path.join(os.homedir(), ".claude", "local", "claude"),
|
||||
"/usr/local/bin/claude",
|
||||
path.join(os.homedir(), ".npm-global", "bin", "claude"),
|
||||
path.join(os.homedir(), '.local', 'bin', 'claude'),
|
||||
path.join(os.homedir(), '.claude', 'local', 'claude'),
|
||||
'/usr/local/bin/claude',
|
||||
path.join(os.homedir(), '.npm-global', 'bin', 'claude'),
|
||||
];
|
||||
|
||||
for (const p of commonPaths) {
|
||||
@@ -63,7 +63,7 @@ export async function getClaudeStatus() {
|
||||
await fs.access(p);
|
||||
cliPath = p;
|
||||
installed = true;
|
||||
method = "local";
|
||||
method = 'local';
|
||||
|
||||
// Get version from this path
|
||||
try {
|
||||
@@ -84,11 +84,11 @@ export async function getClaudeStatus() {
|
||||
// apiKeys.anthropic stores direct API keys for pay-per-use
|
||||
let auth = {
|
||||
authenticated: false,
|
||||
method: "none" as string,
|
||||
method: 'none' as string,
|
||||
hasCredentialsFile: false,
|
||||
hasToken: false,
|
||||
hasStoredOAuthToken: !!getApiKey("anthropic_oauth_token"),
|
||||
hasStoredApiKey: !!getApiKey("anthropic"),
|
||||
hasStoredOAuthToken: !!getApiKey('anthropic_oauth_token'),
|
||||
hasStoredApiKey: !!getApiKey('anthropic'),
|
||||
hasEnvApiKey: !!process.env.ANTHROPIC_API_KEY,
|
||||
// Additional fields for detailed status
|
||||
oauthTokenValid: false,
|
||||
@@ -97,13 +97,13 @@ export async function getClaudeStatus() {
|
||||
hasRecentActivity: false,
|
||||
};
|
||||
|
||||
const claudeDir = path.join(os.homedir(), ".claude");
|
||||
const claudeDir = path.join(os.homedir(), '.claude');
|
||||
|
||||
// Check for recent Claude CLI activity - indicates working authentication
|
||||
// The stats-cache.json file is only populated when the CLI is working properly
|
||||
const statsCachePath = path.join(claudeDir, "stats-cache.json");
|
||||
const statsCachePath = path.join(claudeDir, 'stats-cache.json');
|
||||
try {
|
||||
const statsContent = await fs.readFile(statsCachePath, "utf-8");
|
||||
const statsContent = await fs.readFile(statsCachePath, 'utf-8');
|
||||
const stats = JSON.parse(statsContent);
|
||||
|
||||
// Check if there's any activity (which means the CLI is authenticated and working)
|
||||
@@ -111,26 +111,26 @@ export async function getClaudeStatus() {
|
||||
auth.hasRecentActivity = true;
|
||||
auth.hasCliAuth = true;
|
||||
auth.authenticated = true;
|
||||
auth.method = "cli_authenticated";
|
||||
auth.method = 'cli_authenticated';
|
||||
}
|
||||
} catch {
|
||||
// Stats file doesn't exist or is invalid
|
||||
}
|
||||
|
||||
// Check for settings.json - indicates CLI has been set up
|
||||
const settingsPath = path.join(claudeDir, "settings.json");
|
||||
const settingsPath = path.join(claudeDir, 'settings.json');
|
||||
try {
|
||||
await fs.access(settingsPath);
|
||||
// If settings exist but no activity, CLI might be set up but not authenticated
|
||||
if (!auth.hasCliAuth) {
|
||||
// Try to check for other indicators of auth
|
||||
const sessionsDir = path.join(claudeDir, "projects");
|
||||
const sessionsDir = path.join(claudeDir, 'projects');
|
||||
try {
|
||||
const sessions = await fs.readdir(sessionsDir);
|
||||
if (sessions.length > 0) {
|
||||
auth.hasCliAuth = true;
|
||||
auth.authenticated = true;
|
||||
auth.method = "cli_authenticated";
|
||||
auth.method = 'cli_authenticated';
|
||||
}
|
||||
} catch {
|
||||
// Sessions directory doesn't exist
|
||||
@@ -143,13 +143,13 @@ export async function getClaudeStatus() {
|
||||
// Check for credentials file (OAuth tokens from claude login)
|
||||
// Note: Claude CLI may use ".credentials.json" (hidden) or "credentials.json" depending on version/platform
|
||||
const credentialsPaths = [
|
||||
path.join(claudeDir, ".credentials.json"),
|
||||
path.join(claudeDir, "credentials.json"),
|
||||
path.join(claudeDir, '.credentials.json'),
|
||||
path.join(claudeDir, 'credentials.json'),
|
||||
];
|
||||
|
||||
for (const credentialsPath of credentialsPaths) {
|
||||
try {
|
||||
const credentialsContent = await fs.readFile(credentialsPath, "utf-8");
|
||||
const credentialsContent = await fs.readFile(credentialsPath, 'utf-8');
|
||||
const credentials = JSON.parse(credentialsContent);
|
||||
auth.hasCredentialsFile = true;
|
||||
|
||||
@@ -158,11 +158,11 @@ export async function getClaudeStatus() {
|
||||
auth.hasStoredOAuthToken = true;
|
||||
auth.oauthTokenValid = true;
|
||||
auth.authenticated = true;
|
||||
auth.method = "oauth_token"; // Stored OAuth token from credentials file
|
||||
auth.method = 'oauth_token'; // Stored OAuth token from credentials file
|
||||
} else if (credentials.api_key) {
|
||||
auth.apiKeyValid = true;
|
||||
auth.authenticated = true;
|
||||
auth.method = "api_key"; // Stored API key in credentials file
|
||||
auth.method = 'api_key'; // Stored API key in credentials file
|
||||
}
|
||||
break; // Found and processed credentials file
|
||||
} catch {
|
||||
@@ -174,25 +174,25 @@ export async function getClaudeStatus() {
|
||||
if (auth.hasEnvApiKey) {
|
||||
auth.authenticated = true;
|
||||
auth.apiKeyValid = true;
|
||||
auth.method = "api_key_env"; // API key from ANTHROPIC_API_KEY env var
|
||||
auth.method = 'api_key_env'; // API key from ANTHROPIC_API_KEY env var
|
||||
}
|
||||
|
||||
// In-memory stored OAuth token (from setup wizard - subscription auth)
|
||||
if (!auth.authenticated && getApiKey("anthropic_oauth_token")) {
|
||||
if (!auth.authenticated && getApiKey('anthropic_oauth_token')) {
|
||||
auth.authenticated = true;
|
||||
auth.oauthTokenValid = true;
|
||||
auth.method = "oauth_token"; // Stored OAuth token from setup wizard
|
||||
auth.method = 'oauth_token'; // Stored OAuth token from setup wizard
|
||||
}
|
||||
|
||||
// In-memory stored API key (from settings UI - pay-per-use)
|
||||
if (!auth.authenticated && getApiKey("anthropic")) {
|
||||
if (!auth.authenticated && getApiKey('anthropic')) {
|
||||
auth.authenticated = true;
|
||||
auth.apiKeyValid = true;
|
||||
auth.method = "api_key"; // Manually stored API key
|
||||
auth.method = 'api_key'; // Manually stored API key
|
||||
}
|
||||
|
||||
return {
|
||||
status: installed ? "installed" : "not_installed",
|
||||
status: installed ? 'installed' : 'not_installed',
|
||||
installed,
|
||||
method,
|
||||
version,
|
||||
|
||||
@@ -2,29 +2,29 @@
|
||||
* Setup routes - HTTP API for CLI detection, API keys, and platform info
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { createClaudeStatusHandler } from "./routes/claude-status.js";
|
||||
import { createInstallClaudeHandler } from "./routes/install-claude.js";
|
||||
import { createAuthClaudeHandler } from "./routes/auth-claude.js";
|
||||
import { createStoreApiKeyHandler } from "./routes/store-api-key.js";
|
||||
import { createDeleteApiKeyHandler } from "./routes/delete-api-key.js";
|
||||
import { createApiKeysHandler } from "./routes/api-keys.js";
|
||||
import { createPlatformHandler } from "./routes/platform.js";
|
||||
import { createVerifyClaudeAuthHandler } from "./routes/verify-claude-auth.js";
|
||||
import { createGhStatusHandler } from "./routes/gh-status.js";
|
||||
import { Router } from 'express';
|
||||
import { createClaudeStatusHandler } from './routes/claude-status.js';
|
||||
import { createInstallClaudeHandler } from './routes/install-claude.js';
|
||||
import { createAuthClaudeHandler } from './routes/auth-claude.js';
|
||||
import { createStoreApiKeyHandler } from './routes/store-api-key.js';
|
||||
import { createDeleteApiKeyHandler } from './routes/delete-api-key.js';
|
||||
import { createApiKeysHandler } from './routes/api-keys.js';
|
||||
import { createPlatformHandler } from './routes/platform.js';
|
||||
import { createVerifyClaudeAuthHandler } from './routes/verify-claude-auth.js';
|
||||
import { createGhStatusHandler } from './routes/gh-status.js';
|
||||
|
||||
export function createSetupRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
router.get("/claude-status", createClaudeStatusHandler());
|
||||
router.post("/install-claude", createInstallClaudeHandler());
|
||||
router.post("/auth-claude", createAuthClaudeHandler());
|
||||
router.post("/store-api-key", createStoreApiKeyHandler());
|
||||
router.post("/delete-api-key", createDeleteApiKeyHandler());
|
||||
router.get("/api-keys", createApiKeysHandler());
|
||||
router.get("/platform", createPlatformHandler());
|
||||
router.post("/verify-claude-auth", createVerifyClaudeAuthHandler());
|
||||
router.get("/gh-status", createGhStatusHandler());
|
||||
router.get('/claude-status', createClaudeStatusHandler());
|
||||
router.post('/install-claude', createInstallClaudeHandler());
|
||||
router.post('/auth-claude', createAuthClaudeHandler());
|
||||
router.post('/store-api-key', createStoreApiKeyHandler());
|
||||
router.post('/delete-api-key', createDeleteApiKeyHandler());
|
||||
router.get('/api-keys', createApiKeysHandler());
|
||||
router.get('/platform', createPlatformHandler());
|
||||
router.post('/verify-claude-auth', createVerifyClaudeAuthHandler());
|
||||
router.get('/gh-status', createGhStatusHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
* POST /auth-claude endpoint - Auth Claude
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createAuthClaudeHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
@@ -11,11 +11,11 @@ export function createAuthClaudeHandler() {
|
||||
res.json({
|
||||
success: true,
|
||||
requiresManualAuth: true,
|
||||
command: "claude login",
|
||||
command: 'claude login',
|
||||
message: "Please run 'claude login' in your terminal to authenticate",
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Auth Claude failed");
|
||||
logError(error, 'Auth Claude failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* GET /claude-status endpoint - Get Claude CLI status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getClaudeStatus } from "../get-claude-status.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getClaudeStatus } from '../get-claude-status.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createClaudeStatusHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,7 +15,7 @@ export function createClaudeStatusHandler() {
|
||||
...status,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Get Claude status failed");
|
||||
logError(error, 'Get Claude status failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,24 +2,26 @@
|
||||
* GET /gh-status endpoint - Get GitHub CLI status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
// Extended PATH to include common tool installation locations
|
||||
const extendedPath = [
|
||||
process.env.PATH,
|
||||
"/opt/homebrew/bin",
|
||||
"/usr/local/bin",
|
||||
"/home/linuxbrew/.linuxbrew/bin",
|
||||
'/opt/homebrew/bin',
|
||||
'/usr/local/bin',
|
||||
'/home/linuxbrew/.linuxbrew/bin',
|
||||
`${process.env.HOME}/.local/bin`,
|
||||
].filter(Boolean).join(":");
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(':');
|
||||
|
||||
const execEnv = {
|
||||
...process.env,
|
||||
@@ -44,11 +46,11 @@ async function getGhStatus(): Promise<GhStatus> {
|
||||
user: null,
|
||||
};
|
||||
|
||||
const isWindows = process.platform === "win32";
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
// Check if gh CLI is installed
|
||||
try {
|
||||
const findCommand = isWindows ? "where gh" : "command -v gh";
|
||||
const findCommand = isWindows ? 'where gh' : 'command -v gh';
|
||||
const { stdout } = await execAsync(findCommand, { env: execEnv });
|
||||
status.path = stdout.trim().split(/\r?\n/)[0];
|
||||
status.installed = true;
|
||||
@@ -56,14 +58,14 @@ async function getGhStatus(): Promise<GhStatus> {
|
||||
// gh not in PATH, try common locations
|
||||
const commonPaths = isWindows
|
||||
? [
|
||||
path.join(process.env.LOCALAPPDATA || "", "Programs", "gh", "bin", "gh.exe"),
|
||||
path.join(process.env.ProgramFiles || "", "GitHub CLI", "gh.exe"),
|
||||
path.join(process.env.LOCALAPPDATA || '', 'Programs', 'gh', 'bin', 'gh.exe'),
|
||||
path.join(process.env.ProgramFiles || '', 'GitHub CLI', 'gh.exe'),
|
||||
]
|
||||
: [
|
||||
"/opt/homebrew/bin/gh",
|
||||
"/usr/local/bin/gh",
|
||||
path.join(os.homedir(), ".local", "bin", "gh"),
|
||||
"/home/linuxbrew/.linuxbrew/bin/gh",
|
||||
'/opt/homebrew/bin/gh',
|
||||
'/usr/local/bin/gh',
|
||||
path.join(os.homedir(), '.local', 'bin', 'gh'),
|
||||
'/home/linuxbrew/.linuxbrew/bin/gh',
|
||||
];
|
||||
|
||||
for (const p of commonPaths) {
|
||||
@@ -84,30 +86,31 @@ async function getGhStatus(): Promise<GhStatus> {
|
||||
|
||||
// Get version
|
||||
try {
|
||||
const { stdout } = await execAsync("gh --version", { env: execEnv });
|
||||
const { stdout } = await execAsync('gh --version', { env: execEnv });
|
||||
// Extract version from output like "gh version 2.40.1 (2024-01-09)"
|
||||
const versionMatch = stdout.match(/gh version ([\d.]+)/);
|
||||
status.version = versionMatch ? versionMatch[1] : stdout.trim().split("\n")[0];
|
||||
status.version = versionMatch ? versionMatch[1] : stdout.trim().split('\n')[0];
|
||||
} catch {
|
||||
// Version command failed
|
||||
}
|
||||
|
||||
// Check authentication status
|
||||
try {
|
||||
const { stdout } = await execAsync("gh auth status", { env: execEnv });
|
||||
const { stdout } = await execAsync('gh auth status', { env: execEnv });
|
||||
// If this succeeds without error, we're authenticated
|
||||
status.authenticated = true;
|
||||
|
||||
// Try to extract username from output
|
||||
const userMatch = stdout.match(/Logged in to [^\s]+ account ([^\s]+)/i) ||
|
||||
stdout.match(/Logged in to [^\s]+ as ([^\s]+)/i);
|
||||
const userMatch =
|
||||
stdout.match(/Logged in to [^\s]+ account ([^\s]+)/i) ||
|
||||
stdout.match(/Logged in to [^\s]+ as ([^\s]+)/i);
|
||||
if (userMatch) {
|
||||
status.user = userMatch[1];
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
// Auth status returns non-zero if not authenticated
|
||||
const err = error as { stderr?: string };
|
||||
if (err.stderr?.includes("not logged in")) {
|
||||
if (err.stderr?.includes('not logged in')) {
|
||||
status.authenticated = false;
|
||||
}
|
||||
}
|
||||
@@ -124,7 +127,7 @@ export function createGhStatusHandler() {
|
||||
...status,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Get GitHub CLI status failed");
|
||||
logError(error, 'Get GitHub CLI status failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
* POST /install-claude endpoint - Install Claude CLI
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createInstallClaudeHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
@@ -13,10 +13,10 @@ export function createInstallClaudeHandler() {
|
||||
res.json({
|
||||
success: false,
|
||||
error:
|
||||
"CLI installation requires terminal access. Please install manually using: npm install -g @anthropic-ai/claude-code",
|
||||
'CLI installation requires terminal access. Please install manually using: npm install -g @anthropic-ai/claude-code',
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Install Claude CLI failed");
|
||||
logError(error, 'Install Claude CLI failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* GET /platform endpoint - Get platform info
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import os from "os";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import os from 'os';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createPlatformHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
@@ -15,12 +15,12 @@ export function createPlatformHandler() {
|
||||
platform,
|
||||
arch: os.arch(),
|
||||
homeDir: os.homedir(),
|
||||
isWindows: platform === "win32",
|
||||
isMac: platform === "darwin",
|
||||
isLinux: platform === "linux",
|
||||
isWindows: platform === 'win32',
|
||||
isMac: platform === 'darwin',
|
||||
isLinux: platform === 'linux',
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Get platform info failed");
|
||||
logError(error, 'Get platform info failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
* GET /status endpoint - Get status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getSuggestionsStatus, getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getSuggestionsStatus, getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStatusHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
@@ -11,7 +11,7 @@ export function createStatusHandler() {
|
||||
const { isRunning } = getSuggestionsStatus();
|
||||
res.json({ success: true, isRunning });
|
||||
} catch (error) {
|
||||
logError(error, "Get status failed");
|
||||
logError(error, 'Get status failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,13 +2,8 @@
|
||||
* POST /stop endpoint - Stop suggestions generation
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import {
|
||||
getSuggestionsStatus,
|
||||
setRunningState,
|
||||
getErrorMessage,
|
||||
logError,
|
||||
} from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getSuggestionsStatus, setRunningState, getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStopHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
@@ -20,7 +15,7 @@ export function createStopHandler() {
|
||||
setRunningState(false, null);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, "Stop suggestions failed");
|
||||
logError(error, 'Stop suggestions failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
* Provides API for cloning GitHub starter templates
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { createCloneHandler } from "./routes/clone.js";
|
||||
import { Router } from 'express';
|
||||
import { createCloneHandler } from './routes/clone.js';
|
||||
|
||||
export function createTemplatesRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post("/clone", createCloneHandler());
|
||||
router.post('/clone', createCloneHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -5,26 +5,20 @@
|
||||
* WebSocket connections for real-time I/O are handled separately in index.ts.
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { Router } from 'express';
|
||||
import {
|
||||
terminalAuthMiddleware,
|
||||
validateTerminalToken,
|
||||
isTerminalEnabled,
|
||||
isTerminalPasswordRequired,
|
||||
} from "./common.js";
|
||||
import { createStatusHandler } from "./routes/status.js";
|
||||
import { createAuthHandler } from "./routes/auth.js";
|
||||
import { createLogoutHandler } from "./routes/logout.js";
|
||||
import {
|
||||
createSessionsListHandler,
|
||||
createSessionsCreateHandler,
|
||||
} from "./routes/sessions.js";
|
||||
import { createSessionDeleteHandler } from "./routes/session-delete.js";
|
||||
import { createSessionResizeHandler } from "./routes/session-resize.js";
|
||||
import {
|
||||
createSettingsGetHandler,
|
||||
createSettingsUpdateHandler,
|
||||
} from "./routes/settings.js";
|
||||
} from './common.js';
|
||||
import { createStatusHandler } from './routes/status.js';
|
||||
import { createAuthHandler } from './routes/auth.js';
|
||||
import { createLogoutHandler } from './routes/logout.js';
|
||||
import { createSessionsListHandler, createSessionsCreateHandler } from './routes/sessions.js';
|
||||
import { createSessionDeleteHandler } from './routes/session-delete.js';
|
||||
import { createSessionResizeHandler } from './routes/session-resize.js';
|
||||
import { createSettingsGetHandler, createSettingsUpdateHandler } from './routes/settings.js';
|
||||
|
||||
// Re-export for use in main index.ts
|
||||
export { validateTerminalToken, isTerminalEnabled, isTerminalPasswordRequired };
|
||||
@@ -32,19 +26,19 @@ export { validateTerminalToken, isTerminalEnabled, isTerminalPasswordRequired };
|
||||
export function createTerminalRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
router.get("/status", createStatusHandler());
|
||||
router.post("/auth", createAuthHandler());
|
||||
router.post("/logout", createLogoutHandler());
|
||||
router.get('/status', createStatusHandler());
|
||||
router.post('/auth', createAuthHandler());
|
||||
router.post('/logout', createLogoutHandler());
|
||||
|
||||
// Apply terminal auth middleware to all routes below
|
||||
router.use(terminalAuthMiddleware);
|
||||
|
||||
router.get("/sessions", createSessionsListHandler());
|
||||
router.post("/sessions", createSessionsCreateHandler());
|
||||
router.delete("/sessions/:id", createSessionDeleteHandler());
|
||||
router.post("/sessions/:id/resize", createSessionResizeHandler());
|
||||
router.get("/settings", createSettingsGetHandler());
|
||||
router.put("/settings", createSettingsUpdateHandler());
|
||||
router.get('/sessions', createSessionsListHandler());
|
||||
router.post('/sessions', createSessionsCreateHandler());
|
||||
router.delete('/sessions/:id', createSessionDeleteHandler());
|
||||
router.post('/sessions/:id/resize', createSessionResizeHandler());
|
||||
router.get('/settings', createSettingsGetHandler());
|
||||
router.put('/settings', createSettingsUpdateHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
* POST /auth endpoint - Authenticate with password to get a session token
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { Request, Response } from 'express';
|
||||
import {
|
||||
getTerminalEnabledConfigValue,
|
||||
getTerminalPasswordConfig,
|
||||
@@ -10,14 +10,14 @@ import {
|
||||
addToken,
|
||||
getTokenExpiryMs,
|
||||
getErrorMessage,
|
||||
} from "../common.js";
|
||||
} from '../common.js';
|
||||
|
||||
export function createAuthHandler() {
|
||||
return (req: Request, res: Response): void => {
|
||||
if (!getTerminalEnabledConfigValue()) {
|
||||
res.status(403).json({
|
||||
success: false,
|
||||
error: "Terminal access is disabled",
|
||||
error: 'Terminal access is disabled',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -41,7 +41,7 @@ export function createAuthHandler() {
|
||||
if (!password || password !== terminalPassword) {
|
||||
res.status(401).json({
|
||||
success: false,
|
||||
error: "Invalid password",
|
||||
error: 'Invalid password',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* POST /logout endpoint - Invalidate a session token
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { deleteToken } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { deleteToken } from '../common.js';
|
||||
|
||||
export function createLogoutHandler() {
|
||||
return (req: Request, res: Response): void => {
|
||||
const token = (req.headers["x-terminal-token"] as string) || req.body.token;
|
||||
const token = (req.headers['x-terminal-token'] as string) || req.body.token;
|
||||
|
||||
if (token) {
|
||||
deleteToken(token);
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
* DELETE /sessions/:id endpoint - Kill a terminal session
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getTerminalService } from "../../../services/terminal-service.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getTerminalService } from '../../../services/terminal-service.js';
|
||||
|
||||
export function createSessionDeleteHandler() {
|
||||
return (req: Request, res: Response): void => {
|
||||
@@ -14,7 +14,7 @@ export function createSessionDeleteHandler() {
|
||||
if (!killed) {
|
||||
res.status(404).json({
|
||||
success: false,
|
||||
error: "Session not found",
|
||||
error: 'Session not found',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
* POST /sessions/:id/resize endpoint - Resize a terminal session
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getTerminalService } from "../../../services/terminal-service.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getTerminalService } from '../../../services/terminal-service.js';
|
||||
|
||||
export function createSessionResizeHandler() {
|
||||
return (req: Request, res: Response): void => {
|
||||
@@ -14,7 +14,7 @@ export function createSessionResizeHandler() {
|
||||
if (!cols || !rows) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "cols and rows are required",
|
||||
error: 'cols and rows are required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -24,7 +24,7 @@ export function createSessionResizeHandler() {
|
||||
if (!resized) {
|
||||
res.status(404).json({
|
||||
success: false,
|
||||
error: "Session not found",
|
||||
error: 'Session not found',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -2,9 +2,13 @@
|
||||
* GET/PUT /settings endpoint - Get/Update terminal settings
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getTerminalService, MIN_MAX_SESSIONS, MAX_MAX_SESSIONS } from "../../../services/terminal-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import {
|
||||
getTerminalService,
|
||||
MIN_MAX_SESSIONS,
|
||||
MAX_MAX_SESSIONS,
|
||||
} from '../../../services/terminal-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createSettingsGetHandler() {
|
||||
return (_req: Request, res: Response): void => {
|
||||
@@ -18,10 +22,10 @@ export function createSettingsGetHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Get terminal settings failed");
|
||||
logError(error, 'Get terminal settings failed');
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to get terminal settings",
|
||||
error: 'Failed to get terminal settings',
|
||||
details: getErrorMessage(error),
|
||||
});
|
||||
}
|
||||
@@ -36,17 +40,17 @@ export function createSettingsUpdateHandler() {
|
||||
|
||||
// Validate maxSessions if provided
|
||||
if (maxSessions !== undefined) {
|
||||
if (typeof maxSessions !== "number") {
|
||||
if (typeof maxSessions !== 'number') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "maxSessions must be a number",
|
||||
error: 'maxSessions must be a number',
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (!Number.isInteger(maxSessions)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "maxSessions must be an integer",
|
||||
error: 'maxSessions must be an integer',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -68,10 +72,10 @@ export function createSettingsUpdateHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Update terminal settings failed");
|
||||
logError(error, 'Update terminal settings failed');
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to update terminal settings",
|
||||
error: 'Failed to update terminal settings',
|
||||
details: getErrorMessage(error),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2,12 +2,9 @@
|
||||
* GET /status endpoint - Get terminal status
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getTerminalService } from "../../../services/terminal-service.js";
|
||||
import {
|
||||
getTerminalEnabledConfigValue,
|
||||
isTerminalPasswordRequired,
|
||||
} from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getTerminalService } from '../../../services/terminal-service.js';
|
||||
import { getTerminalEnabledConfigValue, isTerminalPasswordRequired } from '../common.js';
|
||||
|
||||
export function createStatusHandler() {
|
||||
return (_req: Request, res: Response): void => {
|
||||
|
||||
@@ -3,15 +3,15 @@
|
||||
* Provides API endpoints for workspace directory management
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import { createConfigHandler } from "./routes/config.js";
|
||||
import { createDirectoriesHandler } from "./routes/directories.js";
|
||||
import { Router } from 'express';
|
||||
import { createConfigHandler } from './routes/config.js';
|
||||
import { createDirectoriesHandler } from './routes/directories.js';
|
||||
|
||||
export function createWorkspaceRoutes(): Router {
|
||||
const router = Router();
|
||||
|
||||
router.get("/config", createConfigHandler());
|
||||
router.get("/directories", createDirectoriesHandler());
|
||||
router.get('/config', createConfigHandler());
|
||||
router.get('/directories', createDirectoriesHandler());
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
* POST /checkout-branch endpoint - Create and checkout a new branch
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -20,7 +20,7 @@ export function createCheckoutBranchHandler() {
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "worktreePath required",
|
||||
error: 'worktreePath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -28,7 +28,7 @@ export function createCheckoutBranchHandler() {
|
||||
if (!branchName) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "branchName required",
|
||||
error: 'branchName required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -38,16 +38,15 @@ export function createCheckoutBranchHandler() {
|
||||
if (invalidChars.test(branchName)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "Branch name contains invalid characters",
|
||||
error: 'Branch name contains invalid characters',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Get current branch for reference
|
||||
const { stdout: currentBranchOutput } = await execAsync(
|
||||
"git rev-parse --abbrev-ref HEAD",
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
const { stdout: currentBranchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const currentBranch = currentBranchOutput.trim();
|
||||
|
||||
// Check if branch already exists
|
||||
@@ -79,7 +78,7 @@ export function createCheckoutBranchHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Checkout branch failed");
|
||||
logError(error, 'Checkout branch failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
* POST /commit endpoint - Commit changes in a worktree
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -20,13 +20,13 @@ export function createCommitHandler() {
|
||||
if (!worktreePath || !message) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "worktreePath and message required",
|
||||
error: 'worktreePath and message required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for uncommitted changes
|
||||
const { stdout: status } = await execAsync("git status --porcelain", {
|
||||
const { stdout: status } = await execAsync('git status --porcelain', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
|
||||
@@ -35,14 +35,14 @@ export function createCommitHandler() {
|
||||
success: true,
|
||||
result: {
|
||||
committed: false,
|
||||
message: "No changes to commit",
|
||||
message: 'No changes to commit',
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Stage all changes
|
||||
await execAsync("git add -A", { cwd: worktreePath });
|
||||
await execAsync('git add -A', { cwd: worktreePath });
|
||||
|
||||
// Create commit
|
||||
await execAsync(`git commit -m "${message.replace(/"/g, '\\"')}"`, {
|
||||
@@ -50,16 +50,15 @@ export function createCommitHandler() {
|
||||
});
|
||||
|
||||
// Get commit hash
|
||||
const { stdout: hashOutput } = await execAsync("git rev-parse HEAD", {
|
||||
const { stdout: hashOutput } = await execAsync('git rev-parse HEAD', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const commitHash = hashOutput.trim().substring(0, 8);
|
||||
|
||||
// Get branch name
|
||||
const { stdout: branchOutput } = await execAsync(
|
||||
"git rev-parse --abbrev-ref HEAD",
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const branchName = branchOutput.trim();
|
||||
|
||||
res.json({
|
||||
@@ -72,7 +71,7 @@ export function createCommitHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Commit worktree failed");
|
||||
logError(error, 'Commit worktree failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
* POST /create-pr endpoint - Commit changes and create a pull request from a worktree
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { Request, Response } from 'express';
|
||||
import {
|
||||
getErrorMessage,
|
||||
logError,
|
||||
@@ -10,26 +10,27 @@ import {
|
||||
execEnv,
|
||||
isValidBranchName,
|
||||
isGhCliAvailable,
|
||||
} from "../common.js";
|
||||
import { updateWorktreePRInfo } from "../../../lib/worktree-metadata.js";
|
||||
} from '../common.js';
|
||||
import { updateWorktreePRInfo } from '../../../lib/worktree-metadata.js';
|
||||
|
||||
export function createCreatePRHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { worktreePath, projectPath, commitMessage, prTitle, prBody, baseBranch, draft } = req.body as {
|
||||
worktreePath: string;
|
||||
projectPath?: string;
|
||||
commitMessage?: string;
|
||||
prTitle?: string;
|
||||
prBody?: string;
|
||||
baseBranch?: string;
|
||||
draft?: boolean;
|
||||
};
|
||||
const { worktreePath, projectPath, commitMessage, prTitle, prBody, baseBranch, draft } =
|
||||
req.body as {
|
||||
worktreePath: string;
|
||||
projectPath?: string;
|
||||
commitMessage?: string;
|
||||
prTitle?: string;
|
||||
prBody?: string;
|
||||
baseBranch?: string;
|
||||
draft?: boolean;
|
||||
};
|
||||
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "worktreePath required",
|
||||
error: 'worktreePath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -39,23 +40,23 @@ export function createCreatePRHandler() {
|
||||
const effectiveProjectPath = projectPath || worktreePath;
|
||||
|
||||
// Get current branch name
|
||||
const { stdout: branchOutput } = await execAsync(
|
||||
"git rev-parse --abbrev-ref HEAD",
|
||||
{ cwd: worktreePath, env: execEnv }
|
||||
);
|
||||
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
const branchName = branchOutput.trim();
|
||||
|
||||
// Validate branch name for security
|
||||
if (!isValidBranchName(branchName)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "Invalid branch name contains unsafe characters",
|
||||
error: 'Invalid branch name contains unsafe characters',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for uncommitted changes
|
||||
const { stdout: status } = await execAsync("git status --porcelain", {
|
||||
const { stdout: status } = await execAsync('git status --porcelain', {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
@@ -67,7 +68,7 @@ export function createCreatePRHandler() {
|
||||
const message = commitMessage || `Changes from ${branchName}`;
|
||||
|
||||
// Stage all changes
|
||||
await execAsync("git add -A", { cwd: worktreePath, env: execEnv });
|
||||
await execAsync('git add -A', { cwd: worktreePath, env: execEnv });
|
||||
|
||||
// Create commit
|
||||
await execAsync(`git commit -m "${message.replace(/"/g, '\\"')}"`, {
|
||||
@@ -76,7 +77,7 @@ export function createCreatePRHandler() {
|
||||
});
|
||||
|
||||
// Get commit hash
|
||||
const { stdout: hashOutput } = await execAsync("git rev-parse HEAD", {
|
||||
const { stdout: hashOutput } = await execAsync('git rev-parse HEAD', {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
@@ -100,8 +101,8 @@ export function createCreatePRHandler() {
|
||||
} catch (error2: unknown) {
|
||||
// Capture push error for reporting
|
||||
const err = error2 as { stderr?: string; message?: string };
|
||||
pushError = err.stderr || err.message || "Push failed";
|
||||
console.error("[CreatePR] Push failed:", pushError);
|
||||
pushError = err.stderr || err.message || 'Push failed';
|
||||
console.error('[CreatePR] Push failed:', pushError);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -115,10 +116,10 @@ export function createCreatePRHandler() {
|
||||
}
|
||||
|
||||
// Create PR using gh CLI or provide browser fallback
|
||||
const base = baseBranch || "main";
|
||||
const base = baseBranch || 'main';
|
||||
const title = prTitle || branchName;
|
||||
const body = prBody || `Changes from branch ${branchName}`;
|
||||
const draftFlag = draft ? "--draft" : "";
|
||||
const draftFlag = draft ? '--draft' : '';
|
||||
|
||||
let prUrl: string | null = null;
|
||||
let prError: string | null = null;
|
||||
@@ -131,7 +132,7 @@ export function createCreatePRHandler() {
|
||||
let upstreamRepo: string | null = null;
|
||||
let originOwner: string | null = null;
|
||||
try {
|
||||
const { stdout: remotes } = await execAsync("git remote -v", {
|
||||
const { stdout: remotes } = await execAsync('git remote -v', {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
@@ -150,15 +151,17 @@ export function createCreatePRHandler() {
|
||||
}
|
||||
if (!match) {
|
||||
// Try HTTPS format: https://github.com/owner/repo.git
|
||||
match = line.match(/^(\w+)\s+https?:\/\/[^/]+\/([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/);
|
||||
match = line.match(
|
||||
/^(\w+)\s+https?:\/\/[^/]+\/([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/
|
||||
);
|
||||
}
|
||||
|
||||
if (match) {
|
||||
const [, remoteName, owner, repo] = match;
|
||||
if (remoteName === "upstream") {
|
||||
if (remoteName === 'upstream') {
|
||||
upstreamRepo = `${owner}/${repo}`;
|
||||
repoUrl = `https://github.com/${owner}/${repo}`;
|
||||
} else if (remoteName === "origin") {
|
||||
} else if (remoteName === 'origin') {
|
||||
originOwner = owner;
|
||||
if (!repoUrl) {
|
||||
repoUrl = `https://github.com/${owner}/${repo}`;
|
||||
@@ -173,7 +176,7 @@ export function createCreatePRHandler() {
|
||||
// Fallback: Try to get repo URL from git config if remote parsing failed
|
||||
if (!repoUrl) {
|
||||
try {
|
||||
const { stdout: originUrl } = await execAsync("git config --get remote.origin.url", {
|
||||
const { stdout: originUrl } = await execAsync('git config --get remote.origin.url', {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
@@ -217,9 +220,11 @@ export function createCreatePRHandler() {
|
||||
// This is more reliable than gh pr view as it explicitly searches by branch name
|
||||
// For forks, we need to use owner:branch format for the head parameter
|
||||
const headRef = upstreamRepo && originOwner ? `${originOwner}:${branchName}` : branchName;
|
||||
const repoArg = upstreamRepo ? ` --repo "${upstreamRepo}"` : "";
|
||||
const repoArg = upstreamRepo ? ` --repo "${upstreamRepo}"` : '';
|
||||
|
||||
console.log(`[CreatePR] Checking for existing PR for branch: ${branchName} (headRef: ${headRef})`);
|
||||
console.log(
|
||||
`[CreatePR] Checking for existing PR for branch: ${branchName} (headRef: ${headRef})`
|
||||
);
|
||||
try {
|
||||
const listCmd = `gh pr list${repoArg} --head "${headRef}" --json number,title,url,state --limit 1`;
|
||||
console.log(`[CreatePR] Running: ${listCmd}`);
|
||||
@@ -234,7 +239,9 @@ export function createCreatePRHandler() {
|
||||
if (Array.isArray(existingPrs) && existingPrs.length > 0) {
|
||||
const existingPr = existingPrs[0];
|
||||
// PR already exists - use it and store metadata
|
||||
console.log(`[CreatePR] PR already exists for branch ${branchName}: PR #${existingPr.number}`);
|
||||
console.log(
|
||||
`[CreatePR] PR already exists for branch ${branchName}: PR #${existingPr.number}`
|
||||
);
|
||||
prUrl = existingPr.url;
|
||||
prNumber = existingPr.number;
|
||||
prAlreadyExisted = true;
|
||||
@@ -244,10 +251,12 @@ export function createCreatePRHandler() {
|
||||
number: existingPr.number,
|
||||
url: existingPr.url,
|
||||
title: existingPr.title || title,
|
||||
state: existingPr.state || "open",
|
||||
state: existingPr.state || 'open',
|
||||
createdAt: new Date().toISOString(),
|
||||
});
|
||||
console.log(`[CreatePR] Stored existing PR info for branch ${branchName}: PR #${existingPr.number}`);
|
||||
console.log(
|
||||
`[CreatePR] Stored existing PR info for branch ${branchName}: PR #${existingPr.number}`
|
||||
);
|
||||
} else {
|
||||
console.log(`[CreatePR] No existing PR found for branch ${branchName}`);
|
||||
}
|
||||
@@ -293,23 +302,25 @@ export function createCreatePRHandler() {
|
||||
number: prNumber,
|
||||
url: prUrl,
|
||||
title,
|
||||
state: draft ? "draft" : "open",
|
||||
state: draft ? 'draft' : 'open',
|
||||
createdAt: new Date().toISOString(),
|
||||
});
|
||||
console.log(`[CreatePR] Stored PR info for branch ${branchName}: PR #${prNumber}`);
|
||||
console.log(
|
||||
`[CreatePR] Stored PR info for branch ${branchName}: PR #${prNumber}`
|
||||
);
|
||||
} catch (metadataError) {
|
||||
console.error("[CreatePR] Failed to store PR metadata:", metadataError);
|
||||
console.error('[CreatePR] Failed to store PR metadata:', metadataError);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (ghError: unknown) {
|
||||
// gh CLI failed - check if it's "already exists" error and try to fetch the PR
|
||||
const err = ghError as { stderr?: string; message?: string };
|
||||
const errorMessage = err.stderr || err.message || "PR creation failed";
|
||||
const errorMessage = err.stderr || err.message || 'PR creation failed';
|
||||
console.log(`[CreatePR] gh pr create failed: ${errorMessage}`);
|
||||
|
||||
// If error indicates PR already exists, try to fetch it
|
||||
if (errorMessage.toLowerCase().includes("already exists")) {
|
||||
if (errorMessage.toLowerCase().includes('already exists')) {
|
||||
console.log(`[CreatePR] PR already exists error - trying to fetch existing PR`);
|
||||
try {
|
||||
const { stdout: viewOutput } = await execAsync(
|
||||
@@ -326,13 +337,13 @@ export function createCreatePRHandler() {
|
||||
number: existingPr.number,
|
||||
url: existingPr.url,
|
||||
title: existingPr.title || title,
|
||||
state: existingPr.state || "open",
|
||||
state: existingPr.state || 'open',
|
||||
createdAt: new Date().toISOString(),
|
||||
});
|
||||
console.log(`[CreatePR] Fetched and stored existing PR: #${existingPr.number}`);
|
||||
}
|
||||
} catch (viewError) {
|
||||
console.error("[CreatePR] Failed to fetch existing PR:", viewError);
|
||||
console.error('[CreatePR] Failed to fetch existing PR:', viewError);
|
||||
prError = errorMessage;
|
||||
}
|
||||
} else {
|
||||
@@ -341,7 +352,7 @@ export function createCreatePRHandler() {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
prError = "gh_cli_not_available";
|
||||
prError = 'gh_cli_not_available';
|
||||
}
|
||||
|
||||
// Return result with browser fallback URL
|
||||
@@ -362,7 +373,7 @@ export function createCreatePRHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Create PR failed");
|
||||
logError(error, 'Create PR failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
* POST /list-branches endpoint - List all local branches
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { getErrorMessage, logWorktreeError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logWorktreeError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -25,33 +25,31 @@ export function createListBranchesHandler() {
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "worktreePath required",
|
||||
error: 'worktreePath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Get current branch
|
||||
const { stdout: currentBranchOutput } = await execAsync(
|
||||
"git rev-parse --abbrev-ref HEAD",
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
const { stdout: currentBranchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const currentBranch = currentBranchOutput.trim();
|
||||
|
||||
// List all local branches
|
||||
// Use double quotes around the format string for cross-platform compatibility
|
||||
// Single quotes are preserved literally on Windows; double quotes work on both
|
||||
const { stdout: branchesOutput } = await execAsync(
|
||||
'git branch --format="%(refname:short)"',
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
const { stdout: branchesOutput } = await execAsync('git branch --format="%(refname:short)"', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
|
||||
const branches: BranchInfo[] = branchesOutput
|
||||
.trim()
|
||||
.split("\n")
|
||||
.split('\n')
|
||||
.filter((b) => b.trim())
|
||||
.map((name) => {
|
||||
// Remove any surrounding quotes (Windows git may preserve them)
|
||||
const cleanName = name.trim().replace(/^['"]|['"]$/g, "");
|
||||
const cleanName = name.trim().replace(/^['"]|['"]$/g, '');
|
||||
return {
|
||||
name: cleanName,
|
||||
isCurrent: cleanName === currentBranch,
|
||||
@@ -93,7 +91,7 @@ export function createListBranchesHandler() {
|
||||
});
|
||||
} catch (error) {
|
||||
const worktreePath = req.body?.worktreePath;
|
||||
logWorktreeError(error, "List branches failed", worktreePath);
|
||||
logWorktreeError(error, 'List branches failed', worktreePath);
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
* including their ports and URLs.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getDevServerService } from "../../../services/dev-server-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getDevServerService } from '../../../services/dev-server-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createListDevServersHandler() {
|
||||
return async (_req: Request, res: Response): Promise<void> => {
|
||||
@@ -22,7 +22,7 @@ export function createListDevServersHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "List dev servers failed");
|
||||
logError(error, 'List dev servers failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
* POST /merge endpoint - Merge feature (merge worktree branch into main)
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import path from "path";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import path from 'path';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -20,42 +20,34 @@ export function createMergeHandler() {
|
||||
};
|
||||
|
||||
if (!projectPath || !featureId) {
|
||||
res
|
||||
.status(400)
|
||||
.json({
|
||||
success: false,
|
||||
error: "projectPath and featureId required",
|
||||
});
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'projectPath and featureId required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const branchName = `feature/${featureId}`;
|
||||
// Git worktrees are stored in project directory
|
||||
const worktreePath = path.join(projectPath, ".worktrees", featureId);
|
||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
||||
|
||||
// Get current branch
|
||||
const { stdout: currentBranch } = await execAsync(
|
||||
"git rev-parse --abbrev-ref HEAD",
|
||||
{ cwd: projectPath }
|
||||
);
|
||||
const { stdout: currentBranch } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
cwd: projectPath,
|
||||
});
|
||||
|
||||
// Merge the feature branch
|
||||
const mergeCmd = options?.squash
|
||||
? `git merge --squash ${branchName}`
|
||||
: `git merge ${branchName} -m "${
|
||||
options?.message || `Merge ${branchName}`
|
||||
}"`;
|
||||
: `git merge ${branchName} -m "${options?.message || `Merge ${branchName}`}"`;
|
||||
|
||||
await execAsync(mergeCmd, { cwd: projectPath });
|
||||
|
||||
// If squash merge, need to commit
|
||||
if (options?.squash) {
|
||||
await execAsync(
|
||||
`git commit -m "${
|
||||
options?.message || `Merge ${branchName} (squash)`
|
||||
}"`,
|
||||
{ cwd: projectPath }
|
||||
);
|
||||
await execAsync(`git commit -m "${options?.message || `Merge ${branchName} (squash)`}"`, {
|
||||
cwd: projectPath,
|
||||
});
|
||||
}
|
||||
|
||||
// Clean up worktree and branch
|
||||
@@ -70,7 +62,7 @@ export function createMergeHandler() {
|
||||
|
||||
res.json({ success: true, mergedBranch: branchName });
|
||||
} catch (error) {
|
||||
logError(error, "Merge worktree failed");
|
||||
logError(error, 'Merge worktree failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
* GET /default-editor endpoint - Get the name of the default code editor
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -29,8 +29,8 @@ async function detectDefaultEditor(): Promise<EditorInfo> {
|
||||
|
||||
// Try Cursor first (if user has Cursor, they probably prefer it)
|
||||
try {
|
||||
await execAsync("which cursor || where cursor");
|
||||
cachedEditor = { name: "Cursor", command: "cursor" };
|
||||
await execAsync('which cursor || where cursor');
|
||||
cachedEditor = { name: 'Cursor', command: 'cursor' };
|
||||
return cachedEditor;
|
||||
} catch {
|
||||
// Cursor not found
|
||||
@@ -38,8 +38,8 @@ async function detectDefaultEditor(): Promise<EditorInfo> {
|
||||
|
||||
// Try VS Code
|
||||
try {
|
||||
await execAsync("which code || where code");
|
||||
cachedEditor = { name: "VS Code", command: "code" };
|
||||
await execAsync('which code || where code');
|
||||
cachedEditor = { name: 'VS Code', command: 'code' };
|
||||
return cachedEditor;
|
||||
} catch {
|
||||
// VS Code not found
|
||||
@@ -47,8 +47,8 @@ async function detectDefaultEditor(): Promise<EditorInfo> {
|
||||
|
||||
// Try Zed
|
||||
try {
|
||||
await execAsync("which zed || where zed");
|
||||
cachedEditor = { name: "Zed", command: "zed" };
|
||||
await execAsync('which zed || where zed');
|
||||
cachedEditor = { name: 'Zed', command: 'zed' };
|
||||
return cachedEditor;
|
||||
} catch {
|
||||
// Zed not found
|
||||
@@ -56,8 +56,8 @@ async function detectDefaultEditor(): Promise<EditorInfo> {
|
||||
|
||||
// Try Sublime Text
|
||||
try {
|
||||
await execAsync("which subl || where subl");
|
||||
cachedEditor = { name: "Sublime Text", command: "subl" };
|
||||
await execAsync('which subl || where subl');
|
||||
cachedEditor = { name: 'Sublime Text', command: 'subl' };
|
||||
return cachedEditor;
|
||||
} catch {
|
||||
// Sublime not found
|
||||
@@ -65,12 +65,12 @@ async function detectDefaultEditor(): Promise<EditorInfo> {
|
||||
|
||||
// Fallback to file manager
|
||||
const platform = process.platform;
|
||||
if (platform === "darwin") {
|
||||
cachedEditor = { name: "Finder", command: "open" };
|
||||
} else if (platform === "win32") {
|
||||
cachedEditor = { name: "Explorer", command: "explorer" };
|
||||
if (platform === 'darwin') {
|
||||
cachedEditor = { name: 'Finder', command: 'open' };
|
||||
} else if (platform === 'win32') {
|
||||
cachedEditor = { name: 'Explorer', command: 'explorer' };
|
||||
} else {
|
||||
cachedEditor = { name: "File Manager", command: "xdg-open" };
|
||||
cachedEditor = { name: 'File Manager', command: 'xdg-open' };
|
||||
}
|
||||
return cachedEditor;
|
||||
}
|
||||
@@ -87,7 +87,7 @@ export function createGetDefaultEditorHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Get default editor failed");
|
||||
logError(error, 'Get default editor failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
@@ -103,7 +103,7 @@ export function createOpenInEditorHandler() {
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "worktreePath required",
|
||||
error: 'worktreePath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -125,15 +125,15 @@ export function createOpenInEditorHandler() {
|
||||
let openCommand: string;
|
||||
let fallbackName: string;
|
||||
|
||||
if (platform === "darwin") {
|
||||
if (platform === 'darwin') {
|
||||
openCommand = `open "${worktreePath}"`;
|
||||
fallbackName = "Finder";
|
||||
} else if (platform === "win32") {
|
||||
fallbackName = 'Finder';
|
||||
} else if (platform === 'win32') {
|
||||
openCommand = `explorer "${worktreePath}"`;
|
||||
fallbackName = "Explorer";
|
||||
fallbackName = 'Explorer';
|
||||
} else {
|
||||
openCommand = `xdg-open "${worktreePath}"`;
|
||||
fallbackName = "File Manager";
|
||||
fallbackName = 'File Manager';
|
||||
}
|
||||
|
||||
await execAsync(openCommand);
|
||||
@@ -146,7 +146,7 @@ export function createOpenInEditorHandler() {
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Open in editor failed");
|
||||
logError(error, 'Open in editor failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
* POST /pr-info endpoint - Get PR info and comments for a branch
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import type { Request, Response } from 'express';
|
||||
import {
|
||||
getErrorMessage,
|
||||
logError,
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
execEnv,
|
||||
isValidBranchName,
|
||||
isGhCliAvailable,
|
||||
} from "../common.js";
|
||||
} from '../common.js';
|
||||
|
||||
export interface PRComment {
|
||||
id: number;
|
||||
@@ -44,7 +44,7 @@ export function createPRInfoHandler() {
|
||||
if (!worktreePath || !branchName) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "worktreePath and branchName required",
|
||||
error: 'worktreePath and branchName required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -53,7 +53,7 @@ export function createPRInfoHandler() {
|
||||
if (!isValidBranchName(branchName)) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "Invalid branch name contains unsafe characters",
|
||||
error: 'Invalid branch name contains unsafe characters',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -67,7 +67,7 @@ export function createPRInfoHandler() {
|
||||
result: {
|
||||
hasPR: false,
|
||||
ghCliAvailable: false,
|
||||
error: "gh CLI not available",
|
||||
error: 'gh CLI not available',
|
||||
},
|
||||
});
|
||||
return;
|
||||
@@ -79,7 +79,7 @@ export function createPRInfoHandler() {
|
||||
let originRepo: string | null = null;
|
||||
|
||||
try {
|
||||
const { stdout: remotes } = await execAsync("git remote -v", {
|
||||
const { stdout: remotes } = await execAsync('git remote -v', {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
@@ -87,21 +87,15 @@ export function createPRInfoHandler() {
|
||||
const lines = remotes.split(/\r?\n/);
|
||||
for (const line of lines) {
|
||||
let match =
|
||||
line.match(
|
||||
/^(\w+)\s+.*[:/]([^/]+)\/([^/\s]+?)(?:\.git)?\s+\(fetch\)/
|
||||
) ||
|
||||
line.match(
|
||||
/^(\w+)\s+git@[^:]+:([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/
|
||||
) ||
|
||||
line.match(
|
||||
/^(\w+)\s+https?:\/\/[^/]+\/([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/
|
||||
);
|
||||
line.match(/^(\w+)\s+.*[:/]([^/]+)\/([^/\s]+?)(?:\.git)?\s+\(fetch\)/) ||
|
||||
line.match(/^(\w+)\s+git@[^:]+:([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/) ||
|
||||
line.match(/^(\w+)\s+https?:\/\/[^/]+\/([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/);
|
||||
|
||||
if (match) {
|
||||
const [, remoteName, owner, repo] = match;
|
||||
if (remoteName === "upstream") {
|
||||
if (remoteName === 'upstream') {
|
||||
upstreamRepo = `${owner}/${repo}`;
|
||||
} else if (remoteName === "origin") {
|
||||
} else if (remoteName === 'origin') {
|
||||
originOwner = owner;
|
||||
originRepo = repo;
|
||||
}
|
||||
@@ -113,16 +107,11 @@ export function createPRInfoHandler() {
|
||||
|
||||
if (!originOwner || !originRepo) {
|
||||
try {
|
||||
const { stdout: originUrl } = await execAsync(
|
||||
"git config --get remote.origin.url",
|
||||
{
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
}
|
||||
);
|
||||
const match = originUrl
|
||||
.trim()
|
||||
.match(/[:/]([^/]+)\/([^/\s]+?)(?:\.git)?$/);
|
||||
const { stdout: originUrl } = await execAsync('git config --get remote.origin.url', {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
const match = originUrl.trim().match(/[:/]([^/]+)\/([^/\s]+?)(?:\.git)?$/);
|
||||
if (match) {
|
||||
if (!originOwner) {
|
||||
originOwner = match[1];
|
||||
@@ -137,21 +126,18 @@ export function createPRInfoHandler() {
|
||||
}
|
||||
|
||||
const targetRepo =
|
||||
upstreamRepo || (originOwner && originRepo
|
||||
? `${originOwner}/${originRepo}`
|
||||
: null);
|
||||
const repoFlag = targetRepo ? ` --repo "${targetRepo}"` : "";
|
||||
const headRef =
|
||||
upstreamRepo && originOwner ? `${originOwner}:${branchName}` : branchName;
|
||||
upstreamRepo || (originOwner && originRepo ? `${originOwner}/${originRepo}` : null);
|
||||
const repoFlag = targetRepo ? ` --repo "${targetRepo}"` : '';
|
||||
const headRef = upstreamRepo && originOwner ? `${originOwner}:${branchName}` : branchName;
|
||||
|
||||
// Get PR info for the branch using gh CLI
|
||||
try {
|
||||
// First, find the PR associated with this branch
|
||||
const listCmd = `gh pr list${repoFlag} --head "${headRef}" --json number,title,url,state,author,body --limit 1`;
|
||||
const { stdout: prListOutput } = await execAsync(
|
||||
listCmd,
|
||||
{ cwd: worktreePath, env: execEnv }
|
||||
);
|
||||
const { stdout: prListOutput } = await execAsync(listCmd, {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
|
||||
const prList = JSON.parse(prListOutput);
|
||||
|
||||
@@ -173,25 +159,22 @@ export function createPRInfoHandler() {
|
||||
let comments: PRComment[] = [];
|
||||
try {
|
||||
const viewCmd = `gh pr view ${prNumber}${repoFlag} --json comments`;
|
||||
const { stdout: commentsOutput } = await execAsync(
|
||||
viewCmd,
|
||||
{ cwd: worktreePath, env: execEnv }
|
||||
);
|
||||
const { stdout: commentsOutput } = await execAsync(viewCmd, {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
const commentsData = JSON.parse(commentsOutput);
|
||||
comments = (commentsData.comments || []).map((c: {
|
||||
id: number;
|
||||
author: { login: string };
|
||||
body: string;
|
||||
createdAt: string;
|
||||
}) => ({
|
||||
id: c.id,
|
||||
author: c.author?.login || "unknown",
|
||||
body: c.body,
|
||||
createdAt: c.createdAt,
|
||||
isReviewComment: false,
|
||||
}));
|
||||
comments = (commentsData.comments || []).map(
|
||||
(c: { id: number; author: { login: string }; body: string; createdAt: string }) => ({
|
||||
id: c.id,
|
||||
author: c.author?.login || 'unknown',
|
||||
body: c.body,
|
||||
createdAt: c.createdAt,
|
||||
isReviewComment: false,
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
console.warn("[PRInfo] Failed to fetch PR comments:", error);
|
||||
console.warn('[PRInfo] Failed to fetch PR comments:', error);
|
||||
}
|
||||
|
||||
// Get review comments (inline code comments)
|
||||
@@ -201,33 +184,35 @@ export function createPRInfoHandler() {
|
||||
try {
|
||||
const reviewsEndpoint = `repos/${targetRepo}/pulls/${prNumber}/comments`;
|
||||
const reviewsCmd = `gh api ${reviewsEndpoint}`;
|
||||
const { stdout: reviewsOutput } = await execAsync(
|
||||
reviewsCmd,
|
||||
{ cwd: worktreePath, env: execEnv }
|
||||
);
|
||||
const { stdout: reviewsOutput } = await execAsync(reviewsCmd, {
|
||||
cwd: worktreePath,
|
||||
env: execEnv,
|
||||
});
|
||||
const reviewsData = JSON.parse(reviewsOutput);
|
||||
reviewComments = reviewsData.map((c: {
|
||||
id: number;
|
||||
user: { login: string };
|
||||
body: string;
|
||||
path: string;
|
||||
line?: number;
|
||||
original_line?: number;
|
||||
created_at: string;
|
||||
}) => ({
|
||||
id: c.id,
|
||||
author: c.user?.login || "unknown",
|
||||
body: c.body,
|
||||
path: c.path,
|
||||
line: c.line || c.original_line,
|
||||
createdAt: c.created_at,
|
||||
isReviewComment: true,
|
||||
}));
|
||||
reviewComments = reviewsData.map(
|
||||
(c: {
|
||||
id: number;
|
||||
user: { login: string };
|
||||
body: string;
|
||||
path: string;
|
||||
line?: number;
|
||||
original_line?: number;
|
||||
created_at: string;
|
||||
}) => ({
|
||||
id: c.id,
|
||||
author: c.user?.login || 'unknown',
|
||||
body: c.body,
|
||||
path: c.path,
|
||||
line: c.line || c.original_line,
|
||||
createdAt: c.created_at,
|
||||
isReviewComment: true,
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
console.warn("[PRInfo] Failed to fetch review comments:", error);
|
||||
console.warn('[PRInfo] Failed to fetch review comments:', error);
|
||||
}
|
||||
} else {
|
||||
console.warn("[PRInfo] Cannot fetch review comments: repository info not available");
|
||||
console.warn('[PRInfo] Cannot fetch review comments: repository info not available');
|
||||
}
|
||||
|
||||
const prInfo: PRInfo = {
|
||||
@@ -235,8 +220,8 @@ export function createPRInfoHandler() {
|
||||
title: pr.title,
|
||||
url: pr.url,
|
||||
state: pr.state,
|
||||
author: pr.author?.login || "unknown",
|
||||
body: pr.body || "",
|
||||
author: pr.author?.login || 'unknown',
|
||||
body: pr.body || '',
|
||||
comments,
|
||||
reviewComments,
|
||||
};
|
||||
@@ -251,7 +236,7 @@ export function createPRInfoHandler() {
|
||||
});
|
||||
} catch (error) {
|
||||
// gh CLI failed - might not be authenticated or no remote
|
||||
logError(error, "Failed to get PR info");
|
||||
logError(error, 'Failed to get PR info');
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
@@ -262,7 +247,7 @@ export function createPRInfoHandler() {
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "PR info handler failed");
|
||||
logError(error, 'PR info handler failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
* POST /pull endpoint - Pull latest changes for a worktree/branch
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -19,23 +19,22 @@ export function createPullHandler() {
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "worktreePath required",
|
||||
error: 'worktreePath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Get current branch name
|
||||
const { stdout: branchOutput } = await execAsync(
|
||||
"git rev-parse --abbrev-ref HEAD",
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const branchName = branchOutput.trim();
|
||||
|
||||
// Fetch latest from remote
|
||||
await execAsync("git fetch origin", { cwd: worktreePath });
|
||||
await execAsync('git fetch origin', { cwd: worktreePath });
|
||||
|
||||
// Check if there are local changes that would be overwritten
|
||||
const { stdout: status } = await execAsync("git status --porcelain", {
|
||||
const { stdout: status } = await execAsync('git status --porcelain', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const hasLocalChanges = status.trim().length > 0;
|
||||
@@ -43,35 +42,34 @@ export function createPullHandler() {
|
||||
if (hasLocalChanges) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "You have local changes. Please commit them before pulling.",
|
||||
error: 'You have local changes. Please commit them before pulling.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Pull latest changes
|
||||
try {
|
||||
const { stdout: pullOutput } = await execAsync(
|
||||
`git pull origin ${branchName}`,
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
const { stdout: pullOutput } = await execAsync(`git pull origin ${branchName}`, {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
|
||||
// Check if we pulled any changes
|
||||
const alreadyUpToDate = pullOutput.includes("Already up to date");
|
||||
const alreadyUpToDate = pullOutput.includes('Already up to date');
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result: {
|
||||
branch: branchName,
|
||||
pulled: !alreadyUpToDate,
|
||||
message: alreadyUpToDate ? "Already up to date" : "Pulled latest changes",
|
||||
message: alreadyUpToDate ? 'Already up to date' : 'Pulled latest changes',
|
||||
},
|
||||
});
|
||||
} catch (pullError: unknown) {
|
||||
const err = pullError as { stderr?: string; message?: string };
|
||||
const errorMsg = err.stderr || err.message || "Pull failed";
|
||||
const errorMsg = err.stderr || err.message || 'Pull failed';
|
||||
|
||||
// Check for common errors
|
||||
if (errorMsg.includes("no tracking information")) {
|
||||
if (errorMsg.includes('no tracking information')) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Branch '${branchName}' has no upstream branch. Push it first or set upstream with: git branch --set-upstream-to=origin/${branchName}`,
|
||||
@@ -85,7 +83,7 @@ export function createPullHandler() {
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Pull failed");
|
||||
logError(error, 'Pull failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
* POST /push endpoint - Push a worktree branch to remote
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -20,20 +20,19 @@ export function createPushHandler() {
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "worktreePath required",
|
||||
error: 'worktreePath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Get branch name
|
||||
const { stdout: branchOutput } = await execAsync(
|
||||
"git rev-parse --abbrev-ref HEAD",
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const branchName = branchOutput.trim();
|
||||
|
||||
// Push the branch
|
||||
const forceFlag = force ? "--force" : "";
|
||||
const forceFlag = force ? '--force' : '';
|
||||
try {
|
||||
await execAsync(`git push -u origin ${branchName} ${forceFlag}`, {
|
||||
cwd: worktreePath,
|
||||
@@ -54,7 +53,7 @@ export function createPushHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Push worktree failed");
|
||||
logError(error, 'Push worktree failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -6,9 +6,9 @@
|
||||
* affecting the main dev server.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getDevServerService } from "../../../services/dev-server-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getDevServerService } from '../../../services/dev-server-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStartDevHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -21,7 +21,7 @@ export function createStartDevHandler() {
|
||||
if (!projectPath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "projectPath is required",
|
||||
error: 'projectPath is required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -29,7 +29,7 @@ export function createStartDevHandler() {
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "worktreePath is required",
|
||||
error: 'worktreePath is required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -50,11 +50,11 @@ export function createStartDevHandler() {
|
||||
} else {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: result.error || "Failed to start dev server",
|
||||
error: result.error || 'Failed to start dev server',
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Start dev server failed");
|
||||
logError(error, 'Start dev server failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
* freeing up the ports for reuse.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { getDevServerService } from "../../../services/dev-server-service.js";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { getDevServerService } from '../../../services/dev-server-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createStopDevHandler() {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -19,7 +19,7 @@ export function createStopDevHandler() {
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "worktreePath is required",
|
||||
error: 'worktreePath is required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -38,11 +38,11 @@ export function createStopDevHandler() {
|
||||
} else {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: result.error || "Failed to stop dev server",
|
||||
error: result.error || 'Failed to stop dev server',
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logError(error, "Stop dev server failed");
|
||||
logError(error, 'Stop dev server failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -6,10 +6,10 @@
|
||||
* the user should commit first.
|
||||
*/
|
||||
|
||||
import type { Request, Response } from "express";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { getErrorMessage, logError } from "../common.js";
|
||||
import type { Request, Response } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -19,13 +19,16 @@ const execAsync = promisify(exec);
|
||||
*/
|
||||
async function hasUncommittedChanges(cwd: string): Promise<boolean> {
|
||||
try {
|
||||
const { stdout } = await execAsync("git status --porcelain", { cwd });
|
||||
const lines = stdout.trim().split("\n").filter((line) => {
|
||||
if (!line.trim()) return false;
|
||||
// Exclude .worktrees/ directory (created by automaker)
|
||||
if (line.includes(".worktrees/") || line.endsWith(".worktrees")) return false;
|
||||
return true;
|
||||
});
|
||||
const { stdout } = await execAsync('git status --porcelain', { cwd });
|
||||
const lines = stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((line) => {
|
||||
if (!line.trim()) return false;
|
||||
// Exclude .worktrees/ directory (created by automaker)
|
||||
if (line.includes('.worktrees/') || line.endsWith('.worktrees')) return false;
|
||||
return true;
|
||||
});
|
||||
return lines.length > 0;
|
||||
} catch {
|
||||
return false;
|
||||
@@ -38,18 +41,21 @@ async function hasUncommittedChanges(cwd: string): Promise<boolean> {
|
||||
*/
|
||||
async function getChangesSummary(cwd: string): Promise<string> {
|
||||
try {
|
||||
const { stdout } = await execAsync("git status --short", { cwd });
|
||||
const lines = stdout.trim().split("\n").filter((line) => {
|
||||
if (!line.trim()) return false;
|
||||
// Exclude .worktrees/ directory
|
||||
if (line.includes(".worktrees/") || line.endsWith(".worktrees")) return false;
|
||||
return true;
|
||||
});
|
||||
if (lines.length === 0) return "";
|
||||
if (lines.length <= 5) return lines.join(", ");
|
||||
return `${lines.slice(0, 5).join(", ")} and ${lines.length - 5} more files`;
|
||||
const { stdout } = await execAsync('git status --short', { cwd });
|
||||
const lines = stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((line) => {
|
||||
if (!line.trim()) return false;
|
||||
// Exclude .worktrees/ directory
|
||||
if (line.includes('.worktrees/') || line.endsWith('.worktrees')) return false;
|
||||
return true;
|
||||
});
|
||||
if (lines.length === 0) return '';
|
||||
if (lines.length <= 5) return lines.join(', ');
|
||||
return `${lines.slice(0, 5).join(', ')} and ${lines.length - 5} more files`;
|
||||
} catch {
|
||||
return "unknown changes";
|
||||
return 'unknown changes';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,7 +70,7 @@ export function createSwitchBranchHandler() {
|
||||
if (!worktreePath) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "worktreePath required",
|
||||
error: 'worktreePath required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -72,16 +78,15 @@ export function createSwitchBranchHandler() {
|
||||
if (!branchName) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: "branchName required",
|
||||
error: 'branchName required',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Get current branch
|
||||
const { stdout: currentBranchOutput } = await execAsync(
|
||||
"git rev-parse --abbrev-ref HEAD",
|
||||
{ cwd: worktreePath }
|
||||
);
|
||||
const { stdout: currentBranchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
||||
cwd: worktreePath,
|
||||
});
|
||||
const previousBranch = currentBranchOutput.trim();
|
||||
|
||||
if (previousBranch === branchName) {
|
||||
@@ -115,7 +120,7 @@ export function createSwitchBranchHandler() {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: `Cannot switch branches: you have uncommitted changes (${summary}). Please commit your changes first.`,
|
||||
code: "UNCOMMITTED_CHANGES",
|
||||
code: 'UNCOMMITTED_CHANGES',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -132,7 +137,7 @@ export function createSwitchBranchHandler() {
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, "Switch branch failed");
|
||||
logError(error, 'Switch branch failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
|
||||
@@ -5,11 +5,11 @@
|
||||
* Supports cross-platform shell detection including WSL.
|
||||
*/
|
||||
|
||||
import * as pty from "node-pty";
|
||||
import { EventEmitter } from "events";
|
||||
import * as os from "os";
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import * as pty from 'node-pty';
|
||||
import { EventEmitter } from 'events';
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
// Maximum scrollback buffer size (characters)
|
||||
const MAX_SCROLLBACK_SIZE = 50000; // ~50KB per terminal
|
||||
@@ -21,7 +21,7 @@ export const MAX_MAX_SESSIONS = 1000;
|
||||
// Maximum number of concurrent terminal sessions
|
||||
// Can be overridden via TERMINAL_MAX_SESSIONS environment variable
|
||||
// Default set to 1000 - effectively unlimited for most use cases
|
||||
let maxSessions = parseInt(process.env.TERMINAL_MAX_SESSIONS || "1000", 10);
|
||||
let maxSessions = parseInt(process.env.TERMINAL_MAX_SESSIONS || '1000', 10);
|
||||
|
||||
// Throttle output to prevent overwhelming WebSocket under heavy load
|
||||
// Using 4ms for responsive input feedback while still preventing flood
|
||||
@@ -65,20 +65,20 @@ export class TerminalService extends EventEmitter {
|
||||
const platform = os.platform();
|
||||
|
||||
// Check if running in WSL
|
||||
if (platform === "linux" && this.isWSL()) {
|
||||
if (platform === 'linux' && this.isWSL()) {
|
||||
// In WSL, prefer the user's configured shell or bash
|
||||
const userShell = process.env.SHELL || "/bin/bash";
|
||||
const userShell = process.env.SHELL || '/bin/bash';
|
||||
if (fs.existsSync(userShell)) {
|
||||
return { shell: userShell, args: ["--login"] };
|
||||
return { shell: userShell, args: ['--login'] };
|
||||
}
|
||||
return { shell: "/bin/bash", args: ["--login"] };
|
||||
return { shell: '/bin/bash', args: ['--login'] };
|
||||
}
|
||||
|
||||
switch (platform) {
|
||||
case "win32": {
|
||||
case 'win32': {
|
||||
// Windows: prefer PowerShell, fall back to cmd
|
||||
const pwsh = "C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe";
|
||||
const pwshCore = "C:\\Program Files\\PowerShell\\7\\pwsh.exe";
|
||||
const pwsh = 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe';
|
||||
const pwshCore = 'C:\\Program Files\\PowerShell\\7\\pwsh.exe';
|
||||
|
||||
if (fs.existsSync(pwshCore)) {
|
||||
return { shell: pwshCore, args: [] };
|
||||
@@ -86,32 +86,32 @@ export class TerminalService extends EventEmitter {
|
||||
if (fs.existsSync(pwsh)) {
|
||||
return { shell: pwsh, args: [] };
|
||||
}
|
||||
return { shell: "cmd.exe", args: [] };
|
||||
return { shell: 'cmd.exe', args: [] };
|
||||
}
|
||||
|
||||
case "darwin": {
|
||||
case 'darwin': {
|
||||
// macOS: prefer user's shell, then zsh, then bash
|
||||
const userShell = process.env.SHELL;
|
||||
if (userShell && fs.existsSync(userShell)) {
|
||||
return { shell: userShell, args: ["--login"] };
|
||||
return { shell: userShell, args: ['--login'] };
|
||||
}
|
||||
if (fs.existsSync("/bin/zsh")) {
|
||||
return { shell: "/bin/zsh", args: ["--login"] };
|
||||
if (fs.existsSync('/bin/zsh')) {
|
||||
return { shell: '/bin/zsh', args: ['--login'] };
|
||||
}
|
||||
return { shell: "/bin/bash", args: ["--login"] };
|
||||
return { shell: '/bin/bash', args: ['--login'] };
|
||||
}
|
||||
|
||||
case "linux":
|
||||
case 'linux':
|
||||
default: {
|
||||
// Linux: prefer user's shell, then bash, then sh
|
||||
const userShell = process.env.SHELL;
|
||||
if (userShell && fs.existsSync(userShell)) {
|
||||
return { shell: userShell, args: ["--login"] };
|
||||
return { shell: userShell, args: ['--login'] };
|
||||
}
|
||||
if (fs.existsSync("/bin/bash")) {
|
||||
return { shell: "/bin/bash", args: ["--login"] };
|
||||
if (fs.existsSync('/bin/bash')) {
|
||||
return { shell: '/bin/bash', args: ['--login'] };
|
||||
}
|
||||
return { shell: "/bin/sh", args: [] };
|
||||
return { shell: '/bin/sh', args: [] };
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -122,9 +122,9 @@ export class TerminalService extends EventEmitter {
|
||||
isWSL(): boolean {
|
||||
try {
|
||||
// Check /proc/version for Microsoft/WSL indicators
|
||||
if (fs.existsSync("/proc/version")) {
|
||||
const version = fs.readFileSync("/proc/version", "utf-8").toLowerCase();
|
||||
return version.includes("microsoft") || version.includes("wsl");
|
||||
if (fs.existsSync('/proc/version')) {
|
||||
const version = fs.readFileSync('/proc/version', 'utf-8').toLowerCase();
|
||||
return version.includes('microsoft') || version.includes('wsl');
|
||||
}
|
||||
// Check for WSL environment variable
|
||||
if (process.env.WSL_DISTRO_NAME || process.env.WSLENV) {
|
||||
@@ -170,19 +170,19 @@ export class TerminalService extends EventEmitter {
|
||||
let cwd = requestedCwd.trim();
|
||||
|
||||
// Reject paths with null bytes (could bypass path checks)
|
||||
if (cwd.includes("\0")) {
|
||||
console.warn(`[Terminal] Rejecting path with null byte: ${cwd.replace(/\0/g, "\\0")}`);
|
||||
if (cwd.includes('\0')) {
|
||||
console.warn(`[Terminal] Rejecting path with null byte: ${cwd.replace(/\0/g, '\\0')}`);
|
||||
return homeDir;
|
||||
}
|
||||
|
||||
// Fix double slashes at start (but not for Windows UNC paths)
|
||||
if (cwd.startsWith("//") && !cwd.startsWith("//wsl")) {
|
||||
if (cwd.startsWith('//') && !cwd.startsWith('//wsl')) {
|
||||
cwd = cwd.slice(1);
|
||||
}
|
||||
|
||||
// Normalize the path to resolve . and .. segments
|
||||
// Skip normalization for WSL UNC paths as path.resolve would break them
|
||||
if (!cwd.startsWith("//wsl")) {
|
||||
if (!cwd.startsWith('//wsl')) {
|
||||
cwd = path.resolve(cwd);
|
||||
}
|
||||
|
||||
@@ -247,19 +247,19 @@ export class TerminalService extends EventEmitter {
|
||||
// These settings ensure consistent terminal behavior across platforms
|
||||
const env: Record<string, string> = {
|
||||
...process.env,
|
||||
TERM: "xterm-256color",
|
||||
COLORTERM: "truecolor",
|
||||
TERM_PROGRAM: "automaker-terminal",
|
||||
TERM: 'xterm-256color',
|
||||
COLORTERM: 'truecolor',
|
||||
TERM_PROGRAM: 'automaker-terminal',
|
||||
// Ensure proper locale for character handling
|
||||
LANG: process.env.LANG || "en_US.UTF-8",
|
||||
LC_ALL: process.env.LC_ALL || process.env.LANG || "en_US.UTF-8",
|
||||
LANG: process.env.LANG || 'en_US.UTF-8',
|
||||
LC_ALL: process.env.LC_ALL || process.env.LANG || 'en_US.UTF-8',
|
||||
...options.env,
|
||||
};
|
||||
|
||||
console.log(`[Terminal] Creating session ${id} with shell: ${shell} in ${cwd}`);
|
||||
|
||||
const ptyProcess = pty.spawn(shell, shellArgs, {
|
||||
name: "xterm-256color",
|
||||
name: 'xterm-256color',
|
||||
cols: options.cols || 80,
|
||||
rows: options.rows || 24,
|
||||
cwd,
|
||||
@@ -272,8 +272,8 @@ export class TerminalService extends EventEmitter {
|
||||
cwd,
|
||||
createdAt: new Date(),
|
||||
shell,
|
||||
scrollbackBuffer: "",
|
||||
outputBuffer: "",
|
||||
scrollbackBuffer: '',
|
||||
outputBuffer: '',
|
||||
flushTimeout: null,
|
||||
resizeInProgress: false,
|
||||
resizeDebounceTimeout: null,
|
||||
@@ -293,12 +293,12 @@ export class TerminalService extends EventEmitter {
|
||||
// Schedule another flush for remaining data
|
||||
session.flushTimeout = setTimeout(flushOutput, OUTPUT_THROTTLE_MS);
|
||||
} else {
|
||||
session.outputBuffer = "";
|
||||
session.outputBuffer = '';
|
||||
session.flushTimeout = null;
|
||||
}
|
||||
|
||||
this.dataCallbacks.forEach((cb) => cb(id, dataToSend));
|
||||
this.emit("data", id, dataToSend);
|
||||
this.emit('data', id, dataToSend);
|
||||
};
|
||||
|
||||
// Forward data events with throttling
|
||||
@@ -331,7 +331,7 @@ export class TerminalService extends EventEmitter {
|
||||
console.log(`[Terminal] Session ${id} exited with code ${exitCode}`);
|
||||
this.sessions.delete(id);
|
||||
this.exitCallbacks.forEach((cb) => cb(id, exitCode));
|
||||
this.emit("exit", id, exitCode);
|
||||
this.emit('exit', id, exitCode);
|
||||
});
|
||||
|
||||
console.log(`[Terminal] Session ${id} created successfully`);
|
||||
@@ -414,7 +414,7 @@ export class TerminalService extends EventEmitter {
|
||||
|
||||
// First try graceful SIGTERM to allow process cleanup
|
||||
console.log(`[Terminal] Session ${sessionId} sending SIGTERM`);
|
||||
session.pty.kill("SIGTERM");
|
||||
session.pty.kill('SIGTERM');
|
||||
|
||||
// Schedule SIGKILL fallback if process doesn't exit gracefully
|
||||
// The onExit handler will remove session from map when it actually exits
|
||||
@@ -422,7 +422,7 @@ export class TerminalService extends EventEmitter {
|
||||
if (this.sessions.has(sessionId)) {
|
||||
console.log(`[Terminal] Session ${sessionId} still alive after SIGTERM, sending SIGKILL`);
|
||||
try {
|
||||
session.pty.kill("SIGKILL");
|
||||
session.pty.kill('SIGKILL');
|
||||
} catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
@@ -467,7 +467,7 @@ export class TerminalService extends EventEmitter {
|
||||
|
||||
// Clear any pending output that hasn't been flushed yet
|
||||
// This data is already in scrollbackBuffer
|
||||
session.outputBuffer = "";
|
||||
session.outputBuffer = '';
|
||||
if (session.flushTimeout) {
|
||||
clearTimeout(session.flushTimeout);
|
||||
session.flushTimeout = null;
|
||||
|
||||
8
apps/server/tests/fixtures/images.ts
vendored
8
apps/server/tests/fixtures/images.ts
vendored
@@ -4,11 +4,11 @@
|
||||
|
||||
// 1x1 transparent PNG base64 data
|
||||
export const pngBase64Fixture =
|
||||
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==";
|
||||
'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==';
|
||||
|
||||
export const imageDataFixture = {
|
||||
base64: pngBase64Fixture,
|
||||
mimeType: "image/png",
|
||||
filename: "test.png",
|
||||
originalPath: "/path/to/test.png",
|
||||
mimeType: 'image/png',
|
||||
filename: 'test.png',
|
||||
originalPath: '/path/to/test.png',
|
||||
};
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/**
|
||||
* Helper for creating test git repositories for integration tests
|
||||
*/
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import * as fs from "fs/promises";
|
||||
import * as path from "path";
|
||||
import * as os from "os";
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -18,36 +18,36 @@ export interface TestRepo {
|
||||
* Create a temporary git repository for testing
|
||||
*/
|
||||
export async function createTestGitRepo(): Promise<TestRepo> {
|
||||
const tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "automaker-test-"));
|
||||
const tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'automaker-test-'));
|
||||
|
||||
// Initialize git repo
|
||||
await execAsync("git init", { cwd: tmpDir });
|
||||
await execAsync('git init', { cwd: tmpDir });
|
||||
await execAsync('git config user.email "test@example.com"', { cwd: tmpDir });
|
||||
await execAsync('git config user.name "Test User"', { cwd: tmpDir });
|
||||
|
||||
// Create initial commit
|
||||
await fs.writeFile(path.join(tmpDir, "README.md"), "# Test Project\n");
|
||||
await execAsync("git add .", { cwd: tmpDir });
|
||||
await fs.writeFile(path.join(tmpDir, 'README.md'), '# Test Project\n');
|
||||
await execAsync('git add .', { cwd: tmpDir });
|
||||
await execAsync('git commit -m "Initial commit"', { cwd: tmpDir });
|
||||
|
||||
// Create main branch explicitly
|
||||
await execAsync("git branch -M main", { cwd: tmpDir });
|
||||
await execAsync('git branch -M main', { cwd: tmpDir });
|
||||
|
||||
return {
|
||||
path: tmpDir,
|
||||
cleanup: async () => {
|
||||
try {
|
||||
// Remove all worktrees first
|
||||
const { stdout } = await execAsync("git worktree list --porcelain", {
|
||||
const { stdout } = await execAsync('git worktree list --porcelain', {
|
||||
cwd: tmpDir,
|
||||
}).catch(() => ({ stdout: "" }));
|
||||
}).catch(() => ({ stdout: '' }));
|
||||
|
||||
const worktrees = stdout
|
||||
.split("\n\n")
|
||||
.split('\n\n')
|
||||
.slice(1) // Skip main worktree
|
||||
.map((block) => {
|
||||
const pathLine = block.split("\n").find((line) => line.startsWith("worktree "));
|
||||
return pathLine ? pathLine.replace("worktree ", "") : null;
|
||||
const pathLine = block.split('\n').find((line) => line.startsWith('worktree '));
|
||||
return pathLine ? pathLine.replace('worktree ', '') : null;
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
@@ -64,7 +64,7 @@ export async function createTestGitRepo(): Promise<TestRepo> {
|
||||
// Remove the repository
|
||||
await fs.rm(tmpDir, { recursive: true, force: true });
|
||||
} catch (error) {
|
||||
console.error("Failed to cleanup test repo:", error);
|
||||
console.error('Failed to cleanup test repo:', error);
|
||||
}
|
||||
},
|
||||
};
|
||||
@@ -78,24 +78,21 @@ export async function createTestFeature(
|
||||
featureId: string,
|
||||
featureData: any
|
||||
): Promise<void> {
|
||||
const featuresDir = path.join(repoPath, ".automaker", "features");
|
||||
const featuresDir = path.join(repoPath, '.automaker', 'features');
|
||||
const featureDir = path.join(featuresDir, featureId);
|
||||
|
||||
await fs.mkdir(featureDir, { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(featureDir, "feature.json"),
|
||||
JSON.stringify(featureData, null, 2)
|
||||
);
|
||||
await fs.writeFile(path.join(featureDir, 'feature.json'), JSON.stringify(featureData, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of git branches
|
||||
*/
|
||||
export async function listBranches(repoPath: string): Promise<string[]> {
|
||||
const { stdout } = await execAsync("git branch --list", { cwd: repoPath });
|
||||
const { stdout } = await execAsync('git branch --list', { cwd: repoPath });
|
||||
return stdout
|
||||
.split("\n")
|
||||
.map((line) => line.trim().replace(/^[*+]\s*/, ""))
|
||||
.split('\n')
|
||||
.map((line) => line.trim().replace(/^[*+]\s*/, ''))
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
@@ -104,16 +101,16 @@ export async function listBranches(repoPath: string): Promise<string[]> {
|
||||
*/
|
||||
export async function listWorktrees(repoPath: string): Promise<string[]> {
|
||||
try {
|
||||
const { stdout } = await execAsync("git worktree list --porcelain", {
|
||||
const { stdout } = await execAsync('git worktree list --porcelain', {
|
||||
cwd: repoPath,
|
||||
});
|
||||
|
||||
return stdout
|
||||
.split("\n\n")
|
||||
.split('\n\n')
|
||||
.slice(1) // Skip main worktree
|
||||
.map((block) => {
|
||||
const pathLine = block.split("\n").find((line) => line.startsWith("worktree "));
|
||||
return pathLine ? pathLine.replace("worktree ", "") : null;
|
||||
const pathLine = block.split('\n').find((line) => line.startsWith('worktree '));
|
||||
return pathLine ? pathLine.replace('worktree ', '') : null;
|
||||
})
|
||||
.filter(Boolean) as string[];
|
||||
} catch {
|
||||
@@ -124,10 +121,7 @@ export async function listWorktrees(repoPath: string): Promise<string[]> {
|
||||
/**
|
||||
* Check if a branch exists
|
||||
*/
|
||||
export async function branchExists(
|
||||
repoPath: string,
|
||||
branchName: string
|
||||
): Promise<boolean> {
|
||||
export async function branchExists(repoPath: string, branchName: string): Promise<boolean> {
|
||||
const branches = await listBranches(repoPath);
|
||||
return branches.includes(branchName);
|
||||
}
|
||||
@@ -135,10 +129,7 @@ export async function branchExists(
|
||||
/**
|
||||
* Check if a worktree exists
|
||||
*/
|
||||
export async function worktreeExists(
|
||||
repoPath: string,
|
||||
worktreePath: string
|
||||
): Promise<boolean> {
|
||||
export async function worktreeExists(repoPath: string, worktreePath: string): Promise<boolean> {
|
||||
const worktrees = await listWorktrees(repoPath);
|
||||
return worktrees.some((wt) => wt === worktreePath);
|
||||
}
|
||||
|
||||
@@ -1,22 +1,20 @@
|
||||
import { describe, it, expect, vi, afterEach } from "vitest";
|
||||
import { createCreateHandler } from "@/routes/worktree/routes/create.js";
|
||||
import { AUTOMAKER_INITIAL_COMMIT_MESSAGE } from "@/routes/worktree/common.js";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import * as fs from "fs/promises";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
import { describe, it, expect, vi, afterEach } from 'vitest';
|
||||
import { createCreateHandler } from '@/routes/worktree/routes/create.js';
|
||||
import { AUTOMAKER_INITIAL_COMMIT_MESSAGE } from '@/routes/worktree/common.js';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
describe("worktree create route - repositories without commits", () => {
|
||||
describe('worktree create route - repositories without commits', () => {
|
||||
let repoPath: string | null = null;
|
||||
|
||||
async function initRepoWithoutCommit() {
|
||||
repoPath = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), "automaker-no-commit-")
|
||||
);
|
||||
await execAsync("git init", { cwd: repoPath });
|
||||
repoPath = await fs.mkdtemp(path.join(os.tmpdir(), 'automaker-no-commit-'));
|
||||
await execAsync('git init', { cwd: repoPath });
|
||||
await execAsync('git config user.email "test@example.com"', {
|
||||
cwd: repoPath,
|
||||
});
|
||||
@@ -32,14 +30,14 @@ describe("worktree create route - repositories without commits", () => {
|
||||
repoPath = null;
|
||||
});
|
||||
|
||||
it("creates an initial commit before adding a worktree when HEAD is missing", async () => {
|
||||
it('creates an initial commit before adding a worktree when HEAD is missing', async () => {
|
||||
await initRepoWithoutCommit();
|
||||
const handler = createCreateHandler();
|
||||
|
||||
const json = vi.fn();
|
||||
const status = vi.fn().mockReturnThis();
|
||||
const req = {
|
||||
body: { projectPath: repoPath, branchName: "feature/no-head" },
|
||||
body: { projectPath: repoPath, branchName: 'feature/no-head' },
|
||||
} as any;
|
||||
const res = {
|
||||
json,
|
||||
@@ -53,17 +51,12 @@ describe("worktree create route - repositories without commits", () => {
|
||||
const payload = json.mock.calls[0][0];
|
||||
expect(payload.success).toBe(true);
|
||||
|
||||
const { stdout: commitCount } = await execAsync(
|
||||
"git rev-list --count HEAD",
|
||||
{ cwd: repoPath! }
|
||||
);
|
||||
const { stdout: commitCount } = await execAsync('git rev-list --count HEAD', {
|
||||
cwd: repoPath!,
|
||||
});
|
||||
expect(Number(commitCount.trim())).toBeGreaterThan(0);
|
||||
|
||||
const { stdout: latestMessage } = await execAsync(
|
||||
"git log -1 --pretty=%B",
|
||||
{ cwd: repoPath! }
|
||||
);
|
||||
const { stdout: latestMessage } = await execAsync('git log -1 --pretty=%B', { cwd: repoPath! });
|
||||
expect(latestMessage.trim()).toBe(AUTOMAKER_INITIAL_COMMIT_MESSAGE);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import { AutoModeService } from "@/services/auto-mode-service.js";
|
||||
import { ProviderFactory } from "@/providers/provider-factory.js";
|
||||
import { FeatureLoader } from "@/services/feature-loader.js";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { AutoModeService } from '@/services/auto-mode-service.js';
|
||||
import { ProviderFactory } from '@/providers/provider-factory.js';
|
||||
import { FeatureLoader } from '@/services/feature-loader.js';
|
||||
import {
|
||||
createTestGitRepo,
|
||||
createTestFeature,
|
||||
@@ -10,17 +10,17 @@ import {
|
||||
branchExists,
|
||||
worktreeExists,
|
||||
type TestRepo,
|
||||
} from "../helpers/git-test-repo.js";
|
||||
import * as fs from "fs/promises";
|
||||
import * as path from "path";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
} from '../helpers/git-test-repo.js';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
vi.mock("@/providers/provider-factory.js");
|
||||
vi.mock('@/providers/provider-factory.js');
|
||||
|
||||
describe("auto-mode-service.ts (integration)", () => {
|
||||
describe('auto-mode-service.ts (integration)', () => {
|
||||
let service: AutoModeService;
|
||||
let testRepo: TestRepo;
|
||||
let featureLoader: FeatureLoader;
|
||||
@@ -46,22 +46,22 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
}
|
||||
});
|
||||
|
||||
describe("worktree operations", () => {
|
||||
it("should use existing git worktree for feature", async () => {
|
||||
const branchName = "feature/test-feature-1";
|
||||
|
||||
describe('worktree operations', () => {
|
||||
it('should use existing git worktree for feature', async () => {
|
||||
const branchName = 'feature/test-feature-1';
|
||||
|
||||
// Create a test feature with branchName set
|
||||
await createTestFeature(testRepo.path, "test-feature-1", {
|
||||
id: "test-feature-1",
|
||||
category: "test",
|
||||
description: "Test feature",
|
||||
status: "pending",
|
||||
await createTestFeature(testRepo.path, 'test-feature-1', {
|
||||
id: 'test-feature-1',
|
||||
category: 'test',
|
||||
description: 'Test feature',
|
||||
status: 'pending',
|
||||
branchName: branchName,
|
||||
});
|
||||
|
||||
// Create worktree before executing (worktrees are now created when features are added/edited)
|
||||
const worktreesDir = path.join(testRepo.path, ".worktrees");
|
||||
const worktreePath = path.join(worktreesDir, "test-feature-1");
|
||||
const worktreesDir = path.join(testRepo.path, '.worktrees');
|
||||
const worktreePath = path.join(worktreesDir, 'test-feature-1');
|
||||
await fs.mkdir(worktreesDir, { recursive: true });
|
||||
await execAsync(`git worktree add -b ${branchName} "${worktreePath}" HEAD`, {
|
||||
cwd: testRepo.path,
|
||||
@@ -69,30 +69,28 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
|
||||
// Mock provider to complete quickly
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
yield {
|
||||
type: "assistant",
|
||||
type: 'assistant',
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "Feature implemented" }],
|
||||
role: 'assistant',
|
||||
content: [{ type: 'text', text: 'Feature implemented' }],
|
||||
},
|
||||
};
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
// Execute feature with worktrees enabled
|
||||
await service.executeFeature(
|
||||
testRepo.path,
|
||||
"test-feature-1",
|
||||
'test-feature-1',
|
||||
true, // useWorktrees
|
||||
false // isAutoMode
|
||||
);
|
||||
@@ -107,8 +105,8 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
const worktrees = await listWorktrees(testRepo.path);
|
||||
expect(worktrees.length).toBeGreaterThan(0);
|
||||
// Verify that at least one worktree path contains our feature ID
|
||||
const worktreePathsMatch = worktrees.some(wt =>
|
||||
wt.includes("test-feature-1") || wt.includes(".worktrees")
|
||||
const worktreePathsMatch = worktrees.some(
|
||||
(wt) => wt.includes('test-feature-1') || wt.includes('.worktrees')
|
||||
);
|
||||
expect(worktreePathsMatch).toBe(true);
|
||||
|
||||
@@ -116,243 +114,200 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
// This is expected behavior - manual cleanup is required
|
||||
}, 30000);
|
||||
|
||||
it("should handle error gracefully", async () => {
|
||||
await createTestFeature(testRepo.path, "test-feature-error", {
|
||||
id: "test-feature-error",
|
||||
category: "test",
|
||||
description: "Test feature that errors",
|
||||
status: "pending",
|
||||
it('should handle error gracefully', async () => {
|
||||
await createTestFeature(testRepo.path, 'test-feature-error', {
|
||||
id: 'test-feature-error',
|
||||
category: 'test',
|
||||
description: 'Test feature that errors',
|
||||
status: 'pending',
|
||||
});
|
||||
|
||||
// Mock provider that throws error
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
throw new Error("Provider error");
|
||||
throw new Error('Provider error');
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
// Execute feature (should handle error)
|
||||
await service.executeFeature(
|
||||
testRepo.path,
|
||||
"test-feature-error",
|
||||
true,
|
||||
false
|
||||
);
|
||||
await service.executeFeature(testRepo.path, 'test-feature-error', true, false);
|
||||
|
||||
// Verify feature status was updated to backlog (error status)
|
||||
const feature = await featureLoader.get(
|
||||
testRepo.path,
|
||||
"test-feature-error"
|
||||
);
|
||||
expect(feature?.status).toBe("backlog");
|
||||
const feature = await featureLoader.get(testRepo.path, 'test-feature-error');
|
||||
expect(feature?.status).toBe('backlog');
|
||||
}, 30000);
|
||||
|
||||
it("should work without worktrees", async () => {
|
||||
await createTestFeature(testRepo.path, "test-no-worktree", {
|
||||
id: "test-no-worktree",
|
||||
category: "test",
|
||||
description: "Test without worktree",
|
||||
status: "pending",
|
||||
it('should work without worktrees', async () => {
|
||||
await createTestFeature(testRepo.path, 'test-no-worktree', {
|
||||
id: 'test-no-worktree',
|
||||
category: 'test',
|
||||
description: 'Test without worktree',
|
||||
status: 'pending',
|
||||
});
|
||||
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
// Execute without worktrees
|
||||
await service.executeFeature(
|
||||
testRepo.path,
|
||||
"test-no-worktree",
|
||||
'test-no-worktree',
|
||||
false, // useWorktrees = false
|
||||
false
|
||||
);
|
||||
|
||||
// Feature should be updated successfully
|
||||
const feature = await featureLoader.get(
|
||||
testRepo.path,
|
||||
"test-no-worktree"
|
||||
);
|
||||
expect(feature?.status).toBe("waiting_approval");
|
||||
const feature = await featureLoader.get(testRepo.path, 'test-no-worktree');
|
||||
expect(feature?.status).toBe('waiting_approval');
|
||||
}, 30000);
|
||||
});
|
||||
|
||||
describe("feature execution", () => {
|
||||
it("should execute feature and update status", async () => {
|
||||
await createTestFeature(testRepo.path, "feature-exec-1", {
|
||||
id: "feature-exec-1",
|
||||
category: "ui",
|
||||
description: "Execute this feature",
|
||||
status: "pending",
|
||||
describe('feature execution', () => {
|
||||
it('should execute feature and update status', async () => {
|
||||
await createTestFeature(testRepo.path, 'feature-exec-1', {
|
||||
id: 'feature-exec-1',
|
||||
category: 'ui',
|
||||
description: 'Execute this feature',
|
||||
status: 'pending',
|
||||
});
|
||||
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
yield {
|
||||
type: "assistant",
|
||||
type: 'assistant',
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "Implemented the feature" }],
|
||||
role: 'assistant',
|
||||
content: [{ type: 'text', text: 'Implemented the feature' }],
|
||||
},
|
||||
};
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
await service.executeFeature(
|
||||
testRepo.path,
|
||||
"feature-exec-1",
|
||||
'feature-exec-1',
|
||||
false, // Don't use worktrees so agent output is saved to main project
|
||||
false
|
||||
);
|
||||
|
||||
// Check feature status was updated
|
||||
const feature = await featureLoader.get(testRepo.path, "feature-exec-1");
|
||||
expect(feature?.status).toBe("waiting_approval");
|
||||
const feature = await featureLoader.get(testRepo.path, 'feature-exec-1');
|
||||
expect(feature?.status).toBe('waiting_approval');
|
||||
|
||||
// Check agent output was saved
|
||||
const agentOutput = await featureLoader.getAgentOutput(
|
||||
testRepo.path,
|
||||
"feature-exec-1"
|
||||
);
|
||||
const agentOutput = await featureLoader.getAgentOutput(testRepo.path, 'feature-exec-1');
|
||||
expect(agentOutput).toBeTruthy();
|
||||
expect(agentOutput).toContain("Implemented the feature");
|
||||
expect(agentOutput).toContain('Implemented the feature');
|
||||
}, 30000);
|
||||
|
||||
it("should handle feature not found", async () => {
|
||||
it('should handle feature not found', async () => {
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
// Try to execute non-existent feature
|
||||
await service.executeFeature(
|
||||
testRepo.path,
|
||||
"nonexistent-feature",
|
||||
true,
|
||||
false
|
||||
);
|
||||
await service.executeFeature(testRepo.path, 'nonexistent-feature', true, false);
|
||||
|
||||
// Should emit error event
|
||||
expect(mockEvents.emit).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
featureId: "nonexistent-feature",
|
||||
error: expect.stringContaining("not found"),
|
||||
featureId: 'nonexistent-feature',
|
||||
error: expect.stringContaining('not found'),
|
||||
})
|
||||
);
|
||||
}, 30000);
|
||||
|
||||
it("should prevent duplicate feature execution", async () => {
|
||||
await createTestFeature(testRepo.path, "feature-dup", {
|
||||
id: "feature-dup",
|
||||
category: "test",
|
||||
description: "Duplicate test",
|
||||
status: "pending",
|
||||
it('should prevent duplicate feature execution', async () => {
|
||||
await createTestFeature(testRepo.path, 'feature-dup', {
|
||||
id: 'feature-dup',
|
||||
category: 'test',
|
||||
description: 'Duplicate test',
|
||||
status: 'pending',
|
||||
});
|
||||
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
// Simulate slow execution
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
// Start first execution
|
||||
const promise1 = service.executeFeature(
|
||||
testRepo.path,
|
||||
"feature-dup",
|
||||
false,
|
||||
false
|
||||
);
|
||||
const promise1 = service.executeFeature(testRepo.path, 'feature-dup', false, false);
|
||||
|
||||
// Try to start second execution (should throw)
|
||||
await expect(
|
||||
service.executeFeature(testRepo.path, "feature-dup", false, false)
|
||||
).rejects.toThrow("already running");
|
||||
service.executeFeature(testRepo.path, 'feature-dup', false, false)
|
||||
).rejects.toThrow('already running');
|
||||
|
||||
await promise1;
|
||||
}, 30000);
|
||||
|
||||
it("should use feature-specific model", async () => {
|
||||
await createTestFeature(testRepo.path, "feature-model", {
|
||||
id: "feature-model",
|
||||
category: "test",
|
||||
description: "Model test",
|
||||
status: "pending",
|
||||
model: "claude-sonnet-4-20250514",
|
||||
it('should use feature-specific model', async () => {
|
||||
await createTestFeature(testRepo.path, 'feature-model', {
|
||||
id: 'feature-model',
|
||||
category: 'test',
|
||||
description: 'Model test',
|
||||
status: 'pending',
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
});
|
||||
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
await service.executeFeature(
|
||||
testRepo.path,
|
||||
"feature-model",
|
||||
false,
|
||||
false
|
||||
);
|
||||
await service.executeFeature(testRepo.path, 'feature-model', false, false);
|
||||
|
||||
// Should have used claude-sonnet-4-20250514
|
||||
expect(ProviderFactory.getProviderForModel).toHaveBeenCalledWith(
|
||||
"claude-sonnet-4-20250514"
|
||||
);
|
||||
expect(ProviderFactory.getProviderForModel).toHaveBeenCalledWith('claude-sonnet-4-20250514');
|
||||
}, 30000);
|
||||
});
|
||||
|
||||
describe("auto loop", () => {
|
||||
it("should start and stop auto loop", async () => {
|
||||
describe('auto loop', () => {
|
||||
it('should start and stop auto loop', async () => {
|
||||
const startPromise = service.startAutoLoop(testRepo.path, 2);
|
||||
|
||||
// Give it time to start
|
||||
@@ -365,35 +320,33 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
await startPromise.catch(() => {}); // Cleanup
|
||||
}, 10000);
|
||||
|
||||
it("should process pending features in auto loop", async () => {
|
||||
it('should process pending features in auto loop', async () => {
|
||||
// Create multiple pending features
|
||||
await createTestFeature(testRepo.path, "auto-1", {
|
||||
id: "auto-1",
|
||||
category: "test",
|
||||
description: "Auto feature 1",
|
||||
status: "pending",
|
||||
await createTestFeature(testRepo.path, 'auto-1', {
|
||||
id: 'auto-1',
|
||||
category: 'test',
|
||||
description: 'Auto feature 1',
|
||||
status: 'pending',
|
||||
});
|
||||
|
||||
await createTestFeature(testRepo.path, "auto-2", {
|
||||
id: "auto-2",
|
||||
category: "test",
|
||||
description: "Auto feature 2",
|
||||
status: "pending",
|
||||
await createTestFeature(testRepo.path, 'auto-2', {
|
||||
id: 'auto-2',
|
||||
category: 'test',
|
||||
description: 'Auto feature 2',
|
||||
status: 'pending',
|
||||
});
|
||||
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
// Start auto loop
|
||||
const startPromise = service.startAutoLoop(testRepo.path, 2);
|
||||
@@ -406,25 +359,25 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
await startPromise.catch(() => {});
|
||||
|
||||
// Check that features were updated
|
||||
const feature1 = await featureLoader.get(testRepo.path, "auto-1");
|
||||
const feature2 = await featureLoader.get(testRepo.path, "auto-2");
|
||||
const feature1 = await featureLoader.get(testRepo.path, 'auto-1');
|
||||
const feature2 = await featureLoader.get(testRepo.path, 'auto-2');
|
||||
|
||||
// At least one should have been processed
|
||||
const processedCount = [feature1, feature2].filter(
|
||||
(f) => f?.status === "waiting_approval" || f?.status === "in_progress"
|
||||
(f) => f?.status === 'waiting_approval' || f?.status === 'in_progress'
|
||||
).length;
|
||||
|
||||
expect(processedCount).toBeGreaterThan(0);
|
||||
}, 15000);
|
||||
|
||||
it("should respect max concurrency", async () => {
|
||||
it('should respect max concurrency', async () => {
|
||||
// Create 5 features
|
||||
for (let i = 1; i <= 5; i++) {
|
||||
await createTestFeature(testRepo.path, `concurrent-${i}`, {
|
||||
id: `concurrent-${i}`,
|
||||
category: "test",
|
||||
category: 'test',
|
||||
description: `Concurrent feature ${i}`,
|
||||
status: "pending",
|
||||
status: 'pending',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -432,7 +385,7 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
let maxConcurrent = 0;
|
||||
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
concurrentCount++;
|
||||
maxConcurrent = Math.max(maxConcurrent, concurrentCount);
|
||||
@@ -443,15 +396,13 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
concurrentCount--;
|
||||
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
// Start with max concurrency of 2
|
||||
const startPromise = service.startAutoLoop(testRepo.path, 2);
|
||||
@@ -466,7 +417,7 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
expect(maxConcurrent).toBeLessThanOrEqual(2);
|
||||
}, 15000);
|
||||
|
||||
it("should emit auto mode events", async () => {
|
||||
it('should emit auto mode events', async () => {
|
||||
const startPromise = service.startAutoLoop(testRepo.path, 1);
|
||||
|
||||
// Wait for start event
|
||||
@@ -474,7 +425,7 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
|
||||
// Check start event was emitted
|
||||
const startEvent = mockEvents.emit.mock.calls.find((call) =>
|
||||
call[1]?.message?.includes("Auto mode started")
|
||||
call[1]?.message?.includes('Auto mode started')
|
||||
);
|
||||
expect(startEvent).toBeTruthy();
|
||||
|
||||
@@ -484,74 +435,69 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
// Check stop event was emitted (emitted immediately by stopAutoLoop)
|
||||
const stopEvent = mockEvents.emit.mock.calls.find(
|
||||
(call) =>
|
||||
call[1]?.type === "auto_mode_stopped" ||
|
||||
call[1]?.message?.includes("Auto mode stopped")
|
||||
call[1]?.type === 'auto_mode_stopped' || call[1]?.message?.includes('Auto mode stopped')
|
||||
);
|
||||
expect(stopEvent).toBeTruthy();
|
||||
}, 10000);
|
||||
});
|
||||
|
||||
describe("error handling", () => {
|
||||
it("should handle provider errors gracefully", async () => {
|
||||
await createTestFeature(testRepo.path, "error-feature", {
|
||||
id: "error-feature",
|
||||
category: "test",
|
||||
description: "Error test",
|
||||
status: "pending",
|
||||
describe('error handling', () => {
|
||||
it('should handle provider errors gracefully', async () => {
|
||||
await createTestFeature(testRepo.path, 'error-feature', {
|
||||
id: 'error-feature',
|
||||
category: 'test',
|
||||
description: 'Error test',
|
||||
status: 'pending',
|
||||
});
|
||||
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
throw new Error("Provider execution failed");
|
||||
throw new Error('Provider execution failed');
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
// Should not throw
|
||||
await service.executeFeature(testRepo.path, "error-feature", true, false);
|
||||
await service.executeFeature(testRepo.path, 'error-feature', true, false);
|
||||
|
||||
// Feature should be marked as backlog (error status)
|
||||
const feature = await featureLoader.get(testRepo.path, "error-feature");
|
||||
expect(feature?.status).toBe("backlog");
|
||||
const feature = await featureLoader.get(testRepo.path, 'error-feature');
|
||||
expect(feature?.status).toBe('backlog');
|
||||
}, 30000);
|
||||
|
||||
it("should continue auto loop after feature error", async () => {
|
||||
await createTestFeature(testRepo.path, "fail-1", {
|
||||
id: "fail-1",
|
||||
category: "test",
|
||||
description: "Will fail",
|
||||
status: "pending",
|
||||
it('should continue auto loop after feature error', async () => {
|
||||
await createTestFeature(testRepo.path, 'fail-1', {
|
||||
id: 'fail-1',
|
||||
category: 'test',
|
||||
description: 'Will fail',
|
||||
status: 'pending',
|
||||
});
|
||||
|
||||
await createTestFeature(testRepo.path, "success-1", {
|
||||
id: "success-1",
|
||||
category: "test",
|
||||
description: "Will succeed",
|
||||
status: "pending",
|
||||
await createTestFeature(testRepo.path, 'success-1', {
|
||||
id: 'success-1',
|
||||
category: 'test',
|
||||
description: 'Will succeed',
|
||||
status: 'pending',
|
||||
});
|
||||
|
||||
let callCount = 0;
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
callCount++;
|
||||
if (callCount === 1) {
|
||||
throw new Error("First feature fails");
|
||||
throw new Error('First feature fails');
|
||||
}
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
const startPromise = service.startAutoLoop(testRepo.path, 1);
|
||||
|
||||
@@ -566,200 +512,177 @@ describe("auto-mode-service.ts (integration)", () => {
|
||||
}, 15000);
|
||||
});
|
||||
|
||||
describe("planning mode", () => {
|
||||
it("should execute feature with skip planning mode", async () => {
|
||||
await createTestFeature(testRepo.path, "skip-plan-feature", {
|
||||
id: "skip-plan-feature",
|
||||
category: "test",
|
||||
description: "Feature with skip planning",
|
||||
status: "pending",
|
||||
planningMode: "skip",
|
||||
describe('planning mode', () => {
|
||||
it('should execute feature with skip planning mode', async () => {
|
||||
await createTestFeature(testRepo.path, 'skip-plan-feature', {
|
||||
id: 'skip-plan-feature',
|
||||
category: 'test',
|
||||
description: 'Feature with skip planning',
|
||||
status: 'pending',
|
||||
planningMode: 'skip',
|
||||
});
|
||||
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
yield {
|
||||
type: "assistant",
|
||||
type: 'assistant',
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "Feature implemented" }],
|
||||
role: 'assistant',
|
||||
content: [{ type: 'text', text: 'Feature implemented' }],
|
||||
},
|
||||
};
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
await service.executeFeature(
|
||||
testRepo.path,
|
||||
"skip-plan-feature",
|
||||
false,
|
||||
false
|
||||
);
|
||||
await service.executeFeature(testRepo.path, 'skip-plan-feature', false, false);
|
||||
|
||||
const feature = await featureLoader.get(testRepo.path, "skip-plan-feature");
|
||||
expect(feature?.status).toBe("waiting_approval");
|
||||
const feature = await featureLoader.get(testRepo.path, 'skip-plan-feature');
|
||||
expect(feature?.status).toBe('waiting_approval');
|
||||
}, 30000);
|
||||
|
||||
it("should execute feature with lite planning mode without approval", async () => {
|
||||
await createTestFeature(testRepo.path, "lite-plan-feature", {
|
||||
id: "lite-plan-feature",
|
||||
category: "test",
|
||||
description: "Feature with lite planning",
|
||||
status: "pending",
|
||||
planningMode: "lite",
|
||||
it('should execute feature with lite planning mode without approval', async () => {
|
||||
await createTestFeature(testRepo.path, 'lite-plan-feature', {
|
||||
id: 'lite-plan-feature',
|
||||
category: 'test',
|
||||
description: 'Feature with lite planning',
|
||||
status: 'pending',
|
||||
planningMode: 'lite',
|
||||
requirePlanApproval: false,
|
||||
});
|
||||
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
yield {
|
||||
type: "assistant",
|
||||
type: 'assistant',
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "[PLAN_GENERATED] Planning outline complete.\n\nFeature implemented" }],
|
||||
role: 'assistant',
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: '[PLAN_GENERATED] Planning outline complete.\n\nFeature implemented',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
await service.executeFeature(
|
||||
testRepo.path,
|
||||
"lite-plan-feature",
|
||||
false,
|
||||
false
|
||||
);
|
||||
await service.executeFeature(testRepo.path, 'lite-plan-feature', false, false);
|
||||
|
||||
const feature = await featureLoader.get(testRepo.path, "lite-plan-feature");
|
||||
expect(feature?.status).toBe("waiting_approval");
|
||||
const feature = await featureLoader.get(testRepo.path, 'lite-plan-feature');
|
||||
expect(feature?.status).toBe('waiting_approval');
|
||||
}, 30000);
|
||||
|
||||
it("should emit planning_started event for spec mode", async () => {
|
||||
await createTestFeature(testRepo.path, "spec-plan-feature", {
|
||||
id: "spec-plan-feature",
|
||||
category: "test",
|
||||
description: "Feature with spec planning",
|
||||
status: "pending",
|
||||
planningMode: "spec",
|
||||
it('should emit planning_started event for spec mode', async () => {
|
||||
await createTestFeature(testRepo.path, 'spec-plan-feature', {
|
||||
id: 'spec-plan-feature',
|
||||
category: 'test',
|
||||
description: 'Feature with spec planning',
|
||||
status: 'pending',
|
||||
planningMode: 'spec',
|
||||
requirePlanApproval: false,
|
||||
});
|
||||
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
yield {
|
||||
type: "assistant",
|
||||
type: 'assistant',
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "Spec generated\n\n[SPEC_GENERATED] Review the spec." }],
|
||||
role: 'assistant',
|
||||
content: [
|
||||
{ type: 'text', text: 'Spec generated\n\n[SPEC_GENERATED] Review the spec.' },
|
||||
],
|
||||
},
|
||||
};
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
await service.executeFeature(
|
||||
testRepo.path,
|
||||
"spec-plan-feature",
|
||||
false,
|
||||
false
|
||||
);
|
||||
await service.executeFeature(testRepo.path, 'spec-plan-feature', false, false);
|
||||
|
||||
// Check planning_started event was emitted
|
||||
const planningEvent = mockEvents.emit.mock.calls.find(
|
||||
(call) => call[1]?.mode === "spec"
|
||||
);
|
||||
const planningEvent = mockEvents.emit.mock.calls.find((call) => call[1]?.mode === 'spec');
|
||||
expect(planningEvent).toBeTruthy();
|
||||
}, 30000);
|
||||
|
||||
it("should handle feature with full planning mode", async () => {
|
||||
await createTestFeature(testRepo.path, "full-plan-feature", {
|
||||
id: "full-plan-feature",
|
||||
category: "test",
|
||||
description: "Feature with full planning",
|
||||
status: "pending",
|
||||
planningMode: "full",
|
||||
it('should handle feature with full planning mode', async () => {
|
||||
await createTestFeature(testRepo.path, 'full-plan-feature', {
|
||||
id: 'full-plan-feature',
|
||||
category: 'test',
|
||||
description: 'Feature with full planning',
|
||||
status: 'pending',
|
||||
planningMode: 'full',
|
||||
requirePlanApproval: false,
|
||||
});
|
||||
|
||||
const mockProvider = {
|
||||
getName: () => "claude",
|
||||
getName: () => 'claude',
|
||||
executeQuery: async function* () {
|
||||
yield {
|
||||
type: "assistant",
|
||||
type: 'assistant',
|
||||
message: {
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "Full spec with phases\n\n[SPEC_GENERATED] Review." }],
|
||||
role: 'assistant',
|
||||
content: [
|
||||
{ type: 'text', text: 'Full spec with phases\n\n[SPEC_GENERATED] Review.' },
|
||||
],
|
||||
},
|
||||
};
|
||||
yield {
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
type: 'result',
|
||||
subtype: 'success',
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
|
||||
mockProvider as any
|
||||
);
|
||||
vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
|
||||
|
||||
await service.executeFeature(
|
||||
testRepo.path,
|
||||
"full-plan-feature",
|
||||
false,
|
||||
false
|
||||
);
|
||||
await service.executeFeature(testRepo.path, 'full-plan-feature', false, false);
|
||||
|
||||
// Check planning_started event was emitted with full mode
|
||||
const planningEvent = mockEvents.emit.mock.calls.find(
|
||||
(call) => call[1]?.mode === "full"
|
||||
);
|
||||
const planningEvent = mockEvents.emit.mock.calls.find((call) => call[1]?.mode === 'full');
|
||||
expect(planningEvent).toBeTruthy();
|
||||
}, 30000);
|
||||
|
||||
it("should track pending approval correctly", async () => {
|
||||
it('should track pending approval correctly', async () => {
|
||||
// Initially no pending approvals
|
||||
expect(service.hasPendingApproval("non-existent")).toBe(false);
|
||||
expect(service.hasPendingApproval('non-existent')).toBe(false);
|
||||
});
|
||||
|
||||
it("should cancel pending approval gracefully", () => {
|
||||
it('should cancel pending approval gracefully', () => {
|
||||
// Should not throw when cancelling non-existent approval
|
||||
expect(() => service.cancelPlanApproval("non-existent")).not.toThrow();
|
||||
expect(() => service.cancelPlanApproval('non-existent')).not.toThrow();
|
||||
});
|
||||
|
||||
it("should resolve approval with error for non-existent feature", async () => {
|
||||
it('should resolve approval with error for non-existent feature', async () => {
|
||||
const result = await service.resolvePlanApproval(
|
||||
"non-existent",
|
||||
'non-existent',
|
||||
true,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain("No pending approval");
|
||||
expect(result.error).toContain('No pending approval');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,143 +1,137 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
specToXml,
|
||||
getStructuredSpecPromptInstruction,
|
||||
getAppSpecFormatInstruction,
|
||||
APP_SPEC_XML_FORMAT,
|
||||
type SpecOutput,
|
||||
} from "@/lib/app-spec-format.js";
|
||||
} from '@/lib/app-spec-format.js';
|
||||
|
||||
describe("app-spec-format.ts", () => {
|
||||
describe("specToXml", () => {
|
||||
it("should convert minimal spec to XML", () => {
|
||||
describe('app-spec-format.ts', () => {
|
||||
describe('specToXml', () => {
|
||||
it('should convert minimal spec to XML', () => {
|
||||
const spec: SpecOutput = {
|
||||
project_name: "Test Project",
|
||||
overview: "A test project",
|
||||
technology_stack: ["TypeScript", "Node.js"],
|
||||
core_capabilities: ["Testing", "Development"],
|
||||
implemented_features: [
|
||||
{ name: "Feature 1", description: "First feature" },
|
||||
],
|
||||
project_name: 'Test Project',
|
||||
overview: 'A test project',
|
||||
technology_stack: ['TypeScript', 'Node.js'],
|
||||
core_capabilities: ['Testing', 'Development'],
|
||||
implemented_features: [{ name: 'Feature 1', description: 'First feature' }],
|
||||
};
|
||||
|
||||
const xml = specToXml(spec);
|
||||
|
||||
expect(xml).toContain('<?xml version="1.0" encoding="UTF-8"?>');
|
||||
expect(xml).toContain("<project_specification>");
|
||||
expect(xml).toContain("</project_specification>");
|
||||
expect(xml).toContain("<project_name>Test Project</project_name>");
|
||||
expect(xml).toContain("<technology>TypeScript</technology>");
|
||||
expect(xml).toContain("<capability>Testing</capability>");
|
||||
expect(xml).toContain('<project_specification>');
|
||||
expect(xml).toContain('</project_specification>');
|
||||
expect(xml).toContain('<project_name>Test Project</project_name>');
|
||||
expect(xml).toContain('<technology>TypeScript</technology>');
|
||||
expect(xml).toContain('<capability>Testing</capability>');
|
||||
});
|
||||
|
||||
it("should escape XML special characters", () => {
|
||||
it('should escape XML special characters', () => {
|
||||
const spec: SpecOutput = {
|
||||
project_name: "Test & Project",
|
||||
overview: "Description with <tags>",
|
||||
technology_stack: ["TypeScript"],
|
||||
core_capabilities: ["Cap"],
|
||||
project_name: 'Test & Project',
|
||||
overview: 'Description with <tags>',
|
||||
technology_stack: ['TypeScript'],
|
||||
core_capabilities: ['Cap'],
|
||||
implemented_features: [],
|
||||
};
|
||||
|
||||
const xml = specToXml(spec);
|
||||
|
||||
expect(xml).toContain("Test & Project");
|
||||
expect(xml).toContain("<tags>");
|
||||
expect(xml).toContain('Test & Project');
|
||||
expect(xml).toContain('<tags>');
|
||||
});
|
||||
|
||||
it("should include file_locations when provided", () => {
|
||||
it('should include file_locations when provided', () => {
|
||||
const spec: SpecOutput = {
|
||||
project_name: "Test",
|
||||
overview: "Test",
|
||||
technology_stack: ["TS"],
|
||||
core_capabilities: ["Cap"],
|
||||
project_name: 'Test',
|
||||
overview: 'Test',
|
||||
technology_stack: ['TS'],
|
||||
core_capabilities: ['Cap'],
|
||||
implemented_features: [
|
||||
{
|
||||
name: "Feature",
|
||||
description: "Desc",
|
||||
file_locations: ["src/index.ts"],
|
||||
name: 'Feature',
|
||||
description: 'Desc',
|
||||
file_locations: ['src/index.ts'],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const xml = specToXml(spec);
|
||||
|
||||
expect(xml).toContain("<file_locations>");
|
||||
expect(xml).toContain("<location>src/index.ts</location>");
|
||||
expect(xml).toContain('<file_locations>');
|
||||
expect(xml).toContain('<location>src/index.ts</location>');
|
||||
});
|
||||
|
||||
it("should not include file_locations when empty", () => {
|
||||
it('should not include file_locations when empty', () => {
|
||||
const spec: SpecOutput = {
|
||||
project_name: "Test",
|
||||
overview: "Test",
|
||||
technology_stack: ["TS"],
|
||||
core_capabilities: ["Cap"],
|
||||
implemented_features: [
|
||||
{ name: "Feature", description: "Desc", file_locations: [] },
|
||||
],
|
||||
project_name: 'Test',
|
||||
overview: 'Test',
|
||||
technology_stack: ['TS'],
|
||||
core_capabilities: ['Cap'],
|
||||
implemented_features: [{ name: 'Feature', description: 'Desc', file_locations: [] }],
|
||||
};
|
||||
|
||||
const xml = specToXml(spec);
|
||||
|
||||
expect(xml).not.toContain("<file_locations>");
|
||||
expect(xml).not.toContain('<file_locations>');
|
||||
});
|
||||
|
||||
it("should include additional_requirements when provided", () => {
|
||||
it('should include additional_requirements when provided', () => {
|
||||
const spec: SpecOutput = {
|
||||
project_name: "Test",
|
||||
overview: "Test",
|
||||
technology_stack: ["TS"],
|
||||
core_capabilities: ["Cap"],
|
||||
project_name: 'Test',
|
||||
overview: 'Test',
|
||||
technology_stack: ['TS'],
|
||||
core_capabilities: ['Cap'],
|
||||
implemented_features: [],
|
||||
additional_requirements: ["Node.js 18+"],
|
||||
additional_requirements: ['Node.js 18+'],
|
||||
};
|
||||
|
||||
const xml = specToXml(spec);
|
||||
|
||||
expect(xml).toContain("<additional_requirements>");
|
||||
expect(xml).toContain("<requirement>Node.js 18+</requirement>");
|
||||
expect(xml).toContain('<additional_requirements>');
|
||||
expect(xml).toContain('<requirement>Node.js 18+</requirement>');
|
||||
});
|
||||
|
||||
it("should include development_guidelines when provided", () => {
|
||||
it('should include development_guidelines when provided', () => {
|
||||
const spec: SpecOutput = {
|
||||
project_name: "Test",
|
||||
overview: "Test",
|
||||
technology_stack: ["TS"],
|
||||
core_capabilities: ["Cap"],
|
||||
project_name: 'Test',
|
||||
overview: 'Test',
|
||||
technology_stack: ['TS'],
|
||||
core_capabilities: ['Cap'],
|
||||
implemented_features: [],
|
||||
development_guidelines: ["Use ESLint"],
|
||||
development_guidelines: ['Use ESLint'],
|
||||
};
|
||||
|
||||
const xml = specToXml(spec);
|
||||
|
||||
expect(xml).toContain("<development_guidelines>");
|
||||
expect(xml).toContain("<guideline>Use ESLint</guideline>");
|
||||
expect(xml).toContain('<development_guidelines>');
|
||||
expect(xml).toContain('<guideline>Use ESLint</guideline>');
|
||||
});
|
||||
|
||||
it("should include implementation_roadmap when provided", () => {
|
||||
it('should include implementation_roadmap when provided', () => {
|
||||
const spec: SpecOutput = {
|
||||
project_name: "Test",
|
||||
overview: "Test",
|
||||
technology_stack: ["TS"],
|
||||
core_capabilities: ["Cap"],
|
||||
project_name: 'Test',
|
||||
overview: 'Test',
|
||||
technology_stack: ['TS'],
|
||||
core_capabilities: ['Cap'],
|
||||
implemented_features: [],
|
||||
implementation_roadmap: [
|
||||
{ phase: "Phase 1", status: "completed", description: "Setup" },
|
||||
],
|
||||
implementation_roadmap: [{ phase: 'Phase 1', status: 'completed', description: 'Setup' }],
|
||||
};
|
||||
|
||||
const xml = specToXml(spec);
|
||||
|
||||
expect(xml).toContain("<implementation_roadmap>");
|
||||
expect(xml).toContain("<status>completed</status>");
|
||||
expect(xml).toContain('<implementation_roadmap>');
|
||||
expect(xml).toContain('<status>completed</status>');
|
||||
});
|
||||
|
||||
it("should not include optional sections when empty", () => {
|
||||
it('should not include optional sections when empty', () => {
|
||||
const spec: SpecOutput = {
|
||||
project_name: "Test",
|
||||
overview: "Test",
|
||||
technology_stack: ["TS"],
|
||||
core_capabilities: ["Cap"],
|
||||
project_name: 'Test',
|
||||
overview: 'Test',
|
||||
technology_stack: ['TS'],
|
||||
core_capabilities: ['Cap'],
|
||||
implemented_features: [],
|
||||
additional_requirements: [],
|
||||
development_guidelines: [],
|
||||
@@ -146,44 +140,44 @@ describe("app-spec-format.ts", () => {
|
||||
|
||||
const xml = specToXml(spec);
|
||||
|
||||
expect(xml).not.toContain("<additional_requirements>");
|
||||
expect(xml).not.toContain("<development_guidelines>");
|
||||
expect(xml).not.toContain("<implementation_roadmap>");
|
||||
expect(xml).not.toContain('<additional_requirements>');
|
||||
expect(xml).not.toContain('<development_guidelines>');
|
||||
expect(xml).not.toContain('<implementation_roadmap>');
|
||||
});
|
||||
});
|
||||
|
||||
describe("getStructuredSpecPromptInstruction", () => {
|
||||
it("should return non-empty prompt instruction", () => {
|
||||
describe('getStructuredSpecPromptInstruction', () => {
|
||||
it('should return non-empty prompt instruction', () => {
|
||||
const instruction = getStructuredSpecPromptInstruction();
|
||||
expect(instruction).toBeTruthy();
|
||||
expect(instruction.length).toBeGreaterThan(100);
|
||||
});
|
||||
|
||||
it("should mention required fields", () => {
|
||||
it('should mention required fields', () => {
|
||||
const instruction = getStructuredSpecPromptInstruction();
|
||||
expect(instruction).toContain("project_name");
|
||||
expect(instruction).toContain("overview");
|
||||
expect(instruction).toContain("technology_stack");
|
||||
expect(instruction).toContain('project_name');
|
||||
expect(instruction).toContain('overview');
|
||||
expect(instruction).toContain('technology_stack');
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAppSpecFormatInstruction", () => {
|
||||
it("should return non-empty format instruction", () => {
|
||||
describe('getAppSpecFormatInstruction', () => {
|
||||
it('should return non-empty format instruction', () => {
|
||||
const instruction = getAppSpecFormatInstruction();
|
||||
expect(instruction).toBeTruthy();
|
||||
expect(instruction.length).toBeGreaterThan(100);
|
||||
});
|
||||
|
||||
it("should include critical formatting requirements", () => {
|
||||
it('should include critical formatting requirements', () => {
|
||||
const instruction = getAppSpecFormatInstruction();
|
||||
expect(instruction).toContain("CRITICAL FORMATTING REQUIREMENTS");
|
||||
expect(instruction).toContain('CRITICAL FORMATTING REQUIREMENTS');
|
||||
});
|
||||
});
|
||||
|
||||
describe("APP_SPEC_XML_FORMAT", () => {
|
||||
it("should contain valid XML template structure", () => {
|
||||
expect(APP_SPEC_XML_FORMAT).toContain("<project_specification>");
|
||||
expect(APP_SPEC_XML_FORMAT).toContain("</project_specification>");
|
||||
describe('APP_SPEC_XML_FORMAT', () => {
|
||||
it('should contain valid XML template structure', () => {
|
||||
expect(APP_SPEC_XML_FORMAT).toContain('<project_specification>');
|
||||
expect(APP_SPEC_XML_FORMAT).toContain('</project_specification>');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { createMockExpressContext } from "../../utils/mocks.js";
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||
import { createMockExpressContext } from '../../utils/mocks.js';
|
||||
|
||||
/**
|
||||
* Note: auth.ts reads AUTOMAKER_API_KEY at module load time.
|
||||
* We need to reset modules and reimport for each test to get fresh state.
|
||||
*/
|
||||
describe("auth.ts", () => {
|
||||
describe('auth.ts', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
describe("authMiddleware - no API key", () => {
|
||||
it("should call next() when no API key is set", async () => {
|
||||
describe('authMiddleware - no API key', () => {
|
||||
it('should call next() when no API key is set', async () => {
|
||||
delete process.env.AUTOMAKER_API_KEY;
|
||||
|
||||
const { authMiddleware } = await import("@/lib/auth.js");
|
||||
const { authMiddleware } = await import('@/lib/auth.js');
|
||||
const { req, res, next } = createMockExpressContext();
|
||||
|
||||
authMiddleware(req, res, next);
|
||||
@@ -24,11 +24,11 @@ describe("auth.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("authMiddleware - with API key", () => {
|
||||
it("should reject request without API key header", async () => {
|
||||
process.env.AUTOMAKER_API_KEY = "test-secret-key";
|
||||
describe('authMiddleware - with API key', () => {
|
||||
it('should reject request without API key header', async () => {
|
||||
process.env.AUTOMAKER_API_KEY = 'test-secret-key';
|
||||
|
||||
const { authMiddleware } = await import("@/lib/auth.js");
|
||||
const { authMiddleware } = await import('@/lib/auth.js');
|
||||
const { req, res, next } = createMockExpressContext();
|
||||
|
||||
authMiddleware(req, res, next);
|
||||
@@ -36,34 +36,34 @@ describe("auth.ts", () => {
|
||||
expect(res.status).toHaveBeenCalledWith(401);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: "Authentication required. Provide X-API-Key header.",
|
||||
error: 'Authentication required. Provide X-API-Key header.',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should reject request with invalid API key", async () => {
|
||||
process.env.AUTOMAKER_API_KEY = "test-secret-key";
|
||||
it('should reject request with invalid API key', async () => {
|
||||
process.env.AUTOMAKER_API_KEY = 'test-secret-key';
|
||||
|
||||
const { authMiddleware } = await import("@/lib/auth.js");
|
||||
const { authMiddleware } = await import('@/lib/auth.js');
|
||||
const { req, res, next } = createMockExpressContext();
|
||||
req.headers["x-api-key"] = "wrong-key";
|
||||
req.headers['x-api-key'] = 'wrong-key';
|
||||
|
||||
authMiddleware(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
error: "Invalid API key.",
|
||||
error: 'Invalid API key.',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should call next() with valid API key", async () => {
|
||||
process.env.AUTOMAKER_API_KEY = "test-secret-key";
|
||||
it('should call next() with valid API key', async () => {
|
||||
process.env.AUTOMAKER_API_KEY = 'test-secret-key';
|
||||
|
||||
const { authMiddleware } = await import("@/lib/auth.js");
|
||||
const { req, res, next} = createMockExpressContext();
|
||||
req.headers["x-api-key"] = "test-secret-key";
|
||||
const { authMiddleware } = await import('@/lib/auth.js');
|
||||
const { req, res, next } = createMockExpressContext();
|
||||
req.headers['x-api-key'] = 'test-secret-key';
|
||||
|
||||
authMiddleware(req, res, next);
|
||||
|
||||
@@ -72,44 +72,44 @@ describe("auth.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("isAuthEnabled", () => {
|
||||
it("should return false when no API key is set", async () => {
|
||||
describe('isAuthEnabled', () => {
|
||||
it('should return false when no API key is set', async () => {
|
||||
delete process.env.AUTOMAKER_API_KEY;
|
||||
|
||||
const { isAuthEnabled } = await import("@/lib/auth.js");
|
||||
const { isAuthEnabled } = await import('@/lib/auth.js');
|
||||
expect(isAuthEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it("should return true when API key is set", async () => {
|
||||
process.env.AUTOMAKER_API_KEY = "test-key";
|
||||
it('should return true when API key is set', async () => {
|
||||
process.env.AUTOMAKER_API_KEY = 'test-key';
|
||||
|
||||
const { isAuthEnabled } = await import("@/lib/auth.js");
|
||||
const { isAuthEnabled } = await import('@/lib/auth.js');
|
||||
expect(isAuthEnabled()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAuthStatus", () => {
|
||||
it("should return disabled status when no API key", async () => {
|
||||
describe('getAuthStatus', () => {
|
||||
it('should return disabled status when no API key', async () => {
|
||||
delete process.env.AUTOMAKER_API_KEY;
|
||||
|
||||
const { getAuthStatus } = await import("@/lib/auth.js");
|
||||
const { getAuthStatus } = await import('@/lib/auth.js');
|
||||
const status = getAuthStatus();
|
||||
|
||||
expect(status).toEqual({
|
||||
enabled: false,
|
||||
method: "none",
|
||||
method: 'none',
|
||||
});
|
||||
});
|
||||
|
||||
it("should return enabled status when API key is set", async () => {
|
||||
process.env.AUTOMAKER_API_KEY = "test-key";
|
||||
it('should return enabled status when API key is set', async () => {
|
||||
process.env.AUTOMAKER_API_KEY = 'test-key';
|
||||
|
||||
const { getAuthStatus } = await import("@/lib/auth.js");
|
||||
const { getAuthStatus } = await import('@/lib/auth.js');
|
||||
const status = getAuthStatus();
|
||||
|
||||
expect(status).toEqual({
|
||||
enabled: true,
|
||||
method: "api_key",
|
||||
method: 'api_key',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
getEnhancementPrompt,
|
||||
getSystemPrompt,
|
||||
@@ -15,38 +15,38 @@ import {
|
||||
SIMPLIFY_EXAMPLES,
|
||||
ACCEPTANCE_EXAMPLES,
|
||||
type EnhancementMode,
|
||||
} from "@/lib/enhancement-prompts.js";
|
||||
} from '@/lib/enhancement-prompts.js';
|
||||
|
||||
describe("enhancement-prompts.ts", () => {
|
||||
describe("System Prompt Constants", () => {
|
||||
it("should have non-empty improve system prompt", () => {
|
||||
describe('enhancement-prompts.ts', () => {
|
||||
describe('System Prompt Constants', () => {
|
||||
it('should have non-empty improve system prompt', () => {
|
||||
expect(IMPROVE_SYSTEM_PROMPT).toBeDefined();
|
||||
expect(IMPROVE_SYSTEM_PROMPT.length).toBeGreaterThan(100);
|
||||
expect(IMPROVE_SYSTEM_PROMPT).toContain("ANALYZE");
|
||||
expect(IMPROVE_SYSTEM_PROMPT).toContain("CLARIFY");
|
||||
expect(IMPROVE_SYSTEM_PROMPT).toContain('ANALYZE');
|
||||
expect(IMPROVE_SYSTEM_PROMPT).toContain('CLARIFY');
|
||||
});
|
||||
|
||||
it("should have non-empty technical system prompt", () => {
|
||||
it('should have non-empty technical system prompt', () => {
|
||||
expect(TECHNICAL_SYSTEM_PROMPT).toBeDefined();
|
||||
expect(TECHNICAL_SYSTEM_PROMPT.length).toBeGreaterThan(100);
|
||||
expect(TECHNICAL_SYSTEM_PROMPT).toContain("technical");
|
||||
expect(TECHNICAL_SYSTEM_PROMPT).toContain('technical');
|
||||
});
|
||||
|
||||
it("should have non-empty simplify system prompt", () => {
|
||||
it('should have non-empty simplify system prompt', () => {
|
||||
expect(SIMPLIFY_SYSTEM_PROMPT).toBeDefined();
|
||||
expect(SIMPLIFY_SYSTEM_PROMPT.length).toBeGreaterThan(100);
|
||||
expect(SIMPLIFY_SYSTEM_PROMPT).toContain("simplify");
|
||||
expect(SIMPLIFY_SYSTEM_PROMPT).toContain('simplify');
|
||||
});
|
||||
|
||||
it("should have non-empty acceptance system prompt", () => {
|
||||
it('should have non-empty acceptance system prompt', () => {
|
||||
expect(ACCEPTANCE_SYSTEM_PROMPT).toBeDefined();
|
||||
expect(ACCEPTANCE_SYSTEM_PROMPT.length).toBeGreaterThan(100);
|
||||
expect(ACCEPTANCE_SYSTEM_PROMPT).toContain("acceptance criteria");
|
||||
expect(ACCEPTANCE_SYSTEM_PROMPT).toContain('acceptance criteria');
|
||||
});
|
||||
});
|
||||
|
||||
describe("Example Constants", () => {
|
||||
it("should have improve examples with input and output", () => {
|
||||
describe('Example Constants', () => {
|
||||
it('should have improve examples with input and output', () => {
|
||||
expect(IMPROVE_EXAMPLES).toBeDefined();
|
||||
expect(IMPROVE_EXAMPLES.length).toBeGreaterThan(0);
|
||||
IMPROVE_EXAMPLES.forEach((example) => {
|
||||
@@ -57,7 +57,7 @@ describe("enhancement-prompts.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("should have technical examples with input and output", () => {
|
||||
it('should have technical examples with input and output', () => {
|
||||
expect(TECHNICAL_EXAMPLES).toBeDefined();
|
||||
expect(TECHNICAL_EXAMPLES.length).toBeGreaterThan(0);
|
||||
TECHNICAL_EXAMPLES.forEach((example) => {
|
||||
@@ -66,7 +66,7 @@ describe("enhancement-prompts.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("should have simplify examples with input and output", () => {
|
||||
it('should have simplify examples with input and output', () => {
|
||||
expect(SIMPLIFY_EXAMPLES).toBeDefined();
|
||||
expect(SIMPLIFY_EXAMPLES.length).toBeGreaterThan(0);
|
||||
SIMPLIFY_EXAMPLES.forEach((example) => {
|
||||
@@ -75,7 +75,7 @@ describe("enhancement-prompts.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("should have acceptance examples with input and output", () => {
|
||||
it('should have acceptance examples with input and output', () => {
|
||||
expect(ACCEPTANCE_EXAMPLES).toBeDefined();
|
||||
expect(ACCEPTANCE_EXAMPLES.length).toBeGreaterThan(0);
|
||||
ACCEPTANCE_EXAMPLES.forEach((example) => {
|
||||
@@ -85,66 +85,66 @@ describe("enhancement-prompts.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("getEnhancementPrompt", () => {
|
||||
it("should return config for improve mode", () => {
|
||||
const config = getEnhancementPrompt("improve");
|
||||
describe('getEnhancementPrompt', () => {
|
||||
it('should return config for improve mode', () => {
|
||||
const config = getEnhancementPrompt('improve');
|
||||
expect(config.systemPrompt).toBe(IMPROVE_SYSTEM_PROMPT);
|
||||
expect(config.description).toContain("clear");
|
||||
expect(config.description).toContain('clear');
|
||||
});
|
||||
|
||||
it("should return config for technical mode", () => {
|
||||
const config = getEnhancementPrompt("technical");
|
||||
it('should return config for technical mode', () => {
|
||||
const config = getEnhancementPrompt('technical');
|
||||
expect(config.systemPrompt).toBe(TECHNICAL_SYSTEM_PROMPT);
|
||||
expect(config.description).toContain("technical");
|
||||
expect(config.description).toContain('technical');
|
||||
});
|
||||
|
||||
it("should return config for simplify mode", () => {
|
||||
const config = getEnhancementPrompt("simplify");
|
||||
it('should return config for simplify mode', () => {
|
||||
const config = getEnhancementPrompt('simplify');
|
||||
expect(config.systemPrompt).toBe(SIMPLIFY_SYSTEM_PROMPT);
|
||||
expect(config.description).toContain("concise");
|
||||
expect(config.description).toContain('concise');
|
||||
});
|
||||
|
||||
it("should return config for acceptance mode", () => {
|
||||
const config = getEnhancementPrompt("acceptance");
|
||||
it('should return config for acceptance mode', () => {
|
||||
const config = getEnhancementPrompt('acceptance');
|
||||
expect(config.systemPrompt).toBe(ACCEPTANCE_SYSTEM_PROMPT);
|
||||
expect(config.description).toContain("acceptance");
|
||||
expect(config.description).toContain('acceptance');
|
||||
});
|
||||
|
||||
it("should handle case-insensitive mode", () => {
|
||||
const config = getEnhancementPrompt("IMPROVE");
|
||||
it('should handle case-insensitive mode', () => {
|
||||
const config = getEnhancementPrompt('IMPROVE');
|
||||
expect(config.systemPrompt).toBe(IMPROVE_SYSTEM_PROMPT);
|
||||
});
|
||||
|
||||
it("should fall back to improve for invalid mode", () => {
|
||||
const config = getEnhancementPrompt("invalid-mode");
|
||||
it('should fall back to improve for invalid mode', () => {
|
||||
const config = getEnhancementPrompt('invalid-mode');
|
||||
expect(config.systemPrompt).toBe(IMPROVE_SYSTEM_PROMPT);
|
||||
});
|
||||
|
||||
it("should fall back to improve for empty string", () => {
|
||||
const config = getEnhancementPrompt("");
|
||||
it('should fall back to improve for empty string', () => {
|
||||
const config = getEnhancementPrompt('');
|
||||
expect(config.systemPrompt).toBe(IMPROVE_SYSTEM_PROMPT);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getSystemPrompt", () => {
|
||||
it("should return correct system prompt for each mode", () => {
|
||||
expect(getSystemPrompt("improve")).toBe(IMPROVE_SYSTEM_PROMPT);
|
||||
expect(getSystemPrompt("technical")).toBe(TECHNICAL_SYSTEM_PROMPT);
|
||||
expect(getSystemPrompt("simplify")).toBe(SIMPLIFY_SYSTEM_PROMPT);
|
||||
expect(getSystemPrompt("acceptance")).toBe(ACCEPTANCE_SYSTEM_PROMPT);
|
||||
describe('getSystemPrompt', () => {
|
||||
it('should return correct system prompt for each mode', () => {
|
||||
expect(getSystemPrompt('improve')).toBe(IMPROVE_SYSTEM_PROMPT);
|
||||
expect(getSystemPrompt('technical')).toBe(TECHNICAL_SYSTEM_PROMPT);
|
||||
expect(getSystemPrompt('simplify')).toBe(SIMPLIFY_SYSTEM_PROMPT);
|
||||
expect(getSystemPrompt('acceptance')).toBe(ACCEPTANCE_SYSTEM_PROMPT);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getExamples", () => {
|
||||
it("should return correct examples for each mode", () => {
|
||||
expect(getExamples("improve")).toBe(IMPROVE_EXAMPLES);
|
||||
expect(getExamples("technical")).toBe(TECHNICAL_EXAMPLES);
|
||||
expect(getExamples("simplify")).toBe(SIMPLIFY_EXAMPLES);
|
||||
expect(getExamples("acceptance")).toBe(ACCEPTANCE_EXAMPLES);
|
||||
describe('getExamples', () => {
|
||||
it('should return correct examples for each mode', () => {
|
||||
expect(getExamples('improve')).toBe(IMPROVE_EXAMPLES);
|
||||
expect(getExamples('technical')).toBe(TECHNICAL_EXAMPLES);
|
||||
expect(getExamples('simplify')).toBe(SIMPLIFY_EXAMPLES);
|
||||
expect(getExamples('acceptance')).toBe(ACCEPTANCE_EXAMPLES);
|
||||
});
|
||||
|
||||
it("should return arrays with example objects", () => {
|
||||
const modes: EnhancementMode[] = ["improve", "technical", "simplify", "acceptance"];
|
||||
it('should return arrays with example objects', () => {
|
||||
const modes: EnhancementMode[] = ['improve', 'technical', 'simplify', 'acceptance'];
|
||||
modes.forEach((mode) => {
|
||||
const examples = getExamples(mode);
|
||||
expect(Array.isArray(examples)).toBe(true);
|
||||
@@ -153,38 +153,38 @@ describe("enhancement-prompts.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildUserPrompt", () => {
|
||||
const testText = "Add a logout button";
|
||||
describe('buildUserPrompt', () => {
|
||||
const testText = 'Add a logout button';
|
||||
|
||||
it("should build prompt with examples by default", () => {
|
||||
const prompt = buildUserPrompt("improve", testText);
|
||||
expect(prompt).toContain("Example 1:");
|
||||
it('should build prompt with examples by default', () => {
|
||||
const prompt = buildUserPrompt('improve', testText);
|
||||
expect(prompt).toContain('Example 1:');
|
||||
expect(prompt).toContain(testText);
|
||||
expect(prompt).toContain("Now, please enhance the following task description:");
|
||||
expect(prompt).toContain('Now, please enhance the following task description:');
|
||||
});
|
||||
|
||||
it("should build prompt without examples when includeExamples is false", () => {
|
||||
const prompt = buildUserPrompt("improve", testText, false);
|
||||
expect(prompt).not.toContain("Example 1:");
|
||||
it('should build prompt without examples when includeExamples is false', () => {
|
||||
const prompt = buildUserPrompt('improve', testText, false);
|
||||
expect(prompt).not.toContain('Example 1:');
|
||||
expect(prompt).toContain(testText);
|
||||
expect(prompt).toContain("Please enhance the following task description:");
|
||||
expect(prompt).toContain('Please enhance the following task description:');
|
||||
});
|
||||
|
||||
it("should include all examples for improve mode", () => {
|
||||
const prompt = buildUserPrompt("improve", testText);
|
||||
it('should include all examples for improve mode', () => {
|
||||
const prompt = buildUserPrompt('improve', testText);
|
||||
IMPROVE_EXAMPLES.forEach((example, index) => {
|
||||
expect(prompt).toContain(`Example ${index + 1}:`);
|
||||
expect(prompt).toContain(example.input);
|
||||
});
|
||||
});
|
||||
|
||||
it("should include separator between examples", () => {
|
||||
const prompt = buildUserPrompt("improve", testText);
|
||||
expect(prompt).toContain("---");
|
||||
it('should include separator between examples', () => {
|
||||
const prompt = buildUserPrompt('improve', testText);
|
||||
expect(prompt).toContain('---');
|
||||
});
|
||||
|
||||
it("should work with all enhancement modes", () => {
|
||||
const modes: EnhancementMode[] = ["improve", "technical", "simplify", "acceptance"];
|
||||
it('should work with all enhancement modes', () => {
|
||||
const modes: EnhancementMode[] = ['improve', 'technical', 'simplify', 'acceptance'];
|
||||
modes.forEach((mode) => {
|
||||
const prompt = buildUserPrompt(mode, testText);
|
||||
expect(prompt).toContain(testText);
|
||||
@@ -192,40 +192,40 @@ describe("enhancement-prompts.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("should preserve the original text exactly", () => {
|
||||
const specialText = "Add feature with special chars: <>&\"'";
|
||||
const prompt = buildUserPrompt("improve", specialText);
|
||||
it('should preserve the original text exactly', () => {
|
||||
const specialText = 'Add feature with special chars: <>&"\'';
|
||||
const prompt = buildUserPrompt('improve', specialText);
|
||||
expect(prompt).toContain(specialText);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isValidEnhancementMode", () => {
|
||||
it("should return true for valid modes", () => {
|
||||
expect(isValidEnhancementMode("improve")).toBe(true);
|
||||
expect(isValidEnhancementMode("technical")).toBe(true);
|
||||
expect(isValidEnhancementMode("simplify")).toBe(true);
|
||||
expect(isValidEnhancementMode("acceptance")).toBe(true);
|
||||
describe('isValidEnhancementMode', () => {
|
||||
it('should return true for valid modes', () => {
|
||||
expect(isValidEnhancementMode('improve')).toBe(true);
|
||||
expect(isValidEnhancementMode('technical')).toBe(true);
|
||||
expect(isValidEnhancementMode('simplify')).toBe(true);
|
||||
expect(isValidEnhancementMode('acceptance')).toBe(true);
|
||||
});
|
||||
|
||||
it("should return false for invalid modes", () => {
|
||||
expect(isValidEnhancementMode("invalid")).toBe(false);
|
||||
expect(isValidEnhancementMode("IMPROVE")).toBe(false); // case-sensitive
|
||||
expect(isValidEnhancementMode("")).toBe(false);
|
||||
expect(isValidEnhancementMode("random")).toBe(false);
|
||||
it('should return false for invalid modes', () => {
|
||||
expect(isValidEnhancementMode('invalid')).toBe(false);
|
||||
expect(isValidEnhancementMode('IMPROVE')).toBe(false); // case-sensitive
|
||||
expect(isValidEnhancementMode('')).toBe(false);
|
||||
expect(isValidEnhancementMode('random')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAvailableEnhancementModes", () => {
|
||||
it("should return all four enhancement modes", () => {
|
||||
describe('getAvailableEnhancementModes', () => {
|
||||
it('should return all four enhancement modes', () => {
|
||||
const modes = getAvailableEnhancementModes();
|
||||
expect(modes).toHaveLength(4);
|
||||
expect(modes).toContain("improve");
|
||||
expect(modes).toContain("technical");
|
||||
expect(modes).toContain("simplify");
|
||||
expect(modes).toContain("acceptance");
|
||||
expect(modes).toContain('improve');
|
||||
expect(modes).toContain('technical');
|
||||
expect(modes).toContain('simplify');
|
||||
expect(modes).toContain('acceptance');
|
||||
});
|
||||
|
||||
it("should return an array", () => {
|
||||
it('should return an array', () => {
|
||||
const modes = getAvailableEnhancementModes();
|
||||
expect(Array.isArray(modes)).toBe(true);
|
||||
});
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
import { createEventEmitter, type EventType } from "@/lib/events.js";
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { createEventEmitter, type EventType } from '@/lib/events.js';
|
||||
|
||||
describe("events.ts", () => {
|
||||
describe("createEventEmitter", () => {
|
||||
it("should emit events to single subscriber", () => {
|
||||
describe('events.ts', () => {
|
||||
describe('createEventEmitter', () => {
|
||||
it('should emit events to single subscriber', () => {
|
||||
const emitter = createEventEmitter();
|
||||
const callback = vi.fn();
|
||||
|
||||
emitter.subscribe(callback);
|
||||
emitter.emit("agent:stream", { message: "test" });
|
||||
emitter.emit('agent:stream', { message: 'test' });
|
||||
|
||||
expect(callback).toHaveBeenCalledOnce();
|
||||
expect(callback).toHaveBeenCalledWith("agent:stream", { message: "test" });
|
||||
expect(callback).toHaveBeenCalledWith('agent:stream', { message: 'test' });
|
||||
});
|
||||
|
||||
it("should emit events to multiple subscribers", () => {
|
||||
it('should emit events to multiple subscribers', () => {
|
||||
const emitter = createEventEmitter();
|
||||
const callback1 = vi.fn();
|
||||
const callback2 = vi.fn();
|
||||
@@ -23,42 +23,42 @@ describe("events.ts", () => {
|
||||
emitter.subscribe(callback1);
|
||||
emitter.subscribe(callback2);
|
||||
emitter.subscribe(callback3);
|
||||
emitter.emit("feature:started", { id: "123" });
|
||||
emitter.emit('feature:started', { id: '123' });
|
||||
|
||||
expect(callback1).toHaveBeenCalledOnce();
|
||||
expect(callback2).toHaveBeenCalledOnce();
|
||||
expect(callback3).toHaveBeenCalledOnce();
|
||||
expect(callback1).toHaveBeenCalledWith("feature:started", { id: "123" });
|
||||
expect(callback1).toHaveBeenCalledWith('feature:started', { id: '123' });
|
||||
});
|
||||
|
||||
it("should support unsubscribe functionality", () => {
|
||||
it('should support unsubscribe functionality', () => {
|
||||
const emitter = createEventEmitter();
|
||||
const callback = vi.fn();
|
||||
|
||||
const unsubscribe = emitter.subscribe(callback);
|
||||
emitter.emit("agent:stream", { test: 1 });
|
||||
emitter.emit('agent:stream', { test: 1 });
|
||||
|
||||
expect(callback).toHaveBeenCalledOnce();
|
||||
|
||||
unsubscribe();
|
||||
emitter.emit("agent:stream", { test: 2 });
|
||||
emitter.emit('agent:stream', { test: 2 });
|
||||
|
||||
expect(callback).toHaveBeenCalledOnce(); // Still called only once
|
||||
});
|
||||
|
||||
it("should handle errors in subscribers without crashing", () => {
|
||||
it('should handle errors in subscribers without crashing', () => {
|
||||
const emitter = createEventEmitter();
|
||||
const errorCallback = vi.fn(() => {
|
||||
throw new Error("Subscriber error");
|
||||
throw new Error('Subscriber error');
|
||||
});
|
||||
const normalCallback = vi.fn();
|
||||
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
|
||||
const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
|
||||
emitter.subscribe(errorCallback);
|
||||
emitter.subscribe(normalCallback);
|
||||
|
||||
expect(() => {
|
||||
emitter.emit("feature:error", { error: "test" });
|
||||
emitter.emit('feature:error', { error: 'test' });
|
||||
}).not.toThrow();
|
||||
|
||||
expect(errorCallback).toHaveBeenCalledOnce();
|
||||
@@ -68,17 +68,17 @@ describe("events.ts", () => {
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
|
||||
it("should emit different event types", () => {
|
||||
it('should emit different event types', () => {
|
||||
const emitter = createEventEmitter();
|
||||
const callback = vi.fn();
|
||||
|
||||
emitter.subscribe(callback);
|
||||
|
||||
const eventTypes: EventType[] = [
|
||||
"agent:stream",
|
||||
"auto-mode:started",
|
||||
"feature:completed",
|
||||
"project:analysis-progress",
|
||||
'agent:stream',
|
||||
'auto-mode:started',
|
||||
'feature:completed',
|
||||
'project:analysis-progress',
|
||||
];
|
||||
|
||||
eventTypes.forEach((type) => {
|
||||
@@ -88,15 +88,15 @@ describe("events.ts", () => {
|
||||
expect(callback).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
|
||||
it("should handle emitting without subscribers", () => {
|
||||
it('should handle emitting without subscribers', () => {
|
||||
const emitter = createEventEmitter();
|
||||
|
||||
expect(() => {
|
||||
emitter.emit("agent:stream", { test: true });
|
||||
emitter.emit('agent:stream', { test: true });
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it("should allow multiple subscriptions and unsubscriptions", () => {
|
||||
it('should allow multiple subscriptions and unsubscriptions', () => {
|
||||
const emitter = createEventEmitter();
|
||||
const callback1 = vi.fn();
|
||||
const callback2 = vi.fn();
|
||||
@@ -106,14 +106,14 @@ describe("events.ts", () => {
|
||||
const unsub2 = emitter.subscribe(callback2);
|
||||
const unsub3 = emitter.subscribe(callback3);
|
||||
|
||||
emitter.emit("feature:started", { test: 1 });
|
||||
emitter.emit('feature:started', { test: 1 });
|
||||
expect(callback1).toHaveBeenCalledOnce();
|
||||
expect(callback2).toHaveBeenCalledOnce();
|
||||
expect(callback3).toHaveBeenCalledOnce();
|
||||
|
||||
unsub2();
|
||||
|
||||
emitter.emit("feature:started", { test: 2 });
|
||||
emitter.emit('feature:started', { test: 2 });
|
||||
expect(callback1).toHaveBeenCalledTimes(2);
|
||||
expect(callback2).toHaveBeenCalledOnce(); // Still just once
|
||||
expect(callback3).toHaveBeenCalledTimes(2);
|
||||
@@ -121,7 +121,7 @@ describe("events.ts", () => {
|
||||
unsub1();
|
||||
unsub3();
|
||||
|
||||
emitter.emit("feature:started", { test: 3 });
|
||||
emitter.emit('feature:started', { test: 3 });
|
||||
expect(callback1).toHaveBeenCalledTimes(2);
|
||||
expect(callback2).toHaveBeenCalledOnce();
|
||||
expect(callback3).toHaveBeenCalledTimes(2);
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import { ProviderFactory } from "@/providers/provider-factory.js";
|
||||
import { ClaudeProvider } from "@/providers/claude-provider.js";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { ProviderFactory } from '@/providers/provider-factory.js';
|
||||
import { ClaudeProvider } from '@/providers/claude-provider.js';
|
||||
|
||||
describe("provider-factory.ts", () => {
|
||||
describe('provider-factory.ts', () => {
|
||||
let consoleSpy: any;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleSpy = {
|
||||
warn: vi.spyOn(console, "warn").mockImplementation(() => {}),
|
||||
warn: vi.spyOn(console, 'warn').mockImplementation(() => {}),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -15,55 +15,49 @@ describe("provider-factory.ts", () => {
|
||||
consoleSpy.warn.mockRestore();
|
||||
});
|
||||
|
||||
describe("getProviderForModel", () => {
|
||||
describe("Claude models (claude-* prefix)", () => {
|
||||
it("should return ClaudeProvider for claude-opus-4-5-20251101", () => {
|
||||
const provider = ProviderFactory.getProviderForModel(
|
||||
"claude-opus-4-5-20251101"
|
||||
);
|
||||
describe('getProviderForModel', () => {
|
||||
describe('Claude models (claude-* prefix)', () => {
|
||||
it('should return ClaudeProvider for claude-opus-4-5-20251101', () => {
|
||||
const provider = ProviderFactory.getProviderForModel('claude-opus-4-5-20251101');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
});
|
||||
|
||||
it("should return ClaudeProvider for claude-sonnet-4-20250514", () => {
|
||||
const provider = ProviderFactory.getProviderForModel(
|
||||
"claude-sonnet-4-20250514"
|
||||
);
|
||||
it('should return ClaudeProvider for claude-sonnet-4-20250514', () => {
|
||||
const provider = ProviderFactory.getProviderForModel('claude-sonnet-4-20250514');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
});
|
||||
|
||||
it("should return ClaudeProvider for claude-haiku-4-5", () => {
|
||||
const provider = ProviderFactory.getProviderForModel("claude-haiku-4-5");
|
||||
it('should return ClaudeProvider for claude-haiku-4-5', () => {
|
||||
const provider = ProviderFactory.getProviderForModel('claude-haiku-4-5');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
});
|
||||
|
||||
it("should be case-insensitive for claude models", () => {
|
||||
const provider = ProviderFactory.getProviderForModel(
|
||||
"CLAUDE-OPUS-4-5-20251101"
|
||||
);
|
||||
it('should be case-insensitive for claude models', () => {
|
||||
const provider = ProviderFactory.getProviderForModel('CLAUDE-OPUS-4-5-20251101');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Claude aliases", () => {
|
||||
describe('Claude aliases', () => {
|
||||
it("should return ClaudeProvider for 'haiku'", () => {
|
||||
const provider = ProviderFactory.getProviderForModel("haiku");
|
||||
const provider = ProviderFactory.getProviderForModel('haiku');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
});
|
||||
|
||||
it("should return ClaudeProvider for 'sonnet'", () => {
|
||||
const provider = ProviderFactory.getProviderForModel("sonnet");
|
||||
const provider = ProviderFactory.getProviderForModel('sonnet');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
});
|
||||
|
||||
it("should return ClaudeProvider for 'opus'", () => {
|
||||
const provider = ProviderFactory.getProviderForModel("opus");
|
||||
const provider = ProviderFactory.getProviderForModel('opus');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
});
|
||||
|
||||
it("should be case-insensitive for aliases", () => {
|
||||
const provider1 = ProviderFactory.getProviderForModel("HAIKU");
|
||||
const provider2 = ProviderFactory.getProviderForModel("Sonnet");
|
||||
const provider3 = ProviderFactory.getProviderForModel("Opus");
|
||||
it('should be case-insensitive for aliases', () => {
|
||||
const provider1 = ProviderFactory.getProviderForModel('HAIKU');
|
||||
const provider2 = ProviderFactory.getProviderForModel('Sonnet');
|
||||
const provider3 = ProviderFactory.getProviderForModel('Opus');
|
||||
|
||||
expect(provider1).toBeInstanceOf(ClaudeProvider);
|
||||
expect(provider2).toBeInstanceOf(ClaudeProvider);
|
||||
@@ -71,65 +65,61 @@ describe("provider-factory.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("Unknown models", () => {
|
||||
it("should default to ClaudeProvider for unknown model", () => {
|
||||
const provider = ProviderFactory.getProviderForModel("unknown-model-123");
|
||||
describe('Unknown models', () => {
|
||||
it('should default to ClaudeProvider for unknown model', () => {
|
||||
const provider = ProviderFactory.getProviderForModel('unknown-model-123');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
});
|
||||
|
||||
it("should warn when defaulting to Claude", () => {
|
||||
ProviderFactory.getProviderForModel("random-model");
|
||||
it('should warn when defaulting to Claude', () => {
|
||||
ProviderFactory.getProviderForModel('random-model');
|
||||
expect(consoleSpy.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Unknown model prefix")
|
||||
expect.stringContaining('Unknown model prefix')
|
||||
);
|
||||
expect(consoleSpy.warn).toHaveBeenCalledWith(expect.stringContaining('random-model'));
|
||||
expect(consoleSpy.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining("random-model")
|
||||
);
|
||||
expect(consoleSpy.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining("defaulting to Claude")
|
||||
expect.stringContaining('defaulting to Claude')
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle empty string", () => {
|
||||
const provider = ProviderFactory.getProviderForModel("");
|
||||
it('should handle empty string', () => {
|
||||
const provider = ProviderFactory.getProviderForModel('');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
expect(consoleSpy.warn).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should default to ClaudeProvider for gpt models (not supported)", () => {
|
||||
const provider = ProviderFactory.getProviderForModel("gpt-5.2");
|
||||
it('should default to ClaudeProvider for gpt models (not supported)', () => {
|
||||
const provider = ProviderFactory.getProviderForModel('gpt-5.2');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
expect(consoleSpy.warn).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should default to ClaudeProvider for o-series models (not supported)", () => {
|
||||
const provider = ProviderFactory.getProviderForModel("o1");
|
||||
it('should default to ClaudeProvider for o-series models (not supported)', () => {
|
||||
const provider = ProviderFactory.getProviderForModel('o1');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
expect(consoleSpy.warn).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAllProviders", () => {
|
||||
it("should return array of all providers", () => {
|
||||
describe('getAllProviders', () => {
|
||||
it('should return array of all providers', () => {
|
||||
const providers = ProviderFactory.getAllProviders();
|
||||
expect(Array.isArray(providers)).toBe(true);
|
||||
});
|
||||
|
||||
it("should include ClaudeProvider", () => {
|
||||
it('should include ClaudeProvider', () => {
|
||||
const providers = ProviderFactory.getAllProviders();
|
||||
const hasClaudeProvider = providers.some(
|
||||
(p) => p instanceof ClaudeProvider
|
||||
);
|
||||
const hasClaudeProvider = providers.some((p) => p instanceof ClaudeProvider);
|
||||
expect(hasClaudeProvider).toBe(true);
|
||||
});
|
||||
|
||||
it("should return exactly 1 provider", () => {
|
||||
it('should return exactly 1 provider', () => {
|
||||
const providers = ProviderFactory.getAllProviders();
|
||||
expect(providers).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("should create new instances each time", () => {
|
||||
it('should create new instances each time', () => {
|
||||
const providers1 = ProviderFactory.getAllProviders();
|
||||
const providers2 = ProviderFactory.getAllProviders();
|
||||
|
||||
@@ -137,60 +127,60 @@ describe("provider-factory.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("checkAllProviders", () => {
|
||||
it("should return installation status for all providers", async () => {
|
||||
describe('checkAllProviders', () => {
|
||||
it('should return installation status for all providers', async () => {
|
||||
const statuses = await ProviderFactory.checkAllProviders();
|
||||
|
||||
expect(statuses).toHaveProperty("claude");
|
||||
expect(statuses).toHaveProperty('claude');
|
||||
});
|
||||
|
||||
it("should call detectInstallation on each provider", async () => {
|
||||
it('should call detectInstallation on each provider', async () => {
|
||||
const statuses = await ProviderFactory.checkAllProviders();
|
||||
|
||||
expect(statuses.claude).toHaveProperty("installed");
|
||||
expect(statuses.claude).toHaveProperty('installed');
|
||||
});
|
||||
|
||||
it("should return correct provider names as keys", async () => {
|
||||
it('should return correct provider names as keys', async () => {
|
||||
const statuses = await ProviderFactory.checkAllProviders();
|
||||
const keys = Object.keys(statuses);
|
||||
|
||||
expect(keys).toContain("claude");
|
||||
expect(keys).toContain('claude');
|
||||
expect(keys).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getProviderByName", () => {
|
||||
describe('getProviderByName', () => {
|
||||
it("should return ClaudeProvider for 'claude'", () => {
|
||||
const provider = ProviderFactory.getProviderByName("claude");
|
||||
const provider = ProviderFactory.getProviderByName('claude');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
});
|
||||
|
||||
it("should return ClaudeProvider for 'anthropic'", () => {
|
||||
const provider = ProviderFactory.getProviderByName("anthropic");
|
||||
const provider = ProviderFactory.getProviderByName('anthropic');
|
||||
expect(provider).toBeInstanceOf(ClaudeProvider);
|
||||
});
|
||||
|
||||
it("should be case-insensitive", () => {
|
||||
const provider1 = ProviderFactory.getProviderByName("CLAUDE");
|
||||
const provider2 = ProviderFactory.getProviderByName("ANTHROPIC");
|
||||
it('should be case-insensitive', () => {
|
||||
const provider1 = ProviderFactory.getProviderByName('CLAUDE');
|
||||
const provider2 = ProviderFactory.getProviderByName('ANTHROPIC');
|
||||
|
||||
expect(provider1).toBeInstanceOf(ClaudeProvider);
|
||||
expect(provider2).toBeInstanceOf(ClaudeProvider);
|
||||
});
|
||||
|
||||
it("should return null for unknown provider", () => {
|
||||
const provider = ProviderFactory.getProviderByName("unknown");
|
||||
it('should return null for unknown provider', () => {
|
||||
const provider = ProviderFactory.getProviderByName('unknown');
|
||||
expect(provider).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for empty string", () => {
|
||||
const provider = ProviderFactory.getProviderByName("");
|
||||
it('should return null for empty string', () => {
|
||||
const provider = ProviderFactory.getProviderByName('');
|
||||
expect(provider).toBeNull();
|
||||
});
|
||||
|
||||
it("should create new instance each time", () => {
|
||||
const provider1 = ProviderFactory.getProviderByName("claude");
|
||||
const provider2 = ProviderFactory.getProviderByName("claude");
|
||||
it('should create new instance each time', () => {
|
||||
const provider1 = ProviderFactory.getProviderByName('claude');
|
||||
const provider2 = ProviderFactory.getProviderByName('claude');
|
||||
|
||||
expect(provider1).not.toBe(provider2);
|
||||
expect(provider1).toBeInstanceOf(ClaudeProvider);
|
||||
@@ -198,35 +188,33 @@ describe("provider-factory.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAllAvailableModels", () => {
|
||||
it("should return array of models", () => {
|
||||
describe('getAllAvailableModels', () => {
|
||||
it('should return array of models', () => {
|
||||
const models = ProviderFactory.getAllAvailableModels();
|
||||
expect(Array.isArray(models)).toBe(true);
|
||||
});
|
||||
|
||||
it("should include models from all providers", () => {
|
||||
it('should include models from all providers', () => {
|
||||
const models = ProviderFactory.getAllAvailableModels();
|
||||
expect(models.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should return models with required fields", () => {
|
||||
it('should return models with required fields', () => {
|
||||
const models = ProviderFactory.getAllAvailableModels();
|
||||
|
||||
models.forEach((model) => {
|
||||
expect(model).toHaveProperty("id");
|
||||
expect(model).toHaveProperty("name");
|
||||
expect(typeof model.id).toBe("string");
|
||||
expect(typeof model.name).toBe("string");
|
||||
expect(model).toHaveProperty('id');
|
||||
expect(model).toHaveProperty('name');
|
||||
expect(typeof model.id).toBe('string');
|
||||
expect(typeof model.name).toBe('string');
|
||||
});
|
||||
});
|
||||
|
||||
it("should include Claude models", () => {
|
||||
it('should include Claude models', () => {
|
||||
const models = ProviderFactory.getAllAvailableModels();
|
||||
|
||||
// Claude models should include claude-* in their IDs
|
||||
const hasClaudeModels = models.some((m) =>
|
||||
m.id.toLowerCase().includes("claude")
|
||||
);
|
||||
const hasClaudeModels = models.some((m) => m.id.toLowerCase().includes('claude'));
|
||||
|
||||
expect(hasClaudeModels).toBe(true);
|
||||
});
|
||||
|
||||
@@ -1,65 +1,59 @@
|
||||
import { describe, it, expect, beforeEach } from "vitest";
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import {
|
||||
setRunningState,
|
||||
getErrorMessage,
|
||||
getSpecRegenerationStatus,
|
||||
} from "@/routes/app-spec/common.js";
|
||||
} from '@/routes/app-spec/common.js';
|
||||
|
||||
describe("app-spec/common.ts", () => {
|
||||
describe('app-spec/common.ts', () => {
|
||||
beforeEach(() => {
|
||||
// Reset state before each test
|
||||
setRunningState(false, null);
|
||||
});
|
||||
|
||||
describe("setRunningState", () => {
|
||||
it("should set isRunning to true when running is true", () => {
|
||||
describe('setRunningState', () => {
|
||||
it('should set isRunning to true when running is true', () => {
|
||||
setRunningState(true);
|
||||
expect(getSpecRegenerationStatus().isRunning).toBe(true);
|
||||
});
|
||||
|
||||
it("should set isRunning to false when running is false", () => {
|
||||
it('should set isRunning to false when running is false', () => {
|
||||
setRunningState(true);
|
||||
setRunningState(false);
|
||||
expect(getSpecRegenerationStatus().isRunning).toBe(false);
|
||||
});
|
||||
|
||||
it("should set currentAbortController when provided", () => {
|
||||
it('should set currentAbortController when provided', () => {
|
||||
const controller = new AbortController();
|
||||
setRunningState(true, controller);
|
||||
expect(getSpecRegenerationStatus().currentAbortController).toBe(
|
||||
controller
|
||||
);
|
||||
expect(getSpecRegenerationStatus().currentAbortController).toBe(controller);
|
||||
});
|
||||
|
||||
it("should set currentAbortController to null when not provided", () => {
|
||||
it('should set currentAbortController to null when not provided', () => {
|
||||
const controller = new AbortController();
|
||||
setRunningState(true, controller);
|
||||
setRunningState(false);
|
||||
expect(getSpecRegenerationStatus().currentAbortController).toBe(null);
|
||||
});
|
||||
|
||||
it("should set currentAbortController to null when explicitly passed null", () => {
|
||||
it('should set currentAbortController to null when explicitly passed null', () => {
|
||||
const controller = new AbortController();
|
||||
setRunningState(true, controller);
|
||||
setRunningState(true, null);
|
||||
expect(getSpecRegenerationStatus().currentAbortController).toBe(null);
|
||||
});
|
||||
|
||||
it("should update state multiple times correctly", () => {
|
||||
it('should update state multiple times correctly', () => {
|
||||
const controller1 = new AbortController();
|
||||
const controller2 = new AbortController();
|
||||
|
||||
setRunningState(true, controller1);
|
||||
expect(getSpecRegenerationStatus().isRunning).toBe(true);
|
||||
expect(getSpecRegenerationStatus().currentAbortController).toBe(
|
||||
controller1
|
||||
);
|
||||
expect(getSpecRegenerationStatus().currentAbortController).toBe(controller1);
|
||||
|
||||
setRunningState(true, controller2);
|
||||
expect(getSpecRegenerationStatus().isRunning).toBe(true);
|
||||
expect(getSpecRegenerationStatus().currentAbortController).toBe(
|
||||
controller2
|
||||
);
|
||||
expect(getSpecRegenerationStatus().currentAbortController).toBe(controller2);
|
||||
|
||||
setRunningState(false, null);
|
||||
expect(getSpecRegenerationStatus().isRunning).toBe(false);
|
||||
@@ -67,42 +61,42 @@ describe("app-spec/common.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("getErrorMessage", () => {
|
||||
it("should return message from Error instance", () => {
|
||||
const error = new Error("Test error message");
|
||||
expect(getErrorMessage(error)).toBe("Test error message");
|
||||
describe('getErrorMessage', () => {
|
||||
it('should return message from Error instance', () => {
|
||||
const error = new Error('Test error message');
|
||||
expect(getErrorMessage(error)).toBe('Test error message');
|
||||
});
|
||||
|
||||
it("should return 'Unknown error' for non-Error objects", () => {
|
||||
expect(getErrorMessage("string error")).toBe("Unknown error");
|
||||
expect(getErrorMessage(123)).toBe("Unknown error");
|
||||
expect(getErrorMessage(null)).toBe("Unknown error");
|
||||
expect(getErrorMessage(undefined)).toBe("Unknown error");
|
||||
expect(getErrorMessage({})).toBe("Unknown error");
|
||||
expect(getErrorMessage([])).toBe("Unknown error");
|
||||
expect(getErrorMessage('string error')).toBe('Unknown error');
|
||||
expect(getErrorMessage(123)).toBe('Unknown error');
|
||||
expect(getErrorMessage(null)).toBe('Unknown error');
|
||||
expect(getErrorMessage(undefined)).toBe('Unknown error');
|
||||
expect(getErrorMessage({})).toBe('Unknown error');
|
||||
expect(getErrorMessage([])).toBe('Unknown error');
|
||||
});
|
||||
|
||||
it("should return message from Error with empty message", () => {
|
||||
const error = new Error("");
|
||||
expect(getErrorMessage(error)).toBe("");
|
||||
it('should return message from Error with empty message', () => {
|
||||
const error = new Error('');
|
||||
expect(getErrorMessage(error)).toBe('');
|
||||
});
|
||||
|
||||
it("should handle Error objects with custom properties", () => {
|
||||
const error = new Error("Base message");
|
||||
(error as any).customProp = "custom value";
|
||||
expect(getErrorMessage(error)).toBe("Base message");
|
||||
it('should handle Error objects with custom properties', () => {
|
||||
const error = new Error('Base message');
|
||||
(error as any).customProp = 'custom value';
|
||||
expect(getErrorMessage(error)).toBe('Base message');
|
||||
});
|
||||
|
||||
it("should handle Error objects created with different constructors", () => {
|
||||
it('should handle Error objects created with different constructors', () => {
|
||||
class CustomError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = "CustomError";
|
||||
this.name = 'CustomError';
|
||||
}
|
||||
}
|
||||
|
||||
const customError = new CustomError("Custom error message");
|
||||
expect(getErrorMessage(customError)).toBe("Custom error message");
|
||||
const customError = new CustomError('Custom error message');
|
||||
expect(getErrorMessage(customError)).toBe('Custom error message');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { describe, it, expect } from 'vitest';
|
||||
|
||||
describe("app-spec/parse-and-create-features.ts - JSON extraction", () => {
|
||||
describe('app-spec/parse-and-create-features.ts - JSON extraction', () => {
|
||||
// Test the JSON extraction regex pattern used in parseAndCreateFeatures
|
||||
const jsonExtractionPattern = /\{[\s\S]*"features"[\s\S]*\}/;
|
||||
|
||||
describe("JSON extraction regex", () => {
|
||||
it("should extract JSON with features array", () => {
|
||||
describe('JSON extraction regex', () => {
|
||||
it('should extract JSON with features array', () => {
|
||||
const content = `Here is the response:
|
||||
{
|
||||
"features": [
|
||||
@@ -26,7 +26,7 @@ describe("app-spec/parse-and-create-features.ts - JSON extraction", () => {
|
||||
expect(match![0]).toContain('"id": "feature-1"');
|
||||
});
|
||||
|
||||
it("should extract JSON with multiple features", () => {
|
||||
it('should extract JSON with multiple features', () => {
|
||||
const content = `Some text before
|
||||
{
|
||||
"features": [
|
||||
@@ -49,7 +49,7 @@ Some text after`;
|
||||
expect(match![0]).toContain('"feature-2"');
|
||||
});
|
||||
|
||||
it("should extract JSON with nested objects and arrays", () => {
|
||||
it('should extract JSON with nested objects and arrays', () => {
|
||||
const content = `Response:
|
||||
{
|
||||
"features": [
|
||||
@@ -69,7 +69,7 @@ Some text after`;
|
||||
expect(match![0]).toContain('"dep-1"');
|
||||
});
|
||||
|
||||
it("should handle JSON with whitespace and newlines", () => {
|
||||
it('should handle JSON with whitespace and newlines', () => {
|
||||
const content = `Text before
|
||||
{
|
||||
"features": [
|
||||
@@ -87,7 +87,7 @@ Text after`;
|
||||
expect(match![0]).toContain('"features"');
|
||||
});
|
||||
|
||||
it("should extract JSON when features array is empty", () => {
|
||||
it('should extract JSON when features array is empty', () => {
|
||||
const content = `Response:
|
||||
{
|
||||
"features": []
|
||||
@@ -96,10 +96,10 @@ Text after`;
|
||||
const match = content.match(jsonExtractionPattern);
|
||||
expect(match).not.toBeNull();
|
||||
expect(match![0]).toContain('"features"');
|
||||
expect(match![0]).toContain("[]");
|
||||
expect(match![0]).toContain('[]');
|
||||
});
|
||||
|
||||
it("should not match content without features key", () => {
|
||||
it('should not match content without features key', () => {
|
||||
const content = `{
|
||||
"otherKey": "value"
|
||||
}`;
|
||||
@@ -108,13 +108,13 @@ Text after`;
|
||||
expect(match).toBeNull();
|
||||
});
|
||||
|
||||
it("should not match content without JSON structure", () => {
|
||||
const content = "Just plain text with features mentioned";
|
||||
it('should not match content without JSON structure', () => {
|
||||
const content = 'Just plain text with features mentioned';
|
||||
const match = content.match(jsonExtractionPattern);
|
||||
expect(match).toBeNull();
|
||||
});
|
||||
|
||||
it("should extract JSON when features key appears multiple times", () => {
|
||||
it('should extract JSON when features key appears multiple times', () => {
|
||||
const content = `Before:
|
||||
{
|
||||
"features": [
|
||||
@@ -132,7 +132,7 @@ After: The word "features" appears again`;
|
||||
expect(match![0]).toContain('"features"');
|
||||
});
|
||||
|
||||
it("should handle JSON with escaped quotes", () => {
|
||||
it('should handle JSON with escaped quotes', () => {
|
||||
const content = `{
|
||||
"features": [
|
||||
{
|
||||
@@ -147,7 +147,7 @@ After: The word "features" appears again`;
|
||||
expect(match![0]).toContain('"features"');
|
||||
});
|
||||
|
||||
it("should extract JSON with complex nested structure", () => {
|
||||
it('should extract JSON with complex nested structure', () => {
|
||||
const content = `Response:
|
||||
{
|
||||
"features": [
|
||||
@@ -177,8 +177,8 @@ After: The word "features" appears again`;
|
||||
});
|
||||
});
|
||||
|
||||
describe("JSON parsing validation", () => {
|
||||
it("should parse valid feature JSON structure", () => {
|
||||
describe('JSON parsing validation', () => {
|
||||
it('should parse valid feature JSON structure', () => {
|
||||
const validJson = `{
|
||||
"features": [
|
||||
{
|
||||
@@ -196,11 +196,11 @@ After: The word "features" appears again`;
|
||||
expect(parsed.features).toBeDefined();
|
||||
expect(Array.isArray(parsed.features)).toBe(true);
|
||||
expect(parsed.features.length).toBe(1);
|
||||
expect(parsed.features[0].id).toBe("feature-1");
|
||||
expect(parsed.features[0].title).toBe("Test Feature");
|
||||
expect(parsed.features[0].id).toBe('feature-1');
|
||||
expect(parsed.features[0].title).toBe('Test Feature');
|
||||
});
|
||||
|
||||
it("should handle features with optional fields", () => {
|
||||
it('should handle features with optional fields', () => {
|
||||
const jsonWithOptionalFields = `{
|
||||
"features": [
|
||||
{
|
||||
@@ -213,14 +213,14 @@ After: The word "features" appears again`;
|
||||
}`;
|
||||
|
||||
const parsed = JSON.parse(jsonWithOptionalFields);
|
||||
expect(parsed.features[0].id).toBe("feature-1");
|
||||
expect(parsed.features[0].id).toBe('feature-1');
|
||||
expect(parsed.features[0].priority).toBe(2);
|
||||
// description and dependencies are optional
|
||||
expect(parsed.features[0].description).toBeUndefined();
|
||||
expect(parsed.features[0].dependencies).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should handle features with dependencies", () => {
|
||||
it('should handle features with dependencies', () => {
|
||||
const jsonWithDeps = `{
|
||||
"features": [
|
||||
{
|
||||
@@ -238,7 +238,7 @@ After: The word "features" appears again`;
|
||||
|
||||
const parsed = JSON.parse(jsonWithDeps);
|
||||
expect(parsed.features[0].dependencies).toEqual([]);
|
||||
expect(parsed.features[1].dependencies).toEqual(["feature-1"]);
|
||||
expect(parsed.features[1].dependencies).toEqual(['feature-1']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import { AutoModeService } from "@/services/auto-mode-service.js";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { AutoModeService } from '@/services/auto-mode-service.js';
|
||||
|
||||
describe("auto-mode-service.ts - Planning Mode", () => {
|
||||
describe('auto-mode-service.ts - Planning Mode', () => {
|
||||
let service: AutoModeService;
|
||||
const mockEvents = {
|
||||
subscribe: vi.fn(),
|
||||
@@ -18,98 +18,98 @@ describe("auto-mode-service.ts - Planning Mode", () => {
|
||||
await service.stopAutoLoop().catch(() => {});
|
||||
});
|
||||
|
||||
describe("getPlanningPromptPrefix", () => {
|
||||
describe('getPlanningPromptPrefix', () => {
|
||||
// Access private method through any cast for testing
|
||||
const getPlanningPromptPrefix = (svc: any, feature: any) => {
|
||||
return svc.getPlanningPromptPrefix(feature);
|
||||
};
|
||||
|
||||
it("should return empty string for skip mode", () => {
|
||||
const feature = { id: "test", planningMode: "skip" as const };
|
||||
it('should return empty string for skip mode', () => {
|
||||
const feature = { id: 'test', planningMode: 'skip' as const };
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result).toBe("");
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
it("should return empty string when planningMode is undefined", () => {
|
||||
const feature = { id: "test" };
|
||||
it('should return empty string when planningMode is undefined', () => {
|
||||
const feature = { id: 'test' };
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result).toBe("");
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
it("should return lite prompt for lite mode without approval", () => {
|
||||
it('should return lite prompt for lite mode without approval', () => {
|
||||
const feature = {
|
||||
id: "test",
|
||||
planningMode: "lite" as const,
|
||||
requirePlanApproval: false
|
||||
id: 'test',
|
||||
planningMode: 'lite' as const,
|
||||
requirePlanApproval: false,
|
||||
};
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result).toContain("Planning Phase (Lite Mode)");
|
||||
expect(result).toContain("[PLAN_GENERATED]");
|
||||
expect(result).toContain("Feature Request");
|
||||
expect(result).toContain('Planning Phase (Lite Mode)');
|
||||
expect(result).toContain('[PLAN_GENERATED]');
|
||||
expect(result).toContain('Feature Request');
|
||||
});
|
||||
|
||||
it("should return lite_with_approval prompt for lite mode with approval", () => {
|
||||
it('should return lite_with_approval prompt for lite mode with approval', () => {
|
||||
const feature = {
|
||||
id: "test",
|
||||
planningMode: "lite" as const,
|
||||
requirePlanApproval: true
|
||||
id: 'test',
|
||||
planningMode: 'lite' as const,
|
||||
requirePlanApproval: true,
|
||||
};
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result).toContain("Planning Phase (Lite Mode)");
|
||||
expect(result).toContain("[SPEC_GENERATED]");
|
||||
expect(result).toContain("DO NOT proceed with implementation");
|
||||
expect(result).toContain('Planning Phase (Lite Mode)');
|
||||
expect(result).toContain('[SPEC_GENERATED]');
|
||||
expect(result).toContain('DO NOT proceed with implementation');
|
||||
});
|
||||
|
||||
it("should return spec prompt for spec mode", () => {
|
||||
it('should return spec prompt for spec mode', () => {
|
||||
const feature = {
|
||||
id: "test",
|
||||
planningMode: "spec" as const
|
||||
id: 'test',
|
||||
planningMode: 'spec' as const,
|
||||
};
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result).toContain("Specification Phase (Spec Mode)");
|
||||
expect(result).toContain("```tasks");
|
||||
expect(result).toContain("T001");
|
||||
expect(result).toContain("[TASK_START]");
|
||||
expect(result).toContain("[TASK_COMPLETE]");
|
||||
expect(result).toContain('Specification Phase (Spec Mode)');
|
||||
expect(result).toContain('```tasks');
|
||||
expect(result).toContain('T001');
|
||||
expect(result).toContain('[TASK_START]');
|
||||
expect(result).toContain('[TASK_COMPLETE]');
|
||||
});
|
||||
|
||||
it("should return full prompt for full mode", () => {
|
||||
it('should return full prompt for full mode', () => {
|
||||
const feature = {
|
||||
id: "test",
|
||||
planningMode: "full" as const
|
||||
id: 'test',
|
||||
planningMode: 'full' as const,
|
||||
};
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result).toContain("Full Specification Phase (Full SDD Mode)");
|
||||
expect(result).toContain("Phase 1: Foundation");
|
||||
expect(result).toContain("Phase 2: Core Implementation");
|
||||
expect(result).toContain("Phase 3: Integration & Testing");
|
||||
expect(result).toContain('Full Specification Phase (Full SDD Mode)');
|
||||
expect(result).toContain('Phase 1: Foundation');
|
||||
expect(result).toContain('Phase 2: Core Implementation');
|
||||
expect(result).toContain('Phase 3: Integration & Testing');
|
||||
});
|
||||
|
||||
it("should include the separator and Feature Request header", () => {
|
||||
it('should include the separator and Feature Request header', () => {
|
||||
const feature = {
|
||||
id: "test",
|
||||
planningMode: "spec" as const
|
||||
id: 'test',
|
||||
planningMode: 'spec' as const,
|
||||
};
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result).toContain("---");
|
||||
expect(result).toContain("## Feature Request");
|
||||
expect(result).toContain('---');
|
||||
expect(result).toContain('## Feature Request');
|
||||
});
|
||||
|
||||
it("should instruct agent to NOT output exploration text", () => {
|
||||
const modes = ["lite", "spec", "full"] as const;
|
||||
it('should instruct agent to NOT output exploration text', () => {
|
||||
const modes = ['lite', 'spec', 'full'] as const;
|
||||
for (const mode of modes) {
|
||||
const feature = { id: "test", planningMode: mode };
|
||||
const feature = { id: 'test', planningMode: mode };
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result).toContain("Do NOT output exploration text");
|
||||
expect(result).toContain("Start DIRECTLY");
|
||||
expect(result).toContain('Do NOT output exploration text');
|
||||
expect(result).toContain('Start DIRECTLY');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseTasksFromSpec (via module)", () => {
|
||||
describe('parseTasksFromSpec (via module)', () => {
|
||||
// We need to test the module-level function
|
||||
// Import it directly for testing
|
||||
it("should parse tasks from a valid tasks block", async () => {
|
||||
it('should parse tasks from a valid tasks block', async () => {
|
||||
// This tests the internal logic through integration
|
||||
// The function is module-level, so we verify behavior through the service
|
||||
const specContent = `
|
||||
@@ -123,12 +123,12 @@ describe("auto-mode-service.ts - Planning Mode", () => {
|
||||
`;
|
||||
// Since parseTasksFromSpec is a module-level function,
|
||||
// we verify its behavior indirectly through plan parsing
|
||||
expect(specContent).toContain("T001");
|
||||
expect(specContent).toContain("T002");
|
||||
expect(specContent).toContain("T003");
|
||||
expect(specContent).toContain('T001');
|
||||
expect(specContent).toContain('T002');
|
||||
expect(specContent).toContain('T003');
|
||||
});
|
||||
|
||||
it("should handle tasks block with phases", () => {
|
||||
it('should handle tasks block with phases', () => {
|
||||
const specContent = `
|
||||
\`\`\`tasks
|
||||
## Phase 1: Setup
|
||||
@@ -139,190 +139,191 @@ describe("auto-mode-service.ts - Planning Mode", () => {
|
||||
- [ ] T003: Create main module | File: src/index.ts
|
||||
\`\`\`
|
||||
`;
|
||||
expect(specContent).toContain("Phase 1");
|
||||
expect(specContent).toContain("Phase 2");
|
||||
expect(specContent).toContain("T001");
|
||||
expect(specContent).toContain("T003");
|
||||
expect(specContent).toContain('Phase 1');
|
||||
expect(specContent).toContain('Phase 2');
|
||||
expect(specContent).toContain('T001');
|
||||
expect(specContent).toContain('T003');
|
||||
});
|
||||
});
|
||||
|
||||
describe("plan approval flow", () => {
|
||||
it("should track pending approvals correctly", () => {
|
||||
expect(service.hasPendingApproval("test-feature")).toBe(false);
|
||||
describe('plan approval flow', () => {
|
||||
it('should track pending approvals correctly', () => {
|
||||
expect(service.hasPendingApproval('test-feature')).toBe(false);
|
||||
});
|
||||
|
||||
it("should allow cancelling non-existent approval without error", () => {
|
||||
expect(() => service.cancelPlanApproval("non-existent")).not.toThrow();
|
||||
it('should allow cancelling non-existent approval without error', () => {
|
||||
expect(() => service.cancelPlanApproval('non-existent')).not.toThrow();
|
||||
});
|
||||
|
||||
it("should return running features count after stop", async () => {
|
||||
it('should return running features count after stop', async () => {
|
||||
const count = await service.stopAutoLoop();
|
||||
expect(count).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolvePlanApproval", () => {
|
||||
it("should return error when no pending approval exists", async () => {
|
||||
describe('resolvePlanApproval', () => {
|
||||
it('should return error when no pending approval exists', async () => {
|
||||
const result = await service.resolvePlanApproval(
|
||||
"non-existent-feature",
|
||||
'non-existent-feature',
|
||||
true,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain("No pending approval");
|
||||
expect(result.error).toContain('No pending approval');
|
||||
});
|
||||
|
||||
it("should handle approval with edited plan", async () => {
|
||||
it('should handle approval with edited plan', async () => {
|
||||
// Without a pending approval, this should fail gracefully
|
||||
const result = await service.resolvePlanApproval(
|
||||
"test-feature",
|
||||
'test-feature',
|
||||
true,
|
||||
"Edited plan content",
|
||||
'Edited plan content',
|
||||
undefined,
|
||||
undefined
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should handle rejection with feedback", async () => {
|
||||
it('should handle rejection with feedback', async () => {
|
||||
const result = await service.resolvePlanApproval(
|
||||
"test-feature",
|
||||
'test-feature',
|
||||
false,
|
||||
undefined,
|
||||
"Please add more details",
|
||||
'Please add more details',
|
||||
undefined
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildFeaturePrompt", () => {
|
||||
describe('buildFeaturePrompt', () => {
|
||||
const buildFeaturePrompt = (svc: any, feature: any) => {
|
||||
return svc.buildFeaturePrompt(feature);
|
||||
};
|
||||
|
||||
it("should include feature ID and description", () => {
|
||||
it('should include feature ID and description', () => {
|
||||
const feature = {
|
||||
id: "feat-123",
|
||||
description: "Add user authentication",
|
||||
id: 'feat-123',
|
||||
description: 'Add user authentication',
|
||||
};
|
||||
const result = buildFeaturePrompt(service, feature);
|
||||
expect(result).toContain("feat-123");
|
||||
expect(result).toContain("Add user authentication");
|
||||
expect(result).toContain('feat-123');
|
||||
expect(result).toContain('Add user authentication');
|
||||
});
|
||||
|
||||
it("should include specification when present", () => {
|
||||
it('should include specification when present', () => {
|
||||
const feature = {
|
||||
id: "feat-123",
|
||||
description: "Test feature",
|
||||
spec: "Detailed specification here",
|
||||
id: 'feat-123',
|
||||
description: 'Test feature',
|
||||
spec: 'Detailed specification here',
|
||||
};
|
||||
const result = buildFeaturePrompt(service, feature);
|
||||
expect(result).toContain("Specification:");
|
||||
expect(result).toContain("Detailed specification here");
|
||||
expect(result).toContain('Specification:');
|
||||
expect(result).toContain('Detailed specification here');
|
||||
});
|
||||
|
||||
it("should include image paths when present", () => {
|
||||
it('should include image paths when present', () => {
|
||||
const feature = {
|
||||
id: "feat-123",
|
||||
description: "Test feature",
|
||||
id: 'feat-123',
|
||||
description: 'Test feature',
|
||||
imagePaths: [
|
||||
{ path: "/tmp/image1.png", filename: "image1.png", mimeType: "image/png" },
|
||||
"/tmp/image2.jpg",
|
||||
{ path: '/tmp/image1.png', filename: 'image1.png', mimeType: 'image/png' },
|
||||
'/tmp/image2.jpg',
|
||||
],
|
||||
};
|
||||
const result = buildFeaturePrompt(service, feature);
|
||||
expect(result).toContain("Context Images Attached");
|
||||
expect(result).toContain("image1.png");
|
||||
expect(result).toContain("/tmp/image2.jpg");
|
||||
expect(result).toContain('Context Images Attached');
|
||||
expect(result).toContain('image1.png');
|
||||
expect(result).toContain('/tmp/image2.jpg');
|
||||
});
|
||||
|
||||
it("should include summary tags instruction", () => {
|
||||
it('should include summary tags instruction', () => {
|
||||
const feature = {
|
||||
id: "feat-123",
|
||||
description: "Test feature",
|
||||
id: 'feat-123',
|
||||
description: 'Test feature',
|
||||
};
|
||||
const result = buildFeaturePrompt(service, feature);
|
||||
expect(result).toContain("<summary>");
|
||||
expect(result).toContain("</summary>");
|
||||
expect(result).toContain('<summary>');
|
||||
expect(result).toContain('</summary>');
|
||||
});
|
||||
});
|
||||
|
||||
describe("extractTitleFromDescription", () => {
|
||||
describe('extractTitleFromDescription', () => {
|
||||
const extractTitle = (svc: any, description: string) => {
|
||||
return svc.extractTitleFromDescription(description);
|
||||
};
|
||||
|
||||
it("should return 'Untitled Feature' for empty description", () => {
|
||||
expect(extractTitle(service, "")).toBe("Untitled Feature");
|
||||
expect(extractTitle(service, " ")).toBe("Untitled Feature");
|
||||
expect(extractTitle(service, '')).toBe('Untitled Feature');
|
||||
expect(extractTitle(service, ' ')).toBe('Untitled Feature');
|
||||
});
|
||||
|
||||
it("should return first line if under 60 characters", () => {
|
||||
const description = "Add user login\nWith email validation";
|
||||
expect(extractTitle(service, description)).toBe("Add user login");
|
||||
it('should return first line if under 60 characters', () => {
|
||||
const description = 'Add user login\nWith email validation';
|
||||
expect(extractTitle(service, description)).toBe('Add user login');
|
||||
});
|
||||
|
||||
it("should truncate long first lines to 60 characters", () => {
|
||||
const description = "This is a very long feature description that exceeds the sixty character limit significantly";
|
||||
it('should truncate long first lines to 60 characters', () => {
|
||||
const description =
|
||||
'This is a very long feature description that exceeds the sixty character limit significantly';
|
||||
const result = extractTitle(service, description);
|
||||
expect(result.length).toBe(60);
|
||||
expect(result).toContain("...");
|
||||
expect(result).toContain('...');
|
||||
});
|
||||
});
|
||||
|
||||
describe("PLANNING_PROMPTS structure", () => {
|
||||
describe('PLANNING_PROMPTS structure', () => {
|
||||
const getPlanningPromptPrefix = (svc: any, feature: any) => {
|
||||
return svc.getPlanningPromptPrefix(feature);
|
||||
};
|
||||
|
||||
it("should have all required planning modes", () => {
|
||||
const modes = ["lite", "spec", "full"] as const;
|
||||
it('should have all required planning modes', () => {
|
||||
const modes = ['lite', 'spec', 'full'] as const;
|
||||
for (const mode of modes) {
|
||||
const feature = { id: "test", planningMode: mode };
|
||||
const feature = { id: 'test', planningMode: mode };
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result.length).toBeGreaterThan(100);
|
||||
}
|
||||
});
|
||||
|
||||
it("lite prompt should include correct structure", () => {
|
||||
const feature = { id: "test", planningMode: "lite" as const };
|
||||
it('lite prompt should include correct structure', () => {
|
||||
const feature = { id: 'test', planningMode: 'lite' as const };
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result).toContain("Goal");
|
||||
expect(result).toContain("Approach");
|
||||
expect(result).toContain("Files to Touch");
|
||||
expect(result).toContain("Tasks");
|
||||
expect(result).toContain("Risks");
|
||||
expect(result).toContain('Goal');
|
||||
expect(result).toContain('Approach');
|
||||
expect(result).toContain('Files to Touch');
|
||||
expect(result).toContain('Tasks');
|
||||
expect(result).toContain('Risks');
|
||||
});
|
||||
|
||||
it("spec prompt should include task format instructions", () => {
|
||||
const feature = { id: "test", planningMode: "spec" as const };
|
||||
it('spec prompt should include task format instructions', () => {
|
||||
const feature = { id: 'test', planningMode: 'spec' as const };
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result).toContain("Problem");
|
||||
expect(result).toContain("Solution");
|
||||
expect(result).toContain("Acceptance Criteria");
|
||||
expect(result).toContain("GIVEN-WHEN-THEN");
|
||||
expect(result).toContain("Implementation Tasks");
|
||||
expect(result).toContain("Verification");
|
||||
expect(result).toContain('Problem');
|
||||
expect(result).toContain('Solution');
|
||||
expect(result).toContain('Acceptance Criteria');
|
||||
expect(result).toContain('GIVEN-WHEN-THEN');
|
||||
expect(result).toContain('Implementation Tasks');
|
||||
expect(result).toContain('Verification');
|
||||
});
|
||||
|
||||
it("full prompt should include phases", () => {
|
||||
const feature = { id: "test", planningMode: "full" as const };
|
||||
it('full prompt should include phases', () => {
|
||||
const feature = { id: 'test', planningMode: 'full' as const };
|
||||
const result = getPlanningPromptPrefix(service, feature);
|
||||
expect(result).toContain("Problem Statement");
|
||||
expect(result).toContain("User Story");
|
||||
expect(result).toContain("Technical Context");
|
||||
expect(result).toContain("Non-Goals");
|
||||
expect(result).toContain("Phase 1");
|
||||
expect(result).toContain("Phase 2");
|
||||
expect(result).toContain("Phase 3");
|
||||
expect(result).toContain('Problem Statement');
|
||||
expect(result).toContain('User Story');
|
||||
expect(result).toContain('Technical Context');
|
||||
expect(result).toContain('Non-Goals');
|
||||
expect(result).toContain('Phase 1');
|
||||
expect(result).toContain('Phase 2');
|
||||
expect(result).toContain('Phase 3');
|
||||
});
|
||||
});
|
||||
|
||||
describe("status management", () => {
|
||||
it("should report correct status", () => {
|
||||
describe('status management', () => {
|
||||
it('should report correct status', () => {
|
||||
const status = service.getStatus();
|
||||
expect(status.runningFeatures).toEqual([]);
|
||||
expect(status.isRunning).toBe(false);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||
import { AutoModeService } from "@/services/auto-mode-service.js";
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { AutoModeService } from '@/services/auto-mode-service.js';
|
||||
|
||||
describe("auto-mode-service.ts", () => {
|
||||
describe('auto-mode-service.ts', () => {
|
||||
let service: AutoModeService;
|
||||
const mockEvents = {
|
||||
subscribe: vi.fn(),
|
||||
@@ -13,29 +13,27 @@ describe("auto-mode-service.ts", () => {
|
||||
service = new AutoModeService(mockEvents as any);
|
||||
});
|
||||
|
||||
describe("constructor", () => {
|
||||
it("should initialize with event emitter", () => {
|
||||
describe('constructor', () => {
|
||||
it('should initialize with event emitter', () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("startAutoLoop", () => {
|
||||
it("should throw if auto mode is already running", async () => {
|
||||
describe('startAutoLoop', () => {
|
||||
it('should throw if auto mode is already running', async () => {
|
||||
// Start first loop
|
||||
const promise1 = service.startAutoLoop("/test/project", 3);
|
||||
const promise1 = service.startAutoLoop('/test/project', 3);
|
||||
|
||||
// Try to start second loop
|
||||
await expect(
|
||||
service.startAutoLoop("/test/project", 3)
|
||||
).rejects.toThrow("already running");
|
||||
await expect(service.startAutoLoop('/test/project', 3)).rejects.toThrow('already running');
|
||||
|
||||
// Cleanup
|
||||
await service.stopAutoLoop();
|
||||
await promise1.catch(() => {});
|
||||
});
|
||||
|
||||
it("should emit auto mode start event", async () => {
|
||||
const promise = service.startAutoLoop("/test/project", 3);
|
||||
it('should emit auto mode start event', async () => {
|
||||
const promise = service.startAutoLoop('/test/project', 3);
|
||||
|
||||
// Give it time to emit the event
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
@@ -43,7 +41,7 @@ describe("auto-mode-service.ts", () => {
|
||||
expect(mockEvents.emit).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
message: expect.stringContaining("Auto mode started"),
|
||||
message: expect.stringContaining('Auto mode started'),
|
||||
})
|
||||
);
|
||||
|
||||
@@ -53,9 +51,9 @@ describe("auto-mode-service.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("stopAutoLoop", () => {
|
||||
it("should stop the auto loop", async () => {
|
||||
const promise = service.startAutoLoop("/test/project", 3);
|
||||
describe('stopAutoLoop', () => {
|
||||
it('should stop the auto loop', async () => {
|
||||
const promise = service.startAutoLoop('/test/project', 3);
|
||||
|
||||
const runningCount = await service.stopAutoLoop();
|
||||
|
||||
@@ -63,7 +61,7 @@ describe("auto-mode-service.ts", () => {
|
||||
await promise.catch(() => {});
|
||||
});
|
||||
|
||||
it("should return 0 when not running", async () => {
|
||||
it('should return 0 when not running', async () => {
|
||||
const runningCount = await service.stopAutoLoop();
|
||||
expect(runningCount).toBe(0);
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { describe, it, expect } from 'vitest';
|
||||
|
||||
/**
|
||||
* Test the task parsing logic by reimplementing the parsing functions
|
||||
@@ -88,59 +88,59 @@ function parseTasksFromSpec(specContent: string): ParsedTask[] {
|
||||
return tasks;
|
||||
}
|
||||
|
||||
describe("Task Parsing", () => {
|
||||
describe("parseTaskLine", () => {
|
||||
it("should parse task with file path", () => {
|
||||
const line = "- [ ] T001: Create user model | File: src/models/user.ts";
|
||||
describe('Task Parsing', () => {
|
||||
describe('parseTaskLine', () => {
|
||||
it('should parse task with file path', () => {
|
||||
const line = '- [ ] T001: Create user model | File: src/models/user.ts';
|
||||
const result = parseTaskLine(line);
|
||||
expect(result).toEqual({
|
||||
id: "T001",
|
||||
description: "Create user model",
|
||||
filePath: "src/models/user.ts",
|
||||
id: 'T001',
|
||||
description: 'Create user model',
|
||||
filePath: 'src/models/user.ts',
|
||||
phase: undefined,
|
||||
status: "pending",
|
||||
status: 'pending',
|
||||
});
|
||||
});
|
||||
|
||||
it("should parse task without file path", () => {
|
||||
const line = "- [ ] T002: Setup database connection";
|
||||
it('should parse task without file path', () => {
|
||||
const line = '- [ ] T002: Setup database connection';
|
||||
const result = parseTaskLine(line);
|
||||
expect(result).toEqual({
|
||||
id: "T002",
|
||||
description: "Setup database connection",
|
||||
id: 'T002',
|
||||
description: 'Setup database connection',
|
||||
phase: undefined,
|
||||
status: "pending",
|
||||
status: 'pending',
|
||||
});
|
||||
});
|
||||
|
||||
it("should include phase when provided", () => {
|
||||
const line = "- [ ] T003: Write tests | File: tests/user.test.ts";
|
||||
const result = parseTaskLine(line, "Phase 1: Foundation");
|
||||
expect(result?.phase).toBe("Phase 1: Foundation");
|
||||
it('should include phase when provided', () => {
|
||||
const line = '- [ ] T003: Write tests | File: tests/user.test.ts';
|
||||
const result = parseTaskLine(line, 'Phase 1: Foundation');
|
||||
expect(result?.phase).toBe('Phase 1: Foundation');
|
||||
});
|
||||
|
||||
it("should return null for invalid line", () => {
|
||||
expect(parseTaskLine("- [ ] Invalid format")).toBeNull();
|
||||
expect(parseTaskLine("Not a task line")).toBeNull();
|
||||
expect(parseTaskLine("")).toBeNull();
|
||||
it('should return null for invalid line', () => {
|
||||
expect(parseTaskLine('- [ ] Invalid format')).toBeNull();
|
||||
expect(parseTaskLine('Not a task line')).toBeNull();
|
||||
expect(parseTaskLine('')).toBeNull();
|
||||
});
|
||||
|
||||
it("should handle multi-word descriptions", () => {
|
||||
const line = "- [ ] T004: Implement user authentication with JWT tokens | File: src/auth.ts";
|
||||
it('should handle multi-word descriptions', () => {
|
||||
const line = '- [ ] T004: Implement user authentication with JWT tokens | File: src/auth.ts';
|
||||
const result = parseTaskLine(line);
|
||||
expect(result?.description).toBe("Implement user authentication with JWT tokens");
|
||||
expect(result?.description).toBe('Implement user authentication with JWT tokens');
|
||||
});
|
||||
|
||||
it("should trim whitespace from description and file path", () => {
|
||||
const line = "- [ ] T005: Create API endpoint | File: src/routes/api.ts ";
|
||||
it('should trim whitespace from description and file path', () => {
|
||||
const line = '- [ ] T005: Create API endpoint | File: src/routes/api.ts ';
|
||||
const result = parseTaskLine(line);
|
||||
expect(result?.description).toBe("Create API endpoint");
|
||||
expect(result?.filePath).toBe("src/routes/api.ts");
|
||||
expect(result?.description).toBe('Create API endpoint');
|
||||
expect(result?.filePath).toBe('src/routes/api.ts');
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseTasksFromSpec", () => {
|
||||
it("should parse tasks from a tasks code block", () => {
|
||||
describe('parseTasksFromSpec', () => {
|
||||
it('should parse tasks from a tasks code block', () => {
|
||||
const specContent = `
|
||||
## Specification
|
||||
|
||||
@@ -157,12 +157,12 @@ Some notes here.
|
||||
`;
|
||||
const tasks = parseTasksFromSpec(specContent);
|
||||
expect(tasks).toHaveLength(3);
|
||||
expect(tasks[0].id).toBe("T001");
|
||||
expect(tasks[1].id).toBe("T002");
|
||||
expect(tasks[2].id).toBe("T003");
|
||||
expect(tasks[0].id).toBe('T001');
|
||||
expect(tasks[1].id).toBe('T002');
|
||||
expect(tasks[2].id).toBe('T003');
|
||||
});
|
||||
|
||||
it("should parse tasks with phases", () => {
|
||||
it('should parse tasks with phases', () => {
|
||||
const specContent = `
|
||||
\`\`\`tasks
|
||||
## Phase 1: Foundation
|
||||
@@ -179,20 +179,20 @@ Some notes here.
|
||||
`;
|
||||
const tasks = parseTasksFromSpec(specContent);
|
||||
expect(tasks).toHaveLength(5);
|
||||
expect(tasks[0].phase).toBe("Phase 1: Foundation");
|
||||
expect(tasks[1].phase).toBe("Phase 1: Foundation");
|
||||
expect(tasks[2].phase).toBe("Phase 2: Implementation");
|
||||
expect(tasks[3].phase).toBe("Phase 2: Implementation");
|
||||
expect(tasks[4].phase).toBe("Phase 3: Testing");
|
||||
expect(tasks[0].phase).toBe('Phase 1: Foundation');
|
||||
expect(tasks[1].phase).toBe('Phase 1: Foundation');
|
||||
expect(tasks[2].phase).toBe('Phase 2: Implementation');
|
||||
expect(tasks[3].phase).toBe('Phase 2: Implementation');
|
||||
expect(tasks[4].phase).toBe('Phase 3: Testing');
|
||||
});
|
||||
|
||||
it("should return empty array for content without tasks", () => {
|
||||
const specContent = "Just some text without any tasks";
|
||||
it('should return empty array for content without tasks', () => {
|
||||
const specContent = 'Just some text without any tasks';
|
||||
const tasks = parseTasksFromSpec(specContent);
|
||||
expect(tasks).toEqual([]);
|
||||
});
|
||||
|
||||
it("should fallback to finding task lines outside code block", () => {
|
||||
it('should fallback to finding task lines outside code block', () => {
|
||||
const specContent = `
|
||||
## Implementation Plan
|
||||
|
||||
@@ -201,11 +201,11 @@ Some notes here.
|
||||
`;
|
||||
const tasks = parseTasksFromSpec(specContent);
|
||||
expect(tasks).toHaveLength(2);
|
||||
expect(tasks[0].id).toBe("T001");
|
||||
expect(tasks[1].id).toBe("T002");
|
||||
expect(tasks[0].id).toBe('T001');
|
||||
expect(tasks[1].id).toBe('T002');
|
||||
});
|
||||
|
||||
it("should handle empty tasks block", () => {
|
||||
it('should handle empty tasks block', () => {
|
||||
const specContent = `
|
||||
\`\`\`tasks
|
||||
\`\`\`
|
||||
@@ -214,7 +214,7 @@ Some notes here.
|
||||
expect(tasks).toEqual([]);
|
||||
});
|
||||
|
||||
it("should handle mixed valid and invalid lines", () => {
|
||||
it('should handle mixed valid and invalid lines', () => {
|
||||
const specContent = `
|
||||
\`\`\`tasks
|
||||
- [ ] T001: Valid task | File: src/valid.ts
|
||||
@@ -227,7 +227,7 @@ Some other text
|
||||
expect(tasks).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("should preserve task order", () => {
|
||||
it('should preserve task order', () => {
|
||||
const specContent = `
|
||||
\`\`\`tasks
|
||||
- [ ] T003: Third
|
||||
@@ -236,12 +236,12 @@ Some other text
|
||||
\`\`\`
|
||||
`;
|
||||
const tasks = parseTasksFromSpec(specContent);
|
||||
expect(tasks[0].id).toBe("T003");
|
||||
expect(tasks[1].id).toBe("T001");
|
||||
expect(tasks[2].id).toBe("T002");
|
||||
expect(tasks[0].id).toBe('T003');
|
||||
expect(tasks[1].id).toBe('T001');
|
||||
expect(tasks[2].id).toBe('T002');
|
||||
});
|
||||
|
||||
it("should handle task IDs with different numbers", () => {
|
||||
it('should handle task IDs with different numbers', () => {
|
||||
const specContent = `
|
||||
\`\`\`tasks
|
||||
- [ ] T001: First
|
||||
@@ -251,14 +251,14 @@ Some other text
|
||||
`;
|
||||
const tasks = parseTasksFromSpec(specContent);
|
||||
expect(tasks).toHaveLength(3);
|
||||
expect(tasks[0].id).toBe("T001");
|
||||
expect(tasks[1].id).toBe("T010");
|
||||
expect(tasks[2].id).toBe("T100");
|
||||
expect(tasks[0].id).toBe('T001');
|
||||
expect(tasks[1].id).toBe('T010');
|
||||
expect(tasks[2].id).toBe('T100');
|
||||
});
|
||||
});
|
||||
|
||||
describe("spec content generation patterns", () => {
|
||||
it("should match the expected lite mode output format", () => {
|
||||
describe('spec content generation patterns', () => {
|
||||
it('should match the expected lite mode output format', () => {
|
||||
const liteModeOutput = `
|
||||
1. **Goal**: Implement user registration
|
||||
2. **Approach**: Create form component, add validation, connect to API
|
||||
@@ -271,12 +271,12 @@ Some other text
|
||||
|
||||
[PLAN_GENERATED] Planning outline complete.
|
||||
`;
|
||||
expect(liteModeOutput).toContain("[PLAN_GENERATED]");
|
||||
expect(liteModeOutput).toContain("Goal");
|
||||
expect(liteModeOutput).toContain("Approach");
|
||||
expect(liteModeOutput).toContain('[PLAN_GENERATED]');
|
||||
expect(liteModeOutput).toContain('Goal');
|
||||
expect(liteModeOutput).toContain('Approach');
|
||||
});
|
||||
|
||||
it("should match the expected spec mode output format", () => {
|
||||
it('should match the expected spec mode output format', () => {
|
||||
const specModeOutput = `
|
||||
1. **Problem**: Users cannot register for accounts
|
||||
|
||||
@@ -300,12 +300,12 @@ Some other text
|
||||
|
||||
[SPEC_GENERATED] Please review the specification above.
|
||||
`;
|
||||
expect(specModeOutput).toContain("[SPEC_GENERATED]");
|
||||
expect(specModeOutput).toContain("```tasks");
|
||||
expect(specModeOutput).toContain("T001");
|
||||
expect(specModeOutput).toContain('[SPEC_GENERATED]');
|
||||
expect(specModeOutput).toContain('```tasks');
|
||||
expect(specModeOutput).toContain('T001');
|
||||
});
|
||||
|
||||
it("should match the expected full mode output format", () => {
|
||||
it('should match the expected full mode output format', () => {
|
||||
const fullModeOutput = `
|
||||
1. **Problem Statement**: Users need ability to create accounts
|
||||
|
||||
@@ -336,10 +336,10 @@ Some other text
|
||||
|
||||
[SPEC_GENERATED] Please review the comprehensive specification above.
|
||||
`;
|
||||
expect(fullModeOutput).toContain("Phase 1");
|
||||
expect(fullModeOutput).toContain("Phase 2");
|
||||
expect(fullModeOutput).toContain("Phase 3");
|
||||
expect(fullModeOutput).toContain("[SPEC_GENERATED]");
|
||||
expect(fullModeOutput).toContain('Phase 1');
|
||||
expect(fullModeOutput).toContain('Phase 2');
|
||||
expect(fullModeOutput).toContain('Phase 3');
|
||||
expect(fullModeOutput).toContain('[SPEC_GENERATED]');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import { TerminalService, getTerminalService } from "@/services/terminal-service.js";
|
||||
import * as pty from "node-pty";
|
||||
import * as os from "os";
|
||||
import * as fs from "fs";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { TerminalService, getTerminalService } from '@/services/terminal-service.js';
|
||||
import * as pty from 'node-pty';
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs';
|
||||
|
||||
vi.mock("node-pty");
|
||||
vi.mock("fs");
|
||||
vi.mock("os");
|
||||
vi.mock('node-pty');
|
||||
vi.mock('fs');
|
||||
vi.mock('os');
|
||||
|
||||
describe("terminal-service.ts", () => {
|
||||
describe('terminal-service.ts', () => {
|
||||
let service: TerminalService;
|
||||
let mockPtyProcess: any;
|
||||
|
||||
@@ -26,225 +26,225 @@ describe("terminal-service.ts", () => {
|
||||
};
|
||||
|
||||
vi.mocked(pty.spawn).mockReturnValue(mockPtyProcess);
|
||||
vi.mocked(os.homedir).mockReturnValue("/home/user");
|
||||
vi.mocked(os.platform).mockReturnValue("linux");
|
||||
vi.mocked(os.arch).mockReturnValue("x64");
|
||||
vi.mocked(os.homedir).mockReturnValue('/home/user');
|
||||
vi.mocked(os.platform).mockReturnValue('linux');
|
||||
vi.mocked(os.arch).mockReturnValue('x64');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
service.cleanup();
|
||||
});
|
||||
|
||||
describe("detectShell", () => {
|
||||
it("should detect PowerShell Core on Windows when available", () => {
|
||||
vi.mocked(os.platform).mockReturnValue("win32");
|
||||
describe('detectShell', () => {
|
||||
it('should detect PowerShell Core on Windows when available', () => {
|
||||
vi.mocked(os.platform).mockReturnValue('win32');
|
||||
vi.mocked(fs.existsSync).mockImplementation((path: any) => {
|
||||
return path === "C:\\Program Files\\PowerShell\\7\\pwsh.exe";
|
||||
return path === 'C:\\Program Files\\PowerShell\\7\\pwsh.exe';
|
||||
});
|
||||
|
||||
const result = service.detectShell();
|
||||
|
||||
expect(result.shell).toBe("C:\\Program Files\\PowerShell\\7\\pwsh.exe");
|
||||
expect(result.shell).toBe('C:\\Program Files\\PowerShell\\7\\pwsh.exe');
|
||||
expect(result.args).toEqual([]);
|
||||
});
|
||||
|
||||
it("should fall back to PowerShell on Windows if Core not available", () => {
|
||||
vi.mocked(os.platform).mockReturnValue("win32");
|
||||
it('should fall back to PowerShell on Windows if Core not available', () => {
|
||||
vi.mocked(os.platform).mockReturnValue('win32');
|
||||
vi.mocked(fs.existsSync).mockImplementation((path: any) => {
|
||||
return path === "C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe";
|
||||
return path === 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe';
|
||||
});
|
||||
|
||||
const result = service.detectShell();
|
||||
|
||||
expect(result.shell).toBe("C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe");
|
||||
expect(result.shell).toBe('C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe');
|
||||
expect(result.args).toEqual([]);
|
||||
});
|
||||
|
||||
it("should fall back to cmd.exe on Windows if no PowerShell", () => {
|
||||
vi.mocked(os.platform).mockReturnValue("win32");
|
||||
it('should fall back to cmd.exe on Windows if no PowerShell', () => {
|
||||
vi.mocked(os.platform).mockReturnValue('win32');
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false);
|
||||
|
||||
const result = service.detectShell();
|
||||
|
||||
expect(result.shell).toBe("cmd.exe");
|
||||
expect(result.shell).toBe('cmd.exe');
|
||||
expect(result.args).toEqual([]);
|
||||
});
|
||||
|
||||
it("should detect user shell on macOS", () => {
|
||||
vi.mocked(os.platform).mockReturnValue("darwin");
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/zsh" });
|
||||
it('should detect user shell on macOS', () => {
|
||||
vi.mocked(os.platform).mockReturnValue('darwin');
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/zsh' });
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
|
||||
const result = service.detectShell();
|
||||
|
||||
expect(result.shell).toBe("/bin/zsh");
|
||||
expect(result.args).toEqual(["--login"]);
|
||||
expect(result.shell).toBe('/bin/zsh');
|
||||
expect(result.args).toEqual(['--login']);
|
||||
});
|
||||
|
||||
it("should fall back to zsh on macOS if user shell not available", () => {
|
||||
vi.mocked(os.platform).mockReturnValue("darwin");
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({});
|
||||
it('should fall back to zsh on macOS if user shell not available', () => {
|
||||
vi.mocked(os.platform).mockReturnValue('darwin');
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({});
|
||||
vi.mocked(fs.existsSync).mockImplementation((path: any) => {
|
||||
return path === "/bin/zsh";
|
||||
return path === '/bin/zsh';
|
||||
});
|
||||
|
||||
const result = service.detectShell();
|
||||
|
||||
expect(result.shell).toBe("/bin/zsh");
|
||||
expect(result.args).toEqual(["--login"]);
|
||||
expect(result.shell).toBe('/bin/zsh');
|
||||
expect(result.args).toEqual(['--login']);
|
||||
});
|
||||
|
||||
it("should fall back to bash on macOS if zsh not available", () => {
|
||||
vi.mocked(os.platform).mockReturnValue("darwin");
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({});
|
||||
it('should fall back to bash on macOS if zsh not available', () => {
|
||||
vi.mocked(os.platform).mockReturnValue('darwin');
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({});
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false);
|
||||
|
||||
const result = service.detectShell();
|
||||
|
||||
expect(result.shell).toBe("/bin/bash");
|
||||
expect(result.args).toEqual(["--login"]);
|
||||
expect(result.shell).toBe('/bin/bash');
|
||||
expect(result.args).toEqual(['--login']);
|
||||
});
|
||||
|
||||
it("should detect user shell on Linux", () => {
|
||||
vi.mocked(os.platform).mockReturnValue("linux");
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
it('should detect user shell on Linux', () => {
|
||||
vi.mocked(os.platform).mockReturnValue('linux');
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
|
||||
const result = service.detectShell();
|
||||
|
||||
expect(result.shell).toBe("/bin/bash");
|
||||
expect(result.args).toEqual(["--login"]);
|
||||
expect(result.shell).toBe('/bin/bash');
|
||||
expect(result.args).toEqual(['--login']);
|
||||
});
|
||||
|
||||
it("should fall back to bash on Linux if user shell not available", () => {
|
||||
vi.mocked(os.platform).mockReturnValue("linux");
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({});
|
||||
it('should fall back to bash on Linux if user shell not available', () => {
|
||||
vi.mocked(os.platform).mockReturnValue('linux');
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({});
|
||||
vi.mocked(fs.existsSync).mockImplementation((path: any) => {
|
||||
return path === "/bin/bash";
|
||||
return path === '/bin/bash';
|
||||
});
|
||||
|
||||
const result = service.detectShell();
|
||||
|
||||
expect(result.shell).toBe("/bin/bash");
|
||||
expect(result.args).toEqual(["--login"]);
|
||||
expect(result.shell).toBe('/bin/bash');
|
||||
expect(result.args).toEqual(['--login']);
|
||||
});
|
||||
|
||||
it("should fall back to sh on Linux if bash not available", () => {
|
||||
vi.mocked(os.platform).mockReturnValue("linux");
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({});
|
||||
it('should fall back to sh on Linux if bash not available', () => {
|
||||
vi.mocked(os.platform).mockReturnValue('linux');
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({});
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false);
|
||||
|
||||
const result = service.detectShell();
|
||||
|
||||
expect(result.shell).toBe("/bin/sh");
|
||||
expect(result.shell).toBe('/bin/sh');
|
||||
expect(result.args).toEqual([]);
|
||||
});
|
||||
|
||||
it("should detect WSL and use appropriate shell", () => {
|
||||
vi.mocked(os.platform).mockReturnValue("linux");
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
it('should detect WSL and use appropriate shell', () => {
|
||||
vi.mocked(os.platform).mockReturnValue('linux');
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.readFileSync).mockReturnValue("Linux version 5.10.0-microsoft-standard-WSL2");
|
||||
vi.mocked(fs.readFileSync).mockReturnValue('Linux version 5.10.0-microsoft-standard-WSL2');
|
||||
|
||||
const result = service.detectShell();
|
||||
|
||||
expect(result.shell).toBe("/bin/bash");
|
||||
expect(result.args).toEqual(["--login"]);
|
||||
expect(result.shell).toBe('/bin/bash');
|
||||
expect(result.args).toEqual(['--login']);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isWSL", () => {
|
||||
it("should return true if /proc/version contains microsoft", () => {
|
||||
describe('isWSL', () => {
|
||||
it('should return true if /proc/version contains microsoft', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.readFileSync).mockReturnValue("Linux version 5.10.0-microsoft-standard-WSL2");
|
||||
vi.mocked(fs.readFileSync).mockReturnValue('Linux version 5.10.0-microsoft-standard-WSL2');
|
||||
|
||||
expect(service.isWSL()).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true if /proc/version contains wsl", () => {
|
||||
it('should return true if /proc/version contains wsl', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.readFileSync).mockReturnValue("Linux version 5.10.0-wsl2");
|
||||
vi.mocked(fs.readFileSync).mockReturnValue('Linux version 5.10.0-wsl2');
|
||||
|
||||
expect(service.isWSL()).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true if WSL_DISTRO_NAME is set", () => {
|
||||
it('should return true if WSL_DISTRO_NAME is set', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ WSL_DISTRO_NAME: "Ubuntu" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ WSL_DISTRO_NAME: 'Ubuntu' });
|
||||
|
||||
expect(service.isWSL()).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true if WSLENV is set", () => {
|
||||
it('should return true if WSLENV is set', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ WSLENV: "PATH/l" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ WSLENV: 'PATH/l' });
|
||||
|
||||
expect(service.isWSL()).toBe(true);
|
||||
});
|
||||
|
||||
it("should return false if not in WSL", () => {
|
||||
it('should return false if not in WSL', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({});
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({});
|
||||
|
||||
expect(service.isWSL()).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false if error reading /proc/version", () => {
|
||||
it('should return false if error reading /proc/version', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.readFileSync).mockImplementation(() => {
|
||||
throw new Error("Permission denied");
|
||||
throw new Error('Permission denied');
|
||||
});
|
||||
|
||||
expect(service.isWSL()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPlatformInfo", () => {
|
||||
it("should return platform information", () => {
|
||||
vi.mocked(os.platform).mockReturnValue("linux");
|
||||
vi.mocked(os.arch).mockReturnValue("x64");
|
||||
describe('getPlatformInfo', () => {
|
||||
it('should return platform information', () => {
|
||||
vi.mocked(os.platform).mockReturnValue('linux');
|
||||
vi.mocked(os.arch).mockReturnValue('x64');
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const info = service.getPlatformInfo();
|
||||
|
||||
expect(info.platform).toBe("linux");
|
||||
expect(info.arch).toBe("x64");
|
||||
expect(info.defaultShell).toBe("/bin/bash");
|
||||
expect(typeof info.isWSL).toBe("boolean");
|
||||
expect(info.platform).toBe('linux');
|
||||
expect(info.arch).toBe('x64');
|
||||
expect(info.defaultShell).toBe('/bin/bash');
|
||||
expect(typeof info.isWSL).toBe('boolean');
|
||||
});
|
||||
});
|
||||
|
||||
describe("createSession", () => {
|
||||
it("should create a new terminal session", () => {
|
||||
describe('createSession', () => {
|
||||
it('should create a new terminal session', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session = service.createSession({
|
||||
cwd: "/test/dir",
|
||||
cwd: '/test/dir',
|
||||
cols: 100,
|
||||
rows: 30,
|
||||
});
|
||||
|
||||
expect(session.id).toMatch(/^term-/);
|
||||
expect(session.cwd).toBe("/test/dir");
|
||||
expect(session.shell).toBe("/bin/bash");
|
||||
expect(session.cwd).toBe('/test/dir');
|
||||
expect(session.shell).toBe('/bin/bash');
|
||||
expect(pty.spawn).toHaveBeenCalledWith(
|
||||
"/bin/bash",
|
||||
["--login"],
|
||||
'/bin/bash',
|
||||
['--login'],
|
||||
expect.objectContaining({
|
||||
cwd: "/test/dir",
|
||||
cwd: '/test/dir',
|
||||
cols: 100,
|
||||
rows: 30,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("should use default cols and rows if not provided", () => {
|
||||
it('should use default cols and rows if not provided', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
service.createSession();
|
||||
|
||||
@@ -258,61 +258,61 @@ describe("terminal-service.ts", () => {
|
||||
);
|
||||
});
|
||||
|
||||
it("should fall back to home directory if cwd does not exist", () => {
|
||||
it('should fall back to home directory if cwd does not exist', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockImplementation(() => {
|
||||
throw new Error("ENOENT");
|
||||
throw new Error('ENOENT');
|
||||
});
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session = service.createSession({
|
||||
cwd: "/nonexistent",
|
||||
cwd: '/nonexistent',
|
||||
});
|
||||
|
||||
expect(session.cwd).toBe("/home/user");
|
||||
expect(session.cwd).toBe('/home/user');
|
||||
});
|
||||
|
||||
it("should fall back to home directory if cwd is not a directory", () => {
|
||||
it('should fall back to home directory if cwd is not a directory', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => false } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session = service.createSession({
|
||||
cwd: "/file.txt",
|
||||
cwd: '/file.txt',
|
||||
});
|
||||
|
||||
expect(session.cwd).toBe("/home/user");
|
||||
expect(session.cwd).toBe('/home/user');
|
||||
});
|
||||
|
||||
it("should fix double slashes in path", () => {
|
||||
it('should fix double slashes in path', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session = service.createSession({
|
||||
cwd: "//test/dir",
|
||||
cwd: '//test/dir',
|
||||
});
|
||||
|
||||
expect(session.cwd).toBe("/test/dir");
|
||||
expect(session.cwd).toBe('/test/dir');
|
||||
});
|
||||
|
||||
it("should preserve WSL UNC paths", () => {
|
||||
it('should preserve WSL UNC paths', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session = service.createSession({
|
||||
cwd: "//wsl$/Ubuntu/home",
|
||||
cwd: '//wsl$/Ubuntu/home',
|
||||
});
|
||||
|
||||
expect(session.cwd).toBe("//wsl$/Ubuntu/home");
|
||||
expect(session.cwd).toBe('//wsl$/Ubuntu/home');
|
||||
});
|
||||
|
||||
it("should handle data events from PTY", () => {
|
||||
it('should handle data events from PTY', () => {
|
||||
vi.useFakeTimers();
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const dataCallback = vi.fn();
|
||||
service.onData(dataCallback);
|
||||
@@ -321,7 +321,7 @@ describe("terminal-service.ts", () => {
|
||||
|
||||
// Simulate data event
|
||||
const onDataHandler = mockPtyProcess.onData.mock.calls[0][0];
|
||||
onDataHandler("test data");
|
||||
onDataHandler('test data');
|
||||
|
||||
// Wait for throttled output
|
||||
vi.advanceTimersByTime(20);
|
||||
@@ -331,10 +331,10 @@ describe("terminal-service.ts", () => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it("should handle exit events from PTY", () => {
|
||||
it('should handle exit events from PTY', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const exitCallback = vi.fn();
|
||||
service.onExit(exitCallback);
|
||||
@@ -350,32 +350,32 @@ describe("terminal-service.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("write", () => {
|
||||
it("should write data to existing session", () => {
|
||||
describe('write', () => {
|
||||
it('should write data to existing session', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session = service.createSession();
|
||||
const result = service.write(session.id, "ls\n");
|
||||
const result = service.write(session.id, 'ls\n');
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockPtyProcess.write).toHaveBeenCalledWith("ls\n");
|
||||
expect(mockPtyProcess.write).toHaveBeenCalledWith('ls\n');
|
||||
});
|
||||
|
||||
it("should return false for non-existent session", () => {
|
||||
const result = service.write("nonexistent", "data");
|
||||
it('should return false for non-existent session', () => {
|
||||
const result = service.write('nonexistent', 'data');
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(mockPtyProcess.write).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("resize", () => {
|
||||
it("should resize existing session", () => {
|
||||
describe('resize', () => {
|
||||
it('should resize existing session', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session = service.createSession();
|
||||
const result = service.resize(session.id, 120, 40);
|
||||
@@ -384,19 +384,19 @@ describe("terminal-service.ts", () => {
|
||||
expect(mockPtyProcess.resize).toHaveBeenCalledWith(120, 40);
|
||||
});
|
||||
|
||||
it("should return false for non-existent session", () => {
|
||||
const result = service.resize("nonexistent", 120, 40);
|
||||
it('should return false for non-existent session', () => {
|
||||
const result = service.resize('nonexistent', 120, 40);
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(mockPtyProcess.resize).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should handle resize errors", () => {
|
||||
it('should handle resize errors', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
mockPtyProcess.resize.mockImplementation(() => {
|
||||
throw new Error("Resize failed");
|
||||
throw new Error('Resize failed');
|
||||
});
|
||||
|
||||
const session = service.createSession();
|
||||
@@ -406,40 +406,40 @@ describe("terminal-service.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("killSession", () => {
|
||||
it("should kill existing session", () => {
|
||||
describe('killSession', () => {
|
||||
it('should kill existing session', () => {
|
||||
vi.useFakeTimers();
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session = service.createSession();
|
||||
const result = service.killSession(session.id);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockPtyProcess.kill).toHaveBeenCalledWith("SIGTERM");
|
||||
expect(mockPtyProcess.kill).toHaveBeenCalledWith('SIGTERM');
|
||||
|
||||
// Session is removed after SIGKILL timeout (1 second)
|
||||
vi.advanceTimersByTime(1000);
|
||||
|
||||
expect(mockPtyProcess.kill).toHaveBeenCalledWith("SIGKILL");
|
||||
expect(mockPtyProcess.kill).toHaveBeenCalledWith('SIGKILL');
|
||||
expect(service.getSession(session.id)).toBeUndefined();
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it("should return false for non-existent session", () => {
|
||||
const result = service.killSession("nonexistent");
|
||||
it('should return false for non-existent session', () => {
|
||||
const result = service.killSession('nonexistent');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("should handle kill errors", () => {
|
||||
it('should handle kill errors', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
mockPtyProcess.kill.mockImplementation(() => {
|
||||
throw new Error("Kill failed");
|
||||
throw new Error('Kill failed');
|
||||
});
|
||||
|
||||
const session = service.createSession();
|
||||
@@ -449,11 +449,11 @@ describe("terminal-service.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("getSession", () => {
|
||||
it("should return existing session", () => {
|
||||
describe('getSession', () => {
|
||||
it('should return existing session', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session = service.createSession();
|
||||
const retrieved = service.getSession(session.id);
|
||||
@@ -461,84 +461,84 @@ describe("terminal-service.ts", () => {
|
||||
expect(retrieved).toBe(session);
|
||||
});
|
||||
|
||||
it("should return undefined for non-existent session", () => {
|
||||
const retrieved = service.getSession("nonexistent");
|
||||
it('should return undefined for non-existent session', () => {
|
||||
const retrieved = service.getSession('nonexistent');
|
||||
|
||||
expect(retrieved).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("getScrollback", () => {
|
||||
it("should return scrollback buffer for existing session", () => {
|
||||
describe('getScrollback', () => {
|
||||
it('should return scrollback buffer for existing session', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session = service.createSession();
|
||||
session.scrollbackBuffer = "test scrollback";
|
||||
session.scrollbackBuffer = 'test scrollback';
|
||||
|
||||
const scrollback = service.getScrollback(session.id);
|
||||
|
||||
expect(scrollback).toBe("test scrollback");
|
||||
expect(scrollback).toBe('test scrollback');
|
||||
});
|
||||
|
||||
it("should return null for non-existent session", () => {
|
||||
const scrollback = service.getScrollback("nonexistent");
|
||||
it('should return null for non-existent session', () => {
|
||||
const scrollback = service.getScrollback('nonexistent');
|
||||
|
||||
expect(scrollback).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAllSessions", () => {
|
||||
it("should return all active sessions", () => {
|
||||
describe('getAllSessions', () => {
|
||||
it('should return all active sessions', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session1 = service.createSession({ cwd: "/dir1" });
|
||||
const session2 = service.createSession({ cwd: "/dir2" });
|
||||
const session1 = service.createSession({ cwd: '/dir1' });
|
||||
const session2 = service.createSession({ cwd: '/dir2' });
|
||||
|
||||
const sessions = service.getAllSessions();
|
||||
|
||||
expect(sessions).toHaveLength(2);
|
||||
expect(sessions[0].id).toBe(session1.id);
|
||||
expect(sessions[1].id).toBe(session2.id);
|
||||
expect(sessions[0].cwd).toBe("/dir1");
|
||||
expect(sessions[1].cwd).toBe("/dir2");
|
||||
expect(sessions[0].cwd).toBe('/dir1');
|
||||
expect(sessions[1].cwd).toBe('/dir2');
|
||||
});
|
||||
|
||||
it("should return empty array if no sessions", () => {
|
||||
it('should return empty array if no sessions', () => {
|
||||
const sessions = service.getAllSessions();
|
||||
|
||||
expect(sessions).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("onData and onExit", () => {
|
||||
it("should allow subscribing and unsubscribing from data events", () => {
|
||||
describe('onData and onExit', () => {
|
||||
it('should allow subscribing and unsubscribing from data events', () => {
|
||||
const callback = vi.fn();
|
||||
const unsubscribe = service.onData(callback);
|
||||
|
||||
expect(typeof unsubscribe).toBe("function");
|
||||
expect(typeof unsubscribe).toBe('function');
|
||||
|
||||
unsubscribe();
|
||||
});
|
||||
|
||||
it("should allow subscribing and unsubscribing from exit events", () => {
|
||||
it('should allow subscribing and unsubscribing from exit events', () => {
|
||||
const callback = vi.fn();
|
||||
const unsubscribe = service.onExit(callback);
|
||||
|
||||
expect(typeof unsubscribe).toBe("function");
|
||||
expect(typeof unsubscribe).toBe('function');
|
||||
|
||||
unsubscribe();
|
||||
});
|
||||
});
|
||||
|
||||
describe("cleanup", () => {
|
||||
it("should clean up all sessions", () => {
|
||||
describe('cleanup', () => {
|
||||
it('should clean up all sessions', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
|
||||
const session1 = service.createSession();
|
||||
const session2 = service.createSession();
|
||||
@@ -550,12 +550,12 @@ describe("terminal-service.ts", () => {
|
||||
expect(service.getAllSessions()).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should handle cleanup errors gracefully", () => {
|
||||
it('should handle cleanup errors gracefully', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
|
||||
vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
|
||||
vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
|
||||
mockPtyProcess.kill.mockImplementation(() => {
|
||||
throw new Error("Kill failed");
|
||||
throw new Error('Kill failed');
|
||||
});
|
||||
|
||||
service.createSession();
|
||||
@@ -564,8 +564,8 @@ describe("terminal-service.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("getTerminalService", () => {
|
||||
it("should return singleton instance", () => {
|
||||
describe('getTerminalService', () => {
|
||||
it('should return singleton instance', () => {
|
||||
const instance1 = getTerminalService();
|
||||
const instance2 = getTerminalService();
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ export async function waitFor(
|
||||
const start = Date.now();
|
||||
while (!condition()) {
|
||||
if (Date.now() - start > timeout) {
|
||||
throw new Error("Timeout waiting for condition");
|
||||
throw new Error('Timeout waiting for condition');
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, interval));
|
||||
}
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
* Provides reusable mocks for common dependencies
|
||||
*/
|
||||
|
||||
import { vi } from "vitest";
|
||||
import type { ChildProcess } from "child_process";
|
||||
import { EventEmitter } from "events";
|
||||
import type { Readable } from "stream";
|
||||
import { vi } from 'vitest';
|
||||
import type { ChildProcess } from 'child_process';
|
||||
import { EventEmitter } from 'events';
|
||||
import type { Readable } from 'stream';
|
||||
|
||||
/**
|
||||
* Mock child_process.spawn for subprocess tests
|
||||
@@ -31,19 +31,19 @@ export function createMockChildProcess(options: {
|
||||
process.nextTick(() => {
|
||||
// Emit stdout lines
|
||||
for (const line of stdout) {
|
||||
mockProcess.stdout.emit("data", Buffer.from(line + "\n"));
|
||||
mockProcess.stdout.emit('data', Buffer.from(line + '\n'));
|
||||
}
|
||||
|
||||
// Emit stderr lines
|
||||
for (const line of stderr) {
|
||||
mockProcess.stderr.emit("data", Buffer.from(line + "\n"));
|
||||
mockProcess.stderr.emit('data', Buffer.from(line + '\n'));
|
||||
}
|
||||
|
||||
// Emit exit or error
|
||||
if (shouldError) {
|
||||
mockProcess.emit("error", new Error("Process error"));
|
||||
mockProcess.emit('error', new Error('Process error'));
|
||||
} else {
|
||||
mockProcess.emit("exit", exitCode);
|
||||
mockProcess.emit('exit', exitCode);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -90,9 +90,9 @@ const {
|
||||
clearHistory, // Clear conversation
|
||||
error, // Error state
|
||||
} = useElectronAgent({
|
||||
sessionId: "project_xyz",
|
||||
workingDirectory: "/path/to/project",
|
||||
onToolUse: (tool) => console.log("Using:", tool),
|
||||
sessionId: 'project_xyz',
|
||||
workingDirectory: '/path/to/project',
|
||||
onToolUse: (tool) => console.log('Using:', tool),
|
||||
});
|
||||
```
|
||||
|
||||
@@ -160,7 +160,7 @@ Each session file contains:
|
||||
Session IDs are generated from project paths:
|
||||
|
||||
```typescript
|
||||
const sessionId = `project_${projectPath.replace(/[^a-zA-Z0-9]/g, "_")}`;
|
||||
const sessionId = `project_${projectPath.replace(/[^a-zA-Z0-9]/g, '_')}`;
|
||||
```
|
||||
|
||||
This ensures:
|
||||
|
||||
@@ -7,24 +7,28 @@ The Automaker Agent Chat now supports multiple concurrent sessions, allowing you
|
||||
## Features
|
||||
|
||||
### ✨ Multiple Sessions
|
||||
|
||||
- Create unlimited agent sessions per project
|
||||
- Each session has its own conversation history
|
||||
- Switch between sessions instantly
|
||||
- Sessions persist across app restarts
|
||||
|
||||
### 📋 Session Organization
|
||||
|
||||
- Custom names for easy identification
|
||||
- Last message preview
|
||||
- Message count tracking
|
||||
- Sort by most recently updated
|
||||
|
||||
### 🗄️ Archive & Delete
|
||||
|
||||
- Archive old sessions to declutter
|
||||
- Unarchive when needed
|
||||
- Permanently delete sessions
|
||||
- Confirm before destructive actions
|
||||
|
||||
### 💾 Automatic Persistence
|
||||
|
||||
- All sessions auto-save to disk
|
||||
- Survive Next.js restarts
|
||||
- Survive Electron app restarts
|
||||
@@ -67,6 +71,7 @@ Click the panel icon in the header to show/hide the session manager.
|
||||
4. The new session is immediately active
|
||||
|
||||
**Example session names:**
|
||||
|
||||
- "Feature: Dark Mode"
|
||||
- "Bug: Login redirect"
|
||||
- "Refactor: API layer"
|
||||
@@ -93,6 +98,7 @@ Click the **"Clear"** button in the chat header to delete all messages from the
|
||||
3. Toggle **"Show Archived"** to view archived sessions
|
||||
|
||||
**When to archive:**
|
||||
|
||||
- Completed features
|
||||
- Resolved bugs
|
||||
- Old experiments
|
||||
@@ -117,16 +123,19 @@ Click the **"Clear"** button in the chat header to delete all messages from the
|
||||
Sessions are stored in your user data directory:
|
||||
|
||||
**macOS:**
|
||||
|
||||
```
|
||||
~/Library/Application Support/automaker/agent-sessions/
|
||||
```
|
||||
|
||||
**Windows:**
|
||||
|
||||
```
|
||||
%APPDATA%/automaker/agent-sessions/
|
||||
```
|
||||
|
||||
**Linux:**
|
||||
|
||||
```
|
||||
~/.config/automaker/agent-sessions/
|
||||
```
|
||||
@@ -215,12 +224,14 @@ Use prefixes to organize sessions by type:
|
||||
### When to Create Multiple Sessions
|
||||
|
||||
**Do create separate sessions for:**
|
||||
|
||||
- ✅ Different features
|
||||
- ✅ Unrelated bugs
|
||||
- ✅ Experimental work
|
||||
- ✅ Different contexts or approaches
|
||||
|
||||
**Don't create separate sessions for:**
|
||||
|
||||
- ❌ Same feature, different iterations
|
||||
- ❌ Related bug fixes
|
||||
- ❌ Continuation of previous work
|
||||
@@ -272,7 +283,7 @@ Use prefixes to organize sessions by type:
|
||||
|
||||
## Keyboard Shortcuts
|
||||
|
||||
*(Coming soon)*
|
||||
_(Coming soon)_
|
||||
|
||||
- `Cmd/Ctrl + K` - Create new session
|
||||
- `Cmd/Ctrl + [` - Previous session
|
||||
@@ -284,11 +295,13 @@ Use prefixes to organize sessions by type:
|
||||
### Session Not Saving
|
||||
|
||||
**Check:**
|
||||
|
||||
- Electron has write permissions
|
||||
- Disk space available
|
||||
- Check Electron console for errors
|
||||
|
||||
**Solution:**
|
||||
|
||||
```bash
|
||||
# macOS - Check permissions
|
||||
ls -la ~/Library/Application\ Support/automaker/
|
||||
@@ -300,11 +313,13 @@ chmod -R u+w ~/Library/Application\ Support/automaker/
|
||||
### Can't Switch Sessions
|
||||
|
||||
**Check:**
|
||||
|
||||
- Session is not archived
|
||||
- No errors in console
|
||||
- Agent is not currently processing
|
||||
|
||||
**Solution:**
|
||||
|
||||
- Wait for current message to complete
|
||||
- Check for error messages
|
||||
- Try clearing and reloading
|
||||
@@ -312,11 +327,13 @@ chmod -R u+w ~/Library/Application\ Support/automaker/
|
||||
### Session Disappeared
|
||||
|
||||
**Check:**
|
||||
|
||||
- Not filtered by archive status
|
||||
- Not accidentally deleted
|
||||
- Check backup files
|
||||
|
||||
**Recovery:**
|
||||
|
||||
- Toggle "Show Archived"
|
||||
- Check filesystem for `.json` files
|
||||
- Restore from backup if available
|
||||
@@ -326,15 +343,17 @@ chmod -R u+w ~/Library/Application\ Support/automaker/
|
||||
For developers integrating session management:
|
||||
|
||||
### Create Session
|
||||
|
||||
```typescript
|
||||
const result = await window.electronAPI.sessions.create(
|
||||
"Session Name",
|
||||
"/project/path",
|
||||
"/working/directory"
|
||||
'Session Name',
|
||||
'/project/path',
|
||||
'/working/directory'
|
||||
);
|
||||
```
|
||||
|
||||
### List Sessions
|
||||
|
||||
```typescript
|
||||
const { sessions } = await window.electronAPI.sessions.list(
|
||||
false // includeArchived
|
||||
@@ -342,21 +361,20 @@ const { sessions } = await window.electronAPI.sessions.list(
|
||||
```
|
||||
|
||||
### Update Session
|
||||
|
||||
```typescript
|
||||
await window.electronAPI.sessions.update(
|
||||
sessionId,
|
||||
"New Name",
|
||||
["tag1", "tag2"]
|
||||
);
|
||||
await window.electronAPI.sessions.update(sessionId, 'New Name', ['tag1', 'tag2']);
|
||||
```
|
||||
|
||||
### Archive/Unarchive
|
||||
|
||||
```typescript
|
||||
await window.electronAPI.sessions.archive(sessionId);
|
||||
await window.electronAPI.sessions.unarchive(sessionId);
|
||||
```
|
||||
|
||||
### Delete Session
|
||||
|
||||
```typescript
|
||||
await window.electronAPI.sessions.delete(sessionId);
|
||||
```
|
||||
|
||||
@@ -1,111 +1,111 @@
|
||||
import { defineConfig, globalIgnores } from "eslint/config";
|
||||
import js from "@eslint/js";
|
||||
import ts from "@typescript-eslint/eslint-plugin";
|
||||
import tsParser from "@typescript-eslint/parser";
|
||||
import { defineConfig, globalIgnores } from 'eslint/config';
|
||||
import js from '@eslint/js';
|
||||
import ts from '@typescript-eslint/eslint-plugin';
|
||||
import tsParser from '@typescript-eslint/parser';
|
||||
|
||||
const eslintConfig = defineConfig([
|
||||
js.configs.recommended,
|
||||
{
|
||||
files: ["**/*.mjs", "**/*.cjs"],
|
||||
files: ['**/*.mjs', '**/*.cjs'],
|
||||
languageOptions: {
|
||||
globals: {
|
||||
console: "readonly",
|
||||
process: "readonly",
|
||||
require: "readonly",
|
||||
__dirname: "readonly",
|
||||
__filename: "readonly",
|
||||
console: 'readonly',
|
||||
process: 'readonly',
|
||||
require: 'readonly',
|
||||
__dirname: 'readonly',
|
||||
__filename: 'readonly',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ["**/*.ts", "**/*.tsx"],
|
||||
files: ['**/*.ts', '**/*.tsx'],
|
||||
languageOptions: {
|
||||
parser: tsParser,
|
||||
parserOptions: {
|
||||
ecmaVersion: "latest",
|
||||
sourceType: "module",
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module',
|
||||
},
|
||||
globals: {
|
||||
// Browser/DOM APIs
|
||||
window: "readonly",
|
||||
document: "readonly",
|
||||
navigator: "readonly",
|
||||
Navigator: "readonly",
|
||||
localStorage: "readonly",
|
||||
sessionStorage: "readonly",
|
||||
fetch: "readonly",
|
||||
WebSocket: "readonly",
|
||||
File: "readonly",
|
||||
FileList: "readonly",
|
||||
FileReader: "readonly",
|
||||
Blob: "readonly",
|
||||
atob: "readonly",
|
||||
crypto: "readonly",
|
||||
prompt: "readonly",
|
||||
confirm: "readonly",
|
||||
getComputedStyle: "readonly",
|
||||
requestAnimationFrame: "readonly",
|
||||
window: 'readonly',
|
||||
document: 'readonly',
|
||||
navigator: 'readonly',
|
||||
Navigator: 'readonly',
|
||||
localStorage: 'readonly',
|
||||
sessionStorage: 'readonly',
|
||||
fetch: 'readonly',
|
||||
WebSocket: 'readonly',
|
||||
File: 'readonly',
|
||||
FileList: 'readonly',
|
||||
FileReader: 'readonly',
|
||||
Blob: 'readonly',
|
||||
atob: 'readonly',
|
||||
crypto: 'readonly',
|
||||
prompt: 'readonly',
|
||||
confirm: 'readonly',
|
||||
getComputedStyle: 'readonly',
|
||||
requestAnimationFrame: 'readonly',
|
||||
// DOM Element Types
|
||||
HTMLElement: "readonly",
|
||||
HTMLInputElement: "readonly",
|
||||
HTMLDivElement: "readonly",
|
||||
HTMLButtonElement: "readonly",
|
||||
HTMLSpanElement: "readonly",
|
||||
HTMLTextAreaElement: "readonly",
|
||||
HTMLHeadingElement: "readonly",
|
||||
HTMLParagraphElement: "readonly",
|
||||
HTMLImageElement: "readonly",
|
||||
Element: "readonly",
|
||||
HTMLElement: 'readonly',
|
||||
HTMLInputElement: 'readonly',
|
||||
HTMLDivElement: 'readonly',
|
||||
HTMLButtonElement: 'readonly',
|
||||
HTMLSpanElement: 'readonly',
|
||||
HTMLTextAreaElement: 'readonly',
|
||||
HTMLHeadingElement: 'readonly',
|
||||
HTMLParagraphElement: 'readonly',
|
||||
HTMLImageElement: 'readonly',
|
||||
Element: 'readonly',
|
||||
// Event Types
|
||||
Event: "readonly",
|
||||
KeyboardEvent: "readonly",
|
||||
DragEvent: "readonly",
|
||||
PointerEvent: "readonly",
|
||||
CustomEvent: "readonly",
|
||||
ClipboardEvent: "readonly",
|
||||
WheelEvent: "readonly",
|
||||
DataTransfer: "readonly",
|
||||
Event: 'readonly',
|
||||
KeyboardEvent: 'readonly',
|
||||
DragEvent: 'readonly',
|
||||
PointerEvent: 'readonly',
|
||||
CustomEvent: 'readonly',
|
||||
ClipboardEvent: 'readonly',
|
||||
WheelEvent: 'readonly',
|
||||
DataTransfer: 'readonly',
|
||||
// Web APIs
|
||||
ResizeObserver: "readonly",
|
||||
AbortSignal: "readonly",
|
||||
Audio: "readonly",
|
||||
ScrollBehavior: "readonly",
|
||||
ResizeObserver: 'readonly',
|
||||
AbortSignal: 'readonly',
|
||||
Audio: 'readonly',
|
||||
ScrollBehavior: 'readonly',
|
||||
// Timers
|
||||
setTimeout: "readonly",
|
||||
setInterval: "readonly",
|
||||
clearTimeout: "readonly",
|
||||
clearInterval: "readonly",
|
||||
setTimeout: 'readonly',
|
||||
setInterval: 'readonly',
|
||||
clearTimeout: 'readonly',
|
||||
clearInterval: 'readonly',
|
||||
// Node.js (for scripts and Electron)
|
||||
process: "readonly",
|
||||
require: "readonly",
|
||||
__dirname: "readonly",
|
||||
__filename: "readonly",
|
||||
NodeJS: "readonly",
|
||||
process: 'readonly',
|
||||
require: 'readonly',
|
||||
__dirname: 'readonly',
|
||||
__filename: 'readonly',
|
||||
NodeJS: 'readonly',
|
||||
// React
|
||||
React: "readonly",
|
||||
JSX: "readonly",
|
||||
React: 'readonly',
|
||||
JSX: 'readonly',
|
||||
// Electron
|
||||
Electron: "readonly",
|
||||
Electron: 'readonly',
|
||||
// Console
|
||||
console: "readonly",
|
||||
console: 'readonly',
|
||||
},
|
||||
},
|
||||
plugins: {
|
||||
"@typescript-eslint": ts,
|
||||
'@typescript-eslint': ts,
|
||||
},
|
||||
rules: {
|
||||
...ts.configs.recommended.rules,
|
||||
"@typescript-eslint/no-unused-vars": ["warn", { argsIgnorePattern: "^_" }],
|
||||
"@typescript-eslint/no-explicit-any": "warn",
|
||||
'@typescript-eslint/no-unused-vars': ['warn', { argsIgnorePattern: '^_' }],
|
||||
'@typescript-eslint/no-explicit-any': 'warn',
|
||||
},
|
||||
},
|
||||
globalIgnores([
|
||||
"dist/**",
|
||||
"dist-electron/**",
|
||||
"node_modules/**",
|
||||
"server-bundle/**",
|
||||
"release/**",
|
||||
"src/routeTree.gen.ts",
|
||||
'dist/**',
|
||||
'dist-electron/**',
|
||||
'node_modules/**',
|
||||
'server-bundle/**',
|
||||
'release/**',
|
||||
'src/routeTree.gen.ts',
|
||||
]),
|
||||
]);
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
<link rel="icon" type="image/x-icon" href="/favicon.ico" />
|
||||
<script>
|
||||
// Prevent theme flash - apply stored theme before React hydrates
|
||||
(function() {
|
||||
(function () {
|
||||
try {
|
||||
const stored = localStorage.getItem('automaker-storage');
|
||||
if (stored) {
|
||||
@@ -17,7 +17,10 @@
|
||||
if (theme && theme !== 'system' && theme !== 'light') {
|
||||
// Apply the actual theme class (dark, retro, dracula, nord, etc.)
|
||||
document.documentElement.classList.add(theme);
|
||||
} else if (theme === 'system' && window.matchMedia('(prefers-color-scheme: dark)').matches) {
|
||||
} else if (
|
||||
theme === 'system' &&
|
||||
window.matchMedia('(prefers-color-scheme: dark)').matches
|
||||
) {
|
||||
document.documentElement.classList.add('dark');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ const LOCAL_PACKAGES = [
|
||||
'@automaker/platform',
|
||||
'@automaker/model-resolver',
|
||||
'@automaker/dependency-resolver',
|
||||
'@automaker/git-utils'
|
||||
'@automaker/git-utils',
|
||||
];
|
||||
|
||||
console.log('🔧 Preparing server for Electron bundling...\n');
|
||||
@@ -95,13 +95,10 @@ const bundlePkg = {
|
||||
version: serverPkg.version,
|
||||
type: 'module',
|
||||
main: 'dist/index.js',
|
||||
dependencies
|
||||
dependencies,
|
||||
};
|
||||
|
||||
writeFileSync(
|
||||
join(BUNDLE_DIR, 'package.json'),
|
||||
JSON.stringify(bundlePkg, null, 2)
|
||||
);
|
||||
writeFileSync(join(BUNDLE_DIR, 'package.json'), JSON.stringify(bundlePkg, null, 2));
|
||||
|
||||
// Step 6: Install production dependencies
|
||||
console.log('📥 Installing server production dependencies...');
|
||||
@@ -111,8 +108,8 @@ execSync('npm install --omit=dev', {
|
||||
env: {
|
||||
...process.env,
|
||||
// Prevent npm from using workspace resolution
|
||||
npm_config_workspace: ''
|
||||
}
|
||||
npm_config_workspace: '',
|
||||
},
|
||||
});
|
||||
|
||||
// Step 7: Rebuild native modules for current architecture
|
||||
@@ -121,11 +118,13 @@ console.log('🔨 Rebuilding native modules for current architecture...');
|
||||
try {
|
||||
execSync('npm rebuild', {
|
||||
cwd: BUNDLE_DIR,
|
||||
stdio: 'inherit'
|
||||
stdio: 'inherit',
|
||||
});
|
||||
console.log('✅ Native modules rebuilt successfully');
|
||||
} catch (error) {
|
||||
console.warn('⚠️ Warning: Failed to rebuild native modules. Terminal functionality may not work.');
|
||||
console.warn(
|
||||
'⚠️ Warning: Failed to rebuild native modules. Terminal functionality may not work.'
|
||||
);
|
||||
console.warn(' Error:', error.message);
|
||||
}
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ const path = require('path');
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
exports.default = async function(context) {
|
||||
exports.default = async function (context) {
|
||||
const { appOutDir, electronPlatformName, arch, packager } = context;
|
||||
const electronVersion = packager.config.electronVersion;
|
||||
|
||||
@@ -33,19 +33,9 @@ exports.default = async function(context) {
|
||||
'node_modules'
|
||||
);
|
||||
} else if (electronPlatformName === 'win32') {
|
||||
serverNodeModulesPath = path.join(
|
||||
appOutDir,
|
||||
'resources',
|
||||
'server',
|
||||
'node_modules'
|
||||
);
|
||||
serverNodeModulesPath = path.join(appOutDir, 'resources', 'server', 'node_modules');
|
||||
} else {
|
||||
serverNodeModulesPath = path.join(
|
||||
appOutDir,
|
||||
'resources',
|
||||
'server',
|
||||
'node_modules'
|
||||
);
|
||||
serverNodeModulesPath = path.join(appOutDir, 'resources', 'server', 'node_modules');
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
@@ -5,17 +5,17 @@
|
||||
* Creates the necessary test fixture directories and files before running Playwright tests
|
||||
*/
|
||||
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// Resolve workspace root (apps/ui/scripts -> workspace root)
|
||||
const WORKSPACE_ROOT = path.resolve(__dirname, "../../..");
|
||||
const FIXTURE_PATH = path.join(WORKSPACE_ROOT, "test/fixtures/projectA");
|
||||
const SPEC_FILE_PATH = path.join(FIXTURE_PATH, ".automaker/app_spec.txt");
|
||||
const WORKSPACE_ROOT = path.resolve(__dirname, '../../..');
|
||||
const FIXTURE_PATH = path.join(WORKSPACE_ROOT, 'test/fixtures/projectA');
|
||||
const SPEC_FILE_PATH = path.join(FIXTURE_PATH, '.automaker/app_spec.txt');
|
||||
|
||||
const SPEC_CONTENT = `<app_spec>
|
||||
<name>Test Project A</name>
|
||||
@@ -28,7 +28,7 @@ const SPEC_CONTENT = `<app_spec>
|
||||
`;
|
||||
|
||||
function setupFixtures() {
|
||||
console.log("Setting up E2E test fixtures...");
|
||||
console.log('Setting up E2E test fixtures...');
|
||||
console.log(`Workspace root: ${WORKSPACE_ROOT}`);
|
||||
console.log(`Fixture path: ${FIXTURE_PATH}`);
|
||||
|
||||
@@ -43,7 +43,7 @@ function setupFixtures() {
|
||||
fs.writeFileSync(SPEC_FILE_PATH, SPEC_CONTENT);
|
||||
console.log(`Created fixture file: ${SPEC_FILE_PATH}`);
|
||||
|
||||
console.log("E2E test fixtures setup complete!");
|
||||
console.log('E2E test fixtures setup complete!');
|
||||
}
|
||||
|
||||
setupFixtures();
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import * as React from 'react';
|
||||
import { Check, ChevronsUpDown, LucideIcon } from 'lucide-react';
|
||||
|
||||
import * as React from "react";
|
||||
import { Check, ChevronsUpDown, LucideIcon } from "lucide-react";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import {
|
||||
Command,
|
||||
CommandEmpty,
|
||||
@@ -11,12 +10,8 @@ import {
|
||||
CommandInput,
|
||||
CommandItem,
|
||||
CommandList,
|
||||
} from "@/components/ui/command";
|
||||
import {
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverTrigger,
|
||||
} from "@/components/ui/popover";
|
||||
} from '@/components/ui/command';
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover';
|
||||
|
||||
export interface AutocompleteOption {
|
||||
value: string;
|
||||
@@ -38,12 +33,12 @@ interface AutocompleteProps {
|
||||
icon?: LucideIcon;
|
||||
allowCreate?: boolean;
|
||||
createLabel?: (value: string) => string;
|
||||
"data-testid"?: string;
|
||||
'data-testid'?: string;
|
||||
itemTestIdPrefix?: string;
|
||||
}
|
||||
|
||||
function normalizeOption(opt: string | AutocompleteOption): AutocompleteOption {
|
||||
if (typeof opt === "string") {
|
||||
if (typeof opt === 'string') {
|
||||
return { value: opt, label: opt };
|
||||
}
|
||||
return { ...opt, label: opt.label ?? opt.value };
|
||||
@@ -53,27 +48,24 @@ export function Autocomplete({
|
||||
value,
|
||||
onChange,
|
||||
options,
|
||||
placeholder = "Select an option...",
|
||||
searchPlaceholder = "Search...",
|
||||
emptyMessage = "No results found.",
|
||||
placeholder = 'Select an option...',
|
||||
searchPlaceholder = 'Search...',
|
||||
emptyMessage = 'No results found.',
|
||||
className,
|
||||
disabled = false,
|
||||
error = false,
|
||||
icon: Icon,
|
||||
allowCreate = false,
|
||||
createLabel = (v) => `Create "${v}"`,
|
||||
"data-testid": testId,
|
||||
itemTestIdPrefix = "option",
|
||||
'data-testid': testId,
|
||||
itemTestIdPrefix = 'option',
|
||||
}: AutocompleteProps) {
|
||||
const [open, setOpen] = React.useState(false);
|
||||
const [inputValue, setInputValue] = React.useState("");
|
||||
const [inputValue, setInputValue] = React.useState('');
|
||||
const [triggerWidth, setTriggerWidth] = React.useState<number>(0);
|
||||
const triggerRef = React.useRef<HTMLButtonElement>(null);
|
||||
|
||||
const normalizedOptions = React.useMemo(
|
||||
() => options.map(normalizeOption),
|
||||
[options]
|
||||
);
|
||||
const normalizedOptions = React.useMemo(() => options.map(normalizeOption), [options]);
|
||||
|
||||
// Update trigger width when component mounts or value changes
|
||||
React.useEffect(() => {
|
||||
@@ -98,9 +90,7 @@ export function Autocomplete({
|
||||
if (!inputValue) return normalizedOptions;
|
||||
const lower = inputValue.toLowerCase();
|
||||
return normalizedOptions.filter(
|
||||
(opt) =>
|
||||
opt.value.toLowerCase().includes(lower) ||
|
||||
opt.label?.toLowerCase().includes(lower)
|
||||
(opt) => opt.value.toLowerCase().includes(lower) || opt.label?.toLowerCase().includes(lower)
|
||||
);
|
||||
}, [normalizedOptions, inputValue]);
|
||||
|
||||
@@ -108,9 +98,7 @@ export function Autocomplete({
|
||||
const isNewValue =
|
||||
allowCreate &&
|
||||
inputValue.trim() &&
|
||||
!normalizedOptions.some(
|
||||
(opt) => opt.value.toLowerCase() === inputValue.toLowerCase()
|
||||
);
|
||||
!normalizedOptions.some((opt) => opt.value.toLowerCase() === inputValue.toLowerCase());
|
||||
|
||||
// Get display value
|
||||
const displayValue = React.useMemo(() => {
|
||||
@@ -129,17 +117,15 @@ export function Autocomplete({
|
||||
aria-expanded={open}
|
||||
disabled={disabled}
|
||||
className={cn(
|
||||
"w-full justify-between",
|
||||
Icon && "font-mono text-sm",
|
||||
error && "border-destructive focus-visible:ring-destructive",
|
||||
'w-full justify-between',
|
||||
Icon && 'font-mono text-sm',
|
||||
error && 'border-destructive focus-visible:ring-destructive',
|
||||
className
|
||||
)}
|
||||
data-testid={testId}
|
||||
>
|
||||
<span className="flex items-center gap-2 truncate">
|
||||
{Icon && (
|
||||
<Icon className="w-4 h-4 shrink-0 text-muted-foreground" />
|
||||
)}
|
||||
{Icon && <Icon className="w-4 h-4 shrink-0 text-muted-foreground" />}
|
||||
{displayValue || placeholder}
|
||||
</span>
|
||||
<ChevronsUpDown className="opacity-50 shrink-0" />
|
||||
@@ -163,8 +149,7 @@ export function Autocomplete({
|
||||
<CommandEmpty>
|
||||
{isNewValue ? (
|
||||
<div className="py-2 px-3 text-sm">
|
||||
Press enter to create{" "}
|
||||
<code className="bg-muted px-1 rounded">{inputValue}</code>
|
||||
Press enter to create <code className="bg-muted px-1 rounded">{inputValue}</code>
|
||||
</div>
|
||||
) : (
|
||||
emptyMessage
|
||||
@@ -177,7 +162,7 @@ export function Autocomplete({
|
||||
value={inputValue}
|
||||
onSelect={() => {
|
||||
onChange(inputValue);
|
||||
setInputValue("");
|
||||
setInputValue('');
|
||||
setOpen(false);
|
||||
}}
|
||||
className="text-[var(--status-success)]"
|
||||
@@ -185,9 +170,7 @@ export function Autocomplete({
|
||||
>
|
||||
{Icon && <Icon className="w-4 h-4 mr-2" />}
|
||||
{createLabel(inputValue)}
|
||||
<span className="ml-auto text-xs text-muted-foreground">
|
||||
(new)
|
||||
</span>
|
||||
<span className="ml-auto text-xs text-muted-foreground">(new)</span>
|
||||
</CommandItem>
|
||||
)}
|
||||
{filteredOptions.map((option) => (
|
||||
@@ -195,24 +178,19 @@ export function Autocomplete({
|
||||
key={option.value}
|
||||
value={option.value}
|
||||
onSelect={(currentValue) => {
|
||||
onChange(currentValue === value ? "" : currentValue);
|
||||
setInputValue("");
|
||||
onChange(currentValue === value ? '' : currentValue);
|
||||
setInputValue('');
|
||||
setOpen(false);
|
||||
}}
|
||||
data-testid={`${itemTestIdPrefix}-${option.value.toLowerCase().replace(/[\s/\\]+/g, "-")}`}
|
||||
data-testid={`${itemTestIdPrefix}-${option.value.toLowerCase().replace(/[\s/\\]+/g, '-')}`}
|
||||
>
|
||||
{Icon && <Icon className="w-4 h-4 mr-2" />}
|
||||
{option.label}
|
||||
<Check
|
||||
className={cn(
|
||||
"ml-auto",
|
||||
value === option.value ? "opacity-100" : "opacity-0"
|
||||
)}
|
||||
className={cn('ml-auto', value === option.value ? 'opacity-100' : 'opacity-0')}
|
||||
/>
|
||||
{option.badge && (
|
||||
<span className="ml-2 text-xs text-muted-foreground">
|
||||
({option.badge})
|
||||
</span>
|
||||
<span className="ml-2 text-xs text-muted-foreground">({option.badge})</span>
|
||||
)}
|
||||
</CommandItem>
|
||||
))}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user