Compare commits
15 Commits
prettier-e
...
prettier-e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c48ddf462a | ||
|
|
0c10ccd149 | ||
|
|
a0a0b1ba6c | ||
|
|
3c72d01f97 | ||
|
|
e2b72c0618 | ||
|
|
1e5dcd043a | ||
|
|
312540327f | ||
|
|
74c78d2274 | ||
|
|
e1176f337e | ||
|
|
424cea6d8f | ||
|
|
3092c9c9c2 | ||
|
|
3c7f922564 | ||
|
|
12aaaa537b | ||
|
|
faff4e06a1 | ||
|
|
5e5c7ed98f |
66
.github/workflows/promote-to-stable.yaml
vendored
66
.github/workflows/promote-to-stable.yaml
vendored
@@ -37,6 +37,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
git config --global user.name "github-actions[bot]"
|
git config --global user.name "github-actions[bot]"
|
||||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git config --global url."https://x-access-token:${{ secrets.GITHUB_TOKEN }}@github.com/".insteadOf "https://github.com/"
|
||||||
|
|
||||||
|
- name: Switch to stable branch
|
||||||
|
run: |
|
||||||
|
git checkout stable
|
||||||
|
git pull origin stable
|
||||||
|
|
||||||
|
- name: Merge main into stable
|
||||||
|
run: |
|
||||||
|
git merge origin/main --no-edit
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: npm ci
|
run: npm ci
|
||||||
@@ -73,27 +83,6 @@ jobs:
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
# Check if calculated version already exists (either as NPM package or git tag)
|
|
||||||
while npm view bmad-method@$NEW_VERSION version >/dev/null 2>&1 || git ls-remote --tags origin | grep -q "refs/tags/v$NEW_VERSION"; do
|
|
||||||
echo "Version $NEW_VERSION already exists, incrementing..."
|
|
||||||
IFS='.' read -ra NEW_VERSION_PARTS <<< "$NEW_VERSION"
|
|
||||||
NEW_MAJOR=${NEW_VERSION_PARTS[0]}
|
|
||||||
NEW_MINOR=${NEW_VERSION_PARTS[1]}
|
|
||||||
NEW_PATCH=${NEW_VERSION_PARTS[2]}
|
|
||||||
|
|
||||||
case "${{ github.event.inputs.version_bump }}" in
|
|
||||||
"major")
|
|
||||||
NEW_VERSION="$((NEW_MAJOR + 1)).0.0"
|
|
||||||
;;
|
|
||||||
"minor")
|
|
||||||
NEW_VERSION="$NEW_MAJOR.$((NEW_MINOR + 1)).0"
|
|
||||||
;;
|
|
||||||
"patch")
|
|
||||||
NEW_VERSION="$NEW_MAJOR.$NEW_MINOR.$((NEW_PATCH + 1))"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "new_version=$NEW_VERSION" >> $GITHUB_OUTPUT
|
echo "new_version=$NEW_VERSION" >> $GITHUB_OUTPUT
|
||||||
echo "Promoting from $CURRENT_VERSION to $NEW_VERSION"
|
echo "Promoting from $CURRENT_VERSION to $NEW_VERSION"
|
||||||
|
|
||||||
@@ -111,38 +100,23 @@ jobs:
|
|||||||
- name: Commit stable release
|
- name: Commit stable release
|
||||||
run: |
|
run: |
|
||||||
git add .
|
git add .
|
||||||
git commit -m "release: promote to stable ${{ steps.version.outputs.new_version }}"
|
git commit -m "release: promote to stable ${{ steps.version.outputs.new_version }}
|
||||||
|
|
||||||
- name: Create and push stable tag
|
- Promote beta features to stable release
|
||||||
|
- Update version from ${{ steps.version.outputs.current_version }} to ${{ steps.version.outputs.new_version }}
|
||||||
|
- Automated promotion via GitHub Actions"
|
||||||
|
|
||||||
|
- name: Push stable release
|
||||||
run: |
|
run: |
|
||||||
# Create new tag (version check already ensures it doesn't exist)
|
git remote set-url origin https://x-access-token:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }}.git
|
||||||
git tag -a "v${{ steps.version.outputs.new_version }}" -m "Stable release v${{ steps.version.outputs.new_version }}"
|
git push origin stable
|
||||||
|
|
||||||
# Push the new tag
|
- name: Switch back to main
|
||||||
git push origin "v${{ steps.version.outputs.new_version }}"
|
run: git checkout main
|
||||||
|
|
||||||
- name: Push changes to main
|
|
||||||
run: |
|
|
||||||
git push origin HEAD:main
|
|
||||||
|
|
||||||
- name: Publish to NPM with stable tag
|
|
||||||
env:
|
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
|
||||||
run: |
|
|
||||||
# Publish with the stable (latest) tag
|
|
||||||
npm publish --tag latest
|
|
||||||
|
|
||||||
# Also tag the previous beta version as stable if it exists
|
|
||||||
if npm view bmad-method@${{ steps.version.outputs.current_version }} version >/dev/null 2>&1; then
|
|
||||||
npm dist-tag add bmad-method@${{ steps.version.outputs.new_version }} stable || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Summary
|
- name: Summary
|
||||||
run: |
|
run: |
|
||||||
echo "🎉 Successfully promoted to stable!"
|
echo "🎉 Successfully promoted to stable!"
|
||||||
echo "📦 Version: ${{ steps.version.outputs.new_version }}"
|
echo "📦 Version: ${{ steps.version.outputs.new_version }}"
|
||||||
echo "🏷️ Git tag: v${{ steps.version.outputs.new_version }}"
|
|
||||||
echo "✅ Published to NPM with 'latest' tag"
|
|
||||||
echo "✅ Users running 'npx bmad-method install' will now get version ${{ steps.version.outputs.new_version }}"
|
|
||||||
echo "🚀 The stable release will be automatically published to NPM via semantic-release"
|
echo "🚀 The stable release will be automatically published to NPM via semantic-release"
|
||||||
echo "✅ Users running 'npx bmad-method install' will now get version ${{ steps.version.outputs.new_version }}"
|
echo "✅ Users running 'npx bmad-method install' will now get version ${{ steps.version.outputs.new_version }}"
|
||||||
|
|||||||
1
.github/workflows/release.yaml
vendored
1
.github/workflows/release.yaml
vendored
@@ -3,6 +3,7 @@ name: Release
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
|
- stable
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
version_type:
|
version_type:
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -43,4 +43,4 @@ CLAUDE.md
|
|||||||
test-project-install/*
|
test-project-install/*
|
||||||
sample-project/*
|
sample-project/*
|
||||||
flattened-codebase.xml
|
flattened-codebase.xml
|
||||||
*.stats.md
|
|
||||||
|
|||||||
@@ -4,6 +4,10 @@
|
|||||||
"name": "main",
|
"name": "main",
|
||||||
"prerelease": "beta",
|
"prerelease": "beta",
|
||||||
"channel": "beta"
|
"channel": "beta"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "stable",
|
||||||
|
"channel": "latest"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"plugins": [
|
"plugins": [
|
||||||
@@ -12,7 +16,8 @@
|
|||||||
[
|
[
|
||||||
"@semantic-release/changelog",
|
"@semantic-release/changelog",
|
||||||
{
|
{
|
||||||
"changelogFile": "CHANGELOG.md"
|
"changelogFile": "CHANGELOG.md",
|
||||||
|
"changelogTitle": ""
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"@semantic-release/npm",
|
"@semantic-release/npm",
|
||||||
|
|||||||
@@ -125,6 +125,9 @@ program
|
|||||||
// Ensure output directory exists
|
// Ensure output directory exists
|
||||||
await fs.ensureDir(path.dirname(outputPath));
|
await fs.ensureDir(path.dirname(outputPath));
|
||||||
|
|
||||||
|
console.log(`Flattening codebase from: ${inputDir}`);
|
||||||
|
console.log(`Output file: ${outputPath}`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Verify input directory exists
|
// Verify input directory exists
|
||||||
if (!(await fs.pathExists(inputDir))) {
|
if (!(await fs.pathExists(inputDir))) {
|
||||||
@@ -155,6 +158,10 @@ program
|
|||||||
if (aggregatedContent.errors.length > 0) {
|
if (aggregatedContent.errors.length > 0) {
|
||||||
console.log(`Errors: ${aggregatedContent.errors.length}`);
|
console.log(`Errors: ${aggregatedContent.errors.length}`);
|
||||||
}
|
}
|
||||||
|
console.log(`Text files: ${aggregatedContent.textFiles.length}`);
|
||||||
|
if (aggregatedContent.binaryFiles.length > 0) {
|
||||||
|
console.log(`Binary files: ${aggregatedContent.binaryFiles.length}`);
|
||||||
|
}
|
||||||
|
|
||||||
// Generate XML output using streaming
|
// Generate XML output using streaming
|
||||||
const xmlSpinner = ora('🔧 Generating XML output...').start();
|
const xmlSpinner = ora('🔧 Generating XML output...').start();
|
||||||
@@ -163,7 +170,7 @@ program
|
|||||||
|
|
||||||
// Calculate and display statistics
|
// Calculate and display statistics
|
||||||
const outputStats = await fs.stat(outputPath);
|
const outputStats = await fs.stat(outputPath);
|
||||||
const stats = await calculateStatistics(aggregatedContent, outputStats.size, inputDir);
|
const stats = calculateStatistics(aggregatedContent, outputStats.size);
|
||||||
|
|
||||||
// Display completion summary
|
// Display completion summary
|
||||||
console.log('\n📊 Completion Summary:');
|
console.log('\n📊 Completion Summary:');
|
||||||
@@ -176,389 +183,8 @@ program
|
|||||||
console.log(`📝 Total lines of code: ${stats.totalLines.toLocaleString()}`);
|
console.log(`📝 Total lines of code: ${stats.totalLines.toLocaleString()}`);
|
||||||
console.log(`🔢 Estimated tokens: ${stats.estimatedTokens}`);
|
console.log(`🔢 Estimated tokens: ${stats.estimatedTokens}`);
|
||||||
console.log(
|
console.log(
|
||||||
`📊 File breakdown: ${stats.textFiles} text, ${stats.binaryFiles} binary, ${stats.errorFiles} errors\n`,
|
`📊 File breakdown: ${stats.textFiles} text, ${stats.binaryFiles} binary, ${stats.errorFiles} errors`,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Ask user if they want detailed stats + markdown report
|
|
||||||
const generateDetailed = await promptYesNo(
|
|
||||||
'Generate detailed stats (console + markdown) now?',
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (generateDetailed) {
|
|
||||||
// Additional detailed stats
|
|
||||||
console.log('\n📈 Size Percentiles:');
|
|
||||||
console.log(
|
|
||||||
` Avg: ${Math.round(stats.avgFileSize).toLocaleString()} B, Median: ${Math.round(
|
|
||||||
stats.medianFileSize,
|
|
||||||
).toLocaleString()} B, p90: ${stats.p90.toLocaleString()} B, p95: ${stats.p95.toLocaleString()} B, p99: ${stats.p99.toLocaleString()} B`,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (Array.isArray(stats.histogram) && stats.histogram.length > 0) {
|
|
||||||
console.log('\n🧮 Size Histogram:');
|
|
||||||
for (const b of stats.histogram.slice(0, 2)) {
|
|
||||||
console.log(` ${b.label}: ${b.count} files, ${b.bytes.toLocaleString()} bytes`);
|
|
||||||
}
|
|
||||||
if (stats.histogram.length > 2) {
|
|
||||||
console.log(` … and ${stats.histogram.length - 2} more buckets`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(stats.byExtension) && stats.byExtension.length > 0) {
|
|
||||||
const topExt = stats.byExtension.slice(0, 2);
|
|
||||||
console.log('\n📦 Top Extensions:');
|
|
||||||
for (const e of topExt) {
|
|
||||||
const pct = stats.totalBytes ? (e.bytes / stats.totalBytes) * 100 : 0;
|
|
||||||
console.log(
|
|
||||||
` ${e.ext}: ${e.count} files, ${e.bytes.toLocaleString()} bytes (${pct.toFixed(
|
|
||||||
2,
|
|
||||||
)}%)`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (stats.byExtension.length > 2) {
|
|
||||||
console.log(` … and ${stats.byExtension.length - 2} more extensions`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(stats.byDirectory) && stats.byDirectory.length > 0) {
|
|
||||||
const topDir = stats.byDirectory.slice(0, 2);
|
|
||||||
console.log('\n📂 Top Directories:');
|
|
||||||
for (const d of topDir) {
|
|
||||||
const pct = stats.totalBytes ? (d.bytes / stats.totalBytes) * 100 : 0;
|
|
||||||
console.log(
|
|
||||||
` ${d.dir}: ${d.count} files, ${d.bytes.toLocaleString()} bytes (${pct.toFixed(
|
|
||||||
2,
|
|
||||||
)}%)`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (stats.byDirectory.length > 2) {
|
|
||||||
console.log(` … and ${stats.byDirectory.length - 2} more directories`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(stats.depthDistribution) && stats.depthDistribution.length > 0) {
|
|
||||||
console.log('\n🌳 Depth Distribution:');
|
|
||||||
const dd = stats.depthDistribution.slice(0, 2);
|
|
||||||
let line = ' ' + dd.map((d) => `${d.depth}:${d.count}`).join(' ');
|
|
||||||
if (stats.depthDistribution.length > 2) {
|
|
||||||
line += ` … +${stats.depthDistribution.length - 2} more`;
|
|
||||||
}
|
|
||||||
console.log(line);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(stats.longestPaths) && stats.longestPaths.length > 0) {
|
|
||||||
console.log('\n🧵 Longest Paths:');
|
|
||||||
for (const p of stats.longestPaths.slice(0, 2)) {
|
|
||||||
console.log(` ${p.path} (${p.length} chars, ${p.size.toLocaleString()} bytes)`);
|
|
||||||
}
|
|
||||||
if (stats.longestPaths.length > 2) {
|
|
||||||
console.log(` … and ${stats.longestPaths.length - 2} more paths`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stats.temporal) {
|
|
||||||
console.log('\n⏱️ Temporal:');
|
|
||||||
if (stats.temporal.oldest) {
|
|
||||||
console.log(
|
|
||||||
` Oldest: ${stats.temporal.oldest.path} (${stats.temporal.oldest.mtime})`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (stats.temporal.newest) {
|
|
||||||
console.log(
|
|
||||||
` Newest: ${stats.temporal.newest.path} (${stats.temporal.newest.mtime})`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (Array.isArray(stats.temporal.ageBuckets)) {
|
|
||||||
console.log(' Age buckets:');
|
|
||||||
for (const b of stats.temporal.ageBuckets.slice(0, 2)) {
|
|
||||||
console.log(` ${b.label}: ${b.count} files, ${b.bytes.toLocaleString()} bytes`);
|
|
||||||
}
|
|
||||||
if (stats.temporal.ageBuckets.length > 2) {
|
|
||||||
console.log(` … and ${stats.temporal.ageBuckets.length - 2} more buckets`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stats.quality) {
|
|
||||||
console.log('\n✅ Quality Signals:');
|
|
||||||
console.log(` Zero-byte files: ${stats.quality.zeroByteFiles}`);
|
|
||||||
console.log(` Empty text files: ${stats.quality.emptyTextFiles}`);
|
|
||||||
console.log(` Hidden files: ${stats.quality.hiddenFiles}`);
|
|
||||||
console.log(` Symlinks: ${stats.quality.symlinks}`);
|
|
||||||
console.log(
|
|
||||||
` Large files (>= ${(stats.quality.largeThreshold / (1024 * 1024)).toFixed(
|
|
||||||
0,
|
|
||||||
)} MB): ${stats.quality.largeFilesCount}`,
|
|
||||||
);
|
|
||||||
console.log(
|
|
||||||
` Suspiciously large files (>= 100 MB): ${stats.quality.suspiciousLargeFilesCount}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(stats.duplicateCandidates) && stats.duplicateCandidates.length > 0) {
|
|
||||||
console.log('\n🧬 Duplicate Candidates:');
|
|
||||||
for (const d of stats.duplicateCandidates.slice(0, 2)) {
|
|
||||||
console.log(` ${d.reason}: ${d.count} files @ ${d.size.toLocaleString()} bytes`);
|
|
||||||
}
|
|
||||||
if (stats.duplicateCandidates.length > 2) {
|
|
||||||
console.log(` … and ${stats.duplicateCandidates.length - 2} more groups`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof stats.compressibilityRatio === 'number') {
|
|
||||||
console.log(
|
|
||||||
`\n🗜️ Compressibility ratio (sampled): ${(stats.compressibilityRatio * 100).toFixed(
|
|
||||||
2,
|
|
||||||
)}%`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stats.git && stats.git.isRepo) {
|
|
||||||
console.log('\n🔧 Git:');
|
|
||||||
console.log(
|
|
||||||
` Tracked: ${stats.git.trackedCount} files, ${stats.git.trackedBytes.toLocaleString()} bytes`,
|
|
||||||
);
|
|
||||||
console.log(
|
|
||||||
` Untracked: ${stats.git.untrackedCount} files, ${stats.git.untrackedBytes.toLocaleString()} bytes`,
|
|
||||||
);
|
|
||||||
if (Array.isArray(stats.git.lfsCandidates) && stats.git.lfsCandidates.length > 0) {
|
|
||||||
console.log(' LFS candidates (top 2):');
|
|
||||||
for (const f of stats.git.lfsCandidates.slice(0, 2)) {
|
|
||||||
console.log(` ${f.path} (${f.size.toLocaleString()} bytes)`);
|
|
||||||
}
|
|
||||||
if (stats.git.lfsCandidates.length > 2) {
|
|
||||||
console.log(` … and ${stats.git.lfsCandidates.length - 2} more`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(stats.largestFiles) && stats.largestFiles.length > 0) {
|
|
||||||
console.log('\n📚 Largest Files (top 2):');
|
|
||||||
for (const f of stats.largestFiles.slice(0, 2)) {
|
|
||||||
// Show LOC for text files when available; omit ext and mtime
|
|
||||||
let locStr = '';
|
|
||||||
if (!f.isBinary && Array.isArray(aggregatedContent?.textFiles)) {
|
|
||||||
const tf = aggregatedContent.textFiles.find((t) => t.path === f.path);
|
|
||||||
if (tf && typeof tf.lines === 'number') {
|
|
||||||
locStr = `, LOC: ${tf.lines.toLocaleString()}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
console.log(
|
|
||||||
` ${f.path} – ${f.sizeFormatted} (${f.percentOfTotal.toFixed(2)}%)${locStr}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (stats.largestFiles.length > 2) {
|
|
||||||
console.log(` … and ${stats.largestFiles.length - 2} more files`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write a comprehensive markdown report next to the XML
|
|
||||||
{
|
|
||||||
const mdPath = outputPath.endsWith('.xml')
|
|
||||||
? outputPath.replace(/\.xml$/i, '.stats.md')
|
|
||||||
: outputPath + '.stats.md';
|
|
||||||
try {
|
|
||||||
const pct = (num, den) => (den ? (num / den) * 100 : 0);
|
|
||||||
const md = [];
|
|
||||||
md.push(
|
|
||||||
`# 🧾 Flatten Stats for ${path.basename(outputPath)}`,
|
|
||||||
'',
|
|
||||||
'## 📊 Summary',
|
|
||||||
`- Total source size: ${stats.totalSize}`,
|
|
||||||
`- Generated XML size: ${stats.xmlSize}`,
|
|
||||||
`- Total lines of code: ${stats.totalLines.toLocaleString()}`,
|
|
||||||
`- Estimated tokens: ${stats.estimatedTokens}`,
|
|
||||||
`- File breakdown: ${stats.textFiles} text, ${stats.binaryFiles} binary, ${stats.errorFiles} errors`,
|
|
||||||
'',
|
|
||||||
'## 📈 Size Percentiles',
|
|
||||||
`Avg: ${Math.round(stats.avgFileSize).toLocaleString()} B, Median: ${Math.round(
|
|
||||||
stats.medianFileSize,
|
|
||||||
).toLocaleString()} B, p90: ${stats.p90.toLocaleString()} B, p95: ${stats.p95.toLocaleString()} B, p99: ${stats.p99.toLocaleString()} B`,
|
|
||||||
'',
|
|
||||||
);
|
|
||||||
|
|
||||||
// Histogram
|
|
||||||
if (Array.isArray(stats.histogram) && stats.histogram.length > 0) {
|
|
||||||
md.push(
|
|
||||||
'## 🧮 Size Histogram',
|
|
||||||
'| Bucket | Files | Bytes |',
|
|
||||||
'| --- | ---: | ---: |',
|
|
||||||
);
|
|
||||||
for (const b of stats.histogram) {
|
|
||||||
md.push(`| ${b.label} | ${b.count} | ${b.bytes.toLocaleString()} |`);
|
|
||||||
}
|
|
||||||
md.push('');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Top Extensions
|
|
||||||
if (Array.isArray(stats.byExtension) && stats.byExtension.length > 0) {
|
|
||||||
md.push(
|
|
||||||
'## 📦 Top Extensions by Bytes (Top 20)',
|
|
||||||
'| Ext | Files | Bytes | % of total |',
|
|
||||||
'| --- | ---: | ---: | ---: |',
|
|
||||||
);
|
|
||||||
for (const e of stats.byExtension.slice(0, 20)) {
|
|
||||||
const p = pct(e.bytes, stats.totalBytes);
|
|
||||||
md.push(
|
|
||||||
`| ${e.ext} | ${e.count} | ${e.bytes.toLocaleString()} | ${p.toFixed(2)}% |`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
md.push('');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Top Directories
|
|
||||||
if (Array.isArray(stats.byDirectory) && stats.byDirectory.length > 0) {
|
|
||||||
md.push(
|
|
||||||
'## 📂 Top Directories by Bytes (Top 20)',
|
|
||||||
'| Directory | Files | Bytes | % of total |',
|
|
||||||
'| --- | ---: | ---: | ---: |',
|
|
||||||
);
|
|
||||||
for (const d of stats.byDirectory.slice(0, 20)) {
|
|
||||||
const p = pct(d.bytes, stats.totalBytes);
|
|
||||||
md.push(
|
|
||||||
`| ${d.dir} | ${d.count} | ${d.bytes.toLocaleString()} | ${p.toFixed(2)}% |`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
md.push('');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Depth distribution
|
|
||||||
if (Array.isArray(stats.depthDistribution) && stats.depthDistribution.length > 0) {
|
|
||||||
md.push('## 🌳 Depth Distribution', '| Depth | Count |', '| ---: | ---: |');
|
|
||||||
for (const d of stats.depthDistribution) {
|
|
||||||
md.push(`| ${d.depth} | ${d.count} |`);
|
|
||||||
}
|
|
||||||
md.push('');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Longest paths
|
|
||||||
if (Array.isArray(stats.longestPaths) && stats.longestPaths.length > 0) {
|
|
||||||
md.push(
|
|
||||||
'## 🧵 Longest Paths (Top 25)',
|
|
||||||
'| Path | Length | Bytes |',
|
|
||||||
'| --- | ---: | ---: |',
|
|
||||||
);
|
|
||||||
for (const pth of stats.longestPaths) {
|
|
||||||
md.push(`| ${pth.path} | ${pth.length} | ${pth.size.toLocaleString()} |`);
|
|
||||||
}
|
|
||||||
md.push('');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Temporal
|
|
||||||
if (stats.temporal) {
|
|
||||||
md.push('## ⏱️ Temporal');
|
|
||||||
if (stats.temporal.oldest) {
|
|
||||||
md.push(`- Oldest: ${stats.temporal.oldest.path} (${stats.temporal.oldest.mtime})`);
|
|
||||||
}
|
|
||||||
if (stats.temporal.newest) {
|
|
||||||
md.push(`- Newest: ${stats.temporal.newest.path} (${stats.temporal.newest.mtime})`);
|
|
||||||
}
|
|
||||||
if (Array.isArray(stats.temporal.ageBuckets)) {
|
|
||||||
md.push('', '| Age | Files | Bytes |', '| --- | ---: | ---: |');
|
|
||||||
for (const b of stats.temporal.ageBuckets) {
|
|
||||||
md.push(`| ${b.label} | ${b.count} | ${b.bytes.toLocaleString()} |`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
md.push('');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Quality signals
|
|
||||||
if (stats.quality) {
|
|
||||||
md.push(
|
|
||||||
'## ✅ Quality Signals',
|
|
||||||
`- Zero-byte files: ${stats.quality.zeroByteFiles}`,
|
|
||||||
`- Empty text files: ${stats.quality.emptyTextFiles}`,
|
|
||||||
`- Hidden files: ${stats.quality.hiddenFiles}`,
|
|
||||||
`- Symlinks: ${stats.quality.symlinks}`,
|
|
||||||
`- Large files (>= ${(stats.quality.largeThreshold / (1024 * 1024)).toFixed(0)} MB): ${stats.quality.largeFilesCount}`,
|
|
||||||
`- Suspiciously large files (>= 100 MB): ${stats.quality.suspiciousLargeFilesCount}`,
|
|
||||||
'',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Duplicates
|
|
||||||
if (Array.isArray(stats.duplicateCandidates) && stats.duplicateCandidates.length > 0) {
|
|
||||||
md.push(
|
|
||||||
'## 🧬 Duplicate Candidates',
|
|
||||||
'| Reason | Files | Size (bytes) |',
|
|
||||||
'| --- | ---: | ---: |',
|
|
||||||
);
|
|
||||||
for (const d of stats.duplicateCandidates) {
|
|
||||||
md.push(`| ${d.reason} | ${d.count} | ${d.size.toLocaleString()} |`);
|
|
||||||
}
|
|
||||||
md.push('', '### 🧬 Duplicate Groups Details');
|
|
||||||
let dupIndex = 1;
|
|
||||||
for (const d of stats.duplicateCandidates) {
|
|
||||||
md.push(
|
|
||||||
`#### Group ${dupIndex}: ${d.count} files @ ${d.size.toLocaleString()} bytes (${d.reason})`,
|
|
||||||
);
|
|
||||||
if (Array.isArray(d.files) && d.files.length > 0) {
|
|
||||||
for (const fp of d.files) {
|
|
||||||
md.push(`- ${fp}`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
md.push('- (file list unavailable)');
|
|
||||||
}
|
|
||||||
md.push('');
|
|
||||||
dupIndex++;
|
|
||||||
}
|
|
||||||
md.push('');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compressibility
|
|
||||||
if (typeof stats.compressibilityRatio === 'number') {
|
|
||||||
md.push(
|
|
||||||
'## 🗜️ Compressibility',
|
|
||||||
`Sampled compressibility ratio: ${(stats.compressibilityRatio * 100).toFixed(2)}%`,
|
|
||||||
'',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Git
|
|
||||||
if (stats.git && stats.git.isRepo) {
|
|
||||||
md.push(
|
|
||||||
'## 🔧 Git',
|
|
||||||
`- Tracked: ${stats.git.trackedCount} files, ${stats.git.trackedBytes.toLocaleString()} bytes`,
|
|
||||||
`- Untracked: ${stats.git.untrackedCount} files, ${stats.git.untrackedBytes.toLocaleString()} bytes`,
|
|
||||||
);
|
|
||||||
if (Array.isArray(stats.git.lfsCandidates) && stats.git.lfsCandidates.length > 0) {
|
|
||||||
md.push('', '### 📦 LFS Candidates (Top 20)', '| Path | Bytes |', '| --- | ---: |');
|
|
||||||
for (const f of stats.git.lfsCandidates.slice(0, 20)) {
|
|
||||||
md.push(`| ${f.path} | ${f.size.toLocaleString()} |`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
md.push('');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Largest Files
|
|
||||||
if (Array.isArray(stats.largestFiles) && stats.largestFiles.length > 0) {
|
|
||||||
md.push(
|
|
||||||
'## 📚 Largest Files (Top 50)',
|
|
||||||
'| Path | Size | % of total | LOC |',
|
|
||||||
'| --- | ---: | ---: | ---: |',
|
|
||||||
);
|
|
||||||
for (const f of stats.largestFiles) {
|
|
||||||
let loc = '';
|
|
||||||
if (!f.isBinary && Array.isArray(aggregatedContent?.textFiles)) {
|
|
||||||
const tf = aggregatedContent.textFiles.find((t) => t.path === f.path);
|
|
||||||
if (tf && typeof tf.lines === 'number') {
|
|
||||||
loc = tf.lines.toLocaleString();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
md.push(
|
|
||||||
`| ${f.path} | ${f.sizeFormatted} | ${f.percentOfTotal.toFixed(2)}% | ${loc} |`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
md.push('');
|
|
||||||
}
|
|
||||||
|
|
||||||
await fs.writeFile(mdPath, md.join('\n'));
|
|
||||||
console.log(`\n🧾 Detailed stats report written to: ${mdPath}`);
|
|
||||||
} catch (error) {
|
|
||||||
console.warn(`⚠️ Failed to write stats markdown: ${error.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('❌ Critical error:', error.message);
|
console.error('❌ Critical error:', error.message);
|
||||||
console.error('An unexpected error occurred.');
|
console.error('An unexpected error occurred.');
|
||||||
|
|||||||
@@ -1,203 +1,40 @@
|
|||||||
const fs = require('fs-extra');
|
const fs = require('fs-extra');
|
||||||
const path = require('node:path');
|
const path = require('node:path');
|
||||||
|
|
||||||
// Deno/Node compatibility: explicitly import process
|
|
||||||
const process = require('node:process');
|
|
||||||
const { execFile } = require('node:child_process');
|
|
||||||
const { promisify } = require('node:util');
|
|
||||||
const execFileAsync = promisify(execFile);
|
|
||||||
|
|
||||||
// Simple memoization across calls (keyed by realpath of startDir)
|
|
||||||
const _cache = new Map();
|
|
||||||
|
|
||||||
async function _tryRun(cmd, args, cwd, timeoutMs = 500) {
|
|
||||||
try {
|
|
||||||
const { stdout } = await execFileAsync(cmd, args, {
|
|
||||||
cwd,
|
|
||||||
timeout: timeoutMs,
|
|
||||||
windowsHide: true,
|
|
||||||
maxBuffer: 1024 * 1024,
|
|
||||||
});
|
|
||||||
const out = String(stdout || '').trim();
|
|
||||||
return out || null;
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function _detectVcsTopLevel(startDir) {
|
|
||||||
// Run common VCS root queries in parallel; ignore failures
|
|
||||||
const gitP = _tryRun('git', ['rev-parse', '--show-toplevel'], startDir);
|
|
||||||
const hgP = _tryRun('hg', ['root'], startDir);
|
|
||||||
const svnP = (async () => {
|
|
||||||
const show = await _tryRun('svn', ['info', '--show-item', 'wc-root'], startDir);
|
|
||||||
if (show) return show;
|
|
||||||
const info = await _tryRun('svn', ['info'], startDir);
|
|
||||||
if (info) {
|
|
||||||
const line = info
|
|
||||||
.split(/\r?\n/)
|
|
||||||
.find((l) => l.toLowerCase().startsWith('working copy root path:'));
|
|
||||||
if (line) return line.split(':').slice(1).join(':').trim();
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
})();
|
|
||||||
const [git, hg, svn] = await Promise.all([gitP, hgP, svnP]);
|
|
||||||
return git || hg || svn || null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Attempt to find the project root by walking up from startDir.
|
* Attempt to find the project root by walking up from startDir
|
||||||
* Uses a robust, prioritized set of ecosystem markers (VCS > workspaces/monorepo > lock/build > language config).
|
* Looks for common project markers like .git, package.json, pyproject.toml, etc.
|
||||||
* Also recognizes package.json with "workspaces" as a workspace root.
|
|
||||||
* You can augment markers via env PROJECT_ROOT_MARKERS as a comma-separated list of file/dir names.
|
|
||||||
* @param {string} startDir
|
* @param {string} startDir
|
||||||
* @returns {Promise<string|null>} project root directory or null if not found
|
* @returns {Promise<string|null>} project root directory or null if not found
|
||||||
*/
|
*/
|
||||||
async function findProjectRoot(startDir) {
|
async function findProjectRoot(startDir) {
|
||||||
try {
|
try {
|
||||||
// Resolve symlinks for robustness (e.g., when invoked from a symlinked path)
|
|
||||||
let dir = path.resolve(startDir);
|
let dir = path.resolve(startDir);
|
||||||
try {
|
const root = path.parse(dir).root;
|
||||||
dir = await fs.realpath(dir);
|
const markers = [
|
||||||
} catch {
|
'.git',
|
||||||
// ignore if realpath fails; continue with resolved path
|
'package.json',
|
||||||
}
|
'pnpm-workspace.yaml',
|
||||||
const startKey = dir; // preserve starting point for caching
|
'yarn.lock',
|
||||||
if (_cache.has(startKey)) return _cache.get(startKey);
|
'pnpm-lock.yaml',
|
||||||
const fsRoot = path.parse(dir).root;
|
'pyproject.toml',
|
||||||
|
'requirements.txt',
|
||||||
// Helper to safely check for existence
|
'go.mod',
|
||||||
const exists = (p) => fs.pathExists(p);
|
'Cargo.toml',
|
||||||
|
'composer.json',
|
||||||
// Build checks: an array of { makePath: (dir) => string, weight }
|
'.hg',
|
||||||
const checks = [];
|
'.svn',
|
||||||
|
];
|
||||||
const add = (rel, weight) => {
|
|
||||||
const makePath = (d) => (Array.isArray(rel) ? path.join(d, ...rel) : path.join(d, rel));
|
|
||||||
checks.push({ makePath, weight });
|
|
||||||
};
|
|
||||||
|
|
||||||
// Highest priority: explicit sentinel markers
|
|
||||||
add('.project-root', 110);
|
|
||||||
add('.workspace-root', 110);
|
|
||||||
add('.repo-root', 110);
|
|
||||||
|
|
||||||
// Highest priority: VCS roots
|
|
||||||
add('.git', 100);
|
|
||||||
add('.hg', 95);
|
|
||||||
add('.svn', 95);
|
|
||||||
|
|
||||||
// Monorepo/workspace indicators
|
|
||||||
add('pnpm-workspace.yaml', 90);
|
|
||||||
add('lerna.json', 90);
|
|
||||||
add('turbo.json', 90);
|
|
||||||
add('nx.json', 90);
|
|
||||||
add('rush.json', 90);
|
|
||||||
add('go.work', 90);
|
|
||||||
add('WORKSPACE', 90);
|
|
||||||
add('WORKSPACE.bazel', 90);
|
|
||||||
add('MODULE.bazel', 90);
|
|
||||||
add('pants.toml', 90);
|
|
||||||
|
|
||||||
// Lockfiles and package-manager/top-level locks
|
|
||||||
add('yarn.lock', 85);
|
|
||||||
add('pnpm-lock.yaml', 85);
|
|
||||||
add('package-lock.json', 85);
|
|
||||||
add('bun.lockb', 85);
|
|
||||||
add('Cargo.lock', 85);
|
|
||||||
add('composer.lock', 85);
|
|
||||||
add('poetry.lock', 85);
|
|
||||||
add('Pipfile.lock', 85);
|
|
||||||
add('Gemfile.lock', 85);
|
|
||||||
|
|
||||||
// Build-system root indicators
|
|
||||||
add('settings.gradle', 80);
|
|
||||||
add('settings.gradle.kts', 80);
|
|
||||||
add('gradlew', 80);
|
|
||||||
add('pom.xml', 80);
|
|
||||||
add('build.sbt', 80);
|
|
||||||
add(['project', 'build.properties'], 80);
|
|
||||||
|
|
||||||
// Language/project config markers
|
|
||||||
add('deno.json', 75);
|
|
||||||
add('deno.jsonc', 75);
|
|
||||||
add('pyproject.toml', 75);
|
|
||||||
add('Pipfile', 75);
|
|
||||||
add('requirements.txt', 75);
|
|
||||||
add('go.mod', 75);
|
|
||||||
add('Cargo.toml', 75);
|
|
||||||
add('composer.json', 75);
|
|
||||||
add('mix.exs', 75);
|
|
||||||
add('Gemfile', 75);
|
|
||||||
add('CMakeLists.txt', 75);
|
|
||||||
add('stack.yaml', 75);
|
|
||||||
add('cabal.project', 75);
|
|
||||||
add('rebar.config', 75);
|
|
||||||
add('pubspec.yaml', 75);
|
|
||||||
add('flake.nix', 75);
|
|
||||||
add('shell.nix', 75);
|
|
||||||
add('default.nix', 75);
|
|
||||||
add('.tool-versions', 75);
|
|
||||||
add('package.json', 74); // generic Node project (lower than lockfiles/workspaces)
|
|
||||||
|
|
||||||
// Changesets
|
|
||||||
add(['.changeset', 'config.json'], 70);
|
|
||||||
add('.changeset', 70);
|
|
||||||
|
|
||||||
// Custom markers via env (comma-separated names)
|
|
||||||
if (process.env.PROJECT_ROOT_MARKERS) {
|
|
||||||
for (const name of process.env.PROJECT_ROOT_MARKERS.split(',')
|
|
||||||
.map((s) => s.trim())
|
|
||||||
.filter(Boolean)) {
|
|
||||||
add(name, 72);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Check for package.json with "workspaces" */
|
|
||||||
const hasWorkspacePackageJson = async (d) => {
|
|
||||||
const pkgPath = path.join(d, 'package.json');
|
|
||||||
if (!(await exists(pkgPath))) return false;
|
|
||||||
try {
|
|
||||||
const raw = await fs.readFile(pkgPath, 'utf8');
|
|
||||||
const pkg = JSON.parse(raw);
|
|
||||||
return Boolean(pkg && pkg.workspaces);
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let best = null; // { dir, weight }
|
|
||||||
|
|
||||||
// Try to detect VCS toplevel once up-front; treat as authoritative slightly above .git marker
|
|
||||||
const vcsTop = await _detectVcsTopLevel(dir);
|
|
||||||
if (vcsTop) {
|
|
||||||
best = { dir: vcsTop, weight: 101 };
|
|
||||||
}
|
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
// Special check: package.json with "workspaces"
|
const exists = await Promise.all(markers.map((m) => fs.pathExists(path.join(dir, m))));
|
||||||
if ((await hasWorkspacePackageJson(dir)) && (!best || 90 >= best.weight))
|
if (exists.some(Boolean)) {
|
||||||
best = { dir, weight: 90 };
|
return dir;
|
||||||
|
|
||||||
// Evaluate all other checks in parallel
|
|
||||||
const results = await Promise.all(
|
|
||||||
checks.map(async (c) => ({ c, ok: await exists(c.makePath(dir)) })),
|
|
||||||
);
|
|
||||||
|
|
||||||
for (const { c, ok } of results) {
|
|
||||||
if (!ok) continue;
|
|
||||||
if (!best || c.weight >= best.weight) {
|
|
||||||
best = { dir, weight: c.weight };
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
if (dir === root) break;
|
||||||
if (dir === fsRoot) break;
|
|
||||||
dir = path.dirname(dir);
|
dir = path.dirname(dir);
|
||||||
}
|
}
|
||||||
|
return null;
|
||||||
const out = best ? best.dir : null;
|
|
||||||
_cache.set(startKey, out);
|
|
||||||
return out;
|
|
||||||
} catch {
|
} catch {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,395 +0,0 @@
|
|||||||
'use strict';
|
|
||||||
|
|
||||||
const fs = require('node:fs/promises');
|
|
||||||
const path = require('node:path');
|
|
||||||
const zlib = require('node:zlib');
|
|
||||||
const { Buffer } = require('node:buffer');
|
|
||||||
const crypto = require('node:crypto');
|
|
||||||
const cp = require('node:child_process');
|
|
||||||
|
|
||||||
const KB = 1024;
|
|
||||||
const MB = 1024 * KB;
|
|
||||||
|
|
||||||
const formatSize = (bytes) => {
|
|
||||||
if (bytes < 1024) return `${bytes} B`;
|
|
||||||
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
|
||||||
if (bytes < 1024 * 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
|
||||||
return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`;
|
|
||||||
};
|
|
||||||
|
|
||||||
const percentile = (sorted, p) => {
|
|
||||||
if (sorted.length === 0) return 0;
|
|
||||||
const idx = Math.min(sorted.length - 1, Math.max(0, Math.ceil((p / 100) * sorted.length) - 1));
|
|
||||||
return sorted[idx];
|
|
||||||
};
|
|
||||||
|
|
||||||
async function processWithLimit(items, fn, concurrency = 64) {
|
|
||||||
for (let i = 0; i < items.length; i += concurrency) {
|
|
||||||
await Promise.all(items.slice(i, i + concurrency).map(fn));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function enrichAllFiles(textFiles, binaryFiles) {
|
|
||||||
/** @type {Array<{ path: string; absolutePath: string; size: number; lines?: number; isBinary: boolean; ext: string; dir: string; depth: number; hidden: boolean; mtimeMs: number; isSymlink: boolean; }>} */
|
|
||||||
const allFiles = [];
|
|
||||||
|
|
||||||
async function enrich(file, isBinary) {
|
|
||||||
const ext = (path.extname(file.path) || '').toLowerCase();
|
|
||||||
const dir = path.dirname(file.path) || '.';
|
|
||||||
const depth = file.path.split(path.sep).filter(Boolean).length;
|
|
||||||
const hidden = file.path.split(path.sep).some((seg) => seg.startsWith('.'));
|
|
||||||
let mtimeMs = 0;
|
|
||||||
let isSymlink = false;
|
|
||||||
try {
|
|
||||||
const lst = await fs.lstat(file.absolutePath);
|
|
||||||
mtimeMs = lst.mtimeMs;
|
|
||||||
isSymlink = lst.isSymbolicLink();
|
|
||||||
} catch {
|
|
||||||
/* ignore lstat errors during enrichment */
|
|
||||||
}
|
|
||||||
allFiles.push({
|
|
||||||
path: file.path,
|
|
||||||
absolutePath: file.absolutePath,
|
|
||||||
size: file.size || 0,
|
|
||||||
lines: file.lines,
|
|
||||||
isBinary,
|
|
||||||
ext,
|
|
||||||
dir,
|
|
||||||
depth,
|
|
||||||
hidden,
|
|
||||||
mtimeMs,
|
|
||||||
isSymlink,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await processWithLimit(textFiles, (f) => enrich(f, false));
|
|
||||||
await processWithLimit(binaryFiles, (f) => enrich(f, true));
|
|
||||||
return allFiles;
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildHistogram(allFiles) {
|
|
||||||
const buckets = [
|
|
||||||
[1 * KB, '0–1KB'],
|
|
||||||
[10 * KB, '1–10KB'],
|
|
||||||
[100 * KB, '10–100KB'],
|
|
||||||
[1 * MB, '100KB–1MB'],
|
|
||||||
[10 * MB, '1–10MB'],
|
|
||||||
[100 * MB, '10–100MB'],
|
|
||||||
[Infinity, '>=100MB'],
|
|
||||||
];
|
|
||||||
const histogram = buckets.map(([_, label]) => ({ label, count: 0, bytes: 0 }));
|
|
||||||
for (const f of allFiles) {
|
|
||||||
for (const [i, bucket] of buckets.entries()) {
|
|
||||||
if (f.size < bucket[0]) {
|
|
||||||
histogram[i].count++;
|
|
||||||
histogram[i].bytes += f.size;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return histogram;
|
|
||||||
}
|
|
||||||
|
|
||||||
function aggregateByExtension(allFiles) {
|
|
||||||
const byExtension = new Map();
|
|
||||||
for (const f of allFiles) {
|
|
||||||
const key = f.ext || '<none>';
|
|
||||||
const v = byExtension.get(key) || { ext: key, count: 0, bytes: 0 };
|
|
||||||
v.count++;
|
|
||||||
v.bytes += f.size;
|
|
||||||
byExtension.set(key, v);
|
|
||||||
}
|
|
||||||
return [...byExtension.values()].sort((a, b) => b.bytes - a.bytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
function aggregateByDirectory(allFiles) {
|
|
||||||
const byDirectory = new Map();
|
|
||||||
function addDirBytes(dir, bytes) {
|
|
||||||
const v = byDirectory.get(dir) || { dir, count: 0, bytes: 0 };
|
|
||||||
v.count++;
|
|
||||||
v.bytes += bytes;
|
|
||||||
byDirectory.set(dir, v);
|
|
||||||
}
|
|
||||||
for (const f of allFiles) {
|
|
||||||
const parts = f.dir === '.' ? [] : f.dir.split(path.sep);
|
|
||||||
let acc = '';
|
|
||||||
for (let i = 0; i < parts.length; i++) {
|
|
||||||
acc = i === 0 ? parts[0] : acc + path.sep + parts[i];
|
|
||||||
addDirBytes(acc, f.size);
|
|
||||||
}
|
|
||||||
if (parts.length === 0) addDirBytes('.', f.size);
|
|
||||||
}
|
|
||||||
return [...byDirectory.values()].sort((a, b) => b.bytes - a.bytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
function computeDepthAndLongest(allFiles) {
|
|
||||||
const depthDistribution = new Map();
|
|
||||||
for (const f of allFiles) {
|
|
||||||
depthDistribution.set(f.depth, (depthDistribution.get(f.depth) || 0) + 1);
|
|
||||||
}
|
|
||||||
const longestPaths = [...allFiles]
|
|
||||||
.sort((a, b) => b.path.length - a.path.length)
|
|
||||||
.slice(0, 25)
|
|
||||||
.map((f) => ({ path: f.path, length: f.path.length, size: f.size }));
|
|
||||||
const depthDist = [...depthDistribution.entries()]
|
|
||||||
.sort((a, b) => a[0] - b[0])
|
|
||||||
.map(([depth, count]) => ({ depth, count }));
|
|
||||||
return { depthDist, longestPaths };
|
|
||||||
}
|
|
||||||
|
|
||||||
function computeTemporal(allFiles, nowMs) {
|
|
||||||
let oldest = null,
|
|
||||||
newest = null;
|
|
||||||
const ageBuckets = [
|
|
||||||
{ label: '> 1 year', minDays: 365, maxDays: Infinity, count: 0, bytes: 0 },
|
|
||||||
{ label: '6–12 months', minDays: 180, maxDays: 365, count: 0, bytes: 0 },
|
|
||||||
{ label: '1–6 months', minDays: 30, maxDays: 180, count: 0, bytes: 0 },
|
|
||||||
{ label: '7–30 days', minDays: 7, maxDays: 30, count: 0, bytes: 0 },
|
|
||||||
{ label: '1–7 days', minDays: 1, maxDays: 7, count: 0, bytes: 0 },
|
|
||||||
{ label: '< 1 day', minDays: 0, maxDays: 1, count: 0, bytes: 0 },
|
|
||||||
];
|
|
||||||
for (const f of allFiles) {
|
|
||||||
const ageDays = Math.max(0, (nowMs - (f.mtimeMs || nowMs)) / (24 * 60 * 60 * 1000));
|
|
||||||
for (const b of ageBuckets) {
|
|
||||||
if (ageDays >= b.minDays && ageDays < b.maxDays) {
|
|
||||||
b.count++;
|
|
||||||
b.bytes += f.size;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!oldest || f.mtimeMs < oldest.mtimeMs) oldest = f;
|
|
||||||
if (!newest || f.mtimeMs > newest.mtimeMs) newest = f;
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
oldest: oldest
|
|
||||||
? { path: oldest.path, mtime: oldest.mtimeMs ? new Date(oldest.mtimeMs).toISOString() : null }
|
|
||||||
: null,
|
|
||||||
newest: newest
|
|
||||||
? { path: newest.path, mtime: newest.mtimeMs ? new Date(newest.mtimeMs).toISOString() : null }
|
|
||||||
: null,
|
|
||||||
ageBuckets,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function computeQuality(allFiles, textFiles) {
|
|
||||||
const zeroByteFiles = allFiles.filter((f) => f.size === 0).length;
|
|
||||||
const emptyTextFiles = textFiles.filter(
|
|
||||||
(f) => (f.size || 0) === 0 || (f.lines || 0) === 0,
|
|
||||||
).length;
|
|
||||||
const hiddenFiles = allFiles.filter((f) => f.hidden).length;
|
|
||||||
const symlinks = allFiles.filter((f) => f.isSymlink).length;
|
|
||||||
const largeThreshold = 50 * MB;
|
|
||||||
const suspiciousThreshold = 100 * MB;
|
|
||||||
const largeFilesCount = allFiles.filter((f) => f.size >= largeThreshold).length;
|
|
||||||
const suspiciousLargeFilesCount = allFiles.filter((f) => f.size >= suspiciousThreshold).length;
|
|
||||||
return {
|
|
||||||
zeroByteFiles,
|
|
||||||
emptyTextFiles,
|
|
||||||
hiddenFiles,
|
|
||||||
symlinks,
|
|
||||||
largeFilesCount,
|
|
||||||
suspiciousLargeFilesCount,
|
|
||||||
largeThreshold,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function computeDuplicates(allFiles, textFiles) {
|
|
||||||
const duplicatesBySize = new Map();
|
|
||||||
for (const f of allFiles) {
|
|
||||||
const key = String(f.size);
|
|
||||||
const arr = duplicatesBySize.get(key) || [];
|
|
||||||
arr.push(f);
|
|
||||||
duplicatesBySize.set(key, arr);
|
|
||||||
}
|
|
||||||
const duplicateCandidates = [];
|
|
||||||
for (const [sizeKey, arr] of duplicatesBySize.entries()) {
|
|
||||||
if (arr.length < 2) continue;
|
|
||||||
const textGroup = arr.filter((f) => !f.isBinary);
|
|
||||||
const otherGroup = arr.filter((f) => f.isBinary);
|
|
||||||
const contentHashGroups = new Map();
|
|
||||||
for (const tf of textGroup) {
|
|
||||||
try {
|
|
||||||
const src = textFiles.find((x) => x.absolutePath === tf.absolutePath);
|
|
||||||
const content = src ? src.content : '';
|
|
||||||
const h = crypto.createHash('sha1').update(content).digest('hex');
|
|
||||||
const g = contentHashGroups.get(h) || [];
|
|
||||||
g.push(tf);
|
|
||||||
contentHashGroups.set(h, g);
|
|
||||||
} catch {
|
|
||||||
/* ignore hashing errors for duplicate detection */
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (const [_h, g] of contentHashGroups.entries()) {
|
|
||||||
if (g.length > 1)
|
|
||||||
duplicateCandidates.push({
|
|
||||||
reason: 'same-size+text-hash',
|
|
||||||
size: Number(sizeKey),
|
|
||||||
count: g.length,
|
|
||||||
files: g.map((f) => f.path),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (otherGroup.length > 1) {
|
|
||||||
duplicateCandidates.push({
|
|
||||||
reason: 'same-size',
|
|
||||||
size: Number(sizeKey),
|
|
||||||
count: otherGroup.length,
|
|
||||||
files: otherGroup.map((f) => f.path),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return duplicateCandidates;
|
|
||||||
}
|
|
||||||
|
|
||||||
function estimateCompressibility(textFiles) {
|
|
||||||
let compSampleBytes = 0;
|
|
||||||
let compCompressedBytes = 0;
|
|
||||||
for (const tf of textFiles) {
|
|
||||||
try {
|
|
||||||
const sampleLen = Math.min(256 * 1024, tf.size || 0);
|
|
||||||
if (sampleLen <= 0) continue;
|
|
||||||
const sample = tf.content.slice(0, sampleLen);
|
|
||||||
const gz = zlib.gzipSync(Buffer.from(sample, 'utf8'));
|
|
||||||
compSampleBytes += sampleLen;
|
|
||||||
compCompressedBytes += gz.length;
|
|
||||||
} catch {
|
|
||||||
/* ignore compression errors during sampling */
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return compSampleBytes > 0 ? compCompressedBytes / compSampleBytes : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function computeGitInfo(allFiles, rootDir, largeThreshold) {
|
|
||||||
const info = {
|
|
||||||
isRepo: false,
|
|
||||||
trackedCount: 0,
|
|
||||||
trackedBytes: 0,
|
|
||||||
untrackedCount: 0,
|
|
||||||
untrackedBytes: 0,
|
|
||||||
lfsCandidates: [],
|
|
||||||
};
|
|
||||||
try {
|
|
||||||
if (!rootDir) return info;
|
|
||||||
const top = cp
|
|
||||||
.execFileSync('git', ['rev-parse', '--show-toplevel'], {
|
|
||||||
cwd: rootDir,
|
|
||||||
stdio: ['ignore', 'pipe', 'ignore'],
|
|
||||||
})
|
|
||||||
.toString()
|
|
||||||
.trim();
|
|
||||||
if (!top) return info;
|
|
||||||
info.isRepo = true;
|
|
||||||
const out = cp.execFileSync('git', ['ls-files', '-z'], {
|
|
||||||
cwd: rootDir,
|
|
||||||
stdio: ['ignore', 'pipe', 'ignore'],
|
|
||||||
});
|
|
||||||
const tracked = new Set(out.toString().split('\0').filter(Boolean));
|
|
||||||
let trackedBytes = 0,
|
|
||||||
trackedCount = 0,
|
|
||||||
untrackedBytes = 0,
|
|
||||||
untrackedCount = 0;
|
|
||||||
const lfsCandidates = [];
|
|
||||||
for (const f of allFiles) {
|
|
||||||
const isTracked = tracked.has(f.path);
|
|
||||||
if (isTracked) {
|
|
||||||
trackedCount++;
|
|
||||||
trackedBytes += f.size;
|
|
||||||
if (f.size >= largeThreshold) lfsCandidates.push({ path: f.path, size: f.size });
|
|
||||||
} else {
|
|
||||||
untrackedCount++;
|
|
||||||
untrackedBytes += f.size;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
info.trackedCount = trackedCount;
|
|
||||||
info.trackedBytes = trackedBytes;
|
|
||||||
info.untrackedCount = untrackedCount;
|
|
||||||
info.untrackedBytes = untrackedBytes;
|
|
||||||
info.lfsCandidates = lfsCandidates.sort((a, b) => b.size - a.size).slice(0, 50);
|
|
||||||
} catch {
|
|
||||||
/* git not available or not a repo, ignore */
|
|
||||||
}
|
|
||||||
return info;
|
|
||||||
}
|
|
||||||
|
|
||||||
function computeLargestFiles(allFiles, totalBytes) {
|
|
||||||
const toPct = (num, den) => (den === 0 ? 0 : (num / den) * 100);
|
|
||||||
return [...allFiles]
|
|
||||||
.sort((a, b) => b.size - a.size)
|
|
||||||
.slice(0, 50)
|
|
||||||
.map((f) => ({
|
|
||||||
path: f.path,
|
|
||||||
size: f.size,
|
|
||||||
sizeFormatted: formatSize(f.size),
|
|
||||||
percentOfTotal: toPct(f.size, totalBytes),
|
|
||||||
ext: f.ext || '',
|
|
||||||
isBinary: f.isBinary,
|
|
||||||
mtime: f.mtimeMs ? new Date(f.mtimeMs).toISOString() : null,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
function mdTable(rows, headers) {
|
|
||||||
const header = `| ${headers.join(' | ')} |`;
|
|
||||||
const sep = `| ${headers.map(() => '---').join(' | ')} |`;
|
|
||||||
const body = rows.map((r) => `| ${r.join(' | ')} |`).join('\n');
|
|
||||||
return `${header}\n${sep}\n${body}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildMarkdownReport(largestFiles, byExtensionArr, byDirectoryArr, totalBytes) {
|
|
||||||
const toPct = (num, den) => (den === 0 ? 0 : (num / den) * 100);
|
|
||||||
const md = [];
|
|
||||||
md.push(
|
|
||||||
'\n### Top Largest Files (Top 50)\n',
|
|
||||||
mdTable(
|
|
||||||
largestFiles.map((f) => [
|
|
||||||
f.path,
|
|
||||||
f.sizeFormatted,
|
|
||||||
`${f.percentOfTotal.toFixed(2)}%`,
|
|
||||||
f.ext || '',
|
|
||||||
f.isBinary ? 'binary' : 'text',
|
|
||||||
]),
|
|
||||||
['Path', 'Size', '% of total', 'Ext', 'Type'],
|
|
||||||
),
|
|
||||||
'\n\n### Top Extensions by Bytes (Top 20)\n',
|
|
||||||
);
|
|
||||||
const topExtRows = byExtensionArr
|
|
||||||
.slice(0, 20)
|
|
||||||
.map((e) => [
|
|
||||||
e.ext,
|
|
||||||
String(e.count),
|
|
||||||
formatSize(e.bytes),
|
|
||||||
`${toPct(e.bytes, totalBytes).toFixed(2)}%`,
|
|
||||||
]);
|
|
||||||
md.push(
|
|
||||||
mdTable(topExtRows, ['Ext', 'Count', 'Bytes', '% of total']),
|
|
||||||
'\n\n### Top Directories by Bytes (Top 20)\n',
|
|
||||||
);
|
|
||||||
const topDirRows = byDirectoryArr
|
|
||||||
.slice(0, 20)
|
|
||||||
.map((d) => [
|
|
||||||
d.dir,
|
|
||||||
String(d.count),
|
|
||||||
formatSize(d.bytes),
|
|
||||||
`${toPct(d.bytes, totalBytes).toFixed(2)}%`,
|
|
||||||
]);
|
|
||||||
md.push(mdTable(topDirRows, ['Directory', 'Files', 'Bytes', '% of total']));
|
|
||||||
return md.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
KB,
|
|
||||||
MB,
|
|
||||||
formatSize,
|
|
||||||
percentile,
|
|
||||||
processWithLimit,
|
|
||||||
enrichAllFiles,
|
|
||||||
buildHistogram,
|
|
||||||
aggregateByExtension,
|
|
||||||
aggregateByDirectory,
|
|
||||||
computeDepthAndLongest,
|
|
||||||
computeTemporal,
|
|
||||||
computeQuality,
|
|
||||||
computeDuplicates,
|
|
||||||
estimateCompressibility,
|
|
||||||
computeGitInfo,
|
|
||||||
computeLargestFiles,
|
|
||||||
buildMarkdownReport,
|
|
||||||
};
|
|
||||||
@@ -1,79 +1,29 @@
|
|||||||
const H = require('./stats.helpers.js');
|
function calculateStatistics(aggregatedContent, xmlFileSize) {
|
||||||
|
|
||||||
async function calculateStatistics(aggregatedContent, xmlFileSize, rootDir) {
|
|
||||||
const { textFiles, binaryFiles, errors } = aggregatedContent;
|
const { textFiles, binaryFiles, errors } = aggregatedContent;
|
||||||
|
|
||||||
const totalLines = textFiles.reduce((sum, f) => sum + (f.lines || 0), 0);
|
const totalTextSize = textFiles.reduce((sum, file) => sum + file.size, 0);
|
||||||
|
const totalBinarySize = binaryFiles.reduce((sum, file) => sum + file.size, 0);
|
||||||
|
const totalSize = totalTextSize + totalBinarySize;
|
||||||
|
|
||||||
|
const totalLines = textFiles.reduce((sum, file) => sum + file.lines, 0);
|
||||||
|
|
||||||
const estimatedTokens = Math.ceil(xmlFileSize / 4);
|
const estimatedTokens = Math.ceil(xmlFileSize / 4);
|
||||||
|
|
||||||
// Build enriched file list
|
const formatSize = (bytes) => {
|
||||||
const allFiles = await H.enrichAllFiles(textFiles, binaryFiles);
|
if (bytes < 1024) return `${bytes} B`;
|
||||||
const totalBytes = allFiles.reduce((s, f) => s + f.size, 0);
|
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
||||||
const sizes = allFiles.map((f) => f.size).sort((a, b) => a - b);
|
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
||||||
const avgSize = sizes.length > 0 ? totalBytes / sizes.length : 0;
|
};
|
||||||
const medianSize = sizes.length > 0 ? H.percentile(sizes, 50) : 0;
|
|
||||||
const p90 = H.percentile(sizes, 90);
|
|
||||||
const p95 = H.percentile(sizes, 95);
|
|
||||||
const p99 = H.percentile(sizes, 99);
|
|
||||||
|
|
||||||
const histogram = H.buildHistogram(allFiles);
|
|
||||||
const byExtensionArr = H.aggregateByExtension(allFiles);
|
|
||||||
const byDirectoryArr = H.aggregateByDirectory(allFiles);
|
|
||||||
const { depthDist, longestPaths } = H.computeDepthAndLongest(allFiles);
|
|
||||||
const temporal = H.computeTemporal(allFiles, Date.now());
|
|
||||||
const quality = H.computeQuality(allFiles, textFiles);
|
|
||||||
const duplicateCandidates = H.computeDuplicates(allFiles, textFiles);
|
|
||||||
const compressibilityRatio = H.estimateCompressibility(textFiles);
|
|
||||||
const git = H.computeGitInfo(allFiles, rootDir, quality.largeThreshold);
|
|
||||||
const largestFiles = H.computeLargestFiles(allFiles, totalBytes);
|
|
||||||
const markdownReport = H.buildMarkdownReport(
|
|
||||||
largestFiles,
|
|
||||||
byExtensionArr,
|
|
||||||
byDirectoryArr,
|
|
||||||
totalBytes,
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
// Back-compat summary
|
|
||||||
totalFiles: textFiles.length + binaryFiles.length,
|
totalFiles: textFiles.length + binaryFiles.length,
|
||||||
textFiles: textFiles.length,
|
textFiles: textFiles.length,
|
||||||
binaryFiles: binaryFiles.length,
|
binaryFiles: binaryFiles.length,
|
||||||
errorFiles: errors.length,
|
errorFiles: errors.length,
|
||||||
totalSize: H.formatSize(totalBytes),
|
totalSize: formatSize(totalSize),
|
||||||
totalBytes,
|
xmlSize: formatSize(xmlFileSize),
|
||||||
xmlSize: H.formatSize(xmlFileSize),
|
|
||||||
totalLines,
|
totalLines,
|
||||||
estimatedTokens: estimatedTokens.toLocaleString(),
|
estimatedTokens: estimatedTokens.toLocaleString(),
|
||||||
|
|
||||||
// Distributions and percentiles
|
|
||||||
avgFileSize: avgSize,
|
|
||||||
medianFileSize: medianSize,
|
|
||||||
p90,
|
|
||||||
p95,
|
|
||||||
p99,
|
|
||||||
histogram,
|
|
||||||
|
|
||||||
// Extensions and directories
|
|
||||||
byExtension: byExtensionArr,
|
|
||||||
byDirectory: byDirectoryArr,
|
|
||||||
depthDistribution: depthDist,
|
|
||||||
longestPaths,
|
|
||||||
|
|
||||||
// Temporal
|
|
||||||
temporal,
|
|
||||||
|
|
||||||
// Quality signals
|
|
||||||
quality,
|
|
||||||
|
|
||||||
// Duplicates and compressibility
|
|
||||||
duplicateCandidates,
|
|
||||||
compressibilityRatio,
|
|
||||||
|
|
||||||
// Git-aware
|
|
||||||
git,
|
|
||||||
|
|
||||||
largestFiles,
|
|
||||||
markdownReport,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,413 +0,0 @@
|
|||||||
/* deno-lint-ignore-file */
|
|
||||||
/*
|
|
||||||
Automatic test matrix for project root detection.
|
|
||||||
Creates temporary fixtures for various ecosystems and validates findProjectRoot().
|
|
||||||
No external options or flags required. Safe to run multiple times.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const os = require('node:os');
|
|
||||||
const path = require('node:path');
|
|
||||||
const fs = require('fs-extra');
|
|
||||||
const { promisify } = require('node:util');
|
|
||||||
const { execFile } = require('node:child_process');
|
|
||||||
const process = require('node:process');
|
|
||||||
const execFileAsync = promisify(execFile);
|
|
||||||
|
|
||||||
const { findProjectRoot } = require('./projectRoot.js');
|
|
||||||
|
|
||||||
async function cmdAvailable(cmd) {
|
|
||||||
try {
|
|
||||||
await execFileAsync(cmd, ['--version'], { timeout: 500, windowsHide: true });
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testSvnMarker() {
|
|
||||||
const root = await mkTmpDir('svn');
|
|
||||||
const nested = path.join(root, 'proj', 'code');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.ensureDir(path.join(root, '.svn'));
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
assertEqual(found, root, '.svn marker should be detected');
|
|
||||||
return { name: 'svn-marker', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testSymlinkStart() {
|
|
||||||
const root = await mkTmpDir('symlink-start');
|
|
||||||
const nested = path.join(root, 'a', 'b');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, '.project-root'), '\n');
|
|
||||||
const tmp = await mkTmpDir('symlink-tmp');
|
|
||||||
const link = path.join(tmp, 'link-to-b');
|
|
||||||
try {
|
|
||||||
await fs.symlink(nested, link);
|
|
||||||
} catch {
|
|
||||||
// symlink may not be permitted on some systems; skip
|
|
||||||
return { name: 'symlink-start', ok: true, skipped: true };
|
|
||||||
}
|
|
||||||
const found = await findProjectRoot(link);
|
|
||||||
assertEqual(found, root, 'should resolve symlinked start to real root');
|
|
||||||
return { name: 'symlink-start', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testSubmoduleLikeInnerGitFile() {
|
|
||||||
const root = await mkTmpDir('submodule-like');
|
|
||||||
const mid = path.join(root, 'mid');
|
|
||||||
const leaf = path.join(mid, 'leaf');
|
|
||||||
await fs.ensureDir(leaf);
|
|
||||||
// outer repo
|
|
||||||
await fs.ensureDir(path.join(root, '.git'));
|
|
||||||
// inner submodule-like .git file
|
|
||||||
await fs.writeFile(path.join(mid, '.git'), 'gitdir: ../.git/modules/mid\n');
|
|
||||||
const found = await findProjectRoot(leaf);
|
|
||||||
assertEqual(found, root, 'outermost .git should win on tie weight');
|
|
||||||
return { name: 'submodule-like-gitfile', ok: true };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function mkTmpDir(name) {
|
|
||||||
const base = await fs.realpath(os.tmpdir());
|
|
||||||
const dir = await fs.mkdtemp(path.join(base, `flattener-${name}-`));
|
|
||||||
return dir;
|
|
||||||
}
|
|
||||||
|
|
||||||
function assertEqual(actual, expected, msg) {
|
|
||||||
if (actual !== expected) {
|
|
||||||
throw new Error(`${msg}: expected="${expected}" actual="${actual}"`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testSentinel() {
|
|
||||||
const root = await mkTmpDir('sentinel');
|
|
||||||
const nested = path.join(root, 'a', 'b', 'c');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, '.project-root'), '\n');
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'sentinel .project-root should win');
|
|
||||||
return { name: 'sentinel', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testOtherSentinels() {
|
|
||||||
const root = await mkTmpDir('other-sentinels');
|
|
||||||
const nested = path.join(root, 'x', 'y');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, '.workspace-root'), '\n');
|
|
||||||
const found1 = await findProjectRoot(nested);
|
|
||||||
assertEqual(found1, root, 'sentinel .workspace-root should win');
|
|
||||||
|
|
||||||
await fs.remove(path.join(root, '.workspace-root'));
|
|
||||||
await fs.writeFile(path.join(root, '.repo-root'), '\n');
|
|
||||||
const found2 = await findProjectRoot(nested);
|
|
||||||
assertEqual(found2, root, 'sentinel .repo-root should win');
|
|
||||||
return { name: 'other-sentinels', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testGitCliAndMarker() {
|
|
||||||
const hasGit = await cmdAvailable('git');
|
|
||||||
if (!hasGit) return { name: 'git-cli', ok: true, skipped: true };
|
|
||||||
|
|
||||||
const root = await mkTmpDir('git');
|
|
||||||
const nested = path.join(root, 'pkg', 'src');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await execFileAsync('git', ['init'], { cwd: root, timeout: 2000 });
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'git toplevel should be detected');
|
|
||||||
return { name: 'git-cli', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testHgMarkerOrCli() {
|
|
||||||
// Prefer simple marker test to avoid requiring Mercurial install
|
|
||||||
const root = await mkTmpDir('hg');
|
|
||||||
const nested = path.join(root, 'lib');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.ensureDir(path.join(root, '.hg'));
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, '.hg marker should be detected');
|
|
||||||
return { name: 'hg-marker', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testWorkspacePnpm() {
|
|
||||||
const root = await mkTmpDir('pnpm-workspace');
|
|
||||||
const pkgA = path.join(root, 'packages', 'a');
|
|
||||||
await fs.ensureDir(pkgA);
|
|
||||||
await fs.writeFile(path.join(root, 'pnpm-workspace.yaml'), 'packages:\n - packages/*\n');
|
|
||||||
const found = await findProjectRoot(pkgA);
|
|
||||||
await assertEqual(found, root, 'pnpm-workspace.yaml should be detected');
|
|
||||||
return { name: 'pnpm-workspace', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testPackageJsonWorkspaces() {
|
|
||||||
const root = await mkTmpDir('package-workspaces');
|
|
||||||
const pkgA = path.join(root, 'packages', 'a');
|
|
||||||
await fs.ensureDir(pkgA);
|
|
||||||
await fs.writeJson(
|
|
||||||
path.join(root, 'package.json'),
|
|
||||||
{ private: true, workspaces: ['packages/*'] },
|
|
||||||
{ spaces: 2 },
|
|
||||||
);
|
|
||||||
const found = await findProjectRoot(pkgA);
|
|
||||||
await assertEqual(found, root, 'package.json workspaces should be detected');
|
|
||||||
return { name: 'package.json-workspaces', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testLockfiles() {
|
|
||||||
const root = await mkTmpDir('lockfiles');
|
|
||||||
const nested = path.join(root, 'src');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, 'yarn.lock'), '\n');
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'yarn.lock should be detected');
|
|
||||||
return { name: 'lockfiles', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testLanguageConfigs() {
|
|
||||||
const root = await mkTmpDir('lang-configs');
|
|
||||||
const nested = path.join(root, 'x', 'y');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, 'pyproject.toml'), "[tool.poetry]\nname='tmp'\n");
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'pyproject.toml should be detected');
|
|
||||||
return { name: 'language-configs', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testPreferOuterOnTie() {
|
|
||||||
const root = await mkTmpDir('tie');
|
|
||||||
const mid = path.join(root, 'mid');
|
|
||||||
const leaf = path.join(mid, 'leaf');
|
|
||||||
await fs.ensureDir(leaf);
|
|
||||||
// same weight marker at two levels
|
|
||||||
await fs.writeFile(path.join(root, 'requirements.txt'), '\n');
|
|
||||||
await fs.writeFile(path.join(mid, 'requirements.txt'), '\n');
|
|
||||||
const found = await findProjectRoot(leaf);
|
|
||||||
await assertEqual(found, root, 'outermost directory should win on equal weight');
|
|
||||||
return { name: 'prefer-outermost-tie', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Additional coverage: Bazel, Nx/Turbo/Rush, Go workspaces, Deno, Java/Scala, PHP, Rust, Nix, Changesets, env markers,
|
|
||||||
// and priority interaction between package.json and lockfiles.
|
|
||||||
|
|
||||||
async function testBazelWorkspace() {
|
|
||||||
const root = await mkTmpDir('bazel');
|
|
||||||
const nested = path.join(root, 'apps', 'svc');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, 'WORKSPACE'), 'workspace(name="tmp")\n');
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'Bazel WORKSPACE should be detected');
|
|
||||||
return { name: 'bazel-workspace', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testNx() {
|
|
||||||
const root = await mkTmpDir('nx');
|
|
||||||
const nested = path.join(root, 'apps', 'web');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeJson(path.join(root, 'nx.json'), { npmScope: 'tmp' }, { spaces: 2 });
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'nx.json should be detected');
|
|
||||||
return { name: 'nx', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testTurbo() {
|
|
||||||
const root = await mkTmpDir('turbo');
|
|
||||||
const nested = path.join(root, 'packages', 'x');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeJson(path.join(root, 'turbo.json'), { pipeline: {} }, { spaces: 2 });
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'turbo.json should be detected');
|
|
||||||
return { name: 'turbo', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testRush() {
|
|
||||||
const root = await mkTmpDir('rush');
|
|
||||||
const nested = path.join(root, 'apps', 'a');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeJson(path.join(root, 'rush.json'), { projectFolderMinDepth: 1 }, { spaces: 2 });
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'rush.json should be detected');
|
|
||||||
return { name: 'rush', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testGoWorkAndMod() {
|
|
||||||
const root = await mkTmpDir('gowork');
|
|
||||||
const mod = path.join(root, 'modA');
|
|
||||||
const nested = path.join(mod, 'pkg');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, 'go.work'), 'go 1.22\nuse ./modA\n');
|
|
||||||
await fs.writeFile(path.join(mod, 'go.mod'), 'module example.com/a\ngo 1.22\n');
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'go.work should define the workspace root');
|
|
||||||
return { name: 'go-work', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testDenoJson() {
|
|
||||||
const root = await mkTmpDir('deno');
|
|
||||||
const nested = path.join(root, 'src');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeJson(path.join(root, 'deno.json'), { tasks: {} }, { spaces: 2 });
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'deno.json should be detected');
|
|
||||||
return { name: 'deno-json', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testGradleSettings() {
|
|
||||||
const root = await mkTmpDir('gradle');
|
|
||||||
const nested = path.join(root, 'app');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, 'settings.gradle'), "rootProject.name='tmp'\n");
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'settings.gradle should be detected');
|
|
||||||
return { name: 'gradle-settings', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testMavenPom() {
|
|
||||||
const root = await mkTmpDir('maven');
|
|
||||||
const nested = path.join(root, 'module');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, 'pom.xml'), '<project></project>\n');
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'pom.xml should be detected');
|
|
||||||
return { name: 'maven-pom', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testSbtBuild() {
|
|
||||||
const root = await mkTmpDir('sbt');
|
|
||||||
const nested = path.join(root, 'sub');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, 'build.sbt'), 'name := "tmp"\n');
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'build.sbt should be detected');
|
|
||||||
return { name: 'sbt-build', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testComposer() {
|
|
||||||
const root = await mkTmpDir('composer');
|
|
||||||
const nested = path.join(root, 'src');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeJson(path.join(root, 'composer.json'), { name: 'tmp/pkg' }, { spaces: 2 });
|
|
||||||
await fs.writeFile(path.join(root, 'composer.lock'), '{}\n');
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'composer.{json,lock} should be detected');
|
|
||||||
return { name: 'composer', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testCargo() {
|
|
||||||
const root = await mkTmpDir('cargo');
|
|
||||||
const nested = path.join(root, 'src');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, 'Cargo.toml'), "[package]\nname='tmp'\nversion='0.0.0'\n");
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'Cargo.toml should be detected');
|
|
||||||
return { name: 'cargo', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testNixFlake() {
|
|
||||||
const root = await mkTmpDir('nix');
|
|
||||||
const nested = path.join(root, 'work');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, 'flake.nix'), '{ }\n');
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'flake.nix should be detected');
|
|
||||||
return { name: 'nix-flake', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testChangesetConfig() {
|
|
||||||
const root = await mkTmpDir('changeset');
|
|
||||||
const nested = path.join(root, 'pkg');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.ensureDir(path.join(root, '.changeset'));
|
|
||||||
await fs.writeJson(
|
|
||||||
path.join(root, '.changeset', 'config.json'),
|
|
||||||
{ $schema: 'https://unpkg.com/@changesets/config@2.3.1/schema.json' },
|
|
||||||
{ spaces: 2 },
|
|
||||||
);
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, '.changeset/config.json should be detected');
|
|
||||||
return { name: 'changesets', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testEnvCustomMarker() {
|
|
||||||
const root = await mkTmpDir('env-marker');
|
|
||||||
const nested = path.join(root, 'dir');
|
|
||||||
await fs.ensureDir(nested);
|
|
||||||
await fs.writeFile(path.join(root, 'MY_ROOT'), '\n');
|
|
||||||
const prev = process.env.PROJECT_ROOT_MARKERS;
|
|
||||||
process.env.PROJECT_ROOT_MARKERS = 'MY_ROOT';
|
|
||||||
try {
|
|
||||||
const found = await findProjectRoot(nested);
|
|
||||||
await assertEqual(found, root, 'custom env marker should be honored');
|
|
||||||
} finally {
|
|
||||||
if (prev === undefined) delete process.env.PROJECT_ROOT_MARKERS;
|
|
||||||
else process.env.PROJECT_ROOT_MARKERS = prev;
|
|
||||||
}
|
|
||||||
return { name: 'env-custom-marker', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function testPackageLowPriorityVsLock() {
|
|
||||||
const root = await mkTmpDir('pkg-vs-lock');
|
|
||||||
const nested = path.join(root, 'nested');
|
|
||||||
await fs.ensureDir(path.join(nested, 'deep'));
|
|
||||||
await fs.writeJson(path.join(nested, 'package.json'), { name: 'nested' }, { spaces: 2 });
|
|
||||||
await fs.writeFile(path.join(root, 'yarn.lock'), '\n');
|
|
||||||
const found = await findProjectRoot(path.join(nested, 'deep'));
|
|
||||||
await assertEqual(found, root, 'lockfile at root should outrank nested package.json');
|
|
||||||
return { name: 'package-vs-lock-priority', ok: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function run() {
|
|
||||||
const tests = [
|
|
||||||
testSentinel,
|
|
||||||
testOtherSentinels,
|
|
||||||
testGitCliAndMarker,
|
|
||||||
testHgMarkerOrCli,
|
|
||||||
testWorkspacePnpm,
|
|
||||||
testPackageJsonWorkspaces,
|
|
||||||
testLockfiles,
|
|
||||||
testLanguageConfigs,
|
|
||||||
testPreferOuterOnTie,
|
|
||||||
testBazelWorkspace,
|
|
||||||
testNx,
|
|
||||||
testTurbo,
|
|
||||||
testRush,
|
|
||||||
testGoWorkAndMod,
|
|
||||||
testDenoJson,
|
|
||||||
testGradleSettings,
|
|
||||||
testMavenPom,
|
|
||||||
testSbtBuild,
|
|
||||||
testComposer,
|
|
||||||
testCargo,
|
|
||||||
testNixFlake,
|
|
||||||
testChangesetConfig,
|
|
||||||
testEnvCustomMarker,
|
|
||||||
testPackageLowPriorityVsLock,
|
|
||||||
testSvnMarker,
|
|
||||||
testSymlinkStart,
|
|
||||||
testSubmoduleLikeInnerGitFile,
|
|
||||||
];
|
|
||||||
|
|
||||||
const results = [];
|
|
||||||
for (const t of tests) {
|
|
||||||
try {
|
|
||||||
const r = await t();
|
|
||||||
results.push({ ...r, ok: true });
|
|
||||||
console.log(`✔ ${r.name}${r.skipped ? ' (skipped)' : ''}`);
|
|
||||||
} catch (error) {
|
|
||||||
console.error(`✖ ${t.name}:`, error && error.message ? error.message : error);
|
|
||||||
results.push({ name: t.name, ok: false, error: String(error) });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const failed = results.filter((r) => !r.ok);
|
|
||||||
console.log('\nSummary:');
|
|
||||||
for (const r of results) {
|
|
||||||
console.log(`- ${r.name}: ${r.ok ? 'ok' : 'FAIL'}${r.skipped ? ' (skipped)' : ''}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (failed.length > 0) {
|
|
||||||
process.exitCode = 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
run().catch((error) => {
|
|
||||||
console.error('Fatal error:', error);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
@@ -403,7 +403,6 @@ async function promptInstallation() {
|
|||||||
{ name: 'Cline', value: 'cline' },
|
{ name: 'Cline', value: 'cline' },
|
||||||
{ name: 'Gemini CLI', value: 'gemini' },
|
{ name: 'Gemini CLI', value: 'gemini' },
|
||||||
{ name: 'Qwen Code', value: 'qwen-code' },
|
{ name: 'Qwen Code', value: 'qwen-code' },
|
||||||
{ name: 'Crush', value: 'crush' },
|
|
||||||
{ name: 'Github Copilot', value: 'github-copilot' },
|
{ name: 'Github Copilot', value: 'github-copilot' },
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -28,16 +28,6 @@ ide-configurations:
|
|||||||
# To use BMad agents in Claude Code:
|
# To use BMad agents in Claude Code:
|
||||||
# 1. Type /agent-name (e.g., "/dev", "/pm", "/architect")
|
# 1. Type /agent-name (e.g., "/dev", "/pm", "/architect")
|
||||||
# 2. Claude will switch to that agent's persona
|
# 2. Claude will switch to that agent's persona
|
||||||
crush:
|
|
||||||
name: Crush
|
|
||||||
rule-dir: .crush/commands/BMad/
|
|
||||||
format: multi-file
|
|
||||||
command-suffix: .md
|
|
||||||
instructions: |
|
|
||||||
# To use BMad agents in Crush:
|
|
||||||
# 1. Press CTRL + P and press TAB
|
|
||||||
# 2. Select agent or task
|
|
||||||
# 3. Crush will switch to that agent's persona / task
|
|
||||||
windsurf:
|
windsurf:
|
||||||
name: Windsurf
|
name: Windsurf
|
||||||
rule-dir: .windsurf/workflows/
|
rule-dir: .windsurf/workflows/
|
||||||
|
|||||||
@@ -47,9 +47,6 @@ class IdeSetup extends BaseIdeSetup {
|
|||||||
case 'claude-code': {
|
case 'claude-code': {
|
||||||
return this.setupClaudeCode(installDir, selectedAgent);
|
return this.setupClaudeCode(installDir, selectedAgent);
|
||||||
}
|
}
|
||||||
case 'crush': {
|
|
||||||
return this.setupCrush(installDir, selectedAgent);
|
|
||||||
}
|
|
||||||
case 'windsurf': {
|
case 'windsurf': {
|
||||||
return this.setupWindsurf(installDir, selectedAgent);
|
return this.setupWindsurf(installDir, selectedAgent);
|
||||||
}
|
}
|
||||||
@@ -102,44 +99,6 @@ class IdeSetup extends BaseIdeSetup {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
async setupCrush(installDir, selectedAgent) {
|
|
||||||
// Setup bmad-core commands
|
|
||||||
const coreSlashPrefix = await this.getCoreSlashPrefix(installDir);
|
|
||||||
const coreAgents = selectedAgent ? [selectedAgent] : await this.getCoreAgentIds(installDir);
|
|
||||||
const coreTasks = await this.getCoreTaskIds(installDir);
|
|
||||||
await this.setupCrushForPackage(
|
|
||||||
installDir,
|
|
||||||
'core',
|
|
||||||
coreSlashPrefix,
|
|
||||||
coreAgents,
|
|
||||||
coreTasks,
|
|
||||||
'.bmad-core',
|
|
||||||
);
|
|
||||||
|
|
||||||
// Setup expansion pack commands
|
|
||||||
const expansionPacks = await this.getInstalledExpansionPacks(installDir);
|
|
||||||
for (const packInfo of expansionPacks) {
|
|
||||||
const packSlashPrefix = await this.getExpansionPackSlashPrefix(packInfo.path);
|
|
||||||
const packAgents = await this.getExpansionPackAgents(packInfo.path);
|
|
||||||
const packTasks = await this.getExpansionPackTasks(packInfo.path);
|
|
||||||
|
|
||||||
if (packAgents.length > 0 || packTasks.length > 0) {
|
|
||||||
// Use the actual directory name where the expansion pack is installed
|
|
||||||
const rootPath = path.relative(installDir, packInfo.path);
|
|
||||||
await this.setupCrushForPackage(
|
|
||||||
installDir,
|
|
||||||
packInfo.name,
|
|
||||||
packSlashPrefix,
|
|
||||||
packAgents,
|
|
||||||
packTasks,
|
|
||||||
rootPath,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
async setupClaudeCode(installDir, selectedAgent) {
|
async setupClaudeCode(installDir, selectedAgent) {
|
||||||
// Setup bmad-core commands
|
// Setup bmad-core commands
|
||||||
const coreSlashPrefix = await this.getCoreSlashPrefix(installDir);
|
const coreSlashPrefix = await this.getCoreSlashPrefix(installDir);
|
||||||
@@ -275,94 +234,6 @@ class IdeSetup extends BaseIdeSetup {
|
|||||||
console.log(chalk.dim(` - Tasks in: ${tasksDir}`));
|
console.log(chalk.dim(` - Tasks in: ${tasksDir}`));
|
||||||
}
|
}
|
||||||
|
|
||||||
async setupCrushForPackage(installDir, packageName, slashPrefix, agentIds, taskIds, rootPath) {
|
|
||||||
const commandsBaseDir = path.join(installDir, '.crush', 'commands', slashPrefix);
|
|
||||||
const agentsDir = path.join(commandsBaseDir, 'agents');
|
|
||||||
const tasksDir = path.join(commandsBaseDir, 'tasks');
|
|
||||||
|
|
||||||
// Ensure directories exist
|
|
||||||
await fileManager.ensureDirectory(agentsDir);
|
|
||||||
await fileManager.ensureDirectory(tasksDir);
|
|
||||||
|
|
||||||
// Setup agents
|
|
||||||
for (const agentId of agentIds) {
|
|
||||||
// Find the agent file - for expansion packs, prefer the expansion pack version
|
|
||||||
let agentPath;
|
|
||||||
if (packageName === 'core') {
|
|
||||||
// For core, use the normal search
|
|
||||||
agentPath = await this.findAgentPath(agentId, installDir);
|
|
||||||
} else {
|
|
||||||
// For expansion packs, first try to find the agent in the expansion pack directory
|
|
||||||
const expansionPackPath = path.join(installDir, rootPath, 'agents', `${agentId}.md`);
|
|
||||||
if (await fileManager.pathExists(expansionPackPath)) {
|
|
||||||
agentPath = expansionPackPath;
|
|
||||||
} else {
|
|
||||||
// Fall back to core if not found in expansion pack
|
|
||||||
agentPath = await this.findAgentPath(agentId, installDir);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const commandPath = path.join(agentsDir, `${agentId}.md`);
|
|
||||||
|
|
||||||
if (agentPath) {
|
|
||||||
// Create command file with agent content
|
|
||||||
let agentContent = await fileManager.readFile(agentPath);
|
|
||||||
|
|
||||||
// Replace {root} placeholder with the appropriate root path for this context
|
|
||||||
agentContent = agentContent.replaceAll('{root}', rootPath);
|
|
||||||
|
|
||||||
// Add command header
|
|
||||||
let commandContent = `# /${agentId} Command\n\n`;
|
|
||||||
commandContent += `When this command is used, adopt the following agent persona:\n\n`;
|
|
||||||
commandContent += agentContent;
|
|
||||||
|
|
||||||
await fileManager.writeFile(commandPath, commandContent);
|
|
||||||
console.log(chalk.green(`✓ Created agent command: /${agentId}`));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Setup tasks
|
|
||||||
for (const taskId of taskIds) {
|
|
||||||
// Find the task file - for expansion packs, prefer the expansion pack version
|
|
||||||
let taskPath;
|
|
||||||
if (packageName === 'core') {
|
|
||||||
// For core, use the normal search
|
|
||||||
taskPath = await this.findTaskPath(taskId, installDir);
|
|
||||||
} else {
|
|
||||||
// For expansion packs, first try to find the task in the expansion pack directory
|
|
||||||
const expansionPackPath = path.join(installDir, rootPath, 'tasks', `${taskId}.md`);
|
|
||||||
if (await fileManager.pathExists(expansionPackPath)) {
|
|
||||||
taskPath = expansionPackPath;
|
|
||||||
} else {
|
|
||||||
// Fall back to core if not found in expansion pack
|
|
||||||
taskPath = await this.findTaskPath(taskId, installDir);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const commandPath = path.join(tasksDir, `${taskId}.md`);
|
|
||||||
|
|
||||||
if (taskPath) {
|
|
||||||
// Create command file with task content
|
|
||||||
let taskContent = await fileManager.readFile(taskPath);
|
|
||||||
|
|
||||||
// Replace {root} placeholder with the appropriate root path for this context
|
|
||||||
taskContent = taskContent.replaceAll('{root}', rootPath);
|
|
||||||
|
|
||||||
// Add command header
|
|
||||||
let commandContent = `# /${taskId} Task\n\n`;
|
|
||||||
commandContent += `When this command is used, execute the following task:\n\n`;
|
|
||||||
commandContent += taskContent;
|
|
||||||
|
|
||||||
await fileManager.writeFile(commandPath, commandContent);
|
|
||||||
console.log(chalk.green(`✓ Created task command: /${taskId}`));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(chalk.green(`\n✓ Created Crush commands for ${packageName} in ${commandsBaseDir}`));
|
|
||||||
console.log(chalk.dim(` - Agents in: ${agentsDir}`));
|
|
||||||
console.log(chalk.dim(` - Tasks in: ${tasksDir}`));
|
|
||||||
}
|
|
||||||
|
|
||||||
async setupWindsurf(installDir, selectedAgent) {
|
async setupWindsurf(installDir, selectedAgent) {
|
||||||
const windsurfWorkflowDir = path.join(installDir, '.windsurf', 'workflows');
|
const windsurfWorkflowDir = path.join(installDir, '.windsurf', 'workflows');
|
||||||
const agents = selectedAgent ? [selectedAgent] : await this.getAllAgentIds(installDir);
|
const agents = selectedAgent ? [selectedAgent] : await this.getAllAgentIds(installDir);
|
||||||
|
|||||||
Reference in New Issue
Block a user