Compare commits

...

8 Commits

Author SHA1 Message Date
Murat Ozcan
62111c7647 chore: configure changelog file path in semantic-release config 2025-08-16 16:12:53 -05:00
manjaroblack
fab9d5e1f5 feat(flattener): prompt for detailed stats; polish .stats.md with emojis (#422)
* feat: add detailed statistics and markdown report generation to flattener tool

* fix: remove redundant error handling for project root detection
2025-08-16 08:03:28 -05:00
Brian Madison
93426c2d2f feat: publish stable release 5.0.0
BREAKING CHANGE: Promote beta features to stable release for v5.0.0

This commit ensures the stable release gets properly published to NPM and GitHub releases.
2025-08-15 23:06:28 -05:00
github-actions[bot]
f56d37a60a release: promote to stable 5.0.0
- Promote beta features to stable release
- Update version from 4.38.0 to 5.0.0
- Automated promotion via GitHub Actions
2025-08-15 23:06:28 -05:00
github-actions[bot]
224cfc05dc release: promote to stable 4.38.0
- Promote beta features to stable release
- Update version from 4.37.0 to 4.38.0
- Automated promotion via GitHub Actions
2025-08-15 23:06:27 -05:00
Brian Madison
6cb2fa68b3 fix: update package-lock.json for semver dependency 2025-08-15 23:06:27 -05:00
Brian Madison
d21ac491a0 release: create stable 4.37.0 release
Promote beta features to stable release with dual publishing support
2025-08-15 23:06:27 -05:00
Thiago Freitas
848e33fdd9 Feature: Installer commands for Crush CLI (#429)
* feat: add support for Crush IDE configuration and commands

* fix: update Crush IDE instructions for clarity on persona/task switching

---------

Co-authored-by: Brian <bmadcode@gmail.com>
2025-08-15 22:38:44 -05:00
15 changed files with 1758 additions and 282 deletions

View File

@@ -83,6 +83,27 @@ jobs:
;;
esac
# Check if calculated version already exists on NPM and increment if necessary
while npm view bmad-method@$NEW_VERSION version >/dev/null 2>&1; do
echo "Version $NEW_VERSION already exists, incrementing..."
IFS='.' read -ra NEW_VERSION_PARTS <<< "$NEW_VERSION"
NEW_MAJOR=${NEW_VERSION_PARTS[0]}
NEW_MINOR=${NEW_VERSION_PARTS[1]}
NEW_PATCH=${NEW_VERSION_PARTS[2]}
case "${{ github.event.inputs.version_bump }}" in
"major")
NEW_VERSION="$((NEW_MAJOR + 1)).0.0"
;;
"minor")
NEW_VERSION="$NEW_MAJOR.$((NEW_MINOR + 1)).0"
;;
"patch")
NEW_VERSION="$NEW_MAJOR.$NEW_MINOR.$((NEW_PATCH + 1))"
;;
esac
done
echo "new_version=$NEW_VERSION" >> $GITHUB_OUTPUT
echo "Promoting from $CURRENT_VERSION to $NEW_VERSION"
@@ -100,9 +121,10 @@ jobs:
- name: Commit stable release
run: |
git add .
git commit -m "release: promote to stable ${{ steps.version.outputs.new_version }}
git commit -m "feat: promote to stable ${{ steps.version.outputs.new_version }}
BREAKING CHANGE: Promote beta features to stable release
- Promote beta features to stable release
- Update version from ${{ steps.version.outputs.current_version }} to ${{ steps.version.outputs.new_version }}
- Automated promotion via GitHub Actions"

2
.gitignore vendored
View File

@@ -44,4 +44,4 @@ CLAUDE.md
test-project-install/*
sample-project/*
flattened-codebase.xml
*.stats.md

View File

@@ -13,7 +13,12 @@
"plugins": [
"@semantic-release/commit-analyzer",
"@semantic-release/release-notes-generator",
"@semantic-release/changelog",
[
"@semantic-release/changelog",
{
"changelogFile": "CHANGELOG.md"
}
],
"@semantic-release/npm",
"./tools/semantic-release-sync-installer.js",
"@semantic-release/github"

View File

@@ -686,3 +686,4 @@ Co-Authored-By: Claude <noreply@anthropic.com>
### Features
- add versioning and release automation ([0ea5e50](https://github.com/bmadcode/BMAD-METHOD/commit/0ea5e50aa7ace5946d0100c180dd4c0da3e2fd8c))
# Promote to stable release 5.0.0

141
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "bmad-method",
"version": "4.36.2",
"version": "5.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "bmad-method",
"version": "4.36.2",
"version": "5.0.0",
"license": "MIT",
"dependencies": {
"@kayvan/markdown-tree-parser": "^1.5.0",
@@ -18,7 +18,8 @@
"ignore": "^7.0.5",
"inquirer": "^8.2.6",
"js-yaml": "^4.1.0",
"ora": "^5.4.1"
"ora": "^5.4.1",
"semver": "^7.6.3"
},
"bin": {
"bmad": "tools/bmad-npx-wrapper.js",
@@ -108,6 +109,16 @@
"url": "https://opencollective.com/babel"
}
},
"node_modules/@babel/core/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/@babel/generator": {
"version": "7.28.0",
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.0.tgz",
@@ -142,6 +153,16 @@
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-compilation-targets/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/@babel/helper-globals": {
"version": "7.28.0",
"resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
@@ -1959,19 +1980,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@semantic-release/npm/node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/@semantic-release/npm/node_modules/signal-exit": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
@@ -3341,19 +3349,6 @@
"node": ">=16"
}
},
"node_modules/conventional-changelog-writer/node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/conventional-commits-filter": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-4.0.0.tgz",
@@ -4907,19 +4902,6 @@
"node": ">=10"
}
},
"node_modules/istanbul-lib-instrument/node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/istanbul-lib-report": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz",
@@ -5621,19 +5603,6 @@
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
}
},
"node_modules/jest-snapshot/node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/jest-util": {
"version": "30.0.5",
"resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.0.5.tgz",
@@ -6403,19 +6372,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/make-dir/node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/makeerror": {
"version": "1.0.12",
"resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz",
@@ -7308,19 +7264,6 @@
"node": "^16.14.0 || >=18.0.0"
}
},
"node_modules/normalize-package-data/node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/normalize-path": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
@@ -11177,19 +11120,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/semantic-release/node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/semantic-release/node_modules/signal-exit": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
@@ -11217,13 +11147,15 @@
}
},
"node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/semver-diff": {
@@ -11242,19 +11174,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/semver-diff/node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
"dev": true,
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/semver-regex": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/semver-regex/-/semver-regex-4.0.5.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "bmad-method",
"version": "4.37.0-beta.6",
"version": "5.0.0",
"description": "Breakthrough Method of Agile AI-driven Development",
"main": "tools/cli.js",
"bin": {

View File

@@ -127,19 +127,11 @@ program
path.join(inputDir, "flattened-codebase.xml"),
);
}
} else {
console.error(
"Could not auto-detect a project root and no arguments were provided. Please specify -i/--input and -o/--output.",
);
process.exit(1);
}
// Ensure output directory exists
await fs.ensureDir(path.dirname(outputPath));
console.log(`Flattening codebase from: ${inputDir}`);
console.log(`Output file: ${outputPath}`);
try {
// Verify input directory exists
if (!await fs.pathExists(inputDir)) {
@@ -159,7 +151,6 @@ program
);
// Process files with progress tracking
console.log("Reading file contents");
const processingSpinner = ora("📄 Processing files...").start();
const aggregatedContent = await aggregateFileContents(
filteredFiles,
@@ -172,10 +163,6 @@ program
if (aggregatedContent.errors.length > 0) {
console.log(`Errors: ${aggregatedContent.errors.length}`);
}
console.log(`Text files: ${aggregatedContent.textFiles.length}`);
if (aggregatedContent.binaryFiles.length > 0) {
console.log(`Binary files: ${aggregatedContent.binaryFiles.length}`);
}
// Generate XML output using streaming
const xmlSpinner = ora("🔧 Generating XML output...").start();
@@ -184,7 +171,11 @@ program
// Calculate and display statistics
const outputStats = await fs.stat(outputPath);
const stats = calculateStatistics(aggregatedContent, outputStats.size);
const stats = await calculateStatistics(
aggregatedContent,
outputStats.size,
inputDir,
);
// Display completion summary
console.log("\n📊 Completion Summary:");
@@ -201,8 +192,476 @@ program
);
console.log(`🔢 Estimated tokens: ${stats.estimatedTokens}`);
console.log(
`📊 File breakdown: ${stats.textFiles} text, ${stats.binaryFiles} binary, ${stats.errorFiles} errors`,
`📊 File breakdown: ${stats.textFiles} text, ${stats.binaryFiles} binary, ${stats.errorFiles} errors\n`,
);
// Ask user if they want detailed stats + markdown report
const generateDetailed = await promptYesNo(
"Generate detailed stats (console + markdown) now?",
true,
);
if (generateDetailed) {
// Additional detailed stats
console.log("\n📈 Size Percentiles:");
console.log(
` Avg: ${
Math.round(stats.avgFileSize).toLocaleString()
} B, Median: ${
Math.round(stats.medianFileSize).toLocaleString()
} B, p90: ${stats.p90.toLocaleString()} B, p95: ${stats.p95.toLocaleString()} B, p99: ${stats.p99.toLocaleString()} B`,
);
if (Array.isArray(stats.histogram) && stats.histogram.length) {
console.log("\n🧮 Size Histogram:");
for (const b of stats.histogram.slice(0, 2)) {
console.log(
` ${b.label}: ${b.count} files, ${b.bytes.toLocaleString()} bytes`,
);
}
if (stats.histogram.length > 2) {
console.log(` … and ${stats.histogram.length - 2} more buckets`);
}
}
if (Array.isArray(stats.byExtension) && stats.byExtension.length) {
const topExt = stats.byExtension.slice(0, 2);
console.log("\n📦 Top Extensions:");
for (const e of topExt) {
const pct = stats.totalBytes
? ((e.bytes / stats.totalBytes) * 100)
: 0;
console.log(
` ${e.ext}: ${e.count} files, ${e.bytes.toLocaleString()} bytes (${
pct.toFixed(2)
}%)`,
);
}
if (stats.byExtension.length > 2) {
console.log(
` … and ${stats.byExtension.length - 2} more extensions`,
);
}
}
if (Array.isArray(stats.byDirectory) && stats.byDirectory.length) {
const topDir = stats.byDirectory.slice(0, 2);
console.log("\n📂 Top Directories:");
for (const d of topDir) {
const pct = stats.totalBytes
? ((d.bytes / stats.totalBytes) * 100)
: 0;
console.log(
` ${d.dir}: ${d.count} files, ${d.bytes.toLocaleString()} bytes (${
pct.toFixed(2)
}%)`,
);
}
if (stats.byDirectory.length > 2) {
console.log(
` … and ${stats.byDirectory.length - 2} more directories`,
);
}
}
if (
Array.isArray(stats.depthDistribution) &&
stats.depthDistribution.length
) {
console.log("\n🌳 Depth Distribution:");
const dd = stats.depthDistribution.slice(0, 2);
let line = " " + dd.map((d) => `${d.depth}:${d.count}`).join(" ");
if (stats.depthDistribution.length > 2) {
line += ` … +${stats.depthDistribution.length - 2} more`;
}
console.log(line);
}
if (Array.isArray(stats.longestPaths) && stats.longestPaths.length) {
console.log("\n🧵 Longest Paths:");
for (const p of stats.longestPaths.slice(0, 2)) {
console.log(
` ${p.path} (${p.length} chars, ${p.size.toLocaleString()} bytes)`,
);
}
if (stats.longestPaths.length > 2) {
console.log(` … and ${stats.longestPaths.length - 2} more paths`);
}
}
if (stats.temporal) {
console.log("\n⏱ Temporal:");
if (stats.temporal.oldest) {
console.log(
` Oldest: ${stats.temporal.oldest.path} (${stats.temporal.oldest.mtime})`,
);
}
if (stats.temporal.newest) {
console.log(
` Newest: ${stats.temporal.newest.path} (${stats.temporal.newest.mtime})`,
);
}
if (Array.isArray(stats.temporal.ageBuckets)) {
console.log(" Age buckets:");
for (const b of stats.temporal.ageBuckets.slice(0, 2)) {
console.log(
` ${b.label}: ${b.count} files, ${b.bytes.toLocaleString()} bytes`,
);
}
if (stats.temporal.ageBuckets.length > 2) {
console.log(
` … and ${
stats.temporal.ageBuckets.length - 2
} more buckets`,
);
}
}
}
if (stats.quality) {
console.log("\n✅ Quality Signals:");
console.log(` Zero-byte files: ${stats.quality.zeroByteFiles}`);
console.log(` Empty text files: ${stats.quality.emptyTextFiles}`);
console.log(` Hidden files: ${stats.quality.hiddenFiles}`);
console.log(` Symlinks: ${stats.quality.symlinks}`);
console.log(
` Large files (>= ${
(stats.quality.largeThreshold / (1024 * 1024)).toFixed(0)
} MB): ${stats.quality.largeFilesCount}`,
);
console.log(
` Suspiciously large files (>= 100 MB): ${stats.quality.suspiciousLargeFilesCount}`,
);
}
if (
Array.isArray(stats.duplicateCandidates) &&
stats.duplicateCandidates.length
) {
console.log("\n🧬 Duplicate Candidates:");
for (const d of stats.duplicateCandidates.slice(0, 2)) {
console.log(
` ${d.reason}: ${d.count} files @ ${d.size.toLocaleString()} bytes`,
);
}
if (stats.duplicateCandidates.length > 2) {
console.log(
` … and ${stats.duplicateCandidates.length - 2} more groups`,
);
}
}
if (typeof stats.compressibilityRatio === "number") {
console.log(
`\n🗜️ Compressibility ratio (sampled): ${
(stats.compressibilityRatio * 100).toFixed(2)
}%`,
);
}
if (stats.git && stats.git.isRepo) {
console.log("\n🔧 Git:");
console.log(
` Tracked: ${stats.git.trackedCount} files, ${stats.git.trackedBytes.toLocaleString()} bytes`,
);
console.log(
` Untracked: ${stats.git.untrackedCount} files, ${stats.git.untrackedBytes.toLocaleString()} bytes`,
);
if (
Array.isArray(stats.git.lfsCandidates) &&
stats.git.lfsCandidates.length
) {
console.log(" LFS candidates (top 2):");
for (const f of stats.git.lfsCandidates.slice(0, 2)) {
console.log(` ${f.path} (${f.size.toLocaleString()} bytes)`);
}
if (stats.git.lfsCandidates.length > 2) {
console.log(
` … and ${stats.git.lfsCandidates.length - 2} more`,
);
}
}
}
if (Array.isArray(stats.largestFiles) && stats.largestFiles.length) {
console.log("\n📚 Largest Files (top 2):");
for (const f of stats.largestFiles.slice(0, 2)) {
// Show LOC for text files when available; omit ext and mtime
let locStr = "";
if (!f.isBinary && Array.isArray(aggregatedContent?.textFiles)) {
const tf = aggregatedContent.textFiles.find((t) =>
t.path === f.path
);
if (tf && typeof tf.lines === "number") {
locStr = `, LOC: ${tf.lines.toLocaleString()}`;
}
}
console.log(
` ${f.path} ${f.sizeFormatted} (${
f.percentOfTotal.toFixed(2)
}%)${locStr}`,
);
}
if (stats.largestFiles.length > 2) {
console.log(` … and ${stats.largestFiles.length - 2} more files`);
}
}
// Write a comprehensive markdown report next to the XML
{
const mdPath = outputPath.endsWith(".xml")
? outputPath.replace(/\.xml$/i, ".stats.md")
: outputPath + ".stats.md";
try {
const pct = (num, den) => (den ? ((num / den) * 100) : 0);
const md = [];
md.push(`# 🧾 Flatten Stats for ${path.basename(outputPath)}`);
md.push("");
md.push("## 📊 Summary");
md.push(`- Total source size: ${stats.totalSize}`);
md.push(`- Generated XML size: ${stats.xmlSize}`);
md.push(
`- Total lines of code: ${stats.totalLines.toLocaleString()}`,
);
md.push(`- Estimated tokens: ${stats.estimatedTokens}`);
md.push(
`- File breakdown: ${stats.textFiles} text, ${stats.binaryFiles} binary, ${stats.errorFiles} errors`,
);
md.push("");
// Percentiles
md.push("## 📈 Size Percentiles");
md.push(
`Avg: ${
Math.round(stats.avgFileSize).toLocaleString()
} B, Median: ${
Math.round(stats.medianFileSize).toLocaleString()
} B, p90: ${stats.p90.toLocaleString()} B, p95: ${stats.p95.toLocaleString()} B, p99: ${stats.p99.toLocaleString()} B`,
);
md.push("");
// Histogram
if (Array.isArray(stats.histogram) && stats.histogram.length) {
md.push("## 🧮 Size Histogram");
md.push("| Bucket | Files | Bytes |");
md.push("| --- | ---: | ---: |");
for (const b of stats.histogram) {
md.push(
`| ${b.label} | ${b.count} | ${b.bytes.toLocaleString()} |`,
);
}
md.push("");
}
// Top Extensions
if (Array.isArray(stats.byExtension) && stats.byExtension.length) {
md.push("## 📦 Top Extensions by Bytes (Top 20)");
md.push("| Ext | Files | Bytes | % of total |");
md.push("| --- | ---: | ---: | ---: |");
for (const e of stats.byExtension.slice(0, 20)) {
const p = pct(e.bytes, stats.totalBytes);
md.push(
`| ${e.ext} | ${e.count} | ${e.bytes.toLocaleString()} | ${
p.toFixed(2)
}% |`,
);
}
md.push("");
}
// Top Directories
if (Array.isArray(stats.byDirectory) && stats.byDirectory.length) {
md.push("## 📂 Top Directories by Bytes (Top 20)");
md.push("| Directory | Files | Bytes | % of total |");
md.push("| --- | ---: | ---: | ---: |");
for (const d of stats.byDirectory.slice(0, 20)) {
const p = pct(d.bytes, stats.totalBytes);
md.push(
`| ${d.dir} | ${d.count} | ${d.bytes.toLocaleString()} | ${
p.toFixed(2)
}% |`,
);
}
md.push("");
}
// Depth distribution
if (
Array.isArray(stats.depthDistribution) &&
stats.depthDistribution.length
) {
md.push("## 🌳 Depth Distribution");
md.push("| Depth | Count |");
md.push("| ---: | ---: |");
for (const d of stats.depthDistribution) {
md.push(`| ${d.depth} | ${d.count} |`);
}
md.push("");
}
// Longest paths
if (
Array.isArray(stats.longestPaths) && stats.longestPaths.length
) {
md.push("## 🧵 Longest Paths (Top 25)");
md.push("| Path | Length | Bytes |");
md.push("| --- | ---: | ---: |");
for (const pth of stats.longestPaths) {
md.push(
`| ${pth.path} | ${pth.length} | ${pth.size.toLocaleString()} |`,
);
}
md.push("");
}
// Temporal
if (stats.temporal) {
md.push("## ⏱️ Temporal");
if (stats.temporal.oldest) {
md.push(
`- Oldest: ${stats.temporal.oldest.path} (${stats.temporal.oldest.mtime})`,
);
}
if (stats.temporal.newest) {
md.push(
`- Newest: ${stats.temporal.newest.path} (${stats.temporal.newest.mtime})`,
);
}
if (Array.isArray(stats.temporal.ageBuckets)) {
md.push("");
md.push("| Age | Files | Bytes |");
md.push("| --- | ---: | ---: |");
for (const b of stats.temporal.ageBuckets) {
md.push(
`| ${b.label} | ${b.count} | ${b.bytes.toLocaleString()} |`,
);
}
}
md.push("");
}
// Quality signals
if (stats.quality) {
md.push("## ✅ Quality Signals");
md.push(`- Zero-byte files: ${stats.quality.zeroByteFiles}`);
md.push(`- Empty text files: ${stats.quality.emptyTextFiles}`);
md.push(`- Hidden files: ${stats.quality.hiddenFiles}`);
md.push(`- Symlinks: ${stats.quality.symlinks}`);
md.push(
`- Large files (>= ${
(stats.quality.largeThreshold / (1024 * 1024)).toFixed(0)
} MB): ${stats.quality.largeFilesCount}`,
);
md.push(
`- Suspiciously large files (>= 100 MB): ${stats.quality.suspiciousLargeFilesCount}`,
);
md.push("");
}
// Duplicates
if (
Array.isArray(stats.duplicateCandidates) &&
stats.duplicateCandidates.length
) {
md.push("## 🧬 Duplicate Candidates");
md.push("| Reason | Files | Size (bytes) |");
md.push("| --- | ---: | ---: |");
for (const d of stats.duplicateCandidates) {
md.push(
`| ${d.reason} | ${d.count} | ${d.size.toLocaleString()} |`,
);
}
md.push("");
// Detailed listing of duplicate file names and locations
md.push("### 🧬 Duplicate Groups Details");
let dupIndex = 1;
for (const d of stats.duplicateCandidates) {
md.push(
`#### Group ${dupIndex}: ${d.count} files @ ${d.size.toLocaleString()} bytes (${d.reason})`,
);
if (Array.isArray(d.files) && d.files.length) {
for (const fp of d.files) {
md.push(`- ${fp}`);
}
} else {
md.push("- (file list unavailable)");
}
md.push("");
dupIndex++;
}
md.push("");
}
// Compressibility
if (typeof stats.compressibilityRatio === "number") {
md.push("## 🗜️ Compressibility");
md.push(
`Sampled compressibility ratio: ${
(stats.compressibilityRatio * 100).toFixed(2)
}%`,
);
md.push("");
}
// Git
if (stats.git && stats.git.isRepo) {
md.push("## 🔧 Git");
md.push(
`- Tracked: ${stats.git.trackedCount} files, ${stats.git.trackedBytes.toLocaleString()} bytes`,
);
md.push(
`- Untracked: ${stats.git.untrackedCount} files, ${stats.git.untrackedBytes.toLocaleString()} bytes`,
);
if (
Array.isArray(stats.git.lfsCandidates) &&
stats.git.lfsCandidates.length
) {
md.push("");
md.push("### 📦 LFS Candidates (Top 20)");
md.push("| Path | Bytes |");
md.push("| --- | ---: |");
for (const f of stats.git.lfsCandidates.slice(0, 20)) {
md.push(`| ${f.path} | ${f.size.toLocaleString()} |`);
}
}
md.push("");
}
// Largest Files
if (
Array.isArray(stats.largestFiles) && stats.largestFiles.length
) {
md.push("## 📚 Largest Files (Top 50)");
md.push("| Path | Size | % of total | LOC |");
md.push("| --- | ---: | ---: | ---: |");
for (const f of stats.largestFiles) {
let loc = "";
if (
!f.isBinary && Array.isArray(aggregatedContent?.textFiles)
) {
const tf = aggregatedContent.textFiles.find((t) =>
t.path === f.path
);
if (tf && typeof tf.lines === "number") {
loc = tf.lines.toLocaleString();
}
}
md.push(
`| ${f.path} | ${f.sizeFormatted} | ${
f.percentOfTotal.toFixed(2)
}% | ${loc} |`,
);
}
md.push("");
}
await fs.writeFile(mdPath, md.join("\n"));
console.log(`\n🧾 Detailed stats report written to: ${mdPath}`);
} catch (e) {
console.warn(`⚠️ Failed to write stats markdown: ${e.message}`);
}
}
}
} catch (error) {
console.error("❌ Critical error:", error.message);
console.error("An unexpected error occurred.");

View File

@@ -1,45 +1,204 @@
const fs = require("fs-extra");
const path = require("node:path");
// Deno/Node compatibility: explicitly import process
const process = require("node:process");
const { execFile } = require("node:child_process");
const { promisify } = require("node:util");
const execFileAsync = promisify(execFile);
// Simple memoization across calls (keyed by realpath of startDir)
const _cache = new Map();
async function _tryRun(cmd, args, cwd, timeoutMs = 500) {
try {
const { stdout } = await execFileAsync(cmd, args, {
cwd,
timeout: timeoutMs,
windowsHide: true,
maxBuffer: 1024 * 1024,
});
const out = String(stdout || "").trim();
return out || null;
} catch {
return null;
}
}
async function _detectVcsTopLevel(startDir) {
// Run common VCS root queries in parallel; ignore failures
const gitP = _tryRun("git", ["rev-parse", "--show-toplevel"], startDir);
const hgP = _tryRun("hg", ["root"], startDir);
const svnP = (async () => {
const show = await _tryRun("svn", ["info", "--show-item", "wc-root"], startDir);
if (show) return show;
const info = await _tryRun("svn", ["info"], startDir);
if (info) {
const line = info.split(/\r?\n/).find((l) => l.toLowerCase().startsWith("working copy root path:"));
if (line) return line.split(":").slice(1).join(":").trim();
}
return null;
})();
const [git, hg, svn] = await Promise.all([gitP, hgP, svnP]);
return git || hg || svn || null;
}
/**
* Attempt to find the project root by walking up from startDir
* Looks for common project markers like .git, package.json, pyproject.toml, etc.
* Attempt to find the project root by walking up from startDir.
* Uses a robust, prioritized set of ecosystem markers (VCS > workspaces/monorepo > lock/build > language config).
* Also recognizes package.json with "workspaces" as a workspace root.
* You can augment markers via env PROJECT_ROOT_MARKERS as a comma-separated list of file/dir names.
* @param {string} startDir
* @returns {Promise<string|null>} project root directory or null if not found
*/
async function findProjectRoot(startDir) {
try {
// Resolve symlinks for robustness (e.g., when invoked from a symlinked path)
let dir = path.resolve(startDir);
const root = path.parse(dir).root;
const markers = [
".git",
"package.json",
"pnpm-workspace.yaml",
"yarn.lock",
"pnpm-lock.yaml",
"pyproject.toml",
"requirements.txt",
"go.mod",
"Cargo.toml",
"composer.json",
".hg",
".svn",
];
try {
dir = await fs.realpath(dir);
} catch {
// ignore if realpath fails; continue with resolved path
}
const startKey = dir; // preserve starting point for caching
if (_cache.has(startKey)) return _cache.get(startKey);
const fsRoot = path.parse(dir).root;
// Helper to safely check for existence
const exists = (p) => fs.pathExists(p);
// Build checks: an array of { makePath: (dir) => string, weight }
const checks = [];
const add = (rel, weight) => {
const makePath = (d) => Array.isArray(rel) ? path.join(d, ...rel) : path.join(d, rel);
checks.push({ makePath, weight });
};
// Highest priority: explicit sentinel markers
add(".project-root", 110);
add(".workspace-root", 110);
add(".repo-root", 110);
// Highest priority: VCS roots
add(".git", 100);
add(".hg", 95);
add(".svn", 95);
// Monorepo/workspace indicators
add("pnpm-workspace.yaml", 90);
add("lerna.json", 90);
add("turbo.json", 90);
add("nx.json", 90);
add("rush.json", 90);
add("go.work", 90);
add("WORKSPACE", 90);
add("WORKSPACE.bazel", 90);
add("MODULE.bazel", 90);
add("pants.toml", 90);
// Lockfiles and package-manager/top-level locks
add("yarn.lock", 85);
add("pnpm-lock.yaml", 85);
add("package-lock.json", 85);
add("bun.lockb", 85);
add("Cargo.lock", 85);
add("composer.lock", 85);
add("poetry.lock", 85);
add("Pipfile.lock", 85);
add("Gemfile.lock", 85);
// Build-system root indicators
add("settings.gradle", 80);
add("settings.gradle.kts", 80);
add("gradlew", 80);
add("pom.xml", 80);
add("build.sbt", 80);
add(["project", "build.properties"], 80);
// Language/project config markers
add("deno.json", 75);
add("deno.jsonc", 75);
add("pyproject.toml", 75);
add("Pipfile", 75);
add("requirements.txt", 75);
add("go.mod", 75);
add("Cargo.toml", 75);
add("composer.json", 75);
add("mix.exs", 75);
add("Gemfile", 75);
add("CMakeLists.txt", 75);
add("stack.yaml", 75);
add("cabal.project", 75);
add("rebar.config", 75);
add("pubspec.yaml", 75);
add("flake.nix", 75);
add("shell.nix", 75);
add("default.nix", 75);
add(".tool-versions", 75);
add("package.json", 74); // generic Node project (lower than lockfiles/workspaces)
// Changesets
add([".changeset", "config.json"], 70);
add(".changeset", 70);
// Custom markers via env (comma-separated names)
if (process.env.PROJECT_ROOT_MARKERS) {
for (const name of process.env.PROJECT_ROOT_MARKERS.split(",").map((s) => s.trim()).filter(Boolean)) {
add(name, 72);
}
}
/** Check for package.json with "workspaces" */
const hasWorkspacePackageJson = async (d) => {
const pkgPath = path.join(d, "package.json");
if (!(await exists(pkgPath))) return false;
try {
const raw = await fs.readFile(pkgPath, "utf8");
const pkg = JSON.parse(raw);
return Boolean(pkg && pkg.workspaces);
} catch {
return false;
}
};
let best = null; // { dir, weight }
// Try to detect VCS toplevel once up-front; treat as authoritative slightly above .git marker
const vcsTop = await _detectVcsTopLevel(dir);
if (vcsTop) {
best = { dir: vcsTop, weight: 101 };
}
while (true) {
const exists = await Promise.all(
markers.map((m) => fs.pathExists(path.join(dir, m))),
);
if (exists.some(Boolean)) {
return dir;
// Special check: package.json with "workspaces"
if (await hasWorkspacePackageJson(dir)) {
if (!best || 90 >= best.weight) best = { dir, weight: 90 };
}
if (dir === root) break;
// Evaluate all other checks in parallel
const results = await Promise.all(
checks.map(async (c) => ({ c, ok: await exists(c.makePath(dir)) })),
);
for (const { c, ok } of results) {
if (!ok) continue;
if (!best || c.weight >= best.weight) {
best = { dir, weight: c.weight };
}
}
if (dir === fsRoot) break;
dir = path.dirname(dir);
}
return null;
const out = best ? best.dir : null;
_cache.set(startKey, out);
return out;
} catch {
return null;
}
}
module.exports = { findProjectRoot };

View File

@@ -0,0 +1,331 @@
"use strict";
const fs = require("node:fs/promises");
const path = require("node:path");
const zlib = require("node:zlib");
const { Buffer } = require("node:buffer");
const crypto = require("node:crypto");
const cp = require("node:child_process");
const KB = 1024;
const MB = 1024 * KB;
const formatSize = (bytes) => {
if (bytes < 1024) return `${bytes} B`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
if (bytes < 1024 * 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`;
};
const percentile = (sorted, p) => {
if (sorted.length === 0) return 0;
const idx = Math.min(sorted.length - 1, Math.max(0, Math.ceil((p / 100) * sorted.length) - 1));
return sorted[idx];
};
async function processWithLimit(items, fn, concurrency = 64) {
for (let i = 0; i < items.length; i += concurrency) {
await Promise.all(items.slice(i, i + concurrency).map(fn));
}
}
async function enrichAllFiles(textFiles, binaryFiles) {
/** @type {Array<{ path: string; absolutePath: string; size: number; lines?: number; isBinary: boolean; ext: string; dir: string; depth: number; hidden: boolean; mtimeMs: number; isSymlink: boolean; }>} */
const allFiles = [];
async function enrich(file, isBinary) {
const ext = (path.extname(file.path) || "").toLowerCase();
const dir = path.dirname(file.path) || ".";
const depth = file.path.split(path.sep).filter(Boolean).length;
const hidden = file.path.split(path.sep).some((seg) => seg.startsWith("."));
let mtimeMs = 0;
let isSymlink = false;
try {
const lst = await fs.lstat(file.absolutePath);
mtimeMs = lst.mtimeMs;
isSymlink = lst.isSymbolicLink();
} catch (_) { /* ignore lstat errors during enrichment */ }
allFiles.push({
path: file.path,
absolutePath: file.absolutePath,
size: file.size || 0,
lines: file.lines,
isBinary,
ext,
dir,
depth,
hidden,
mtimeMs,
isSymlink,
});
}
await processWithLimit(textFiles, (f) => enrich(f, false));
await processWithLimit(binaryFiles, (f) => enrich(f, true));
return allFiles;
}
function buildHistogram(allFiles) {
const buckets = [
[1 * KB, "01KB"],
[10 * KB, "110KB"],
[100 * KB, "10100KB"],
[1 * MB, "100KB1MB"],
[10 * MB, "110MB"],
[100 * MB, "10100MB"],
[Infinity, ">=100MB"],
];
const histogram = buckets.map(([_, label]) => ({ label, count: 0, bytes: 0 }));
for (const f of allFiles) {
for (let i = 0; i < buckets.length; i++) {
if (f.size < buckets[i][0]) {
histogram[i].count++;
histogram[i].bytes += f.size;
break;
}
}
}
return histogram;
}
function aggregateByExtension(allFiles) {
const byExtension = new Map();
for (const f of allFiles) {
const key = f.ext || "<none>";
const v = byExtension.get(key) || { ext: key, count: 0, bytes: 0 };
v.count++;
v.bytes += f.size;
byExtension.set(key, v);
}
return Array.from(byExtension.values()).sort((a, b) => b.bytes - a.bytes);
}
function aggregateByDirectory(allFiles) {
const byDirectory = new Map();
function addDirBytes(dir, bytes) {
const v = byDirectory.get(dir) || { dir, count: 0, bytes: 0 };
v.count++;
v.bytes += bytes;
byDirectory.set(dir, v);
}
for (const f of allFiles) {
const parts = f.dir === "." ? [] : f.dir.split(path.sep);
let acc = "";
for (let i = 0; i < parts.length; i++) {
acc = i === 0 ? parts[0] : acc + path.sep + parts[i];
addDirBytes(acc, f.size);
}
if (parts.length === 0) addDirBytes(".", f.size);
}
return Array.from(byDirectory.values()).sort((a, b) => b.bytes - a.bytes);
}
function computeDepthAndLongest(allFiles) {
const depthDistribution = new Map();
for (const f of allFiles) {
depthDistribution.set(f.depth, (depthDistribution.get(f.depth) || 0) + 1);
}
const longestPaths = [...allFiles]
.sort((a, b) => b.path.length - a.path.length)
.slice(0, 25)
.map((f) => ({ path: f.path, length: f.path.length, size: f.size }));
const depthDist = Array.from(depthDistribution.entries())
.sort((a, b) => a[0] - b[0])
.map(([depth, count]) => ({ depth, count }));
return { depthDist, longestPaths };
}
function computeTemporal(allFiles, nowMs) {
let oldest = null, newest = null;
const ageBuckets = [
{ label: "> 1 year", minDays: 365, maxDays: Infinity, count: 0, bytes: 0 },
{ label: "612 months", minDays: 180, maxDays: 365, count: 0, bytes: 0 },
{ label: "16 months", minDays: 30, maxDays: 180, count: 0, bytes: 0 },
{ label: "730 days", minDays: 7, maxDays: 30, count: 0, bytes: 0 },
{ label: "17 days", minDays: 1, maxDays: 7, count: 0, bytes: 0 },
{ label: "< 1 day", minDays: 0, maxDays: 1, count: 0, bytes: 0 },
];
for (const f of allFiles) {
const ageDays = Math.max(0, (nowMs - (f.mtimeMs || nowMs)) / (24 * 60 * 60 * 1000));
for (const b of ageBuckets) {
if (ageDays >= b.minDays && ageDays < b.maxDays) {
b.count++;
b.bytes += f.size;
break;
}
}
if (!oldest || f.mtimeMs < oldest.mtimeMs) oldest = f;
if (!newest || f.mtimeMs > newest.mtimeMs) newest = f;
}
return {
oldest: oldest ? { path: oldest.path, mtime: oldest.mtimeMs ? new Date(oldest.mtimeMs).toISOString() : null } : null,
newest: newest ? { path: newest.path, mtime: newest.mtimeMs ? new Date(newest.mtimeMs).toISOString() : null } : null,
ageBuckets,
};
}
function computeQuality(allFiles, textFiles) {
const zeroByteFiles = allFiles.filter((f) => f.size === 0).length;
const emptyTextFiles = textFiles.filter((f) => (f.size || 0) === 0 || (f.lines || 0) === 0).length;
const hiddenFiles = allFiles.filter((f) => f.hidden).length;
const symlinks = allFiles.filter((f) => f.isSymlink).length;
const largeThreshold = 50 * MB;
const suspiciousThreshold = 100 * MB;
const largeFilesCount = allFiles.filter((f) => f.size >= largeThreshold).length;
const suspiciousLargeFilesCount = allFiles.filter((f) => f.size >= suspiciousThreshold).length;
return {
zeroByteFiles,
emptyTextFiles,
hiddenFiles,
symlinks,
largeFilesCount,
suspiciousLargeFilesCount,
largeThreshold,
};
}
function computeDuplicates(allFiles, textFiles) {
const duplicatesBySize = new Map();
for (const f of allFiles) {
const key = String(f.size);
const arr = duplicatesBySize.get(key) || [];
arr.push(f);
duplicatesBySize.set(key, arr);
}
const duplicateCandidates = [];
for (const [sizeKey, arr] of duplicatesBySize.entries()) {
if (arr.length < 2) continue;
const textGroup = arr.filter((f) => !f.isBinary);
const otherGroup = arr.filter((f) => f.isBinary);
const contentHashGroups = new Map();
for (const tf of textGroup) {
try {
const src = textFiles.find((x) => x.absolutePath === tf.absolutePath);
const content = src ? src.content : "";
const h = crypto.createHash("sha1").update(content).digest("hex");
const g = contentHashGroups.get(h) || [];
g.push(tf);
contentHashGroups.set(h, g);
} catch (_) { /* ignore hashing errors for duplicate detection */ }
}
for (const [_h, g] of contentHashGroups.entries()) {
if (g.length > 1) duplicateCandidates.push({ reason: "same-size+text-hash", size: Number(sizeKey), count: g.length, files: g.map((f) => f.path) });
}
if (otherGroup.length > 1) {
duplicateCandidates.push({ reason: "same-size", size: Number(sizeKey), count: otherGroup.length, files: otherGroup.map((f) => f.path) });
}
}
return duplicateCandidates;
}
function estimateCompressibility(textFiles) {
let compSampleBytes = 0;
let compCompressedBytes = 0;
for (const tf of textFiles) {
try {
const sampleLen = Math.min(256 * 1024, tf.size || 0);
if (sampleLen <= 0) continue;
const sample = tf.content.slice(0, sampleLen);
const gz = zlib.gzipSync(Buffer.from(sample, "utf8"));
compSampleBytes += sampleLen;
compCompressedBytes += gz.length;
} catch (_) { /* ignore compression errors during sampling */ }
}
return compSampleBytes > 0 ? compCompressedBytes / compSampleBytes : null;
}
function computeGitInfo(allFiles, rootDir, largeThreshold) {
const info = {
isRepo: false,
trackedCount: 0,
trackedBytes: 0,
untrackedCount: 0,
untrackedBytes: 0,
lfsCandidates: [],
};
try {
if (!rootDir) return info;
const top = cp.execFileSync("git", ["rev-parse", "--show-toplevel"], { cwd: rootDir, stdio: ["ignore", "pipe", "ignore"] }).toString().trim();
if (!top) return info;
info.isRepo = true;
const out = cp.execFileSync("git", ["ls-files", "-z"], { cwd: rootDir, stdio: ["ignore", "pipe", "ignore"] });
const tracked = new Set(out.toString().split("\0").filter(Boolean));
let trackedBytes = 0, trackedCount = 0, untrackedBytes = 0, untrackedCount = 0;
const lfsCandidates = [];
for (const f of allFiles) {
const isTracked = tracked.has(f.path);
if (isTracked) {
trackedCount++; trackedBytes += f.size;
if (f.size >= largeThreshold) lfsCandidates.push({ path: f.path, size: f.size });
} else {
untrackedCount++; untrackedBytes += f.size;
}
}
info.trackedCount = trackedCount;
info.trackedBytes = trackedBytes;
info.untrackedCount = untrackedCount;
info.untrackedBytes = untrackedBytes;
info.lfsCandidates = lfsCandidates.sort((a, b) => b.size - a.size).slice(0, 50);
} catch (_) { /* git not available or not a repo, ignore */ }
return info;
}
function computeLargestFiles(allFiles, totalBytes) {
const toPct = (num, den) => (den === 0 ? 0 : (num / den) * 100);
return [...allFiles]
.sort((a, b) => b.size - a.size)
.slice(0, 50)
.map((f) => ({
path: f.path,
size: f.size,
sizeFormatted: formatSize(f.size),
percentOfTotal: toPct(f.size, totalBytes),
ext: f.ext || "",
isBinary: f.isBinary,
mtime: f.mtimeMs ? new Date(f.mtimeMs).toISOString() : null,
}));
}
function mdTable(rows, headers) {
const header = `| ${headers.join(" | ")} |`;
const sep = `| ${headers.map(() => "---").join(" | ")} |`;
const body = rows.map((r) => `| ${r.join(" | ")} |`).join("\n");
return `${header}\n${sep}\n${body}`;
}
function buildMarkdownReport(largestFiles, byExtensionArr, byDirectoryArr, totalBytes) {
const toPct = (num, den) => (den === 0 ? 0 : (num / den) * 100);
const md = [];
md.push("\n### Top Largest Files (Top 50)\n");
md.push(mdTable(
largestFiles.map((f) => [f.path, f.sizeFormatted, `${f.percentOfTotal.toFixed(2)}%`, f.ext || "", f.isBinary ? "binary" : "text"]),
["Path", "Size", "% of total", "Ext", "Type"],
));
md.push("\n\n### Top Extensions by Bytes (Top 20)\n");
const topExtRows = byExtensionArr.slice(0, 20).map((e) => [e.ext, String(e.count), formatSize(e.bytes), `${toPct(e.bytes, totalBytes).toFixed(2)}%`]);
md.push(mdTable(topExtRows, ["Ext", "Count", "Bytes", "% of total"]));
md.push("\n\n### Top Directories by Bytes (Top 20)\n");
const topDirRows = byDirectoryArr.slice(0, 20).map((d) => [d.dir, String(d.count), formatSize(d.bytes), `${toPct(d.bytes, totalBytes).toFixed(2)}%`]);
md.push(mdTable(topDirRows, ["Directory", "Files", "Bytes", "% of total"]));
return md.join("\n");
}
module.exports = {
KB,
MB,
formatSize,
percentile,
processWithLimit,
enrichAllFiles,
buildHistogram,
aggregateByExtension,
aggregateByDirectory,
computeDepthAndLongest,
computeTemporal,
computeQuality,
computeDuplicates,
estimateCompressibility,
computeGitInfo,
computeLargestFiles,
buildMarkdownReport,
};

View File

@@ -1,29 +1,79 @@
function calculateStatistics(aggregatedContent, xmlFileSize) {
const H = require("./stats.helpers.js");
async function calculateStatistics(aggregatedContent, xmlFileSize, rootDir) {
const { textFiles, binaryFiles, errors } = aggregatedContent;
const totalTextSize = textFiles.reduce((sum, file) => sum + file.size, 0);
const totalBinarySize = binaryFiles.reduce((sum, file) => sum + file.size, 0);
const totalSize = totalTextSize + totalBinarySize;
const totalLines = textFiles.reduce((sum, file) => sum + file.lines, 0);
const totalLines = textFiles.reduce((sum, f) => sum + (f.lines || 0), 0);
const estimatedTokens = Math.ceil(xmlFileSize / 4);
const formatSize = (bytes) => {
if (bytes < 1024) return `${bytes} B`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
};
// Build enriched file list
const allFiles = await H.enrichAllFiles(textFiles, binaryFiles);
const totalBytes = allFiles.reduce((s, f) => s + f.size, 0);
const sizes = allFiles.map((f) => f.size).sort((a, b) => a - b);
const avgSize = sizes.length ? totalBytes / sizes.length : 0;
const medianSize = sizes.length ? H.percentile(sizes, 50) : 0;
const p90 = H.percentile(sizes, 90);
const p95 = H.percentile(sizes, 95);
const p99 = H.percentile(sizes, 99);
const histogram = H.buildHistogram(allFiles);
const byExtensionArr = H.aggregateByExtension(allFiles);
const byDirectoryArr = H.aggregateByDirectory(allFiles);
const { depthDist, longestPaths } = H.computeDepthAndLongest(allFiles);
const temporal = H.computeTemporal(allFiles, Date.now());
const quality = H.computeQuality(allFiles, textFiles);
const duplicateCandidates = H.computeDuplicates(allFiles, textFiles);
const compressibilityRatio = H.estimateCompressibility(textFiles);
const git = H.computeGitInfo(allFiles, rootDir, quality.largeThreshold);
const largestFiles = H.computeLargestFiles(allFiles, totalBytes);
const markdownReport = H.buildMarkdownReport(
largestFiles,
byExtensionArr,
byDirectoryArr,
totalBytes,
);
return {
// Back-compat summary
totalFiles: textFiles.length + binaryFiles.length,
textFiles: textFiles.length,
binaryFiles: binaryFiles.length,
errorFiles: errors.length,
totalSize: formatSize(totalSize),
xmlSize: formatSize(xmlFileSize),
totalSize: H.formatSize(totalBytes),
totalBytes,
xmlSize: H.formatSize(xmlFileSize),
totalLines,
estimatedTokens: estimatedTokens.toLocaleString(),
// Distributions and percentiles
avgFileSize: avgSize,
medianFileSize: medianSize,
p90,
p95,
p99,
histogram,
// Extensions and directories
byExtension: byExtensionArr,
byDirectory: byDirectoryArr,
depthDistribution: depthDist,
longestPaths,
// Temporal
temporal,
// Quality signals
quality,
// Duplicates and compressibility
duplicateCandidates,
compressibilityRatio,
// Git-aware
git,
largestFiles,
markdownReport,
};
}

View File

@@ -0,0 +1,405 @@
#!/usr/bin/env node
/* deno-lint-ignore-file */
/*
Automatic test matrix for project root detection.
Creates temporary fixtures for various ecosystems and validates findProjectRoot().
No external options or flags required. Safe to run multiple times.
*/
const os = require("node:os");
const path = require("node:path");
const fs = require("fs-extra");
const { promisify } = require("node:util");
const { execFile } = require("node:child_process");
const process = require("node:process");
const execFileAsync = promisify(execFile);
const { findProjectRoot } = require("./projectRoot.js");
async function cmdAvailable(cmd) {
try {
await execFileAsync(cmd, ["--version"], { timeout: 500, windowsHide: true });
return true;
} catch {
return false;
}
async function testSvnMarker() {
const root = await mkTmpDir("svn");
const nested = path.join(root, "proj", "code");
await fs.ensureDir(nested);
await fs.ensureDir(path.join(root, ".svn"));
const found = await findProjectRoot(nested);
assertEqual(found, root, ".svn marker should be detected");
return { name: "svn-marker", ok: true };
}
async function testSymlinkStart() {
const root = await mkTmpDir("symlink-start");
const nested = path.join(root, "a", "b");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, ".project-root"), "\n");
const tmp = await mkTmpDir("symlink-tmp");
const link = path.join(tmp, "link-to-b");
try {
await fs.symlink(nested, link);
} catch {
// symlink may not be permitted on some systems; skip
return { name: "symlink-start", ok: true, skipped: true };
}
const found = await findProjectRoot(link);
assertEqual(found, root, "should resolve symlinked start to real root");
return { name: "symlink-start", ok: true };
}
async function testSubmoduleLikeInnerGitFile() {
const root = await mkTmpDir("submodule-like");
const mid = path.join(root, "mid");
const leaf = path.join(mid, "leaf");
await fs.ensureDir(leaf);
// outer repo
await fs.ensureDir(path.join(root, ".git"));
// inner submodule-like .git file
await fs.writeFile(path.join(mid, ".git"), "gitdir: ../.git/modules/mid\n");
const found = await findProjectRoot(leaf);
assertEqual(found, root, "outermost .git should win on tie weight");
return { name: "submodule-like-gitfile", ok: true };
}
}
async function mkTmpDir(name) {
const base = await fs.realpath(os.tmpdir());
const dir = await fs.mkdtemp(path.join(base, `flattener-${name}-`));
return dir;
}
function assertEqual(actual, expected, msg) {
if (actual !== expected) {
throw new Error(`${msg}: expected=\"${expected}\" actual=\"${actual}\"`);
}
}
async function testSentinel() {
const root = await mkTmpDir("sentinel");
const nested = path.join(root, "a", "b", "c");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, ".project-root"), "\n");
const found = await findProjectRoot(nested);
await assertEqual(found, root, "sentinel .project-root should win");
return { name: "sentinel", ok: true };
}
async function testOtherSentinels() {
const root = await mkTmpDir("other-sentinels");
const nested = path.join(root, "x", "y");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, ".workspace-root"), "\n");
const found1 = await findProjectRoot(nested);
assertEqual(found1, root, "sentinel .workspace-root should win");
await fs.remove(path.join(root, ".workspace-root"));
await fs.writeFile(path.join(root, ".repo-root"), "\n");
const found2 = await findProjectRoot(nested);
assertEqual(found2, root, "sentinel .repo-root should win");
return { name: "other-sentinels", ok: true };
}
async function testGitCliAndMarker() {
const hasGit = await cmdAvailable("git");
if (!hasGit) return { name: "git-cli", ok: true, skipped: true };
const root = await mkTmpDir("git");
const nested = path.join(root, "pkg", "src");
await fs.ensureDir(nested);
await execFileAsync("git", ["init"], { cwd: root, timeout: 2000 });
const found = await findProjectRoot(nested);
await assertEqual(found, root, "git toplevel should be detected");
return { name: "git-cli", ok: true };
}
async function testHgMarkerOrCli() {
// Prefer simple marker test to avoid requiring Mercurial install
const root = await mkTmpDir("hg");
const nested = path.join(root, "lib");
await fs.ensureDir(nested);
await fs.ensureDir(path.join(root, ".hg"));
const found = await findProjectRoot(nested);
await assertEqual(found, root, ".hg marker should be detected");
return { name: "hg-marker", ok: true };
}
async function testWorkspacePnpm() {
const root = await mkTmpDir("pnpm-workspace");
const pkgA = path.join(root, "packages", "a");
await fs.ensureDir(pkgA);
await fs.writeFile(path.join(root, "pnpm-workspace.yaml"), "packages:\n - packages/*\n");
const found = await findProjectRoot(pkgA);
await assertEqual(found, root, "pnpm-workspace.yaml should be detected");
return { name: "pnpm-workspace", ok: true };
}
async function testPackageJsonWorkspaces() {
const root = await mkTmpDir("package-workspaces");
const pkgA = path.join(root, "packages", "a");
await fs.ensureDir(pkgA);
await fs.writeJson(path.join(root, "package.json"), { private: true, workspaces: ["packages/*"] }, { spaces: 2 });
const found = await findProjectRoot(pkgA);
await assertEqual(found, root, "package.json workspaces should be detected");
return { name: "package.json-workspaces", ok: true };
}
async function testLockfiles() {
const root = await mkTmpDir("lockfiles");
const nested = path.join(root, "src");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, "yarn.lock"), "\n");
const found = await findProjectRoot(nested);
await assertEqual(found, root, "yarn.lock should be detected");
return { name: "lockfiles", ok: true };
}
async function testLanguageConfigs() {
const root = await mkTmpDir("lang-configs");
const nested = path.join(root, "x", "y");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, "pyproject.toml"), "[tool.poetry]\nname='tmp'\n");
const found = await findProjectRoot(nested);
await assertEqual(found, root, "pyproject.toml should be detected");
return { name: "language-configs", ok: true };
}
async function testPreferOuterOnTie() {
const root = await mkTmpDir("tie");
const mid = path.join(root, "mid");
const leaf = path.join(mid, "leaf");
await fs.ensureDir(leaf);
// same weight marker at two levels
await fs.writeFile(path.join(root, "requirements.txt"), "\n");
await fs.writeFile(path.join(mid, "requirements.txt"), "\n");
const found = await findProjectRoot(leaf);
await assertEqual(found, root, "outermost directory should win on equal weight");
return { name: "prefer-outermost-tie", ok: true };
}
// Additional coverage: Bazel, Nx/Turbo/Rush, Go workspaces, Deno, Java/Scala, PHP, Rust, Nix, Changesets, env markers,
// and priority interaction between package.json and lockfiles.
async function testBazelWorkspace() {
const root = await mkTmpDir("bazel");
const nested = path.join(root, "apps", "svc");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, "WORKSPACE"), "workspace(name=\"tmp\")\n");
const found = await findProjectRoot(nested);
await assertEqual(found, root, "Bazel WORKSPACE should be detected");
return { name: "bazel-workspace", ok: true };
}
async function testNx() {
const root = await mkTmpDir("nx");
const nested = path.join(root, "apps", "web");
await fs.ensureDir(nested);
await fs.writeJson(path.join(root, "nx.json"), { npmScope: "tmp" }, { spaces: 2 });
const found = await findProjectRoot(nested);
await assertEqual(found, root, "nx.json should be detected");
return { name: "nx", ok: true };
}
async function testTurbo() {
const root = await mkTmpDir("turbo");
const nested = path.join(root, "packages", "x");
await fs.ensureDir(nested);
await fs.writeJson(path.join(root, "turbo.json"), { pipeline: {} }, { spaces: 2 });
const found = await findProjectRoot(nested);
await assertEqual(found, root, "turbo.json should be detected");
return { name: "turbo", ok: true };
}
async function testRush() {
const root = await mkTmpDir("rush");
const nested = path.join(root, "apps", "a");
await fs.ensureDir(nested);
await fs.writeJson(path.join(root, "rush.json"), { projectFolderMinDepth: 1 }, { spaces: 2 });
const found = await findProjectRoot(nested);
await assertEqual(found, root, "rush.json should be detected");
return { name: "rush", ok: true };
}
async function testGoWorkAndMod() {
const root = await mkTmpDir("gowork");
const mod = path.join(root, "modA");
const nested = path.join(mod, "pkg");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, "go.work"), "go 1.22\nuse ./modA\n");
await fs.writeFile(path.join(mod, "go.mod"), "module example.com/a\ngo 1.22\n");
const found = await findProjectRoot(nested);
await assertEqual(found, root, "go.work should define the workspace root");
return { name: "go-work", ok: true };
}
async function testDenoJson() {
const root = await mkTmpDir("deno");
const nested = path.join(root, "src");
await fs.ensureDir(nested);
await fs.writeJson(path.join(root, "deno.json"), { tasks: {} }, { spaces: 2 });
const found = await findProjectRoot(nested);
await assertEqual(found, root, "deno.json should be detected");
return { name: "deno-json", ok: true };
}
async function testGradleSettings() {
const root = await mkTmpDir("gradle");
const nested = path.join(root, "app");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, "settings.gradle"), "rootProject.name='tmp'\n");
const found = await findProjectRoot(nested);
await assertEqual(found, root, "settings.gradle should be detected");
return { name: "gradle-settings", ok: true };
}
async function testMavenPom() {
const root = await mkTmpDir("maven");
const nested = path.join(root, "module");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, "pom.xml"), "<project></project>\n");
const found = await findProjectRoot(nested);
await assertEqual(found, root, "pom.xml should be detected");
return { name: "maven-pom", ok: true };
}
async function testSbtBuild() {
const root = await mkTmpDir("sbt");
const nested = path.join(root, "sub");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, "build.sbt"), "name := \"tmp\"\n");
const found = await findProjectRoot(nested);
await assertEqual(found, root, "build.sbt should be detected");
return { name: "sbt-build", ok: true };
}
async function testComposer() {
const root = await mkTmpDir("composer");
const nested = path.join(root, "src");
await fs.ensureDir(nested);
await fs.writeJson(path.join(root, "composer.json"), { name: "tmp/pkg" }, { spaces: 2 });
await fs.writeFile(path.join(root, "composer.lock"), "{}\n");
const found = await findProjectRoot(nested);
await assertEqual(found, root, "composer.{json,lock} should be detected");
return { name: "composer", ok: true };
}
async function testCargo() {
const root = await mkTmpDir("cargo");
const nested = path.join(root, "src");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, "Cargo.toml"), "[package]\nname='tmp'\nversion='0.0.0'\n");
const found = await findProjectRoot(nested);
await assertEqual(found, root, "Cargo.toml should be detected");
return { name: "cargo", ok: true };
}
async function testNixFlake() {
const root = await mkTmpDir("nix");
const nested = path.join(root, "work");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, "flake.nix"), "{ }\n");
const found = await findProjectRoot(nested);
await assertEqual(found, root, "flake.nix should be detected");
return { name: "nix-flake", ok: true };
}
async function testChangesetConfig() {
const root = await mkTmpDir("changeset");
const nested = path.join(root, "pkg");
await fs.ensureDir(nested);
await fs.ensureDir(path.join(root, ".changeset"));
await fs.writeJson(path.join(root, ".changeset", "config.json"), { $schema: "https://unpkg.com/@changesets/config@2.3.1/schema.json" }, { spaces: 2 });
const found = await findProjectRoot(nested);
await assertEqual(found, root, ".changeset/config.json should be detected");
return { name: "changesets", ok: true };
}
async function testEnvCustomMarker() {
const root = await mkTmpDir("env-marker");
const nested = path.join(root, "dir");
await fs.ensureDir(nested);
await fs.writeFile(path.join(root, "MY_ROOT"), "\n");
const prev = process.env.PROJECT_ROOT_MARKERS;
process.env.PROJECT_ROOT_MARKERS = "MY_ROOT";
try {
const found = await findProjectRoot(nested);
await assertEqual(found, root, "custom env marker should be honored");
} finally {
if (prev === undefined) delete process.env.PROJECT_ROOT_MARKERS; else process.env.PROJECT_ROOT_MARKERS = prev;
}
return { name: "env-custom-marker", ok: true };
}
async function testPackageLowPriorityVsLock() {
const root = await mkTmpDir("pkg-vs-lock");
const nested = path.join(root, "nested");
await fs.ensureDir(path.join(nested, "deep"));
await fs.writeJson(path.join(nested, "package.json"), { name: "nested" }, { spaces: 2 });
await fs.writeFile(path.join(root, "yarn.lock"), "\n");
const found = await findProjectRoot(path.join(nested, "deep"));
await assertEqual(found, root, "lockfile at root should outrank nested package.json");
return { name: "package-vs-lock-priority", ok: true };
}
async function run() {
const tests = [
testSentinel,
testOtherSentinels,
testGitCliAndMarker,
testHgMarkerOrCli,
testWorkspacePnpm,
testPackageJsonWorkspaces,
testLockfiles,
testLanguageConfigs,
testPreferOuterOnTie,
testBazelWorkspace,
testNx,
testTurbo,
testRush,
testGoWorkAndMod,
testDenoJson,
testGradleSettings,
testMavenPom,
testSbtBuild,
testComposer,
testCargo,
testNixFlake,
testChangesetConfig,
testEnvCustomMarker,
testPackageLowPriorityVsLock,
testSvnMarker,
testSymlinkStart,
testSubmoduleLikeInnerGitFile,
];
const results = [];
for (const t of tests) {
try {
const r = await t();
results.push({ ...r, ok: true });
console.log(`${r.name}${r.skipped ? " (skipped)" : ""}`);
} catch (err) {
console.error(`${t.name}:`, err && err.message ? err.message : err);
results.push({ name: t.name, ok: false, error: String(err) });
}
}
const failed = results.filter((r) => !r.ok);
console.log("\nSummary:");
for (const r of results) {
console.log(`- ${r.name}: ${r.ok ? "ok" : "FAIL"}${r.skipped ? " (skipped)" : ""}`);
}
if (failed.length) {
process.exitCode = 1;
}
}
run().catch((e) => {
console.error("Fatal error:", e);
process.exit(1);
});

View File

@@ -45,7 +45,7 @@ program
.option('-f, --full', 'Install complete BMad Method')
.option('-x, --expansion-only', 'Install only expansion packs (no bmad-core)')
.option('-d, --directory <path>', 'Installation directory')
.option('-i, --ide <ide...>', 'Configure for specific IDE(s) - can specify multiple (cursor, claude-code, windsurf, trae, roo, kilo, cline, gemini, qwen-code, github-copilot, other)')
.option('-i, --ide <ide...>', 'Configure for specific IDE(s) - can specify multiple (cursor, claude-code, windsurf, trae, roo, kilo, cline, gemini, qwen-code, github-copilot, crush, other)')
.option('-e, --expansion-packs <packs...>', 'Install specific expansion packs (can specify multiple)')
.action(async (options) => {
try {
@@ -183,17 +183,17 @@ program
});
async function promptInstallation() {
// Display ASCII logo
console.log(chalk.bold.cyan(`
██████╗ ███╗ ███╗ █████╗ ██████╗ ███╗ ███╗███████╗████████╗██╗ ██╗ ██████╗ ██████╗
██████╗ ███╗ ███╗ █████╗ ██████╗ ███╗ ███╗███████╗████████╗██╗ ██╗ ██████╗ ██████╗
██╔══██╗████╗ ████║██╔══██╗██╔══██╗ ████╗ ████║██╔════╝╚══██╔══╝██║ ██║██╔═══██╗██╔══██╗
██████╔╝██╔████╔██║███████║██║ ██║█████╗██╔████╔██║█████╗ ██║ ███████║██║ ██║██║ ██║
██╔══██╗██║╚██╔╝██║██╔══██║██║ ██║╚════╝██║╚██╔╝██║██╔══╝ ██║ ██╔══██║██║ ██║██║ ██║
██████╔╝██║ ╚═╝ ██║██║ ██║██████╔╝ ██║ ╚═╝ ██║███████╗ ██║ ██║ ██║╚██████╔╝██████╔╝
╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝
╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝
`));
console.log(chalk.bold.magenta('🚀 Universal AI Agent Framework for Any Domain'));
console.log(chalk.bold.blue(`✨ Installer v${version}\n`));
@@ -218,63 +218,63 @@ async function promptInstallation() {
// Detect existing installations
const installDir = path.resolve(directory);
const state = await installer.detectInstallationState(installDir);
// Check for existing expansion packs
const existingExpansionPacks = state.expansionPacks || {};
// Get available expansion packs
const availableExpansionPacks = await installer.getAvailableExpansionPacks();
// Build choices list
const choices = [];
// Load core config to get short-title
const coreConfigPath = path.join(__dirname, '..', '..', '..', 'bmad-core', 'core-config.yaml');
const coreConfig = yaml.load(await fs.readFile(coreConfigPath, 'utf8'));
const coreShortTitle = coreConfig['short-title'] || 'BMad Agile Core System';
// Add BMad core option
let bmadOptionText;
if (state.type === 'v4_existing') {
const currentVersion = state.manifest?.version || 'unknown';
const newVersion = version; // Always use package.json version
const versionInfo = currentVersion === newVersion
const versionInfo = currentVersion === newVersion
? `(v${currentVersion} - reinstall)`
: `(v${currentVersion} → v${newVersion})`;
bmadOptionText = `Update ${coreShortTitle} ${versionInfo} .bmad-core`;
} else {
bmadOptionText = `${coreShortTitle} (v${version}) .bmad-core`;
}
choices.push({
name: bmadOptionText,
value: 'bmad-core',
checked: true
});
// Add expansion pack options
for (const pack of availableExpansionPacks) {
const existing = existingExpansionPacks[pack.id];
let packOptionText;
if (existing) {
const currentVersion = existing.manifest?.version || 'unknown';
const newVersion = pack.version;
const versionInfo = currentVersion === newVersion
const versionInfo = currentVersion === newVersion
? `(v${currentVersion} - reinstall)`
: `(v${currentVersion} → v${newVersion})`;
packOptionText = `Update ${pack.shortTitle} ${versionInfo} .${pack.id}`;
} else {
packOptionText = `${pack.shortTitle} (v${pack.version}) .${pack.id}`;
}
choices.push({
name: packOptionText,
value: pack.id,
checked: false
});
}
// Ask what to install
const { selectedItems } = await inquirer.prompt([
{
@@ -290,7 +290,7 @@ async function promptInstallation() {
}
}
]);
// Process selections
answers.installType = selectedItems.includes('bmad-core') ? 'full' : 'expansion-only';
answers.expansionPacks = selectedItems.filter(item => item !== 'bmad-core');
@@ -299,7 +299,7 @@ async function promptInstallation() {
if (selectedItems.includes('bmad-core')) {
console.log(chalk.cyan('\n📋 Document Organization Settings'));
console.log(chalk.dim('Configure how your project documentation should be organized.\n'));
// Ask about PRD sharding
const { prdSharded } = await inquirer.prompt([
{
@@ -310,7 +310,7 @@ async function promptInstallation() {
}
]);
answers.prdSharded = prdSharded;
// Ask about architecture sharding
const { architectureSharded } = await inquirer.prompt([
{
@@ -321,7 +321,7 @@ async function promptInstallation() {
}
]);
answers.architectureSharded = architectureSharded;
// Show warning if architecture sharding is disabled
if (!architectureSharded) {
console.log(chalk.yellow.bold('\n⚠ IMPORTANT: Architecture Sharding Disabled'));
@@ -330,7 +330,7 @@ async function promptInstallation() {
console.log(chalk.yellow('as these are used by the dev agent at runtime.'));
console.log(chalk.yellow('\nAlternatively, you can remove these files from the devLoadAlwaysFiles list'));
console.log(chalk.yellow('in your core-config.yaml after installation.'));
const { acknowledge } = await inquirer.prompt([
{
type: 'confirm',
@@ -339,7 +339,7 @@ async function promptInstallation() {
default: false
}
]);
if (!acknowledge) {
console.log(chalk.red('Installation cancelled.'));
process.exit(0);
@@ -350,14 +350,14 @@ async function promptInstallation() {
// Ask for IDE configuration
let ides = [];
let ideSelectionComplete = false;
while (!ideSelectionComplete) {
console.log(chalk.cyan('\n🛠 IDE Configuration'));
console.log(chalk.bold.yellow.bgRed(' ⚠️ IMPORTANT: This is a MULTISELECT! Use SPACEBAR to toggle each IDE! '));
console.log(chalk.bold.magenta('🔸 Use arrow keys to navigate'));
console.log(chalk.bold.magenta('🔸 Use SPACEBAR to select/deselect IDEs'));
console.log(chalk.bold.magenta('🔸 Press ENTER when finished selecting\n'));
const ideResponse = await inquirer.prompt([
{
type: 'checkbox',
@@ -373,11 +373,12 @@ async function promptInstallation() {
{ name: 'Cline', value: 'cline' },
{ name: 'Gemini CLI', value: 'gemini' },
{ name: 'Qwen Code', value: 'qwen-code' },
{ name: 'Crush', value: 'crush' },
{ name: 'Github Copilot', value: 'github-copilot' }
]
}
]);
ides = ideResponse.ides;
// Confirm no IDE selection if none selected
@@ -390,13 +391,13 @@ async function promptInstallation() {
default: false
}
]);
if (!confirmNoIde) {
console.log(chalk.bold.red('\n🔄 Returning to IDE selection. Remember to use SPACEBAR to select IDEs!\n'));
continue; // Go back to IDE selection only
}
}
ideSelectionComplete = true;
}
@@ -407,7 +408,7 @@ async function promptInstallation() {
if (ides.includes('github-copilot')) {
console.log(chalk.cyan('\n🔧 GitHub Copilot Configuration'));
console.log(chalk.dim('BMad works best with specific VS Code settings for optimal agent experience.\n'));
const { configChoice } = await inquirer.prompt([
{
type: 'list',
@@ -430,7 +431,7 @@ async function promptInstallation() {
default: 'defaults'
}
]);
answers.githubCopilotConfig = { configChoice };
}

View File

@@ -28,6 +28,16 @@ ide-configurations:
# To use BMad agents in Claude Code:
# 1. Type /agent-name (e.g., "/dev", "/pm", "/architect")
# 2. Claude will switch to that agent's persona
crush:
name: Crush
rule-dir: .crush/commands/BMad/
format: multi-file
command-suffix: .md
instructions: |
# To use BMad agents in Crush:
# 1. Press CTRL + P and press TAB
# 2. Select agent or task
# 3. Crush will switch to that agent's persona / task
windsurf:
name: Windsurf
rule-dir: .windsurf/rules/
@@ -110,4 +120,4 @@ ide-configurations:
# 1. The installer creates a .qwen/bmad-method/ directory in your project.
# 2. It concatenates all agent files into a single QWEN.md file.
# 3. Simply mention the agent in your prompt (e.g., "As *dev, ...").
# 4. The Qwen Code CLI will automatically have the context for that agent.
# 4. The Qwen Code CLI will automatically have the context for that agent.

View File

@@ -17,7 +17,7 @@ class IdeSetup extends BaseIdeSetup {
async loadIdeAgentConfig() {
if (this.ideAgentConfig) return this.ideAgentConfig;
try {
const configPath = path.join(__dirname, '..', 'config', 'ide-agent-config.yaml');
const configContent = await fs.readFile(configPath, 'utf8');
@@ -45,6 +45,8 @@ class IdeSetup extends BaseIdeSetup {
return this.setupCursor(installDir, selectedAgent);
case "claude-code":
return this.setupClaudeCode(installDir, selectedAgent);
case "crush":
return this.setupCrush(installDir, selectedAgent);
case "windsurf":
return this.setupWindsurf(installDir, selectedAgent);
case "trae":
@@ -88,6 +90,30 @@ class IdeSetup extends BaseIdeSetup {
return true;
}
async setupCrush(installDir, selectedAgent) {
// Setup bmad-core commands
const coreSlashPrefix = await this.getCoreSlashPrefix(installDir);
const coreAgents = selectedAgent ? [selectedAgent] : await this.getCoreAgentIds(installDir);
const coreTasks = await this.getCoreTaskIds(installDir);
await this.setupCrushForPackage(installDir, "core", coreSlashPrefix, coreAgents, coreTasks, ".bmad-core");
// Setup expansion pack commands
const expansionPacks = await this.getInstalledExpansionPacks(installDir);
for (const packInfo of expansionPacks) {
const packSlashPrefix = await this.getExpansionPackSlashPrefix(packInfo.path);
const packAgents = await this.getExpansionPackAgents(packInfo.path);
const packTasks = await this.getExpansionPackTasks(packInfo.path);
if (packAgents.length > 0 || packTasks.length > 0) {
// Use the actual directory name where the expansion pack is installed
const rootPath = path.relative(installDir, packInfo.path);
await this.setupCrushForPackage(installDir, packInfo.name, packSlashPrefix, packAgents, packTasks, rootPath);
}
}
return true;
}
async setupClaudeCode(installDir, selectedAgent) {
// Setup bmad-core commands
const coreSlashPrefix = await this.getCoreSlashPrefix(installDir);
@@ -101,7 +127,7 @@ class IdeSetup extends BaseIdeSetup {
const packSlashPrefix = await this.getExpansionPackSlashPrefix(packInfo.path);
const packAgents = await this.getExpansionPackAgents(packInfo.path);
const packTasks = await this.getExpansionPackTasks(packInfo.path);
if (packAgents.length > 0 || packTasks.length > 0) {
// Use the actual directory name where the expansion pack is installed
const rootPath = path.relative(installDir, packInfo.path);
@@ -138,13 +164,13 @@ class IdeSetup extends BaseIdeSetup {
// For core, use the normal search
agentPath = await this.findAgentPath(agentId, installDir);
}
const commandPath = path.join(agentsDir, `${agentId}.md`);
if (agentPath) {
// Create command file with agent content
let agentContent = await fileManager.readFile(agentPath);
// Replace {root} placeholder with the appropriate root path for this context
agentContent = agentContent.replace(/{root}/g, rootPath);
@@ -175,13 +201,13 @@ class IdeSetup extends BaseIdeSetup {
// For core, use the normal search
taskPath = await this.findTaskPath(taskId, installDir);
}
const commandPath = path.join(tasksDir, `${taskId}.md`);
if (taskPath) {
// Create command file with task content
let taskContent = await fileManager.readFile(taskPath);
// Replace {root} placeholder with the appropriate root path for this context
taskContent = taskContent.replace(/{root}/g, rootPath);
@@ -200,6 +226,94 @@ class IdeSetup extends BaseIdeSetup {
console.log(chalk.dim(` - Tasks in: ${tasksDir}`));
}
async setupCrushForPackage(installDir, packageName, slashPrefix, agentIds, taskIds, rootPath) {
const commandsBaseDir = path.join(installDir, ".crush", "commands", slashPrefix);
const agentsDir = path.join(commandsBaseDir, "agents");
const tasksDir = path.join(commandsBaseDir, "tasks");
// Ensure directories exist
await fileManager.ensureDirectory(agentsDir);
await fileManager.ensureDirectory(tasksDir);
// Setup agents
for (const agentId of agentIds) {
// Find the agent file - for expansion packs, prefer the expansion pack version
let agentPath;
if (packageName !== "core") {
// For expansion packs, first try to find the agent in the expansion pack directory
const expansionPackPath = path.join(installDir, rootPath, "agents", `${agentId}.md`);
if (await fileManager.pathExists(expansionPackPath)) {
agentPath = expansionPackPath;
} else {
// Fall back to core if not found in expansion pack
agentPath = await this.findAgentPath(agentId, installDir);
}
} else {
// For core, use the normal search
agentPath = await this.findAgentPath(agentId, installDir);
}
const commandPath = path.join(agentsDir, `${agentId}.md`);
if (agentPath) {
// Create command file with agent content
let agentContent = await fileManager.readFile(agentPath);
// Replace {root} placeholder with the appropriate root path for this context
agentContent = agentContent.replace(/{root}/g, rootPath);
// Add command header
let commandContent = `# /${agentId} Command\n\n`;
commandContent += `When this command is used, adopt the following agent persona:\n\n`;
commandContent += agentContent;
await fileManager.writeFile(commandPath, commandContent);
console.log(chalk.green(`✓ Created agent command: /${agentId}`));
}
}
// Setup tasks
for (const taskId of taskIds) {
// Find the task file - for expansion packs, prefer the expansion pack version
let taskPath;
if (packageName !== "core") {
// For expansion packs, first try to find the task in the expansion pack directory
const expansionPackPath = path.join(installDir, rootPath, "tasks", `${taskId}.md`);
if (await fileManager.pathExists(expansionPackPath)) {
taskPath = expansionPackPath;
} else {
// Fall back to core if not found in expansion pack
taskPath = await this.findTaskPath(taskId, installDir);
}
} else {
// For core, use the normal search
taskPath = await this.findTaskPath(taskId, installDir);
}
const commandPath = path.join(tasksDir, `${taskId}.md`);
if (taskPath) {
// Create command file with task content
let taskContent = await fileManager.readFile(taskPath);
// Replace {root} placeholder with the appropriate root path for this context
taskContent = taskContent.replace(/{root}/g, rootPath);
// Add command header
let commandContent = `# /${taskId} Task\n\n`;
commandContent += `When this command is used, execute the following task:\n\n`;
commandContent += taskContent;
await fileManager.writeFile(commandPath, commandContent);
console.log(chalk.green(`✓ Created task command: /${taskId}`));
}
}
console.log(chalk.green(`\n✓ Created Crush commands for ${packageName} in ${commandsBaseDir}`));
console.log(chalk.dim(` - Agents in: ${agentsDir}`));
console.log(chalk.dim(` - Tasks in: ${tasksDir}`));
}
async setupWindsurf(installDir, selectedAgent) {
const windsurfRulesDir = path.join(installDir, ".windsurf", "rules");
const agents = selectedAgent ? [selectedAgent] : await this.getAllAgentIds(installDir);
@@ -255,17 +369,17 @@ class IdeSetup extends BaseIdeSetup {
async setupTrae(installDir, selectedAgent) {
const traeRulesDir = path.join(installDir, ".trae", "rules");
const agents = selectedAgent? [selectedAgent] : await this.getAllAgentIds(installDir);
await fileManager.ensureDirectory(traeRulesDir);
for (const agentId of agents) {
// Find the agent file
const agentPath = await this.findAgentPath(agentId, installDir);
if (agentPath) {
const agentContent = await fileManager.readFile(agentPath);
const mdPath = path.join(traeRulesDir, `${agentId}.md`);
// Create MD content (similar to Cursor but without frontmatter)
let mdContent = `# ${agentId.toUpperCase()} Agent Rule\n\n`;
mdContent += `This rule is triggered when the user types \`@${agentId}\` and activates the ${await this.getAgentTitle(
@@ -294,7 +408,7 @@ class IdeSetup extends BaseIdeSetup {
agentId,
installDir
)} persona and follow all instructions defined in the YAML configuration above.\n`;
await fileManager.writeFile(mdPath, mdContent);
console.log(chalk.green(`✓ Created rule: ${agentId}.md`));
}
@@ -307,38 +421,38 @@ class IdeSetup extends BaseIdeSetup {
path.join(installDir, ".bmad-core", "agents", `${agentId}.md`),
path.join(installDir, "agents", `${agentId}.md`)
];
// Also check expansion pack directories
const glob = require("glob");
const expansionDirs = glob.sync(".*/agents", { cwd: installDir });
for (const expDir of expansionDirs) {
possiblePaths.push(path.join(installDir, expDir, `${agentId}.md`));
}
for (const agentPath of possiblePaths) {
if (await fileManager.pathExists(agentPath)) {
return agentPath;
}
}
return null;
}
async getAllAgentIds(installDir) {
const glob = require("glob");
const allAgentIds = [];
// Check core agents in .bmad-core or root
let agentsDir = path.join(installDir, ".bmad-core", "agents");
if (!(await fileManager.pathExists(agentsDir))) {
agentsDir = path.join(installDir, "agents");
}
if (await fileManager.pathExists(agentsDir)) {
const agentFiles = glob.sync("*.md", { cwd: agentsDir });
allAgentIds.push(...agentFiles.map((file) => path.basename(file, ".md")));
}
// Also check for expansion pack agents in dot folders
const expansionDirs = glob.sync(".*/agents", { cwd: installDir });
for (const expDir of expansionDirs) {
@@ -346,51 +460,51 @@ class IdeSetup extends BaseIdeSetup {
const expAgentFiles = glob.sync("*.md", { cwd: fullExpDir });
allAgentIds.push(...expAgentFiles.map((file) => path.basename(file, ".md")));
}
// Remove duplicates
return [...new Set(allAgentIds)];
}
async getCoreAgentIds(installDir) {
const allAgentIds = [];
// Check core agents in .bmad-core or root only
let agentsDir = path.join(installDir, ".bmad-core", "agents");
if (!(await fileManager.pathExists(agentsDir))) {
agentsDir = path.join(installDir, "bmad-core", "agents");
}
if (await fileManager.pathExists(agentsDir)) {
const glob = require("glob");
const agentFiles = glob.sync("*.md", { cwd: agentsDir });
allAgentIds.push(...agentFiles.map((file) => path.basename(file, ".md")));
}
return [...new Set(allAgentIds)];
}
async getCoreTaskIds(installDir) {
const allTaskIds = [];
// Check core tasks in .bmad-core or root only
let tasksDir = path.join(installDir, ".bmad-core", "tasks");
if (!(await fileManager.pathExists(tasksDir))) {
tasksDir = path.join(installDir, "bmad-core", "tasks");
}
if (await fileManager.pathExists(tasksDir)) {
const glob = require("glob");
const taskFiles = glob.sync("*.md", { cwd: tasksDir });
allTaskIds.push(...taskFiles.map((file) => path.basename(file, ".md")));
}
// Check common tasks
const commonTasksDir = path.join(installDir, "common", "tasks");
if (await fileManager.pathExists(commonTasksDir)) {
const commonTaskFiles = glob.sync("*.md", { cwd: commonTasksDir });
allTaskIds.push(...commonTaskFiles.map((file) => path.basename(file, ".md")));
}
return [...new Set(allTaskIds)];
}
@@ -400,20 +514,20 @@ class IdeSetup extends BaseIdeSetup {
path.join(installDir, ".bmad-core", "agents", `${agentId}.md`),
path.join(installDir, "agents", `${agentId}.md`)
];
// Also check expansion pack directories
const glob = require("glob");
const expansionDirs = glob.sync(".*/agents", { cwd: installDir });
for (const expDir of expansionDirs) {
possiblePaths.push(path.join(installDir, expDir, `${agentId}.md`));
}
for (const agentPath of possiblePaths) {
if (await fileManager.pathExists(agentPath)) {
try {
const agentContent = await fileManager.readFile(agentPath);
const yamlMatch = agentContent.match(/```ya?ml\r?\n([\s\S]*?)```/);
if (yamlMatch) {
const yaml = yamlMatch[1];
const titleMatch = yaml.match(/title:\s*(.+)/);
@@ -426,9 +540,9 @@ class IdeSetup extends BaseIdeSetup {
}
}
}
// Fallback to formatted agent ID
return agentId.split('-').map(word =>
return agentId.split('-').map(word =>
word.charAt(0).toUpperCase() + word.slice(1)
).join(' ');
}
@@ -436,25 +550,25 @@ class IdeSetup extends BaseIdeSetup {
async getAllTaskIds(installDir) {
const glob = require("glob");
const allTaskIds = [];
// Check core tasks in .bmad-core or root
let tasksDir = path.join(installDir, ".bmad-core", "tasks");
if (!(await fileManager.pathExists(tasksDir))) {
tasksDir = path.join(installDir, "bmad-core", "tasks");
}
if (await fileManager.pathExists(tasksDir)) {
const taskFiles = glob.sync("*.md", { cwd: tasksDir });
allTaskIds.push(...taskFiles.map((file) => path.basename(file, ".md")));
}
// Check common tasks
const commonTasksDir = path.join(installDir, "common", "tasks");
if (await fileManager.pathExists(commonTasksDir)) {
const commonTaskFiles = glob.sync("*.md", { cwd: commonTasksDir });
allTaskIds.push(...commonTaskFiles.map((file) => path.basename(file, ".md")));
}
// Also check for expansion pack tasks in dot folders
const expansionDirs = glob.sync(".*/tasks", { cwd: installDir });
for (const expDir of expansionDirs) {
@@ -462,7 +576,7 @@ class IdeSetup extends BaseIdeSetup {
const expTaskFiles = glob.sync("*.md", { cwd: fullExpDir });
allTaskIds.push(...expTaskFiles.map((file) => path.basename(file, ".md")));
}
// Check expansion-packs folder tasks
const expansionPacksDir = path.join(installDir, "expansion-packs");
if (await fileManager.pathExists(expansionPacksDir)) {
@@ -473,7 +587,7 @@ class IdeSetup extends BaseIdeSetup {
allTaskIds.push(...expTaskFiles.map((file) => path.basename(file, ".md")));
}
}
// Remove duplicates
return [...new Set(allTaskIds)];
}
@@ -485,16 +599,16 @@ class IdeSetup extends BaseIdeSetup {
path.join(installDir, "bmad-core", "tasks", `${taskId}.md`),
path.join(installDir, "common", "tasks", `${taskId}.md`)
];
// Also check expansion pack directories
const glob = require("glob");
// Check dot folder expansion packs
const expansionDirs = glob.sync(".*/tasks", { cwd: installDir });
for (const expDir of expansionDirs) {
possiblePaths.push(path.join(installDir, expDir, `${taskId}.md`));
}
// Check expansion-packs folder
const expansionPacksDir = path.join(installDir, "expansion-packs");
if (await fileManager.pathExists(expansionPacksDir)) {
@@ -503,13 +617,13 @@ class IdeSetup extends BaseIdeSetup {
possiblePaths.push(path.join(expansionPacksDir, expDir, `${taskId}.md`));
}
}
for (const taskPath of possiblePaths) {
if (await fileManager.pathExists(taskPath)) {
return taskPath;
}
}
return null;
}
@@ -526,7 +640,7 @@ class IdeSetup extends BaseIdeSetup {
}
return "BMad"; // fallback
}
const configContent = await fileManager.readFile(coreConfigPath);
const config = yaml.load(configContent);
return config.slashPrefix || "BMad";
@@ -538,11 +652,11 @@ class IdeSetup extends BaseIdeSetup {
async getInstalledExpansionPacks(installDir) {
const expansionPacks = [];
// Check for dot-prefixed expansion packs in install directory
const glob = require("glob");
const dotExpansions = glob.sync(".bmad-*", { cwd: installDir });
for (const dotExpansion of dotExpansions) {
if (dotExpansion !== ".bmad-core") {
const packPath = path.join(installDir, dotExpansion);
@@ -553,15 +667,15 @@ class IdeSetup extends BaseIdeSetup {
});
}
}
// Check for expansion-packs directory style
const expansionPacksDir = path.join(installDir, "expansion-packs");
if (await fileManager.pathExists(expansionPacksDir)) {
const packDirs = glob.sync("*", { cwd: expansionPacksDir });
for (const packDir of packDirs) {
const packPath = path.join(expansionPacksDir, packDir);
if ((await fileManager.pathExists(packPath)) &&
if ((await fileManager.pathExists(packPath)) &&
(await fileManager.pathExists(path.join(packPath, "config.yaml")))) {
expansionPacks.push({
name: packDir,
@@ -570,7 +684,7 @@ class IdeSetup extends BaseIdeSetup {
}
}
}
return expansionPacks;
}
@@ -585,7 +699,7 @@ class IdeSetup extends BaseIdeSetup {
} catch (error) {
console.warn(`Failed to read expansion pack slashPrefix from ${packPath}: ${error.message}`);
}
return path.basename(packPath); // fallback to directory name
}
@@ -594,7 +708,7 @@ class IdeSetup extends BaseIdeSetup {
if (!(await fileManager.pathExists(agentsDir))) {
return [];
}
try {
const glob = require("glob");
const agentFiles = glob.sync("*.md", { cwd: agentsDir });
@@ -610,7 +724,7 @@ class IdeSetup extends BaseIdeSetup {
if (!(await fileManager.pathExists(tasksDir))) {
return [];
}
try {
const glob = require("glob");
const taskFiles = glob.sync("*.md", { cwd: tasksDir });
@@ -688,7 +802,7 @@ class IdeSetup extends BaseIdeSetup {
newModesContent += ` - slug: ${slug}\n`;
newModesContent += ` name: '${icon} ${title}'\n`;
if (permissions) {
newModesContent += ` description: '${permissions.description}'\n`;
newModesContent += ` description: '${permissions.description}'\n`;
}
newModesContent += ` roleDefinition: ${roleDefinition}\n`;
newModesContent += ` whenToUse: ${whenToUse}\n`;
@@ -730,7 +844,7 @@ class IdeSetup extends BaseIdeSetup {
return true;
}
async setupKilocode(installDir, selectedAgent) {
const filePath = path.join(installDir, ".kilocodemodes");
const agents = selectedAgent ? [selectedAgent] : await this.getAllAgentIds(installDir);
@@ -788,7 +902,7 @@ class IdeSetup extends BaseIdeSetup {
newContent += ` - slug: ${slug}\n`;
newContent += ` name: '${icon} ${title}'\n`;
if (agentPermission) {
newContent += ` description: '${agentPermission.description}'\n`;
newContent += ` description: '${agentPermission.description}'\n`;
}
newContent += ` roleDefinition: ${roleDefinition}\n`;
@@ -821,7 +935,7 @@ class IdeSetup extends BaseIdeSetup {
return true;
}
async setupCline(installDir, selectedAgent) {
const clineRulesDir = path.join(installDir, ".clinerules");
const agents = selectedAgent ? [selectedAgent] : await this.getAllAgentIds(installDir);
@@ -891,7 +1005,7 @@ class IdeSetup extends BaseIdeSetup {
const settingsContent = await fileManager.readFile(settingsPath);
const settings = JSON.parse(settingsContent);
let updated = false;
// Handle contextFileName property
if (settings.contextFileName && Array.isArray(settings.contextFileName)) {
const originalLength = settings.contextFileName.length;
@@ -902,7 +1016,7 @@ class IdeSetup extends BaseIdeSetup {
updated = true;
}
}
if (updated) {
await fileManager.writeFile(
settingsPath,
@@ -935,7 +1049,7 @@ class IdeSetup extends BaseIdeSetup {
if (agentPath) {
const agentContent = await fileManager.readFile(agentPath);
// Create properly formatted agent rule content (similar to trae)
let agentRuleContent = `# ${agentId.toUpperCase()} Agent Rule\n\n`;
agentRuleContent += `This rule is triggered when the user types \`*${agentId}\` and activates the ${await this.getAgentTitle(
@@ -964,7 +1078,7 @@ class IdeSetup extends BaseIdeSetup {
agentId,
installDir
)} persona and follow all instructions defined in the YAML configuration above.\n`;
// Add to concatenated content with separator
concatenatedContent += agentRuleContent + "\n\n---\n\n";
console.log(chalk.green(`✓ Added context for @${agentId}`));
@@ -991,7 +1105,7 @@ class IdeSetup extends BaseIdeSetup {
const settingsContent = await fileManager.readFile(settingsPath);
const settings = JSON.parse(settingsContent);
let updated = false;
// Handle contextFileName property
if (settings.contextFileName && Array.isArray(settings.contextFileName)) {
const originalLength = settings.contextFileName.length;
@@ -1002,7 +1116,7 @@ class IdeSetup extends BaseIdeSetup {
updated = true;
}
}
if (updated) {
await fileManager.writeFile(
settingsPath,
@@ -1035,7 +1149,7 @@ class IdeSetup extends BaseIdeSetup {
if (agentPath) {
const agentContent = await fileManager.readFile(agentPath);
// Create properly formatted agent rule content (similar to gemini)
let agentRuleContent = `# ${agentId.toUpperCase()} Agent Rule\n\n`;
agentRuleContent += `This rule is triggered when the user types \`*${agentId}\` and activates the ${await this.getAgentTitle(
@@ -1064,7 +1178,7 @@ class IdeSetup extends BaseIdeSetup {
agentId,
installDir
)} persona and follow all instructions defined in the YAML configuration above.\n`;
// Add to concatenated content with separator
concatenatedContent += agentRuleContent + "\n\n---\n\n";
console.log(chalk.green(`✓ Added context for *${agentId}`));
@@ -1082,10 +1196,10 @@ class IdeSetup extends BaseIdeSetup {
async setupGitHubCopilot(installDir, selectedAgent, spinner = null, preConfiguredSettings = null) {
// Configure VS Code workspace settings first to avoid UI conflicts with loading spinners
await this.configureVsCodeSettings(installDir, spinner, preConfiguredSettings);
const chatmodesDir = path.join(installDir, ".github", "chatmodes");
const agents = selectedAgent ? [selectedAgent] : await this.getAllAgentIds(installDir);
await fileManager.ensureDirectory(chatmodesDir);
for (const agentId of agents) {
@@ -1097,7 +1211,7 @@ class IdeSetup extends BaseIdeSetup {
// Create chat mode file with agent content
const agentContent = await fileManager.readFile(agentPath);
const agentTitle = await this.getAgentTitle(agentId, installDir);
// Extract whenToUse for the description
const yamlMatch = agentContent.match(/```ya?ml\r?\n([\s\S]*?)```/);
let description = `Activates the ${agentTitle} agent persona.`;
@@ -1107,7 +1221,7 @@ class IdeSetup extends BaseIdeSetup {
description = whenToUseMatch[1];
}
}
let chatmodeContent = `---
description: "${description.replace(/"/g, '\\"')}"
tools: ['changes', 'codebase', 'fetch', 'findTestFiles', 'githubRepo', 'problems', 'usages', 'editFiles', 'runCommands', 'runTasks', 'runTests', 'search', 'searchResults', 'terminalLastCommand', 'terminalSelection', 'testFailure']
@@ -1130,9 +1244,9 @@ tools: ['changes', 'codebase', 'fetch', 'findTestFiles', 'githubRepo', 'problems
async configureVsCodeSettings(installDir, spinner, preConfiguredSettings = null) {
const vscodeDir = path.join(installDir, ".vscode");
const settingsPath = path.join(vscodeDir, "settings.json");
await fileManager.ensureDirectory(vscodeDir);
// Read existing settings if they exist
let existingSettings = {};
if (await fileManager.pathExists(settingsPath)) {
@@ -1145,7 +1259,7 @@ tools: ['changes', 'codebase', 'fetch', 'findTestFiles', 'githubRepo', 'problems
existingSettings = {};
}
}
// Use pre-configured settings if provided, otherwise prompt
let configChoice;
if (preConfiguredSettings && preConfiguredSettings.configChoice) {
@@ -1157,7 +1271,7 @@ tools: ['changes', 'codebase', 'fetch', 'findTestFiles', 'githubRepo', 'problems
console.log(chalk.blue("🔧 Github Copilot Agent Settings Configuration"));
console.log(chalk.dim("BMad works best with specific VS Code settings for optimal agent experience."));
console.log(''); // Add extra spacing
const response = await inquirer.prompt([
{
type: 'list',
@@ -1182,9 +1296,9 @@ tools: ['changes', 'codebase', 'fetch', 'findTestFiles', 'githubRepo', 'problems
]);
configChoice = response.configChoice;
}
let bmadSettings = {};
if (configChoice === 'skip') {
console.log(chalk.yellow("⚠️ Skipping VS Code settings configuration."));
console.log(chalk.dim("You can manually configure these settings in .vscode/settings.json:"));
@@ -1196,7 +1310,7 @@ tools: ['changes', 'codebase', 'fetch', 'findTestFiles', 'githubRepo', 'problems
console.log(chalk.dim(" • chat.tools.autoApprove: false"));
return true;
}
if (configChoice === 'defaults') {
// Use recommended defaults
bmadSettings = {
@@ -1211,14 +1325,14 @@ tools: ['changes', 'codebase', 'fetch', 'findTestFiles', 'githubRepo', 'problems
} else {
// Manual configuration
console.log(chalk.blue("\n📋 Let's configure each setting for your preferences:"));
// Pause spinner during manual configuration prompts
let spinnerWasActive = false;
if (spinner && spinner.isSpinning) {
spinner.stop();
spinnerWasActive = true;
}
const manualSettings = await inquirer.prompt([
{
type: 'input',
@@ -1263,7 +1377,7 @@ tools: ['changes', 'codebase', 'fetch', 'findTestFiles', 'githubRepo', 'problems
if (spinner && spinnerWasActive) {
spinner.start();
}
bmadSettings = {
"chat.agent.enabled": true, // Always enabled - required for BMad agents
"chat.agent.maxRequests": parseInt(manualSettings.maxRequests),
@@ -1272,16 +1386,16 @@ tools: ['changes', 'codebase', 'fetch', 'findTestFiles', 'githubRepo', 'problems
"github.copilot.chat.agent.autoFix": manualSettings.autoFix,
"chat.tools.autoApprove": manualSettings.autoApprove
};
console.log(chalk.green("✓ Custom settings configured"));
}
// Merge settings (existing settings take precedence to avoid overriding user preferences)
const mergedSettings = { ...bmadSettings, ...existingSettings };
// Write the updated settings
await fileManager.writeFile(settingsPath, JSON.stringify(mergedSettings, null, 2));
console.log(chalk.green("✓ VS Code workspace settings configured successfully"));
console.log(chalk.dim(" Settings written to .vscode/settings.json:"));
Object.entries(bmadSettings).forEach(([key, value]) => {

View File

@@ -1,6 +1,6 @@
{
"name": "bmad-method",
"version": "4.37.0-beta.6",
"version": "5.0.0",
"description": "BMad Method installer - AI-powered Agile development framework",
"main": "lib/installer.js",
"bin": {