chore: standardize ESLint/Prettier formatting across codebase
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
const fs = require("fs-extra");
|
||||
const path = require("node:path");
|
||||
const os = require("node:os");
|
||||
const { isBinaryFile } = require("./binary.js");
|
||||
const fs = require('fs-extra');
|
||||
const path = require('node:path');
|
||||
const os = require('node:os');
|
||||
const { isBinaryFile } = require('./binary.js');
|
||||
|
||||
/**
|
||||
* Aggregate file contents with bounded concurrency.
|
||||
@@ -22,7 +22,7 @@ async function aggregateFileContents(files, rootDir, spinner = null) {
|
||||
// Automatic concurrency selection based on CPU count and workload size.
|
||||
// - Base on 2x logical CPUs, clamped to [2, 64]
|
||||
// - For very small workloads, avoid excessive parallelism
|
||||
const cpuCount = (os.cpus && Array.isArray(os.cpus()) ? os.cpus().length : (os.cpus?.length || 4));
|
||||
const cpuCount = os.cpus && Array.isArray(os.cpus()) ? os.cpus().length : os.cpus?.length || 4;
|
||||
let concurrency = Math.min(64, Math.max(2, (Number(cpuCount) || 4) * 2));
|
||||
if (files.length > 0 && files.length < concurrency) {
|
||||
concurrency = Math.max(1, Math.min(concurrency, Math.ceil(files.length / 2)));
|
||||
@@ -37,16 +37,16 @@ async function aggregateFileContents(files, rootDir, spinner = null) {
|
||||
|
||||
const binary = await isBinaryFile(filePath);
|
||||
if (binary) {
|
||||
const size = (await fs.stat(filePath)).size;
|
||||
const { size } = await fs.stat(filePath);
|
||||
results.binaryFiles.push({ path: relativePath, absolutePath: filePath, size });
|
||||
} else {
|
||||
const content = await fs.readFile(filePath, "utf8");
|
||||
const content = await fs.readFile(filePath, 'utf8');
|
||||
results.textFiles.push({
|
||||
path: relativePath,
|
||||
absolutePath: filePath,
|
||||
content,
|
||||
size: content.length,
|
||||
lines: content.split("\n").length,
|
||||
lines: content.split('\n').length,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -63,8 +63,8 @@ async function aggregateFileContents(files, rootDir, spinner = null) {
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < files.length; i += concurrency) {
|
||||
const slice = files.slice(i, i + concurrency);
|
||||
for (let index = 0; index < files.length; index += concurrency) {
|
||||
const slice = files.slice(index, index + concurrency);
|
||||
await Promise.all(slice.map(processOne));
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const fsp = require("node:fs/promises");
|
||||
const path = require("node:path");
|
||||
const { Buffer } = require("node:buffer");
|
||||
const fsp = require('node:fs/promises');
|
||||
const path = require('node:path');
|
||||
const { Buffer } = require('node:buffer');
|
||||
|
||||
/**
|
||||
* Efficiently determine if a file is binary without reading the whole file.
|
||||
@@ -13,25 +13,54 @@ async function isBinaryFile(filePath) {
|
||||
try {
|
||||
const stats = await fsp.stat(filePath);
|
||||
if (stats.isDirectory()) {
|
||||
throw new Error("EISDIR: illegal operation on a directory");
|
||||
throw new Error('EISDIR: illegal operation on a directory');
|
||||
}
|
||||
|
||||
const binaryExtensions = new Set([
|
||||
".jpg", ".jpeg", ".png", ".gif", ".bmp", ".ico", ".svg",
|
||||
".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx",
|
||||
".zip", ".tar", ".gz", ".rar", ".7z",
|
||||
".exe", ".dll", ".so", ".dylib",
|
||||
".mp3", ".mp4", ".avi", ".mov", ".wav",
|
||||
".ttf", ".otf", ".woff", ".woff2",
|
||||
".bin", ".dat", ".db", ".sqlite",
|
||||
'.jpg',
|
||||
'.jpeg',
|
||||
'.png',
|
||||
'.gif',
|
||||
'.bmp',
|
||||
'.ico',
|
||||
'.svg',
|
||||
'.pdf',
|
||||
'.doc',
|
||||
'.docx',
|
||||
'.xls',
|
||||
'.xlsx',
|
||||
'.ppt',
|
||||
'.pptx',
|
||||
'.zip',
|
||||
'.tar',
|
||||
'.gz',
|
||||
'.rar',
|
||||
'.7z',
|
||||
'.exe',
|
||||
'.dll',
|
||||
'.so',
|
||||
'.dylib',
|
||||
'.mp3',
|
||||
'.mp4',
|
||||
'.avi',
|
||||
'.mov',
|
||||
'.wav',
|
||||
'.ttf',
|
||||
'.otf',
|
||||
'.woff',
|
||||
'.woff2',
|
||||
'.bin',
|
||||
'.dat',
|
||||
'.db',
|
||||
'.sqlite',
|
||||
]);
|
||||
|
||||
const ext = path.extname(filePath).toLowerCase();
|
||||
if (binaryExtensions.has(ext)) return true;
|
||||
const extension = path.extname(filePath).toLowerCase();
|
||||
if (binaryExtensions.has(extension)) return true;
|
||||
if (stats.size === 0) return false;
|
||||
|
||||
const sampleSize = Math.min(4096, stats.size);
|
||||
const fd = await fsp.open(filePath, "r");
|
||||
const fd = await fsp.open(filePath, 'r');
|
||||
try {
|
||||
const buffer = Buffer.allocUnsafe(sampleSize);
|
||||
const { bytesRead } = await fd.read(buffer, 0, sampleSize, 0);
|
||||
@@ -41,9 +70,7 @@ async function isBinaryFile(filePath) {
|
||||
await fd.close();
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`Warning: Could not determine if file is binary: ${filePath} - ${error.message}`,
|
||||
);
|
||||
console.warn(`Warning: Could not determine if file is binary: ${filePath} - ${error.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,21 @@
|
||||
const path = require("node:path");
|
||||
const { execFile } = require("node:child_process");
|
||||
const { promisify } = require("node:util");
|
||||
const { glob } = require("glob");
|
||||
const { loadIgnore } = require("./ignoreRules.js");
|
||||
const path = require('node:path');
|
||||
const { execFile } = require('node:child_process');
|
||||
const { promisify } = require('node:util');
|
||||
const { glob } = require('glob');
|
||||
const { loadIgnore } = require('./ignoreRules.js');
|
||||
|
||||
const pExecFile = promisify(execFile);
|
||||
|
||||
async function isGitRepo(rootDir) {
|
||||
try {
|
||||
const { stdout } = await pExecFile("git", [
|
||||
"rev-parse",
|
||||
"--is-inside-work-tree",
|
||||
], { cwd: rootDir });
|
||||
return String(stdout || "").toString().trim() === "true";
|
||||
const { stdout } = await pExecFile('git', ['rev-parse', '--is-inside-work-tree'], {
|
||||
cwd: rootDir,
|
||||
});
|
||||
return (
|
||||
String(stdout || '')
|
||||
.toString()
|
||||
.trim() === 'true'
|
||||
);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
@@ -20,12 +23,10 @@ async function isGitRepo(rootDir) {
|
||||
|
||||
async function gitListFiles(rootDir) {
|
||||
try {
|
||||
const { stdout } = await pExecFile("git", [
|
||||
"ls-files",
|
||||
"-co",
|
||||
"--exclude-standard",
|
||||
], { cwd: rootDir });
|
||||
return String(stdout || "")
|
||||
const { stdout } = await pExecFile('git', ['ls-files', '-co', '--exclude-standard'], {
|
||||
cwd: rootDir,
|
||||
});
|
||||
return String(stdout || '')
|
||||
.split(/\r?\n/)
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean);
|
||||
@@ -48,14 +49,14 @@ async function discoverFiles(rootDir, options = {}) {
|
||||
const { filter } = await loadIgnore(rootDir);
|
||||
|
||||
// Try git first
|
||||
if (preferGit && await isGitRepo(rootDir)) {
|
||||
if (preferGit && (await isGitRepo(rootDir))) {
|
||||
const relFiles = await gitListFiles(rootDir);
|
||||
const filteredRel = relFiles.filter((p) => filter(p));
|
||||
return filteredRel.map((p) => path.resolve(rootDir, p));
|
||||
}
|
||||
|
||||
// Glob fallback
|
||||
const globbed = await glob("**/*", {
|
||||
const globbed = await glob('**/*', {
|
||||
cwd: rootDir,
|
||||
nodir: true,
|
||||
dot: true,
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
const path = require("node:path");
|
||||
const discovery = require("./discovery.js");
|
||||
const ignoreRules = require("./ignoreRules.js");
|
||||
const { isBinaryFile } = require("./binary.js");
|
||||
const { aggregateFileContents } = require("./aggregate.js");
|
||||
const path = require('node:path');
|
||||
const discovery = require('./discovery.js');
|
||||
const ignoreRules = require('./ignoreRules.js');
|
||||
const { isBinaryFile } = require('./binary.js');
|
||||
const { aggregateFileContents } = require('./aggregate.js');
|
||||
|
||||
// Backward-compatible signature; delegate to central loader
|
||||
async function parseGitignore(gitignorePath) {
|
||||
@@ -14,7 +14,7 @@ async function discoverFiles(rootDir) {
|
||||
// Delegate to discovery module which respects .gitignore and defaults
|
||||
return await discovery.discoverFiles(rootDir, { preferGit: true });
|
||||
} catch (error) {
|
||||
console.error("Error discovering files:", error.message);
|
||||
console.error('Error discovering files:', error.message);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,147 +1,147 @@
|
||||
const fs = require("fs-extra");
|
||||
const path = require("node:path");
|
||||
const ignore = require("ignore");
|
||||
const fs = require('fs-extra');
|
||||
const path = require('node:path');
|
||||
const ignore = require('ignore');
|
||||
|
||||
// Central default ignore patterns for discovery and filtering.
|
||||
// These complement .gitignore and are applied regardless of VCS presence.
|
||||
const DEFAULT_PATTERNS = [
|
||||
// Project/VCS
|
||||
"**/.bmad-core/**",
|
||||
"**/.git/**",
|
||||
"**/.svn/**",
|
||||
"**/.hg/**",
|
||||
"**/.bzr/**",
|
||||
'**/.bmad-core/**',
|
||||
'**/.git/**',
|
||||
'**/.svn/**',
|
||||
'**/.hg/**',
|
||||
'**/.bzr/**',
|
||||
// Package/build outputs
|
||||
"**/node_modules/**",
|
||||
"**/bower_components/**",
|
||||
"**/vendor/**",
|
||||
"**/packages/**",
|
||||
"**/build/**",
|
||||
"**/dist/**",
|
||||
"**/out/**",
|
||||
"**/target/**",
|
||||
"**/bin/**",
|
||||
"**/obj/**",
|
||||
"**/release/**",
|
||||
"**/debug/**",
|
||||
'**/node_modules/**',
|
||||
'**/bower_components/**',
|
||||
'**/vendor/**',
|
||||
'**/packages/**',
|
||||
'**/build/**',
|
||||
'**/dist/**',
|
||||
'**/out/**',
|
||||
'**/target/**',
|
||||
'**/bin/**',
|
||||
'**/obj/**',
|
||||
'**/release/**',
|
||||
'**/debug/**',
|
||||
// Environments
|
||||
"**/.venv/**",
|
||||
"**/venv/**",
|
||||
"**/.virtualenv/**",
|
||||
"**/virtualenv/**",
|
||||
"**/env/**",
|
||||
'**/.venv/**',
|
||||
'**/venv/**',
|
||||
'**/.virtualenv/**',
|
||||
'**/virtualenv/**',
|
||||
'**/env/**',
|
||||
// Logs & coverage
|
||||
"**/*.log",
|
||||
"**/npm-debug.log*",
|
||||
"**/yarn-debug.log*",
|
||||
"**/yarn-error.log*",
|
||||
"**/lerna-debug.log*",
|
||||
"**/coverage/**",
|
||||
"**/.nyc_output/**",
|
||||
"**/.coverage/**",
|
||||
"**/test-results/**",
|
||||
'**/*.log',
|
||||
'**/npm-debug.log*',
|
||||
'**/yarn-debug.log*',
|
||||
'**/yarn-error.log*',
|
||||
'**/lerna-debug.log*',
|
||||
'**/coverage/**',
|
||||
'**/.nyc_output/**',
|
||||
'**/.coverage/**',
|
||||
'**/test-results/**',
|
||||
// Caches & temp
|
||||
"**/.cache/**",
|
||||
"**/.tmp/**",
|
||||
"**/.temp/**",
|
||||
"**/tmp/**",
|
||||
"**/temp/**",
|
||||
"**/.sass-cache/**",
|
||||
'**/.cache/**',
|
||||
'**/.tmp/**',
|
||||
'**/.temp/**',
|
||||
'**/tmp/**',
|
||||
'**/temp/**',
|
||||
'**/.sass-cache/**',
|
||||
// IDE/editor
|
||||
"**/.vscode/**",
|
||||
"**/.idea/**",
|
||||
"**/*.swp",
|
||||
"**/*.swo",
|
||||
"**/*~",
|
||||
"**/.project",
|
||||
"**/.classpath",
|
||||
"**/.settings/**",
|
||||
"**/*.sublime-project",
|
||||
"**/*.sublime-workspace",
|
||||
'**/.vscode/**',
|
||||
'**/.idea/**',
|
||||
'**/*.swp',
|
||||
'**/*.swo',
|
||||
'**/*~',
|
||||
'**/.project',
|
||||
'**/.classpath',
|
||||
'**/.settings/**',
|
||||
'**/*.sublime-project',
|
||||
'**/*.sublime-workspace',
|
||||
// Lockfiles
|
||||
"**/package-lock.json",
|
||||
"**/yarn.lock",
|
||||
"**/pnpm-lock.yaml",
|
||||
"**/composer.lock",
|
||||
"**/Pipfile.lock",
|
||||
'**/package-lock.json',
|
||||
'**/yarn.lock',
|
||||
'**/pnpm-lock.yaml',
|
||||
'**/composer.lock',
|
||||
'**/Pipfile.lock',
|
||||
// Python/Java/compiled artifacts
|
||||
"**/*.pyc",
|
||||
"**/*.pyo",
|
||||
"**/*.pyd",
|
||||
"**/__pycache__/**",
|
||||
"**/*.class",
|
||||
"**/*.jar",
|
||||
"**/*.war",
|
||||
"**/*.ear",
|
||||
"**/*.o",
|
||||
"**/*.so",
|
||||
"**/*.dll",
|
||||
"**/*.exe",
|
||||
'**/*.pyc',
|
||||
'**/*.pyo',
|
||||
'**/*.pyd',
|
||||
'**/__pycache__/**',
|
||||
'**/*.class',
|
||||
'**/*.jar',
|
||||
'**/*.war',
|
||||
'**/*.ear',
|
||||
'**/*.o',
|
||||
'**/*.so',
|
||||
'**/*.dll',
|
||||
'**/*.exe',
|
||||
// System junk
|
||||
"**/lib64/**",
|
||||
"**/.venv/lib64/**",
|
||||
"**/venv/lib64/**",
|
||||
"**/_site/**",
|
||||
"**/.jekyll-cache/**",
|
||||
"**/.jekyll-metadata",
|
||||
"**/.DS_Store",
|
||||
"**/.DS_Store?",
|
||||
"**/._*",
|
||||
"**/.Spotlight-V100/**",
|
||||
"**/.Trashes/**",
|
||||
"**/ehthumbs.db",
|
||||
"**/Thumbs.db",
|
||||
"**/desktop.ini",
|
||||
'**/lib64/**',
|
||||
'**/.venv/lib64/**',
|
||||
'**/venv/lib64/**',
|
||||
'**/_site/**',
|
||||
'**/.jekyll-cache/**',
|
||||
'**/.jekyll-metadata',
|
||||
'**/.DS_Store',
|
||||
'**/.DS_Store?',
|
||||
'**/._*',
|
||||
'**/.Spotlight-V100/**',
|
||||
'**/.Trashes/**',
|
||||
'**/ehthumbs.db',
|
||||
'**/Thumbs.db',
|
||||
'**/desktop.ini',
|
||||
// XML outputs
|
||||
"**/flattened-codebase.xml",
|
||||
"**/repomix-output.xml",
|
||||
'**/flattened-codebase.xml',
|
||||
'**/repomix-output.xml',
|
||||
// Images, media, fonts, archives, docs, dylibs
|
||||
"**/*.jpg",
|
||||
"**/*.jpeg",
|
||||
"**/*.png",
|
||||
"**/*.gif",
|
||||
"**/*.bmp",
|
||||
"**/*.ico",
|
||||
"**/*.svg",
|
||||
"**/*.pdf",
|
||||
"**/*.doc",
|
||||
"**/*.docx",
|
||||
"**/*.xls",
|
||||
"**/*.xlsx",
|
||||
"**/*.ppt",
|
||||
"**/*.pptx",
|
||||
"**/*.zip",
|
||||
"**/*.tar",
|
||||
"**/*.gz",
|
||||
"**/*.rar",
|
||||
"**/*.7z",
|
||||
"**/*.dylib",
|
||||
"**/*.mp3",
|
||||
"**/*.mp4",
|
||||
"**/*.avi",
|
||||
"**/*.mov",
|
||||
"**/*.wav",
|
||||
"**/*.ttf",
|
||||
"**/*.otf",
|
||||
"**/*.woff",
|
||||
"**/*.woff2",
|
||||
'**/*.jpg',
|
||||
'**/*.jpeg',
|
||||
'**/*.png',
|
||||
'**/*.gif',
|
||||
'**/*.bmp',
|
||||
'**/*.ico',
|
||||
'**/*.svg',
|
||||
'**/*.pdf',
|
||||
'**/*.doc',
|
||||
'**/*.docx',
|
||||
'**/*.xls',
|
||||
'**/*.xlsx',
|
||||
'**/*.ppt',
|
||||
'**/*.pptx',
|
||||
'**/*.zip',
|
||||
'**/*.tar',
|
||||
'**/*.gz',
|
||||
'**/*.rar',
|
||||
'**/*.7z',
|
||||
'**/*.dylib',
|
||||
'**/*.mp3',
|
||||
'**/*.mp4',
|
||||
'**/*.avi',
|
||||
'**/*.mov',
|
||||
'**/*.wav',
|
||||
'**/*.ttf',
|
||||
'**/*.otf',
|
||||
'**/*.woff',
|
||||
'**/*.woff2',
|
||||
// Env files
|
||||
"**/.env",
|
||||
"**/.env.*",
|
||||
"**/*.env",
|
||||
'**/.env',
|
||||
'**/.env.*',
|
||||
'**/*.env',
|
||||
// Misc
|
||||
"**/junit.xml",
|
||||
'**/junit.xml',
|
||||
];
|
||||
|
||||
async function readIgnoreFile(filePath) {
|
||||
try {
|
||||
if (!await fs.pathExists(filePath)) return [];
|
||||
const content = await fs.readFile(filePath, "utf8");
|
||||
if (!(await fs.pathExists(filePath))) return [];
|
||||
const content = await fs.readFile(filePath, 'utf8');
|
||||
return content
|
||||
.split("\n")
|
||||
.split('\n')
|
||||
.map((l) => l.trim())
|
||||
.filter((l) => l && !l.startsWith("#"));
|
||||
} catch (err) {
|
||||
.filter((l) => l && !l.startsWith('#'));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -153,18 +153,18 @@ async function parseGitignore(gitignorePath) {
|
||||
|
||||
async function loadIgnore(rootDir, extraPatterns = []) {
|
||||
const ig = ignore();
|
||||
const gitignorePath = path.join(rootDir, ".gitignore");
|
||||
const gitignorePath = path.join(rootDir, '.gitignore');
|
||||
const patterns = [
|
||||
...await readIgnoreFile(gitignorePath),
|
||||
...(await readIgnoreFile(gitignorePath)),
|
||||
...DEFAULT_PATTERNS,
|
||||
...extraPatterns,
|
||||
];
|
||||
// De-duplicate
|
||||
const unique = Array.from(new Set(patterns.map((p) => String(p))));
|
||||
const unique = [...new Set(patterns.map(String))];
|
||||
ig.add(unique);
|
||||
|
||||
// Include-only filter: return true if path should be included
|
||||
const filter = (relativePath) => !ig.ignores(relativePath.replace(/\\/g, "/"));
|
||||
const filter = (relativePath) => !ig.ignores(relativePath.replaceAll('\\', '/'));
|
||||
|
||||
return { ig, filter, patterns: unique };
|
||||
}
|
||||
|
||||
@@ -1,20 +1,14 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const { Command } = require("commander");
|
||||
const fs = require("fs-extra");
|
||||
const path = require("node:path");
|
||||
const process = require("node:process");
|
||||
const { Command } = require('commander');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('node:path');
|
||||
const process = require('node:process');
|
||||
|
||||
// Modularized components
|
||||
const { findProjectRoot } = require("./projectRoot.js");
|
||||
const { promptYesNo, promptPath } = require("./prompts.js");
|
||||
const {
|
||||
discoverFiles,
|
||||
filterFiles,
|
||||
aggregateFileContents,
|
||||
} = require("./files.js");
|
||||
const { generateXMLOutput } = require("./xml.js");
|
||||
const { calculateStatistics } = require("./stats.js");
|
||||
const { findProjectRoot } = require('./projectRoot.js');
|
||||
const { promptYesNo, promptPath } = require('./prompts.js');
|
||||
const { discoverFiles, filterFiles, aggregateFileContents } = require('./files.js');
|
||||
const { generateXMLOutput } = require('./xml.js');
|
||||
const { calculateStatistics } = require('./stats.js');
|
||||
|
||||
/**
|
||||
* Recursively discover all files in a directory
|
||||
@@ -73,30 +67,30 @@ const { calculateStatistics } = require("./stats.js");
|
||||
const program = new Command();
|
||||
|
||||
program
|
||||
.name("bmad-flatten")
|
||||
.description("BMad-Method codebase flattener tool")
|
||||
.version("1.0.0")
|
||||
.option("-i, --input <path>", "Input directory to flatten", process.cwd())
|
||||
.option("-o, --output <path>", "Output file path", "flattened-codebase.xml")
|
||||
.name('bmad-flatten')
|
||||
.description('BMad-Method codebase flattener tool')
|
||||
.version('1.0.0')
|
||||
.option('-i, --input <path>', 'Input directory to flatten', process.cwd())
|
||||
.option('-o, --output <path>', 'Output file path', 'flattened-codebase.xml')
|
||||
.action(async (options) => {
|
||||
let inputDir = path.resolve(options.input);
|
||||
let outputPath = path.resolve(options.output);
|
||||
|
||||
// Detect if user explicitly provided -i/--input or -o/--output
|
||||
const argv = process.argv.slice(2);
|
||||
const userSpecifiedInput = argv.some((a) =>
|
||||
a === "-i" || a === "--input" || a.startsWith("--input=")
|
||||
const userSpecifiedInput = argv.some(
|
||||
(a) => a === '-i' || a === '--input' || a.startsWith('--input='),
|
||||
);
|
||||
const userSpecifiedOutput = argv.some((a) =>
|
||||
a === "-o" || a === "--output" || a.startsWith("--output=")
|
||||
const userSpecifiedOutput = argv.some(
|
||||
(a) => a === '-o' || a === '--output' || a.startsWith('--output='),
|
||||
);
|
||||
const noPathArgs = !userSpecifiedInput && !userSpecifiedOutput;
|
||||
const noPathArguments = !userSpecifiedInput && !userSpecifiedOutput;
|
||||
|
||||
if (noPathArgs) {
|
||||
if (noPathArguments) {
|
||||
const detectedRoot = await findProjectRoot(process.cwd());
|
||||
const suggestedOutput = detectedRoot
|
||||
? path.join(detectedRoot, "flattened-codebase.xml")
|
||||
: path.resolve("flattened-codebase.xml");
|
||||
? path.join(detectedRoot, 'flattened-codebase.xml')
|
||||
: path.resolve('flattened-codebase.xml');
|
||||
|
||||
if (detectedRoot) {
|
||||
const useDefaults = await promptYesNo(
|
||||
@@ -107,29 +101,23 @@ program
|
||||
inputDir = detectedRoot;
|
||||
outputPath = suggestedOutput;
|
||||
} else {
|
||||
inputDir = await promptPath(
|
||||
"Enter input directory path",
|
||||
process.cwd(),
|
||||
);
|
||||
inputDir = await promptPath('Enter input directory path', process.cwd());
|
||||
outputPath = await promptPath(
|
||||
"Enter output file path",
|
||||
path.join(inputDir, "flattened-codebase.xml"),
|
||||
'Enter output file path',
|
||||
path.join(inputDir, 'flattened-codebase.xml'),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.log("Could not auto-detect a project root.");
|
||||
inputDir = await promptPath(
|
||||
"Enter input directory path",
|
||||
process.cwd(),
|
||||
);
|
||||
console.log('Could not auto-detect a project root.');
|
||||
inputDir = await promptPath('Enter input directory path', process.cwd());
|
||||
outputPath = await promptPath(
|
||||
"Enter output file path",
|
||||
path.join(inputDir, "flattened-codebase.xml"),
|
||||
'Enter output file path',
|
||||
path.join(inputDir, 'flattened-codebase.xml'),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.error(
|
||||
"Could not auto-detect a project root and no arguments were provided. Please specify -i/--input and -o/--output.",
|
||||
'Could not auto-detect a project root and no arguments were provided. Please specify -i/--input and -o/--output.',
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -142,25 +130,23 @@ program
|
||||
|
||||
try {
|
||||
// Verify input directory exists
|
||||
if (!await fs.pathExists(inputDir)) {
|
||||
if (!(await fs.pathExists(inputDir))) {
|
||||
console.error(`❌ Error: Input directory does not exist: ${inputDir}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Import ora dynamically
|
||||
const { default: ora } = await import("ora");
|
||||
const { default: ora } = await import('ora');
|
||||
|
||||
// Start file discovery with spinner
|
||||
const discoverySpinner = ora("🔍 Discovering files...").start();
|
||||
const discoverySpinner = ora('🔍 Discovering files...').start();
|
||||
const files = await discoverFiles(inputDir);
|
||||
const filteredFiles = await filterFiles(files, inputDir);
|
||||
discoverySpinner.succeed(
|
||||
`📁 Found ${filteredFiles.length} files to include`,
|
||||
);
|
||||
discoverySpinner.succeed(`📁 Found ${filteredFiles.length} files to include`);
|
||||
|
||||
// Process files with progress tracking
|
||||
console.log("Reading file contents");
|
||||
const processingSpinner = ora("📄 Processing files...").start();
|
||||
console.log('Reading file contents');
|
||||
const processingSpinner = ora('📄 Processing files...').start();
|
||||
const aggregatedContent = await aggregateFileContents(
|
||||
filteredFiles,
|
||||
inputDir,
|
||||
@@ -178,34 +164,30 @@ program
|
||||
}
|
||||
|
||||
// Generate XML output using streaming
|
||||
const xmlSpinner = ora("🔧 Generating XML output...").start();
|
||||
const xmlSpinner = ora('🔧 Generating XML output...').start();
|
||||
await generateXMLOutput(aggregatedContent, outputPath);
|
||||
xmlSpinner.succeed("📝 XML generation completed");
|
||||
xmlSpinner.succeed('📝 XML generation completed');
|
||||
|
||||
// Calculate and display statistics
|
||||
const outputStats = await fs.stat(outputPath);
|
||||
const stats = calculateStatistics(aggregatedContent, outputStats.size);
|
||||
|
||||
// Display completion summary
|
||||
console.log("\n📊 Completion Summary:");
|
||||
console.log('\n📊 Completion Summary:');
|
||||
console.log(
|
||||
`✅ Successfully processed ${filteredFiles.length} files into ${
|
||||
path.basename(outputPath)
|
||||
}`,
|
||||
`✅ Successfully processed ${filteredFiles.length} files into ${path.basename(outputPath)}`,
|
||||
);
|
||||
console.log(`📁 Output file: ${outputPath}`);
|
||||
console.log(`📏 Total source size: ${stats.totalSize}`);
|
||||
console.log(`📄 Generated XML size: ${stats.xmlSize}`);
|
||||
console.log(
|
||||
`📝 Total lines of code: ${stats.totalLines.toLocaleString()}`,
|
||||
);
|
||||
console.log(`📝 Total lines of code: ${stats.totalLines.toLocaleString()}`);
|
||||
console.log(`🔢 Estimated tokens: ${stats.estimatedTokens}`);
|
||||
console.log(
|
||||
`📊 File breakdown: ${stats.textFiles} text, ${stats.binaryFiles} binary, ${stats.errorFiles} errors`,
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("❌ Critical error:", error.message);
|
||||
console.error("An unexpected error occurred.");
|
||||
console.error('❌ Critical error:', error.message);
|
||||
console.error('An unexpected error occurred.');
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const fs = require("fs-extra");
|
||||
const path = require("node:path");
|
||||
const fs = require('fs-extra');
|
||||
const path = require('node:path');
|
||||
|
||||
/**
|
||||
* Attempt to find the project root by walking up from startDir
|
||||
@@ -12,24 +12,22 @@ async function findProjectRoot(startDir) {
|
||||
let dir = path.resolve(startDir);
|
||||
const root = path.parse(dir).root;
|
||||
const markers = [
|
||||
".git",
|
||||
"package.json",
|
||||
"pnpm-workspace.yaml",
|
||||
"yarn.lock",
|
||||
"pnpm-lock.yaml",
|
||||
"pyproject.toml",
|
||||
"requirements.txt",
|
||||
"go.mod",
|
||||
"Cargo.toml",
|
||||
"composer.json",
|
||||
".hg",
|
||||
".svn",
|
||||
'.git',
|
||||
'package.json',
|
||||
'pnpm-workspace.yaml',
|
||||
'yarn.lock',
|
||||
'pnpm-lock.yaml',
|
||||
'pyproject.toml',
|
||||
'requirements.txt',
|
||||
'go.mod',
|
||||
'Cargo.toml',
|
||||
'composer.json',
|
||||
'.hg',
|
||||
'.svn',
|
||||
];
|
||||
|
||||
while (true) {
|
||||
const exists = await Promise.all(
|
||||
markers.map((m) => fs.pathExists(path.join(dir, m))),
|
||||
);
|
||||
const exists = await Promise.all(markers.map((m) => fs.pathExists(path.join(dir, m))));
|
||||
if (exists.some(Boolean)) {
|
||||
return dir;
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
const os = require("node:os");
|
||||
const path = require("node:path");
|
||||
const readline = require("node:readline");
|
||||
const process = require("node:process");
|
||||
const os = require('node:os');
|
||||
const path = require('node:path');
|
||||
const readline = require('node:readline');
|
||||
const process = require('node:process');
|
||||
|
||||
function expandHome(p) {
|
||||
if (!p) return p;
|
||||
if (p.startsWith("~")) return path.join(os.homedir(), p.slice(1));
|
||||
if (p.startsWith('~')) return path.join(os.homedir(), p.slice(1));
|
||||
return p;
|
||||
}
|
||||
|
||||
@@ -27,16 +27,16 @@ function promptQuestion(question) {
|
||||
}
|
||||
|
||||
async function promptYesNo(question, defaultYes = true) {
|
||||
const suffix = defaultYes ? " [Y/n] " : " [y/N] ";
|
||||
const suffix = defaultYes ? ' [Y/n] ' : ' [y/N] ';
|
||||
const ans = (await promptQuestion(`${question}${suffix}`)).trim().toLowerCase();
|
||||
if (!ans) return defaultYes;
|
||||
if (["y", "yes"].includes(ans)) return true;
|
||||
if (["n", "no"].includes(ans)) return false;
|
||||
if (['y', 'yes'].includes(ans)) return true;
|
||||
if (['n', 'no'].includes(ans)) return false;
|
||||
return promptYesNo(question, defaultYes);
|
||||
}
|
||||
|
||||
async function promptPath(question, defaultValue) {
|
||||
const prompt = `${question}${defaultValue ? ` (default: ${defaultValue})` : ""}: `;
|
||||
const prompt = `${question}${defaultValue ? ` (default: ${defaultValue})` : ''}: `;
|
||||
const ans = (await promptQuestion(prompt)).trim();
|
||||
return expandHome(ans || defaultValue);
|
||||
}
|
||||
|
||||
@@ -1,49 +1,44 @@
|
||||
const fs = require("fs-extra");
|
||||
const fs = require('fs-extra');
|
||||
|
||||
function escapeXml(str) {
|
||||
if (typeof str !== "string") {
|
||||
return String(str);
|
||||
function escapeXml(string_) {
|
||||
if (typeof string_ !== 'string') {
|
||||
return String(string_);
|
||||
}
|
||||
return str
|
||||
.replace(/&/g, "&")
|
||||
.replace(/</g, "<")
|
||||
.replace(/'/g, "'");
|
||||
return string_.replaceAll('&', '&').replaceAll('<', '<').replaceAll("'", ''');
|
||||
}
|
||||
|
||||
function indentFileContent(content) {
|
||||
if (typeof content !== "string") {
|
||||
if (typeof content !== 'string') {
|
||||
return String(content);
|
||||
}
|
||||
return content.split("\n").map((line) => ` ${line}`);
|
||||
return content.split('\n').map((line) => ` ${line}`);
|
||||
}
|
||||
|
||||
function generateXMLOutput(aggregatedContent, outputPath) {
|
||||
const { textFiles } = aggregatedContent;
|
||||
const writeStream = fs.createWriteStream(outputPath, { encoding: "utf8" });
|
||||
const writeStream = fs.createWriteStream(outputPath, { encoding: 'utf8' });
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
writeStream.on("error", reject);
|
||||
writeStream.on("finish", resolve);
|
||||
writeStream.on('error', reject);
|
||||
writeStream.on('finish', resolve);
|
||||
|
||||
writeStream.write('<?xml version="1.0" encoding="UTF-8"?>\n');
|
||||
writeStream.write("<files>\n");
|
||||
writeStream.write('<files>\n');
|
||||
|
||||
// Sort files by path for deterministic order
|
||||
const filesSorted = [...textFiles].sort((a, b) =>
|
||||
a.path.localeCompare(b.path)
|
||||
);
|
||||
const filesSorted = [...textFiles].sort((a, b) => a.path.localeCompare(b.path));
|
||||
let index = 0;
|
||||
|
||||
const writeNext = () => {
|
||||
if (index >= filesSorted.length) {
|
||||
writeStream.write("</files>\n");
|
||||
writeStream.write('</files>\n');
|
||||
writeStream.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const file = filesSorted[index++];
|
||||
const p = escapeXml(file.path);
|
||||
const content = typeof file.content === "string" ? file.content : "";
|
||||
const content = typeof file.content === 'string' ? file.content : '';
|
||||
|
||||
if (content.length === 0) {
|
||||
writeStream.write(`\t<file path='${p}'/>\n`);
|
||||
@@ -51,27 +46,34 @@ function generateXMLOutput(aggregatedContent, outputPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
const needsCdata = content.includes("<") || content.includes("&") ||
|
||||
content.includes("]]>");
|
||||
const needsCdata = content.includes('<') || content.includes('&') || content.includes(']]>');
|
||||
if (needsCdata) {
|
||||
// Open tag and CDATA on their own line with tab indent; content lines indented with two tabs
|
||||
writeStream.write(`\t<file path='${p}'><![CDATA[\n`);
|
||||
// Safely split any occurrences of "]]>" inside content, trim trailing newlines, indent each line with two tabs
|
||||
const safe = content.replace(/]]>/g, "]]]]><![CDATA[>");
|
||||
const trimmed = safe.replace(/[\r\n]+$/, "");
|
||||
const indented = trimmed.length > 0
|
||||
? trimmed.split("\n").map((line) => `\t\t${line}`).join("\n")
|
||||
: "";
|
||||
const safe = content.replaceAll(']]>', ']]]]><![CDATA[>');
|
||||
const trimmed = safe.replace(/[\r\n]+$/, '');
|
||||
const indented =
|
||||
trimmed.length > 0
|
||||
? trimmed
|
||||
.split('\n')
|
||||
.map((line) => `\t\t${line}`)
|
||||
.join('\n')
|
||||
: '';
|
||||
writeStream.write(indented);
|
||||
// Close CDATA and attach closing tag directly after the last content line
|
||||
writeStream.write("]]></file>\n");
|
||||
writeStream.write(']]></file>\n');
|
||||
} else {
|
||||
// Write opening tag then newline; indent content with two tabs; attach closing tag directly after last content char
|
||||
writeStream.write(`\t<file path='${p}'>\n`);
|
||||
const trimmed = content.replace(/[\r\n]+$/, "");
|
||||
const indented = trimmed.length > 0
|
||||
? trimmed.split("\n").map((line) => `\t\t${line}`).join("\n")
|
||||
: "";
|
||||
const trimmed = content.replace(/[\r\n]+$/, '');
|
||||
const indented =
|
||||
trimmed.length > 0
|
||||
? trimmed
|
||||
.split('\n')
|
||||
.map((line) => `\t\t${line}`)
|
||||
.join('\n')
|
||||
: '';
|
||||
writeStream.write(indented);
|
||||
writeStream.write(`</file>\n`);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user