chore: add code formatting config and pre-commit hooks

This commit is contained in:
manjaroblack
2025-08-16 19:00:08 -05:00
parent 51284d6ecf
commit 2a9e07d4fd
130 changed files with 11886 additions and 10939 deletions

View File

@@ -1,7 +1,7 @@
const fs = require("fs-extra");
const path = require("node:path");
const os = require("node:os");
const { isBinaryFile } = require("./binary.js");
const fs = require('fs-extra');
const path = require('node:path');
const os = require('node:os');
const { isBinaryFile } = require('./binary.js');
/**
* Aggregate file contents with bounded concurrency.
@@ -22,7 +22,7 @@ async function aggregateFileContents(files, rootDir, spinner = null) {
// Automatic concurrency selection based on CPU count and workload size.
// - Base on 2x logical CPUs, clamped to [2, 64]
// - For very small workloads, avoid excessive parallelism
const cpuCount = (os.cpus && Array.isArray(os.cpus()) ? os.cpus().length : (os.cpus?.length || 4));
const cpuCount = os.cpus && Array.isArray(os.cpus()) ? os.cpus().length : os.cpus?.length || 4;
let concurrency = Math.min(64, Math.max(2, (Number(cpuCount) || 4) * 2));
if (files.length > 0 && files.length < concurrency) {
concurrency = Math.max(1, Math.min(concurrency, Math.ceil(files.length / 2)));
@@ -37,16 +37,16 @@ async function aggregateFileContents(files, rootDir, spinner = null) {
const binary = await isBinaryFile(filePath);
if (binary) {
const size = (await fs.stat(filePath)).size;
const { size } = await fs.stat(filePath);
results.binaryFiles.push({ path: relativePath, absolutePath: filePath, size });
} else {
const content = await fs.readFile(filePath, "utf8");
const content = await fs.readFile(filePath, 'utf8');
results.textFiles.push({
path: relativePath,
absolutePath: filePath,
content,
size: content.length,
lines: content.split("\n").length,
lines: content.split('\n').length,
});
}
} catch (error) {
@@ -63,8 +63,8 @@ async function aggregateFileContents(files, rootDir, spinner = null) {
}
}
for (let i = 0; i < files.length; i += concurrency) {
const slice = files.slice(i, i + concurrency);
for (let index = 0; index < files.length; index += concurrency) {
const slice = files.slice(index, index + concurrency);
await Promise.all(slice.map(processOne));
}