chore: standardize ESLint/Prettier formatting across codebase
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const path = require('node:path');
|
||||
const yaml = require('js-yaml');
|
||||
const { extractYamlFromAgent } = require('../../lib/yaml-utils');
|
||||
|
||||
@@ -11,7 +11,7 @@ class ConfigLoader {
|
||||
|
||||
async load() {
|
||||
if (this.config) return this.config;
|
||||
|
||||
|
||||
try {
|
||||
const configContent = await fs.readFile(this.configPath, 'utf8');
|
||||
this.config = yaml.load(configContent);
|
||||
@@ -28,30 +28,30 @@ class ConfigLoader {
|
||||
|
||||
async getAvailableAgents() {
|
||||
const agentsDir = path.join(this.getBmadCorePath(), 'agents');
|
||||
|
||||
|
||||
try {
|
||||
const entries = await fs.readdir(agentsDir, { withFileTypes: true });
|
||||
const agents = [];
|
||||
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile() && entry.name.endsWith('.md')) {
|
||||
const agentPath = path.join(agentsDir, entry.name);
|
||||
const agentId = path.basename(entry.name, '.md');
|
||||
|
||||
|
||||
try {
|
||||
const agentContent = await fs.readFile(agentPath, 'utf8');
|
||||
|
||||
|
||||
// Extract YAML block from agent file
|
||||
const yamlContentText = extractYamlFromAgent(agentContent);
|
||||
if (yamlContentText) {
|
||||
const yamlContent = yaml.load(yamlContentText);
|
||||
const agentConfig = yamlContent.agent || {};
|
||||
|
||||
|
||||
agents.push({
|
||||
id: agentId,
|
||||
name: agentConfig.title || agentConfig.name || agentId,
|
||||
file: `bmad-core/agents/${entry.name}`,
|
||||
description: agentConfig.whenToUse || 'No description available'
|
||||
description: agentConfig.whenToUse || 'No description available',
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -59,10 +59,10 @@ class ConfigLoader {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Sort agents by name for consistent display
|
||||
agents.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
|
||||
return agents;
|
||||
} catch (error) {
|
||||
console.warn(`Failed to read agents directory: ${error.message}`);
|
||||
@@ -72,41 +72,45 @@ class ConfigLoader {
|
||||
|
||||
async getAvailableExpansionPacks() {
|
||||
const expansionPacksDir = path.join(this.getBmadCorePath(), '..', 'expansion-packs');
|
||||
|
||||
|
||||
try {
|
||||
const entries = await fs.readdir(expansionPacksDir, { withFileTypes: true });
|
||||
const expansionPacks = [];
|
||||
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory() && !entry.name.startsWith('.')) {
|
||||
const packPath = path.join(expansionPacksDir, entry.name);
|
||||
const configPath = path.join(packPath, 'config.yaml');
|
||||
|
||||
|
||||
try {
|
||||
// Read config.yaml
|
||||
const configContent = await fs.readFile(configPath, 'utf8');
|
||||
const config = yaml.load(configContent);
|
||||
|
||||
|
||||
expansionPacks.push({
|
||||
id: entry.name,
|
||||
name: config.name || entry.name,
|
||||
description: config['short-title'] || config.description || 'No description available',
|
||||
fullDescription: config.description || config['short-title'] || 'No description available',
|
||||
description:
|
||||
config['short-title'] || config.description || 'No description available',
|
||||
fullDescription:
|
||||
config.description || config['short-title'] || 'No description available',
|
||||
version: config.version || '1.0.0',
|
||||
author: config.author || 'BMad Team',
|
||||
packPath: packPath,
|
||||
dependencies: config.dependencies?.agents || []
|
||||
dependencies: config.dependencies?.agents || [],
|
||||
});
|
||||
} catch (error) {
|
||||
// Fallback if config.yaml doesn't exist or can't be read
|
||||
console.warn(`Failed to read config for expansion pack ${entry.name}: ${error.message}`);
|
||||
|
||||
console.warn(
|
||||
`Failed to read config for expansion pack ${entry.name}: ${error.message}`,
|
||||
);
|
||||
|
||||
// Try to derive info from directory name as fallback
|
||||
const name = entry.name
|
||||
.split('-')
|
||||
.map(word => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
|
||||
|
||||
expansionPacks.push({
|
||||
id: entry.name,
|
||||
name: name,
|
||||
@@ -115,12 +119,12 @@ class ConfigLoader {
|
||||
version: '1.0.0',
|
||||
author: 'BMad Team',
|
||||
packPath: packPath,
|
||||
dependencies: []
|
||||
dependencies: [],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return expansionPacks;
|
||||
} catch (error) {
|
||||
console.warn(`Failed to read expansion packs directory: ${error.message}`);
|
||||
@@ -132,16 +136,16 @@ class ConfigLoader {
|
||||
// Use DependencyResolver to dynamically parse agent dependencies
|
||||
const DependencyResolver = require('../../lib/dependency-resolver');
|
||||
const resolver = new DependencyResolver(path.join(__dirname, '..', '..', '..'));
|
||||
|
||||
|
||||
const agentDeps = await resolver.resolveAgentDependencies(agentId);
|
||||
|
||||
|
||||
// Convert to flat list of file paths
|
||||
const depPaths = [];
|
||||
|
||||
|
||||
// Core files and utilities are included automatically by DependencyResolver
|
||||
|
||||
|
||||
// Add agent file itself is already handled by installer
|
||||
|
||||
|
||||
// Add all resolved resources
|
||||
for (const resource of agentDeps.resources) {
|
||||
const filePath = `.bmad-core/${resource.type}/${resource.id}.md`;
|
||||
@@ -149,7 +153,7 @@ class ConfigLoader {
|
||||
depPaths.push(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return depPaths;
|
||||
}
|
||||
|
||||
@@ -175,25 +179,25 @@ class ConfigLoader {
|
||||
|
||||
async getAvailableTeams() {
|
||||
const teamsDir = path.join(this.getBmadCorePath(), 'agent-teams');
|
||||
|
||||
|
||||
try {
|
||||
const entries = await fs.readdir(teamsDir, { withFileTypes: true });
|
||||
const teams = [];
|
||||
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile() && entry.name.endsWith('.yaml')) {
|
||||
const teamPath = path.join(teamsDir, entry.name);
|
||||
|
||||
|
||||
try {
|
||||
const teamContent = await fs.readFile(teamPath, 'utf8');
|
||||
const teamConfig = yaml.load(teamContent);
|
||||
|
||||
|
||||
if (teamConfig.bundle) {
|
||||
teams.push({
|
||||
id: path.basename(entry.name, '.yaml'),
|
||||
name: teamConfig.bundle.name || entry.name,
|
||||
description: teamConfig.bundle.description || 'Team configuration',
|
||||
icon: teamConfig.bundle.icon || '📋'
|
||||
icon: teamConfig.bundle.icon || '📋',
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -201,7 +205,7 @@ class ConfigLoader {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return teams;
|
||||
} catch (error) {
|
||||
console.warn(`Warning: Could not scan teams directory: ${error.message}`);
|
||||
@@ -217,16 +221,16 @@ class ConfigLoader {
|
||||
// Use DependencyResolver to dynamically parse team dependencies
|
||||
const DependencyResolver = require('../../lib/dependency-resolver');
|
||||
const resolver = new DependencyResolver(path.join(__dirname, '..', '..', '..'));
|
||||
|
||||
|
||||
try {
|
||||
const teamDeps = await resolver.resolveTeamDependencies(teamId);
|
||||
|
||||
|
||||
// Convert to flat list of file paths
|
||||
const depPaths = [];
|
||||
|
||||
|
||||
// Add team config file
|
||||
depPaths.push(`.bmad-core/agent-teams/${teamId}.yaml`);
|
||||
|
||||
|
||||
// Add all agents
|
||||
for (const agent of teamDeps.agents) {
|
||||
const filePath = `.bmad-core/agents/${agent.id}.md`;
|
||||
@@ -234,7 +238,7 @@ class ConfigLoader {
|
||||
depPaths.push(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Add all resolved resources
|
||||
for (const resource of teamDeps.resources) {
|
||||
const filePath = `.bmad-core/${resource.type}/${resource.id}.${resource.type === 'workflows' ? 'yaml' : 'md'}`;
|
||||
@@ -242,7 +246,7 @@ class ConfigLoader {
|
||||
depPaths.push(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return depPaths;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to resolve team dependencies for ${teamId}: ${error.message}`);
|
||||
@@ -250,4 +254,4 @@ class ConfigLoader {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new ConfigLoader();
|
||||
module.exports = new ConfigLoader();
|
||||
|
||||
@@ -1,32 +1,24 @@
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const crypto = require("crypto");
|
||||
const yaml = require("js-yaml");
|
||||
const chalk = require("chalk").default || require("chalk");
|
||||
const { createReadStream, createWriteStream, promises: fsPromises } = require('fs');
|
||||
const { pipeline } = require('stream/promises');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('node:path');
|
||||
const crypto = require('node:crypto');
|
||||
const yaml = require('js-yaml');
|
||||
const chalk = require('chalk');
|
||||
const { createReadStream, createWriteStream, promises: fsPromises } = require('node:fs');
|
||||
const { pipeline } = require('node:stream/promises');
|
||||
const resourceLocator = require('./resource-locator');
|
||||
|
||||
class FileManager {
|
||||
constructor() {
|
||||
this.manifestDir = ".bmad-core";
|
||||
this.manifestFile = "install-manifest.yaml";
|
||||
}
|
||||
constructor() {}
|
||||
|
||||
async copyFile(source, destination) {
|
||||
try {
|
||||
await fs.ensureDir(path.dirname(destination));
|
||||
|
||||
|
||||
// Use streaming for large files (> 10MB)
|
||||
const stats = await fs.stat(source);
|
||||
if (stats.size > 10 * 1024 * 1024) {
|
||||
await pipeline(
|
||||
createReadStream(source),
|
||||
createWriteStream(destination)
|
||||
);
|
||||
} else {
|
||||
await fs.copy(source, destination);
|
||||
}
|
||||
await (stats.size > 10 * 1024 * 1024
|
||||
? pipeline(createReadStream(source), createWriteStream(destination))
|
||||
: fs.copy(source, destination));
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(chalk.red(`Failed to copy ${source}:`), error.message);
|
||||
@@ -37,32 +29,24 @@ class FileManager {
|
||||
async copyDirectory(source, destination) {
|
||||
try {
|
||||
await fs.ensureDir(destination);
|
||||
|
||||
|
||||
// Use streaming copy for large directories
|
||||
const files = await resourceLocator.findFiles('**/*', {
|
||||
cwd: source,
|
||||
nodir: true
|
||||
nodir: true,
|
||||
});
|
||||
|
||||
|
||||
// Process files in batches to avoid memory issues
|
||||
const batchSize = 50;
|
||||
for (let i = 0; i < files.length; i += batchSize) {
|
||||
const batch = files.slice(i, i + batchSize);
|
||||
for (let index = 0; index < files.length; index += batchSize) {
|
||||
const batch = files.slice(index, index + batchSize);
|
||||
await Promise.all(
|
||||
batch.map(file =>
|
||||
this.copyFile(
|
||||
path.join(source, file),
|
||||
path.join(destination, file)
|
||||
)
|
||||
)
|
||||
batch.map((file) => this.copyFile(path.join(source, file), path.join(destination, file))),
|
||||
);
|
||||
}
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
chalk.red(`Failed to copy directory ${source}:`),
|
||||
error.message
|
||||
);
|
||||
console.error(chalk.red(`Failed to copy directory ${source}:`), error.message);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -73,17 +57,16 @@ class FileManager {
|
||||
|
||||
for (const file of files) {
|
||||
const sourcePath = path.join(sourceDir, file);
|
||||
const destPath = path.join(destDir, file);
|
||||
const destinationPath = path.join(destDir, file);
|
||||
|
||||
// Use root replacement if rootValue is provided and file needs it
|
||||
const needsRootReplacement = rootValue && (file.endsWith('.md') || file.endsWith('.yaml') || file.endsWith('.yml'));
|
||||
|
||||
const needsRootReplacement =
|
||||
rootValue && (file.endsWith('.md') || file.endsWith('.yaml') || file.endsWith('.yml'));
|
||||
|
||||
let success = false;
|
||||
if (needsRootReplacement) {
|
||||
success = await this.copyFileWithRootReplacement(sourcePath, destPath, rootValue);
|
||||
} else {
|
||||
success = await this.copyFile(sourcePath, destPath);
|
||||
}
|
||||
success = await (needsRootReplacement
|
||||
? this.copyFileWithRootReplacement(sourcePath, destinationPath, rootValue)
|
||||
: this.copyFile(sourcePath, destinationPath));
|
||||
|
||||
if (success) {
|
||||
copied.push(file);
|
||||
@@ -97,32 +80,28 @@ class FileManager {
|
||||
try {
|
||||
// Use streaming for hash calculation to reduce memory usage
|
||||
const stream = createReadStream(filePath);
|
||||
const hash = crypto.createHash("sha256");
|
||||
|
||||
const hash = crypto.createHash('sha256');
|
||||
|
||||
for await (const chunk of stream) {
|
||||
hash.update(chunk);
|
||||
}
|
||||
|
||||
return hash.digest("hex").slice(0, 16);
|
||||
} catch (error) {
|
||||
|
||||
return hash.digest('hex').slice(0, 16);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async createManifest(installDir, config, files) {
|
||||
const manifestPath = path.join(
|
||||
installDir,
|
||||
this.manifestDir,
|
||||
this.manifestFile
|
||||
);
|
||||
const manifestPath = path.join(installDir, this.manifestDir, this.manifestFile);
|
||||
|
||||
// Read version from package.json
|
||||
let coreVersion = "unknown";
|
||||
let coreVersion = 'unknown';
|
||||
try {
|
||||
const packagePath = path.join(__dirname, '..', '..', '..', 'package.json');
|
||||
const packageJson = require(packagePath);
|
||||
coreVersion = packageJson.version;
|
||||
} catch (error) {
|
||||
} catch {
|
||||
console.warn("Could not read version from package.json, using 'unknown'");
|
||||
}
|
||||
|
||||
@@ -156,31 +135,23 @@ class FileManager {
|
||||
}
|
||||
|
||||
async readManifest(installDir) {
|
||||
const manifestPath = path.join(
|
||||
installDir,
|
||||
this.manifestDir,
|
||||
this.manifestFile
|
||||
);
|
||||
const manifestPath = path.join(installDir, this.manifestDir, this.manifestFile);
|
||||
|
||||
try {
|
||||
const content = await fs.readFile(manifestPath, "utf8");
|
||||
const content = await fs.readFile(manifestPath, 'utf8');
|
||||
return yaml.load(content);
|
||||
} catch (error) {
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async readExpansionPackManifest(installDir, packId) {
|
||||
const manifestPath = path.join(
|
||||
installDir,
|
||||
`.${packId}`,
|
||||
this.manifestFile
|
||||
);
|
||||
const manifestPath = path.join(installDir, `.${packId}`, this.manifestFile);
|
||||
|
||||
try {
|
||||
const content = await fs.readFile(manifestPath, "utf8");
|
||||
const content = await fs.readFile(manifestPath, 'utf8');
|
||||
return yaml.load(content);
|
||||
} catch (error) {
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -203,24 +174,24 @@ class FileManager {
|
||||
async checkFileIntegrity(installDir, manifest) {
|
||||
const result = {
|
||||
missing: [],
|
||||
modified: []
|
||||
modified: [],
|
||||
};
|
||||
|
||||
for (const file of manifest.files) {
|
||||
const filePath = path.join(installDir, file.path);
|
||||
|
||||
|
||||
// Skip checking the manifest file itself - it will always be different due to timestamps
|
||||
if (file.path.endsWith('install-manifest.yaml')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!(await this.pathExists(filePath))) {
|
||||
result.missing.push(file.path);
|
||||
} else {
|
||||
|
||||
if (await this.pathExists(filePath)) {
|
||||
const currentHash = await this.calculateFileHash(filePath);
|
||||
if (currentHash && currentHash !== file.hash) {
|
||||
result.modified.push(file.path);
|
||||
}
|
||||
} else {
|
||||
result.missing.push(file.path);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -228,7 +199,7 @@ class FileManager {
|
||||
}
|
||||
|
||||
async backupFile(filePath) {
|
||||
const backupPath = filePath + ".bak";
|
||||
const backupPath = filePath + '.bak';
|
||||
let counter = 1;
|
||||
let finalBackupPath = backupPath;
|
||||
|
||||
@@ -256,7 +227,7 @@ class FileManager {
|
||||
}
|
||||
|
||||
async readFile(filePath) {
|
||||
return fs.readFile(filePath, "utf8");
|
||||
return fs.readFile(filePath, 'utf8');
|
||||
}
|
||||
|
||||
async writeFile(filePath, content) {
|
||||
@@ -269,14 +240,10 @@ class FileManager {
|
||||
}
|
||||
|
||||
async createExpansionPackManifest(installDir, packId, config, files) {
|
||||
const manifestPath = path.join(
|
||||
installDir,
|
||||
`.${packId}`,
|
||||
this.manifestFile
|
||||
);
|
||||
const manifestPath = path.join(installDir, `.${packId}`, this.manifestFile);
|
||||
|
||||
const manifest = {
|
||||
version: config.expansionPackVersion || require("../../../package.json").version,
|
||||
version: config.expansionPackVersion || require('../../../package.json').version,
|
||||
installed_at: new Date().toISOString(),
|
||||
install_type: config.installType,
|
||||
expansion_pack_id: config.expansionPackId,
|
||||
@@ -306,24 +273,24 @@ class FileManager {
|
||||
|
||||
async modifyCoreConfig(installDir, config) {
|
||||
const coreConfigPath = path.join(installDir, '.bmad-core', 'core-config.yaml');
|
||||
|
||||
|
||||
try {
|
||||
// Read the existing core-config.yaml
|
||||
const coreConfigContent = await fs.readFile(coreConfigPath, 'utf8');
|
||||
const coreConfig = yaml.load(coreConfigContent);
|
||||
|
||||
|
||||
// Modify sharding settings if provided
|
||||
if (config.prdSharded !== undefined) {
|
||||
coreConfig.prd.prdSharded = config.prdSharded;
|
||||
}
|
||||
|
||||
|
||||
if (config.architectureSharded !== undefined) {
|
||||
coreConfig.architecture.architectureSharded = config.architectureSharded;
|
||||
}
|
||||
|
||||
|
||||
// Write back the modified config
|
||||
await fs.writeFile(coreConfigPath, yaml.dump(coreConfig, { indent: 2 }));
|
||||
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(chalk.red(`Failed to modify core-config.yaml:`), error.message);
|
||||
@@ -335,31 +302,32 @@ class FileManager {
|
||||
try {
|
||||
// Check file size to determine if we should stream
|
||||
const stats = await fs.stat(source);
|
||||
|
||||
if (stats.size > 5 * 1024 * 1024) { // 5MB threshold
|
||||
|
||||
if (stats.size > 5 * 1024 * 1024) {
|
||||
// 5MB threshold
|
||||
// Use streaming for large files
|
||||
const { Transform } = require('stream');
|
||||
const { Transform } = require('node:stream');
|
||||
const replaceStream = new Transform({
|
||||
transform(chunk, encoding, callback) {
|
||||
const modified = chunk.toString().replace(/\{root\}/g, rootValue);
|
||||
const modified = chunk.toString().replaceAll('{root}', rootValue);
|
||||
callback(null, modified);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
await this.ensureDirectory(path.dirname(destination));
|
||||
await pipeline(
|
||||
createReadStream(source, { encoding: 'utf8' }),
|
||||
replaceStream,
|
||||
createWriteStream(destination, { encoding: 'utf8' })
|
||||
createWriteStream(destination, { encoding: 'utf8' }),
|
||||
);
|
||||
} else {
|
||||
// Regular approach for smaller files
|
||||
const content = await fsPromises.readFile(source, 'utf8');
|
||||
const updatedContent = content.replace(/\{root\}/g, rootValue);
|
||||
const updatedContent = content.replaceAll('{root}', rootValue);
|
||||
await this.ensureDirectory(path.dirname(destination));
|
||||
await fsPromises.writeFile(destination, updatedContent, 'utf8');
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(chalk.red(`Failed to copy ${source} with root replacement:`), error.message);
|
||||
@@ -367,45 +335,55 @@ class FileManager {
|
||||
}
|
||||
}
|
||||
|
||||
async copyDirectoryWithRootReplacement(source, destination, rootValue, fileExtensions = ['.md', '.yaml', '.yml']) {
|
||||
async copyDirectoryWithRootReplacement(
|
||||
source,
|
||||
destination,
|
||||
rootValue,
|
||||
fileExtensions = ['.md', '.yaml', '.yml'],
|
||||
) {
|
||||
try {
|
||||
await this.ensureDirectory(destination);
|
||||
|
||||
|
||||
// Get all files in source directory
|
||||
const files = await resourceLocator.findFiles('**/*', {
|
||||
cwd: source,
|
||||
nodir: true
|
||||
const files = await resourceLocator.findFiles('**/*', {
|
||||
cwd: source,
|
||||
nodir: true,
|
||||
});
|
||||
|
||||
|
||||
let replacedCount = 0;
|
||||
|
||||
|
||||
for (const file of files) {
|
||||
const sourcePath = path.join(source, file);
|
||||
const destPath = path.join(destination, file);
|
||||
|
||||
const destinationPath = path.join(destination, file);
|
||||
|
||||
// Check if this file type should have {root} replacement
|
||||
const shouldReplace = fileExtensions.some(ext => file.endsWith(ext));
|
||||
|
||||
const shouldReplace = fileExtensions.some((extension) => file.endsWith(extension));
|
||||
|
||||
if (shouldReplace) {
|
||||
if (await this.copyFileWithRootReplacement(sourcePath, destPath, rootValue)) {
|
||||
if (await this.copyFileWithRootReplacement(sourcePath, destinationPath, rootValue)) {
|
||||
replacedCount++;
|
||||
}
|
||||
} else {
|
||||
// Regular copy for files that don't need replacement
|
||||
await this.copyFile(sourcePath, destPath);
|
||||
await this.copyFile(sourcePath, destinationPath);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (replacedCount > 0) {
|
||||
console.log(chalk.dim(` Processed ${replacedCount} files with {root} replacement`));
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(chalk.red(`Failed to copy directory ${source} with root replacement:`), error.message);
|
||||
console.error(
|
||||
chalk.red(`Failed to copy directory ${source} with root replacement:`),
|
||||
error.message,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
manifestDir = '.bmad-core';
|
||||
manifestFile = 'install-manifest.yaml';
|
||||
}
|
||||
|
||||
module.exports = new FileManager();
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
* Reduces duplication and provides shared methods
|
||||
*/
|
||||
|
||||
const path = require("path");
|
||||
const fs = require("fs-extra");
|
||||
const yaml = require("js-yaml");
|
||||
const chalk = require("chalk").default || require("chalk");
|
||||
const fileManager = require("./file-manager");
|
||||
const resourceLocator = require("./resource-locator");
|
||||
const { extractYamlFromAgent } = require("../../lib/yaml-utils");
|
||||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const yaml = require('js-yaml');
|
||||
const chalk = require('chalk').default || require('chalk');
|
||||
const fileManager = require('./file-manager');
|
||||
const resourceLocator = require('./resource-locator');
|
||||
const { extractYamlFromAgent } = require('../../lib/yaml-utils');
|
||||
|
||||
class BaseIdeSetup {
|
||||
constructor() {
|
||||
@@ -27,19 +27,19 @@ class BaseIdeSetup {
|
||||
}
|
||||
|
||||
const allAgents = new Set();
|
||||
|
||||
|
||||
// Get core agents
|
||||
const coreAgents = await this.getCoreAgentIds(installDir);
|
||||
coreAgents.forEach(id => allAgents.add(id));
|
||||
|
||||
for (const id of coreAgents) allAgents.add(id);
|
||||
|
||||
// Get expansion pack agents
|
||||
const expansionPacks = await this.getInstalledExpansionPacks(installDir);
|
||||
for (const pack of expansionPacks) {
|
||||
const packAgents = await this.getExpansionPackAgents(pack.path);
|
||||
packAgents.forEach(id => allAgents.add(id));
|
||||
for (const id of packAgents) allAgents.add(id);
|
||||
}
|
||||
|
||||
const result = Array.from(allAgents);
|
||||
|
||||
const result = [...allAgents];
|
||||
this._agentCache.set(cacheKey, result);
|
||||
return result;
|
||||
}
|
||||
@@ -50,14 +50,14 @@ class BaseIdeSetup {
|
||||
async getCoreAgentIds(installDir) {
|
||||
const coreAgents = [];
|
||||
const corePaths = [
|
||||
path.join(installDir, ".bmad-core", "agents"),
|
||||
path.join(installDir, "bmad-core", "agents")
|
||||
path.join(installDir, '.bmad-core', 'agents'),
|
||||
path.join(installDir, 'bmad-core', 'agents'),
|
||||
];
|
||||
|
||||
for (const agentsDir of corePaths) {
|
||||
if (await fileManager.pathExists(agentsDir)) {
|
||||
const files = await resourceLocator.findFiles("*.md", { cwd: agentsDir });
|
||||
coreAgents.push(...files.map(file => path.basename(file, ".md")));
|
||||
const files = await resourceLocator.findFiles('*.md', { cwd: agentsDir });
|
||||
coreAgents.push(...files.map((file) => path.basename(file, '.md')));
|
||||
break; // Use first found
|
||||
}
|
||||
}
|
||||
@@ -76,13 +76,13 @@ class BaseIdeSetup {
|
||||
|
||||
// Use resource locator for efficient path finding
|
||||
let agentPath = await resourceLocator.getAgentPath(agentId);
|
||||
|
||||
|
||||
if (!agentPath) {
|
||||
// Check installation-specific paths
|
||||
const possiblePaths = [
|
||||
path.join(installDir, ".bmad-core", "agents", `${agentId}.md`),
|
||||
path.join(installDir, "bmad-core", "agents", `${agentId}.md`),
|
||||
path.join(installDir, "common", "agents", `${agentId}.md`)
|
||||
path.join(installDir, '.bmad-core', 'agents', `${agentId}.md`),
|
||||
path.join(installDir, 'bmad-core', 'agents', `${agentId}.md`),
|
||||
path.join(installDir, 'common', 'agents', `${agentId}.md`),
|
||||
];
|
||||
|
||||
for (const testPath of possiblePaths) {
|
||||
@@ -113,7 +113,7 @@ class BaseIdeSetup {
|
||||
const metadata = yaml.load(yamlContent);
|
||||
return metadata.agent_name || agentId;
|
||||
}
|
||||
} catch (error) {
|
||||
} catch {
|
||||
// Fallback to agent ID
|
||||
}
|
||||
return agentId;
|
||||
@@ -129,31 +129,31 @@ class BaseIdeSetup {
|
||||
}
|
||||
|
||||
const expansionPacks = [];
|
||||
|
||||
|
||||
// Check for dot-prefixed expansion packs
|
||||
const dotExpansions = await resourceLocator.findFiles(".bmad-*", { cwd: installDir });
|
||||
|
||||
const dotExpansions = await resourceLocator.findFiles('.bmad-*', { cwd: installDir });
|
||||
|
||||
for (const dotExpansion of dotExpansions) {
|
||||
if (dotExpansion !== ".bmad-core") {
|
||||
if (dotExpansion !== '.bmad-core') {
|
||||
const packPath = path.join(installDir, dotExpansion);
|
||||
const packName = dotExpansion.substring(1); // remove the dot
|
||||
const packName = dotExpansion.slice(1); // remove the dot
|
||||
expansionPacks.push({
|
||||
name: packName,
|
||||
path: packPath
|
||||
path: packPath,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Check other dot folders that have config.yaml
|
||||
const allDotFolders = await resourceLocator.findFiles(".*", { cwd: installDir });
|
||||
const allDotFolders = await resourceLocator.findFiles('.*', { cwd: installDir });
|
||||
for (const folder of allDotFolders) {
|
||||
if (!folder.startsWith(".bmad-") && folder !== ".bmad-core") {
|
||||
if (!folder.startsWith('.bmad-') && folder !== '.bmad-core') {
|
||||
const packPath = path.join(installDir, folder);
|
||||
const configPath = path.join(packPath, "config.yaml");
|
||||
const configPath = path.join(packPath, 'config.yaml');
|
||||
if (await fileManager.pathExists(configPath)) {
|
||||
expansionPacks.push({
|
||||
name: folder.substring(1), // remove the dot
|
||||
path: packPath
|
||||
name: folder.slice(1), // remove the dot
|
||||
path: packPath,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -167,13 +167,13 @@ class BaseIdeSetup {
|
||||
* Get expansion pack agents
|
||||
*/
|
||||
async getExpansionPackAgents(packPath) {
|
||||
const agentsDir = path.join(packPath, "agents");
|
||||
const agentsDir = path.join(packPath, 'agents');
|
||||
if (!(await fileManager.pathExists(agentsDir))) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const agentFiles = await resourceLocator.findFiles("*.md", { cwd: agentsDir });
|
||||
return agentFiles.map(file => path.basename(file, ".md"));
|
||||
|
||||
const agentFiles = await resourceLocator.findFiles('*.md', { cwd: agentsDir });
|
||||
return agentFiles.map((file) => path.basename(file, '.md'));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -183,27 +183,28 @@ class BaseIdeSetup {
|
||||
const agentContent = await fileManager.readFile(agentPath);
|
||||
const agentTitle = await this.getAgentTitle(agentId, installDir);
|
||||
const yamlContent = extractYamlFromAgent(agentContent);
|
||||
|
||||
let content = "";
|
||||
|
||||
|
||||
let content = '';
|
||||
|
||||
if (format === 'mdc') {
|
||||
// MDC format for Cursor
|
||||
content = "---\n";
|
||||
content += "description: \n";
|
||||
content += "globs: []\n";
|
||||
content += "alwaysApply: false\n";
|
||||
content += "---\n\n";
|
||||
content = '---\n';
|
||||
content += 'description: \n';
|
||||
content += 'globs: []\n';
|
||||
content += 'alwaysApply: false\n';
|
||||
content += '---\n\n';
|
||||
content += `# ${agentId.toUpperCase()} Agent Rule\n\n`;
|
||||
content += `This rule is triggered when the user types \`@${agentId}\` and activates the ${agentTitle} agent persona.\n\n`;
|
||||
content += "## Agent Activation\n\n";
|
||||
content += "CRITICAL: Read the full YAML, start activation to alter your state of being, follow startup section instructions, stay in this being until told to exit this mode:\n\n";
|
||||
content += "```yaml\n";
|
||||
content += yamlContent || agentContent.replace(/^#.*$/m, "").trim();
|
||||
content += "\n```\n\n";
|
||||
content += "## File Reference\n\n";
|
||||
const relativePath = path.relative(installDir, agentPath).replace(/\\/g, '/');
|
||||
content += '## Agent Activation\n\n';
|
||||
content +=
|
||||
'CRITICAL: Read the full YAML, start activation to alter your state of being, follow startup section instructions, stay in this being until told to exit this mode:\n\n';
|
||||
content += '```yaml\n';
|
||||
content += yamlContent || agentContent.replace(/^#.*$/m, '').trim();
|
||||
content += '\n```\n\n';
|
||||
content += '## File Reference\n\n';
|
||||
const relativePath = path.relative(installDir, agentPath).replaceAll('\\', '/');
|
||||
content += `The complete agent definition is available in [${relativePath}](mdc:${relativePath}).\n\n`;
|
||||
content += "## Usage\n\n";
|
||||
content += '## Usage\n\n';
|
||||
content += `When the user types \`@${agentId}\`, activate this ${agentTitle} persona and follow all instructions defined in the YAML configuration above.\n`;
|
||||
} else if (format === 'claude') {
|
||||
// Claude Code format
|
||||
@@ -211,7 +212,7 @@ class BaseIdeSetup {
|
||||
content += `When this command is used, adopt the following agent persona:\n\n`;
|
||||
content += agentContent;
|
||||
}
|
||||
|
||||
|
||||
return content;
|
||||
}
|
||||
|
||||
@@ -224,4 +225,4 @@ class BaseIdeSetup {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BaseIdeSetup;
|
||||
module.exports = BaseIdeSetup;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@
|
||||
* Helps identify memory leaks and optimize resource usage
|
||||
*/
|
||||
|
||||
const v8 = require('v8');
|
||||
const v8 = require('node:v8');
|
||||
|
||||
class MemoryProfiler {
|
||||
constructor() {
|
||||
@@ -19,7 +19,7 @@ class MemoryProfiler {
|
||||
checkpoint(label) {
|
||||
const memUsage = process.memoryUsage();
|
||||
const heapStats = v8.getHeapStatistics();
|
||||
|
||||
|
||||
const checkpoint = {
|
||||
label,
|
||||
timestamp: Date.now() - this.startTime,
|
||||
@@ -28,18 +28,18 @@ class MemoryProfiler {
|
||||
heapTotal: this.formatBytes(memUsage.heapTotal),
|
||||
heapUsed: this.formatBytes(memUsage.heapUsed),
|
||||
external: this.formatBytes(memUsage.external),
|
||||
arrayBuffers: this.formatBytes(memUsage.arrayBuffers || 0)
|
||||
arrayBuffers: this.formatBytes(memUsage.arrayBuffers || 0),
|
||||
},
|
||||
heap: {
|
||||
totalHeapSize: this.formatBytes(heapStats.total_heap_size),
|
||||
usedHeapSize: this.formatBytes(heapStats.used_heap_size),
|
||||
heapSizeLimit: this.formatBytes(heapStats.heap_size_limit),
|
||||
mallocedMemory: this.formatBytes(heapStats.malloced_memory),
|
||||
externalMemory: this.formatBytes(heapStats.external_memory)
|
||||
externalMemory: this.formatBytes(heapStats.external_memory),
|
||||
},
|
||||
raw: {
|
||||
heapUsed: memUsage.heapUsed
|
||||
}
|
||||
heapUsed: memUsage.heapUsed,
|
||||
},
|
||||
};
|
||||
|
||||
// Track peak memory
|
||||
@@ -55,8 +55,8 @@ class MemoryProfiler {
|
||||
* Force garbage collection (requires --expose-gc flag)
|
||||
*/
|
||||
forceGC() {
|
||||
if (global.gc) {
|
||||
global.gc();
|
||||
if (globalThis.gc) {
|
||||
globalThis.gc();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@@ -67,16 +67,16 @@ class MemoryProfiler {
|
||||
*/
|
||||
getSummary() {
|
||||
const currentMemory = process.memoryUsage();
|
||||
|
||||
|
||||
return {
|
||||
currentUsage: {
|
||||
rss: this.formatBytes(currentMemory.rss),
|
||||
heapTotal: this.formatBytes(currentMemory.heapTotal),
|
||||
heapUsed: this.formatBytes(currentMemory.heapUsed)
|
||||
heapUsed: this.formatBytes(currentMemory.heapUsed),
|
||||
},
|
||||
peakMemory: this.formatBytes(this.peakMemory),
|
||||
totalCheckpoints: this.checkpoints.length,
|
||||
runTime: `${((Date.now() - this.startTime) / 1000).toFixed(2)}s`
|
||||
runTime: `${((Date.now() - this.startTime) / 1000).toFixed(2)}s`,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -86,12 +86,12 @@ class MemoryProfiler {
|
||||
getDetailedReport() {
|
||||
const summary = this.getSummary();
|
||||
const memoryGrowth = this.calculateMemoryGrowth();
|
||||
|
||||
|
||||
return {
|
||||
summary,
|
||||
memoryGrowth,
|
||||
checkpoints: this.checkpoints,
|
||||
recommendations: this.getRecommendations(memoryGrowth)
|
||||
recommendations: this.getRecommendations(memoryGrowth),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -100,23 +100,23 @@ class MemoryProfiler {
|
||||
*/
|
||||
calculateMemoryGrowth() {
|
||||
if (this.checkpoints.length < 2) return [];
|
||||
|
||||
|
||||
const growth = [];
|
||||
for (let i = 1; i < this.checkpoints.length; i++) {
|
||||
const prev = this.checkpoints[i - 1];
|
||||
const curr = this.checkpoints[i];
|
||||
|
||||
const heapDiff = curr.raw.heapUsed - prev.raw.heapUsed;
|
||||
|
||||
for (let index = 1; index < this.checkpoints.length; index++) {
|
||||
const previous = this.checkpoints[index - 1];
|
||||
const current = this.checkpoints[index];
|
||||
|
||||
const heapDiff = current.raw.heapUsed - previous.raw.heapUsed;
|
||||
|
||||
growth.push({
|
||||
from: prev.label,
|
||||
to: curr.label,
|
||||
from: previous.label,
|
||||
to: current.label,
|
||||
heapGrowth: this.formatBytes(Math.abs(heapDiff)),
|
||||
isIncrease: heapDiff > 0,
|
||||
timeDiff: `${((curr.timestamp - prev.timestamp) / 1000).toFixed(2)}s`
|
||||
timeDiff: `${((current.timestamp - previous.timestamp) / 1000).toFixed(2)}s`,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return growth;
|
||||
}
|
||||
|
||||
@@ -125,40 +125,41 @@ class MemoryProfiler {
|
||||
*/
|
||||
getRecommendations(memoryGrowth) {
|
||||
const recommendations = [];
|
||||
|
||||
|
||||
// Check for large memory growth
|
||||
const largeGrowths = memoryGrowth.filter(g => {
|
||||
const largeGrowths = memoryGrowth.filter((g) => {
|
||||
const bytes = this.parseBytes(g.heapGrowth);
|
||||
return bytes > 50 * 1024 * 1024; // 50MB
|
||||
});
|
||||
|
||||
|
||||
if (largeGrowths.length > 0) {
|
||||
recommendations.push({
|
||||
type: 'warning',
|
||||
message: `Large memory growth detected in ${largeGrowths.length} operations`,
|
||||
details: largeGrowths.map(g => `${g.from} → ${g.to}: ${g.heapGrowth}`)
|
||||
details: largeGrowths.map((g) => `${g.from} → ${g.to}: ${g.heapGrowth}`),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Check peak memory
|
||||
if (this.peakMemory > 500 * 1024 * 1024) { // 500MB
|
||||
if (this.peakMemory > 500 * 1024 * 1024) {
|
||||
// 500MB
|
||||
recommendations.push({
|
||||
type: 'warning',
|
||||
message: `High peak memory usage: ${this.formatBytes(this.peakMemory)}`,
|
||||
suggestion: 'Consider processing files in smaller batches'
|
||||
suggestion: 'Consider processing files in smaller batches',
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Check for potential memory leaks
|
||||
const continuousGrowth = this.checkContinuousGrowth();
|
||||
if (continuousGrowth) {
|
||||
recommendations.push({
|
||||
type: 'error',
|
||||
message: 'Potential memory leak detected',
|
||||
details: 'Memory usage continuously increases without significant decreases'
|
||||
details: 'Memory usage continuously increases without significant decreases',
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return recommendations;
|
||||
}
|
||||
|
||||
@@ -167,14 +168,14 @@ class MemoryProfiler {
|
||||
*/
|
||||
checkContinuousGrowth() {
|
||||
if (this.checkpoints.length < 5) return false;
|
||||
|
||||
|
||||
let increasingCount = 0;
|
||||
for (let i = 1; i < this.checkpoints.length; i++) {
|
||||
if (this.checkpoints[i].raw.heapUsed > this.checkpoints[i - 1].raw.heapUsed) {
|
||||
for (let index = 1; index < this.checkpoints.length; index++) {
|
||||
if (this.checkpoints[index].raw.heapUsed > this.checkpoints[index - 1].raw.heapUsed) {
|
||||
increasingCount++;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// If memory increases in more than 80% of checkpoints, might be a leak
|
||||
return increasingCount / (this.checkpoints.length - 1) > 0.8;
|
||||
}
|
||||
@@ -184,31 +185,31 @@ class MemoryProfiler {
|
||||
*/
|
||||
formatBytes(bytes) {
|
||||
if (bytes === 0) return '0 B';
|
||||
|
||||
|
||||
const k = 1024;
|
||||
const sizes = ['B', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
const index = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
|
||||
return Number.parseFloat((bytes / Math.pow(k, index)).toFixed(2)) + ' ' + sizes[index];
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse human-readable bytes back to number
|
||||
*/
|
||||
parseBytes(str) {
|
||||
const match = str.match(/^([\d.]+)\s*([KMGT]?B?)$/i);
|
||||
parseBytes(string_) {
|
||||
const match = string_.match(/^([\d.]+)\s*([KMGT]?B?)$/i);
|
||||
if (!match) return 0;
|
||||
|
||||
const value = parseFloat(match[1]);
|
||||
|
||||
const value = Number.parseFloat(match[1]);
|
||||
const unit = match[2].toUpperCase();
|
||||
|
||||
|
||||
const multipliers = {
|
||||
'B': 1,
|
||||
'KB': 1024,
|
||||
'MB': 1024 * 1024,
|
||||
'GB': 1024 * 1024 * 1024
|
||||
B: 1,
|
||||
KB: 1024,
|
||||
MB: 1024 * 1024,
|
||||
GB: 1024 * 1024 * 1024,
|
||||
};
|
||||
|
||||
|
||||
return value * (multipliers[unit] || 1);
|
||||
}
|
||||
|
||||
@@ -221,4 +222,4 @@ class MemoryProfiler {
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
module.exports = new MemoryProfiler();
|
||||
module.exports = new MemoryProfiler();
|
||||
|
||||
@@ -17,13 +17,13 @@ class ModuleManager {
|
||||
const modules = await Promise.all([
|
||||
this.getModule('chalk'),
|
||||
this.getModule('ora'),
|
||||
this.getModule('inquirer')
|
||||
this.getModule('inquirer'),
|
||||
]);
|
||||
|
||||
return {
|
||||
chalk: modules[0],
|
||||
ora: modules[1],
|
||||
inquirer: modules[2]
|
||||
inquirer: modules[2],
|
||||
};
|
||||
}
|
||||
|
||||
@@ -64,18 +64,24 @@ class ModuleManager {
|
||||
*/
|
||||
async _loadModule(moduleName) {
|
||||
switch (moduleName) {
|
||||
case 'chalk':
|
||||
case 'chalk': {
|
||||
return (await import('chalk')).default;
|
||||
case 'ora':
|
||||
}
|
||||
case 'ora': {
|
||||
return (await import('ora')).default;
|
||||
case 'inquirer':
|
||||
}
|
||||
case 'inquirer': {
|
||||
return (await import('inquirer')).default;
|
||||
case 'glob':
|
||||
}
|
||||
case 'glob': {
|
||||
return (await import('glob')).glob;
|
||||
case 'globSync':
|
||||
}
|
||||
case 'globSync': {
|
||||
return (await import('glob')).globSync;
|
||||
default:
|
||||
}
|
||||
default: {
|
||||
throw new Error(`Unknown module: ${moduleName}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -93,13 +99,11 @@ class ModuleManager {
|
||||
* @returns {Promise<Object>} Object with module names as keys
|
||||
*/
|
||||
async getModules(moduleNames) {
|
||||
const modules = await Promise.all(
|
||||
moduleNames.map(name => this.getModule(name))
|
||||
);
|
||||
const modules = await Promise.all(moduleNames.map((name) => this.getModule(name)));
|
||||
|
||||
return moduleNames.reduce((acc, name, index) => {
|
||||
acc[name] = modules[index];
|
||||
return acc;
|
||||
return moduleNames.reduce((accumulator, name, index) => {
|
||||
accumulator[name] = modules[index];
|
||||
return accumulator;
|
||||
}, {});
|
||||
}
|
||||
}
|
||||
@@ -107,4 +111,4 @@ class ModuleManager {
|
||||
// Singleton instance
|
||||
const moduleManager = new ModuleManager();
|
||||
|
||||
module.exports = moduleManager;
|
||||
module.exports = moduleManager;
|
||||
|
||||
@@ -43,18 +43,18 @@ class ResourceLocator {
|
||||
*/
|
||||
async findFiles(pattern, options = {}) {
|
||||
const cacheKey = `${pattern}:${JSON.stringify(options)}`;
|
||||
|
||||
|
||||
if (this._globCache.has(cacheKey)) {
|
||||
return this._globCache.get(cacheKey);
|
||||
}
|
||||
|
||||
const { glob } = await moduleManager.getModules(['glob']);
|
||||
const files = await glob(pattern, options);
|
||||
|
||||
|
||||
// Cache for 5 minutes
|
||||
this._globCache.set(cacheKey, files);
|
||||
setTimeout(() => this._globCache.delete(cacheKey), 5 * 60 * 1000);
|
||||
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ class ResourceLocator {
|
||||
*/
|
||||
async getAgentPath(agentId) {
|
||||
const cacheKey = `agent:${agentId}`;
|
||||
|
||||
|
||||
if (this._pathCache.has(cacheKey)) {
|
||||
return this._pathCache.get(cacheKey);
|
||||
}
|
||||
@@ -96,7 +96,7 @@ class ResourceLocator {
|
||||
*/
|
||||
async getAvailableAgents() {
|
||||
const cacheKey = 'all-agents';
|
||||
|
||||
|
||||
if (this._pathCache.has(cacheKey)) {
|
||||
return this._pathCache.get(cacheKey);
|
||||
}
|
||||
@@ -107,14 +107,11 @@ class ResourceLocator {
|
||||
|
||||
// Get agents from bmad-core
|
||||
const coreAgents = await this.findFiles('agents/*.md', {
|
||||
cwd: this.getBmadCorePath()
|
||||
cwd: this.getBmadCorePath(),
|
||||
});
|
||||
|
||||
for (const agentFile of coreAgents) {
|
||||
const content = await fs.readFile(
|
||||
path.join(this.getBmadCorePath(), agentFile),
|
||||
'utf8'
|
||||
);
|
||||
const content = await fs.readFile(path.join(this.getBmadCorePath(), agentFile), 'utf8');
|
||||
const yamlContent = extractYamlFromAgent(content);
|
||||
if (yamlContent) {
|
||||
try {
|
||||
@@ -123,9 +120,9 @@ class ResourceLocator {
|
||||
id: path.basename(agentFile, '.md'),
|
||||
name: metadata.agent_name || path.basename(agentFile, '.md'),
|
||||
description: metadata.description || 'No description available',
|
||||
source: 'core'
|
||||
source: 'core',
|
||||
});
|
||||
} catch (e) {
|
||||
} catch {
|
||||
// Skip invalid agents
|
||||
}
|
||||
}
|
||||
@@ -144,7 +141,7 @@ class ResourceLocator {
|
||||
*/
|
||||
async getExpansionPacks() {
|
||||
const cacheKey = 'expansion-packs';
|
||||
|
||||
|
||||
if (this._pathCache.has(cacheKey)) {
|
||||
return this._pathCache.get(cacheKey);
|
||||
}
|
||||
@@ -154,7 +151,7 @@ class ResourceLocator {
|
||||
|
||||
if (await fs.pathExists(expansionPacksPath)) {
|
||||
const entries = await fs.readdir(expansionPacksPath, { withFileTypes: true });
|
||||
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const configPath = path.join(expansionPacksPath, entry.name, 'config.yaml');
|
||||
@@ -167,11 +164,12 @@ class ResourceLocator {
|
||||
name: config.name || entry.name,
|
||||
version: config.version || '1.0.0',
|
||||
description: config.description || 'No description available',
|
||||
shortTitle: config['short-title'] || config.description || 'No description available',
|
||||
shortTitle:
|
||||
config['short-title'] || config.description || 'No description available',
|
||||
author: config.author || 'Unknown',
|
||||
path: path.join(expansionPacksPath, entry.name)
|
||||
path: path.join(expansionPacksPath, entry.name),
|
||||
});
|
||||
} catch (e) {
|
||||
} catch {
|
||||
// Skip invalid packs
|
||||
}
|
||||
}
|
||||
@@ -193,13 +191,13 @@ class ResourceLocator {
|
||||
*/
|
||||
async getTeamConfig(teamId) {
|
||||
const cacheKey = `team:${teamId}`;
|
||||
|
||||
|
||||
if (this._pathCache.has(cacheKey)) {
|
||||
return this._pathCache.get(cacheKey);
|
||||
}
|
||||
|
||||
const teamPath = path.join(this.getBmadCorePath(), 'agent-teams', `${teamId}.yaml`);
|
||||
|
||||
|
||||
if (await fs.pathExists(teamPath)) {
|
||||
try {
|
||||
const yaml = require('js-yaml');
|
||||
@@ -207,7 +205,7 @@ class ResourceLocator {
|
||||
const config = yaml.load(content);
|
||||
this._pathCache.set(cacheKey, config);
|
||||
return config;
|
||||
} catch (e) {
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -222,7 +220,7 @@ class ResourceLocator {
|
||||
*/
|
||||
async getAgentDependencies(agentId) {
|
||||
const cacheKey = `deps:${agentId}`;
|
||||
|
||||
|
||||
if (this._pathCache.has(cacheKey)) {
|
||||
return this._pathCache.get(cacheKey);
|
||||
}
|
||||
@@ -244,11 +242,11 @@ class ResourceLocator {
|
||||
const yaml = require('js-yaml');
|
||||
const metadata = yaml.load(yamlContent);
|
||||
const dependencies = metadata.dependencies || {};
|
||||
|
||||
|
||||
// Flatten dependencies
|
||||
const allDeps = [];
|
||||
const byType = {};
|
||||
|
||||
|
||||
for (const [type, deps] of Object.entries(dependencies)) {
|
||||
if (Array.isArray(deps)) {
|
||||
byType[type] = deps;
|
||||
@@ -261,7 +259,7 @@ class ResourceLocator {
|
||||
const result = { all: allDeps, byType };
|
||||
this._pathCache.set(cacheKey, result);
|
||||
return result;
|
||||
} catch (e) {
|
||||
} catch {
|
||||
return { all: [], byType: {} };
|
||||
}
|
||||
}
|
||||
@@ -281,13 +279,13 @@ class ResourceLocator {
|
||||
*/
|
||||
async getIdeConfig(ideId) {
|
||||
const cacheKey = `ide:${ideId}`;
|
||||
|
||||
|
||||
if (this._pathCache.has(cacheKey)) {
|
||||
return this._pathCache.get(cacheKey);
|
||||
}
|
||||
|
||||
const idePath = path.join(this.getBmadCorePath(), 'ide-rules', `${ideId}.yaml`);
|
||||
|
||||
|
||||
if (await fs.pathExists(idePath)) {
|
||||
try {
|
||||
const yaml = require('js-yaml');
|
||||
@@ -295,7 +293,7 @@ class ResourceLocator {
|
||||
const config = yaml.load(content);
|
||||
this._pathCache.set(cacheKey, config);
|
||||
return config;
|
||||
} catch (e) {
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -307,4 +305,4 @@ class ResourceLocator {
|
||||
// Singleton instance
|
||||
const resourceLocator = new ResourceLocator();
|
||||
|
||||
module.exports = resourceLocator;
|
||||
module.exports = resourceLocator;
|
||||
|
||||
Reference in New Issue
Block a user