refactor: replace fs with secureFs for improved file handling

This commit updates various modules to utilize the secure file system operations from the secureFs module instead of the native fs module. Key changes include:

- Replaced fs imports with secureFs in multiple route handlers and services to enhance security and consistency in file operations.
- Added centralized validation for working directories in the sdk-options module to ensure all AI model invocations are secure.

These changes aim to improve the security and maintainability of file handling across the application.
This commit is contained in:
Test User
2025-12-21 01:32:26 -05:00
parent 2b5479ae0d
commit 077a63b03b
62 changed files with 4866 additions and 3350 deletions

View File

@@ -2,71 +2,71 @@
* Parse agent response and create feature files
*/
import path from "path";
import fs from "fs/promises";
import type { EventEmitter } from "../../lib/events.js";
import { createLogger } from "@automaker/utils";
import { getFeaturesDir } from "@automaker/platform";
import path from 'path';
import * as secureFs from '../../lib/secure-fs.js';
import type { EventEmitter } from '../../lib/events.js';
import { createLogger } from '@automaker/utils';
import { getFeaturesDir } from '@automaker/platform';
const logger = createLogger("SpecRegeneration");
const logger = createLogger('SpecRegeneration');
export async function parseAndCreateFeatures(
projectPath: string,
content: string,
events: EventEmitter
): Promise<void> {
logger.info("========== parseAndCreateFeatures() started ==========");
logger.info('========== parseAndCreateFeatures() started ==========');
logger.info(`Content length: ${content.length} chars`);
logger.info("========== CONTENT RECEIVED FOR PARSING ==========");
logger.info('========== CONTENT RECEIVED FOR PARSING ==========');
logger.info(content);
logger.info("========== END CONTENT ==========");
logger.info('========== END CONTENT ==========');
try {
// Extract JSON from response
logger.info("Extracting JSON from response...");
logger.info('Extracting JSON from response...');
logger.info(`Looking for pattern: /{[\\s\\S]*"features"[\\s\\S]*}/`);
const jsonMatch = content.match(/\{[\s\S]*"features"[\s\S]*\}/);
if (!jsonMatch) {
logger.error("❌ No valid JSON found in response");
logger.error("Full content received:");
logger.error('❌ No valid JSON found in response');
logger.error('Full content received:');
logger.error(content);
throw new Error("No valid JSON found in response");
throw new Error('No valid JSON found in response');
}
logger.info(`JSON match found (${jsonMatch[0].length} chars)`);
logger.info("========== MATCHED JSON ==========");
logger.info('========== MATCHED JSON ==========');
logger.info(jsonMatch[0]);
logger.info("========== END MATCHED JSON ==========");
logger.info('========== END MATCHED JSON ==========');
const parsed = JSON.parse(jsonMatch[0]);
logger.info(`Parsed ${parsed.features?.length || 0} features`);
logger.info("Parsed features:", JSON.stringify(parsed.features, null, 2));
logger.info('Parsed features:', JSON.stringify(parsed.features, null, 2));
const featuresDir = getFeaturesDir(projectPath);
await fs.mkdir(featuresDir, { recursive: true });
await secureFs.mkdir(featuresDir, { recursive: true });
const createdFeatures: Array<{ id: string; title: string }> = [];
for (const feature of parsed.features) {
logger.debug("Creating feature:", feature.id);
logger.debug('Creating feature:', feature.id);
const featureDir = path.join(featuresDir, feature.id);
await fs.mkdir(featureDir, { recursive: true });
await secureFs.mkdir(featureDir, { recursive: true });
const featureData = {
id: feature.id,
category: feature.category || "Uncategorized",
category: feature.category || 'Uncategorized',
title: feature.title,
description: feature.description,
status: "backlog", // Features go to backlog - user must manually start them
status: 'backlog', // Features go to backlog - user must manually start them
priority: feature.priority || 2,
complexity: feature.complexity || "moderate",
complexity: feature.complexity || 'moderate',
dependencies: feature.dependencies || [],
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
await fs.writeFile(
path.join(featureDir, "feature.json"),
await secureFs.writeFile(
path.join(featureDir, 'feature.json'),
JSON.stringify(featureData, null, 2)
);
@@ -75,20 +75,20 @@ export async function parseAndCreateFeatures(
logger.info(`✓ Created ${createdFeatures.length} features successfully`);
events.emit("spec-regeneration:event", {
type: "spec_regeneration_complete",
events.emit('spec-regeneration:event', {
type: 'spec_regeneration_complete',
message: `Spec regeneration complete! Created ${createdFeatures.length} features.`,
projectPath: projectPath,
});
} catch (error) {
logger.error("❌ parseAndCreateFeatures() failed:");
logger.error("Error:", error);
events.emit("spec-regeneration:event", {
type: "spec_regeneration_error",
logger.error('❌ parseAndCreateFeatures() failed:');
logger.error('Error:', error);
events.emit('spec-regeneration:event', {
type: 'spec_regeneration_error',
error: (error as Error).message,
projectPath: projectPath,
});
}
logger.debug("========== parseAndCreateFeatures() completed ==========");
logger.debug('========== parseAndCreateFeatures() completed ==========');
}