mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-02 08:33:36 +00:00
feat(ui): add export and import features functionality
- Introduced new routes for exporting and importing features, enhancing project management capabilities. - Added UI components for export and import dialogs, allowing users to easily manage feature data. - Updated HTTP API client to support export and import operations with appropriate options and responses. - Enhanced board view with controls for triggering export and import actions, improving user experience. - Defined new types for feature export and import, ensuring type safety and clarity in data handling.
This commit is contained in:
@@ -40,7 +40,8 @@
|
||||
"express": "5.2.1",
|
||||
"morgan": "1.10.1",
|
||||
"node-pty": "1.1.0-beta41",
|
||||
"ws": "8.18.3"
|
||||
"ws": "8.18.3",
|
||||
"yaml": "2.7.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cookie": "0.6.0",
|
||||
|
||||
@@ -16,6 +16,8 @@ import { createBulkDeleteHandler } from './routes/bulk-delete.js';
|
||||
import { createDeleteHandler } from './routes/delete.js';
|
||||
import { createAgentOutputHandler, createRawOutputHandler } from './routes/agent-output.js';
|
||||
import { createGenerateTitleHandler } from './routes/generate-title.js';
|
||||
import { createExportHandler } from './routes/export.js';
|
||||
import { createImportHandler, createConflictCheckHandler } from './routes/import.js';
|
||||
|
||||
export function createFeaturesRoutes(
|
||||
featureLoader: FeatureLoader,
|
||||
@@ -46,6 +48,13 @@ export function createFeaturesRoutes(
|
||||
router.post('/agent-output', createAgentOutputHandler(featureLoader));
|
||||
router.post('/raw-output', createRawOutputHandler(featureLoader));
|
||||
router.post('/generate-title', createGenerateTitleHandler(settingsService));
|
||||
router.post('/export', validatePathParams('projectPath'), createExportHandler(featureLoader));
|
||||
router.post('/import', validatePathParams('projectPath'), createImportHandler(featureLoader));
|
||||
router.post(
|
||||
'/check-conflicts',
|
||||
validatePathParams('projectPath'),
|
||||
createConflictCheckHandler(featureLoader)
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
96
apps/server/src/routes/features/routes/export.ts
Normal file
96
apps/server/src/routes/features/routes/export.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
/**
|
||||
* POST /export endpoint - Export features to JSON or YAML format
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import {
|
||||
getFeatureExportService,
|
||||
type ExportFormat,
|
||||
type BulkExportOptions,
|
||||
} from '../../../services/feature-export-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
interface ExportRequest {
|
||||
projectPath: string;
|
||||
/** Feature IDs to export. If empty/undefined, exports all features */
|
||||
featureIds?: string[];
|
||||
/** Export format: 'json' or 'yaml' */
|
||||
format?: ExportFormat;
|
||||
/** Whether to include description history */
|
||||
includeHistory?: boolean;
|
||||
/** Whether to include plan spec */
|
||||
includePlanSpec?: boolean;
|
||||
/** Filter by category */
|
||||
category?: string;
|
||||
/** Filter by status */
|
||||
status?: string;
|
||||
/** Pretty print output */
|
||||
prettyPrint?: boolean;
|
||||
/** Optional metadata to include */
|
||||
metadata?: {
|
||||
projectName?: string;
|
||||
projectPath?: string;
|
||||
branch?: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
}
|
||||
|
||||
export function createExportHandler(featureLoader: FeatureLoader) {
|
||||
const exportService = getFeatureExportService();
|
||||
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const {
|
||||
projectPath,
|
||||
featureIds,
|
||||
format = 'json',
|
||||
includeHistory = true,
|
||||
includePlanSpec = true,
|
||||
category,
|
||||
status,
|
||||
prettyPrint = true,
|
||||
metadata,
|
||||
} = req.body as ExportRequest;
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate format
|
||||
if (format !== 'json' && format !== 'yaml') {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'format must be "json" or "yaml"',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const options: BulkExportOptions = {
|
||||
format,
|
||||
includeHistory,
|
||||
includePlanSpec,
|
||||
category,
|
||||
status,
|
||||
featureIds,
|
||||
prettyPrint,
|
||||
metadata,
|
||||
};
|
||||
|
||||
const exportData = await exportService.exportFeatures(projectPath, options);
|
||||
|
||||
// Return the export data as a string in the response
|
||||
res.json({
|
||||
success: true,
|
||||
data: exportData,
|
||||
format,
|
||||
contentType: format === 'json' ? 'application/json' : 'application/x-yaml',
|
||||
filename: `features-export.${format === 'json' ? 'json' : 'yaml'}`,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Export features failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
215
apps/server/src/routes/features/routes/import.ts
Normal file
215
apps/server/src/routes/features/routes/import.ts
Normal file
@@ -0,0 +1,215 @@
|
||||
/**
|
||||
* POST /import endpoint - Import features from JSON or YAML format
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import type { FeatureImportResult, Feature, FeatureExport } from '@automaker/types';
|
||||
import { getFeatureExportService } from '../../../services/feature-export-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
interface ImportRequest {
|
||||
projectPath: string;
|
||||
/** Raw JSON or YAML string containing feature data */
|
||||
data: string;
|
||||
/** Whether to overwrite existing features with same ID */
|
||||
overwrite?: boolean;
|
||||
/** Whether to preserve branch info from imported features */
|
||||
preserveBranchInfo?: boolean;
|
||||
/** Optional category to assign to all imported features */
|
||||
targetCategory?: string;
|
||||
}
|
||||
|
||||
interface ConflictCheckRequest {
|
||||
projectPath: string;
|
||||
/** Raw JSON or YAML string containing feature data */
|
||||
data: string;
|
||||
}
|
||||
|
||||
interface ConflictInfo {
|
||||
featureId: string;
|
||||
title?: string;
|
||||
existingTitle?: string;
|
||||
hasConflict: boolean;
|
||||
}
|
||||
|
||||
export function createImportHandler(featureLoader: FeatureLoader) {
|
||||
const exportService = getFeatureExportService();
|
||||
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const {
|
||||
projectPath,
|
||||
data,
|
||||
overwrite = false,
|
||||
preserveBranchInfo = false,
|
||||
targetCategory,
|
||||
} = req.body as ImportRequest;
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
res.status(400).json({ success: false, error: 'data is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Detect format and parse the data
|
||||
const format = exportService.detectFormat(data);
|
||||
if (!format) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid data format. Expected valid JSON or YAML.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const parsed = exportService.parseImportData(data);
|
||||
if (!parsed) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Failed to parse import data. Ensure it is valid JSON or YAML.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine if this is a single feature or bulk import
|
||||
const isBulkImport =
|
||||
'features' in parsed && Array.isArray((parsed as { features: unknown }).features);
|
||||
|
||||
let results: FeatureImportResult[];
|
||||
|
||||
if (isBulkImport) {
|
||||
// Bulk import
|
||||
results = await exportService.importFeatures(projectPath, data, {
|
||||
overwrite,
|
||||
preserveBranchInfo,
|
||||
targetCategory,
|
||||
});
|
||||
} else {
|
||||
// Single feature import - we know it's not a bulk export at this point
|
||||
// It must be either a Feature or FeatureExport
|
||||
const singleData = parsed as Feature | FeatureExport;
|
||||
|
||||
const result = await exportService.importFeature(projectPath, {
|
||||
data: singleData,
|
||||
overwrite,
|
||||
preserveBranchInfo,
|
||||
targetCategory,
|
||||
});
|
||||
results = [result];
|
||||
}
|
||||
|
||||
const successCount = results.filter((r) => r.success).length;
|
||||
const failureCount = results.filter((r) => !r.success).length;
|
||||
const allSuccessful = failureCount === 0;
|
||||
|
||||
res.json({
|
||||
success: allSuccessful,
|
||||
importedCount: successCount,
|
||||
failedCount: failureCount,
|
||||
results,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Import features failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create handler for checking conflicts before import
|
||||
*/
|
||||
export function createConflictCheckHandler(featureLoader: FeatureLoader) {
|
||||
const exportService = getFeatureExportService();
|
||||
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, data } = req.body as ConflictCheckRequest;
|
||||
|
||||
if (!projectPath) {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
res.status(400).json({ success: false, error: 'data is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse the import data
|
||||
const format = exportService.detectFormat(data);
|
||||
if (!format) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid data format. Expected valid JSON or YAML.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const parsed = exportService.parseImportData(data);
|
||||
if (!parsed) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: 'Failed to parse import data.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract features from the data
|
||||
type FeatureExportType = { feature: { id: string; title?: string } };
|
||||
type BulkExportType = { features: FeatureExportType[] };
|
||||
type RawFeatureType = { id: string; title?: string };
|
||||
|
||||
let featuresToCheck: Array<{ id: string; title?: string }> = [];
|
||||
|
||||
if ('features' in parsed && Array.isArray((parsed as BulkExportType).features)) {
|
||||
// Bulk export format
|
||||
featuresToCheck = (parsed as BulkExportType).features.map((f) => ({
|
||||
id: f.feature.id,
|
||||
title: f.feature.title,
|
||||
}));
|
||||
} else if ('feature' in parsed) {
|
||||
// Single FeatureExport format
|
||||
const featureExport = parsed as FeatureExportType;
|
||||
featuresToCheck = [
|
||||
{
|
||||
id: featureExport.feature.id,
|
||||
title: featureExport.feature.title,
|
||||
},
|
||||
];
|
||||
} else if ('id' in parsed) {
|
||||
// Raw Feature format
|
||||
const rawFeature = parsed as RawFeatureType;
|
||||
featuresToCheck = [{ id: rawFeature.id, title: rawFeature.title }];
|
||||
}
|
||||
|
||||
// Check each feature for conflicts
|
||||
const conflicts: ConflictInfo[] = [];
|
||||
for (const feature of featuresToCheck) {
|
||||
const existing = await featureLoader.get(projectPath, feature.id);
|
||||
conflicts.push({
|
||||
featureId: feature.id,
|
||||
title: feature.title,
|
||||
existingTitle: existing?.title,
|
||||
hasConflict: !!existing,
|
||||
});
|
||||
}
|
||||
|
||||
const hasConflicts = conflicts.some((c) => c.hasConflict);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
hasConflicts,
|
||||
conflicts,
|
||||
totalFeatures: featuresToCheck.length,
|
||||
conflictCount: conflicts.filter((c) => c.hasConflict).length,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Conflict check failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
521
apps/server/src/services/feature-export-service.ts
Normal file
521
apps/server/src/services/feature-export-service.ts
Normal file
@@ -0,0 +1,521 @@
|
||||
/**
|
||||
* Feature Export Service - Handles exporting and importing features in JSON/YAML formats
|
||||
*
|
||||
* Provides functionality to:
|
||||
* - Export single features to JSON or YAML format
|
||||
* - Export multiple features (bulk export)
|
||||
* - Import features from JSON or YAML data
|
||||
* - Validate import data for compatibility
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { stringify as yamlStringify, parse as yamlParse } from 'yaml';
|
||||
import type { Feature, FeatureExport, FeatureImport, FeatureImportResult } from '@automaker/types';
|
||||
import { FeatureLoader } from './feature-loader.js';
|
||||
|
||||
const logger = createLogger('FeatureExportService');
|
||||
|
||||
/** Current export format version */
|
||||
export const FEATURE_EXPORT_VERSION = '1.0.0';
|
||||
|
||||
/** Supported export formats */
|
||||
export type ExportFormat = 'json' | 'yaml';
|
||||
|
||||
/** Options for exporting features */
|
||||
export interface ExportOptions {
|
||||
/** Format to export in (default: 'json') */
|
||||
format?: ExportFormat;
|
||||
/** Whether to include description history (default: true) */
|
||||
includeHistory?: boolean;
|
||||
/** Whether to include plan spec (default: true) */
|
||||
includePlanSpec?: boolean;
|
||||
/** Optional metadata to include */
|
||||
metadata?: {
|
||||
projectName?: string;
|
||||
projectPath?: string;
|
||||
branch?: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
/** Who/what is performing the export */
|
||||
exportedBy?: string;
|
||||
/** Pretty print output (default: true) */
|
||||
prettyPrint?: boolean;
|
||||
}
|
||||
|
||||
/** Options for bulk export */
|
||||
export interface BulkExportOptions extends ExportOptions {
|
||||
/** Filter by category */
|
||||
category?: string;
|
||||
/** Filter by status */
|
||||
status?: string;
|
||||
/** Feature IDs to include (if not specified, exports all) */
|
||||
featureIds?: string[];
|
||||
}
|
||||
|
||||
/** Result of a bulk export */
|
||||
export interface BulkExportResult {
|
||||
/** Export format version */
|
||||
version: string;
|
||||
/** ISO date string when the export was created */
|
||||
exportedAt: string;
|
||||
/** Number of features exported */
|
||||
count: number;
|
||||
/** The exported features */
|
||||
features: FeatureExport[];
|
||||
/** Export metadata */
|
||||
metadata?: {
|
||||
projectName?: string;
|
||||
projectPath?: string;
|
||||
branch?: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* FeatureExportService - Manages feature export and import operations
|
||||
*/
|
||||
export class FeatureExportService {
|
||||
private featureLoader: FeatureLoader;
|
||||
|
||||
constructor(featureLoader?: FeatureLoader) {
|
||||
this.featureLoader = featureLoader || new FeatureLoader();
|
||||
}
|
||||
|
||||
/**
|
||||
* Export a single feature to the specified format
|
||||
*
|
||||
* @param projectPath - Path to the project
|
||||
* @param featureId - ID of the feature to export
|
||||
* @param options - Export options
|
||||
* @returns Promise resolving to the exported feature string
|
||||
*/
|
||||
async exportFeature(
|
||||
projectPath: string,
|
||||
featureId: string,
|
||||
options: ExportOptions = {}
|
||||
): Promise<string> {
|
||||
const feature = await this.featureLoader.get(projectPath, featureId);
|
||||
if (!feature) {
|
||||
throw new Error(`Feature ${featureId} not found`);
|
||||
}
|
||||
|
||||
return this.exportFeatureData(feature, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Export feature data to the specified format (without fetching from disk)
|
||||
*
|
||||
* @param feature - The feature to export
|
||||
* @param options - Export options
|
||||
* @returns The exported feature string
|
||||
*/
|
||||
exportFeatureData(feature: Feature, options: ExportOptions = {}): string {
|
||||
const {
|
||||
format = 'json',
|
||||
includeHistory = true,
|
||||
includePlanSpec = true,
|
||||
metadata,
|
||||
exportedBy,
|
||||
prettyPrint = true,
|
||||
} = options;
|
||||
|
||||
// Prepare feature data, optionally excluding some fields
|
||||
const featureData = this.prepareFeatureForExport(feature, {
|
||||
includeHistory,
|
||||
includePlanSpec,
|
||||
});
|
||||
|
||||
const exportData: FeatureExport = {
|
||||
version: FEATURE_EXPORT_VERSION,
|
||||
feature: featureData,
|
||||
exportedAt: new Date().toISOString(),
|
||||
...(exportedBy ? { exportedBy } : {}),
|
||||
...(metadata ? { metadata } : {}),
|
||||
};
|
||||
|
||||
return this.serializeExport(exportData, format, prettyPrint);
|
||||
}
|
||||
|
||||
/**
|
||||
* Export multiple features to the specified format
|
||||
*
|
||||
* @param projectPath - Path to the project
|
||||
* @param options - Bulk export options
|
||||
* @returns Promise resolving to the exported features string
|
||||
*/
|
||||
async exportFeatures(projectPath: string, options: BulkExportOptions = {}): Promise<string> {
|
||||
const {
|
||||
format = 'json',
|
||||
category,
|
||||
status,
|
||||
featureIds,
|
||||
includeHistory = true,
|
||||
includePlanSpec = true,
|
||||
metadata,
|
||||
prettyPrint = true,
|
||||
} = options;
|
||||
|
||||
// Get all features
|
||||
let features = await this.featureLoader.getAll(projectPath);
|
||||
|
||||
// Apply filters
|
||||
if (featureIds && featureIds.length > 0) {
|
||||
const idSet = new Set(featureIds);
|
||||
features = features.filter((f) => idSet.has(f.id));
|
||||
}
|
||||
if (category) {
|
||||
features = features.filter((f) => f.category === category);
|
||||
}
|
||||
if (status) {
|
||||
features = features.filter((f) => f.status === status);
|
||||
}
|
||||
|
||||
// Prepare feature exports
|
||||
const featureExports: FeatureExport[] = features.map((feature) => ({
|
||||
version: FEATURE_EXPORT_VERSION,
|
||||
feature: this.prepareFeatureForExport(feature, { includeHistory, includePlanSpec }),
|
||||
exportedAt: new Date().toISOString(),
|
||||
}));
|
||||
|
||||
const bulkExport: BulkExportResult = {
|
||||
version: FEATURE_EXPORT_VERSION,
|
||||
exportedAt: new Date().toISOString(),
|
||||
count: featureExports.length,
|
||||
features: featureExports,
|
||||
...(metadata ? { metadata } : {}),
|
||||
};
|
||||
|
||||
logger.info(`Exported ${featureExports.length} features from ${projectPath}`);
|
||||
|
||||
return this.serializeBulkExport(bulkExport, format, prettyPrint);
|
||||
}
|
||||
|
||||
/**
|
||||
* Import a feature from JSON or YAML data
|
||||
*
|
||||
* @param projectPath - Path to the project
|
||||
* @param importData - Import configuration
|
||||
* @returns Promise resolving to the import result
|
||||
*/
|
||||
async importFeature(
|
||||
projectPath: string,
|
||||
importData: FeatureImport
|
||||
): Promise<FeatureImportResult> {
|
||||
const warnings: string[] = [];
|
||||
const errors: string[] = [];
|
||||
|
||||
try {
|
||||
// Extract feature from data (handle both raw Feature and wrapped FeatureExport)
|
||||
const feature = this.extractFeatureFromImport(importData.data);
|
||||
if (!feature) {
|
||||
return {
|
||||
success: false,
|
||||
importedAt: new Date().toISOString(),
|
||||
errors: ['Invalid import data: could not extract feature'],
|
||||
};
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
const validationErrors = this.validateFeature(feature);
|
||||
if (validationErrors.length > 0) {
|
||||
return {
|
||||
success: false,
|
||||
importedAt: new Date().toISOString(),
|
||||
errors: validationErrors,
|
||||
};
|
||||
}
|
||||
|
||||
// Determine the feature ID to use
|
||||
const featureId = importData.newId || feature.id || this.featureLoader.generateFeatureId();
|
||||
|
||||
// Check for existing feature
|
||||
const existingFeature = await this.featureLoader.get(projectPath, featureId);
|
||||
if (existingFeature && !importData.overwrite) {
|
||||
return {
|
||||
success: false,
|
||||
importedAt: new Date().toISOString(),
|
||||
errors: [`Feature with ID ${featureId} already exists. Set overwrite: true to replace.`],
|
||||
};
|
||||
}
|
||||
|
||||
// Prepare feature for import
|
||||
const featureToImport: Feature = {
|
||||
...feature,
|
||||
id: featureId,
|
||||
// Optionally override category
|
||||
...(importData.targetCategory ? { category: importData.targetCategory } : {}),
|
||||
// Clear branch info if not preserving
|
||||
...(importData.preserveBranchInfo ? {} : { branchName: undefined }),
|
||||
};
|
||||
|
||||
// Clear runtime-specific fields that shouldn't be imported
|
||||
delete featureToImport.titleGenerating;
|
||||
delete featureToImport.error;
|
||||
|
||||
// Handle image paths - they won't be valid after import
|
||||
if (featureToImport.imagePaths && featureToImport.imagePaths.length > 0) {
|
||||
warnings.push(
|
||||
`Feature had ${featureToImport.imagePaths.length} image path(s) that were cleared during import. Images must be re-attached.`
|
||||
);
|
||||
featureToImport.imagePaths = [];
|
||||
}
|
||||
|
||||
// Handle text file paths - they won't be valid after import
|
||||
if (featureToImport.textFilePaths && featureToImport.textFilePaths.length > 0) {
|
||||
warnings.push(
|
||||
`Feature had ${featureToImport.textFilePaths.length} text file path(s) that were cleared during import. Files must be re-attached.`
|
||||
);
|
||||
featureToImport.textFilePaths = [];
|
||||
}
|
||||
|
||||
// Create or update the feature
|
||||
if (existingFeature) {
|
||||
await this.featureLoader.update(projectPath, featureId, featureToImport);
|
||||
logger.info(`Updated feature ${featureId} via import`);
|
||||
} else {
|
||||
await this.featureLoader.create(projectPath, featureToImport);
|
||||
logger.info(`Created feature ${featureId} via import`);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
featureId,
|
||||
importedAt: new Date().toISOString(),
|
||||
warnings: warnings.length > 0 ? warnings : undefined,
|
||||
wasOverwritten: !!existingFeature,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Failed to import feature:', error);
|
||||
return {
|
||||
success: false,
|
||||
importedAt: new Date().toISOString(),
|
||||
errors: [`Import failed: ${error instanceof Error ? error.message : String(error)}`],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Import multiple features from JSON or YAML data
|
||||
*
|
||||
* @param projectPath - Path to the project
|
||||
* @param data - Raw JSON or YAML string, or parsed data
|
||||
* @param options - Import options applied to all features
|
||||
* @returns Promise resolving to array of import results
|
||||
*/
|
||||
async importFeatures(
|
||||
projectPath: string,
|
||||
data: string | BulkExportResult,
|
||||
options: Omit<FeatureImport, 'data'> = {}
|
||||
): Promise<FeatureImportResult[]> {
|
||||
let bulkData: BulkExportResult;
|
||||
|
||||
// Parse if string
|
||||
if (typeof data === 'string') {
|
||||
const parsed = this.parseImportData(data);
|
||||
if (!parsed || !this.isBulkExport(parsed)) {
|
||||
return [
|
||||
{
|
||||
success: false,
|
||||
importedAt: new Date().toISOString(),
|
||||
errors: ['Invalid bulk import data: expected BulkExportResult format'],
|
||||
},
|
||||
];
|
||||
}
|
||||
bulkData = parsed as BulkExportResult;
|
||||
} else {
|
||||
bulkData = data;
|
||||
}
|
||||
|
||||
// Import each feature
|
||||
const results: FeatureImportResult[] = [];
|
||||
for (const featureExport of bulkData.features) {
|
||||
const result = await this.importFeature(projectPath, {
|
||||
data: featureExport,
|
||||
...options,
|
||||
});
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
const successCount = results.filter((r) => r.success).length;
|
||||
logger.info(`Bulk import complete: ${successCount}/${results.length} features imported`);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse import data from JSON or YAML string
|
||||
*
|
||||
* @param data - Raw JSON or YAML string
|
||||
* @returns Parsed data or null if parsing fails
|
||||
*/
|
||||
parseImportData(data: string): Feature | FeatureExport | BulkExportResult | null {
|
||||
const trimmed = data.trim();
|
||||
|
||||
// Try JSON first
|
||||
if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
|
||||
try {
|
||||
return JSON.parse(trimmed);
|
||||
} catch {
|
||||
// Fall through to YAML
|
||||
}
|
||||
}
|
||||
|
||||
// Try YAML
|
||||
try {
|
||||
return yamlParse(trimmed);
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse import data:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect the format of import data
|
||||
*
|
||||
* @param data - Raw string data
|
||||
* @returns Detected format or null if unknown
|
||||
*/
|
||||
detectFormat(data: string): ExportFormat | null {
|
||||
const trimmed = data.trim();
|
||||
|
||||
// JSON detection
|
||||
if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
|
||||
try {
|
||||
JSON.parse(trimmed);
|
||||
return 'json';
|
||||
} catch {
|
||||
// Not valid JSON
|
||||
}
|
||||
}
|
||||
|
||||
// YAML detection (if it parses and wasn't JSON)
|
||||
try {
|
||||
yamlParse(trimmed);
|
||||
return 'yaml';
|
||||
} catch {
|
||||
// Not valid YAML either
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare a feature for export by optionally removing fields
|
||||
*/
|
||||
private prepareFeatureForExport(
|
||||
feature: Feature,
|
||||
options: { includeHistory?: boolean; includePlanSpec?: boolean }
|
||||
): Feature {
|
||||
const { includeHistory = true, includePlanSpec = true } = options;
|
||||
|
||||
// Clone to avoid modifying original
|
||||
const exported: Feature = { ...feature };
|
||||
|
||||
// Remove transient fields that shouldn't be exported
|
||||
delete exported.titleGenerating;
|
||||
delete exported.error;
|
||||
|
||||
// Optionally exclude history
|
||||
if (!includeHistory) {
|
||||
delete exported.descriptionHistory;
|
||||
}
|
||||
|
||||
// Optionally exclude plan spec
|
||||
if (!includePlanSpec) {
|
||||
delete exported.planSpec;
|
||||
}
|
||||
|
||||
return exported;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a Feature from import data (handles both raw and wrapped formats)
|
||||
*/
|
||||
private extractFeatureFromImport(data: Feature | FeatureExport): Feature | null {
|
||||
if (!data || typeof data !== 'object') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check if it's a FeatureExport wrapper
|
||||
if ('version' in data && 'feature' in data && 'exportedAt' in data) {
|
||||
const exportData = data as FeatureExport;
|
||||
return exportData.feature;
|
||||
}
|
||||
|
||||
// Assume it's a raw Feature
|
||||
return data as Feature;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if parsed data is a bulk export
|
||||
*/
|
||||
private isBulkExport(data: unknown): data is BulkExportResult {
|
||||
if (!data || typeof data !== 'object') {
|
||||
return false;
|
||||
}
|
||||
const obj = data as Record<string, unknown>;
|
||||
return 'version' in obj && 'features' in obj && Array.isArray(obj.features);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a feature has required fields
|
||||
*/
|
||||
private validateFeature(feature: Feature): string[] {
|
||||
const errors: string[] = [];
|
||||
|
||||
if (!feature.description && !feature.title) {
|
||||
errors.push('Feature must have at least a title or description');
|
||||
}
|
||||
|
||||
if (!feature.category) {
|
||||
errors.push('Feature must have a category');
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize export data to string
|
||||
*/
|
||||
private serializeExport(data: FeatureExport, format: ExportFormat, prettyPrint: boolean): string {
|
||||
if (format === 'yaml') {
|
||||
return yamlStringify(data, {
|
||||
indent: 2,
|
||||
lineWidth: 120,
|
||||
});
|
||||
}
|
||||
|
||||
return prettyPrint ? JSON.stringify(data, null, 2) : JSON.stringify(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize bulk export data to string
|
||||
*/
|
||||
private serializeBulkExport(
|
||||
data: BulkExportResult,
|
||||
format: ExportFormat,
|
||||
prettyPrint: boolean
|
||||
): string {
|
||||
if (format === 'yaml') {
|
||||
return yamlStringify(data, {
|
||||
indent: 2,
|
||||
lineWidth: 120,
|
||||
});
|
||||
}
|
||||
|
||||
return prettyPrint ? JSON.stringify(data, null, 2) : JSON.stringify(data);
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
let featureExportServiceInstance: FeatureExportService | null = null;
|
||||
|
||||
/**
|
||||
* Get the singleton feature export service instance
|
||||
*/
|
||||
export function getFeatureExportService(): FeatureExportService {
|
||||
if (!featureExportServiceInstance) {
|
||||
featureExportServiceInstance = new FeatureExportService();
|
||||
}
|
||||
return featureExportServiceInstance;
|
||||
}
|
||||
623
apps/server/tests/unit/services/feature-export-service.test.ts
Normal file
623
apps/server/tests/unit/services/feature-export-service.test.ts
Normal file
@@ -0,0 +1,623 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { FeatureExportService, FEATURE_EXPORT_VERSION } from '@/services/feature-export-service.js';
|
||||
import type { Feature, FeatureExport } from '@automaker/types';
|
||||
import type { FeatureLoader } from '@/services/feature-loader.js';
|
||||
|
||||
describe('feature-export-service.ts', () => {
|
||||
let exportService: FeatureExportService;
|
||||
let mockFeatureLoader: {
|
||||
get: ReturnType<typeof vi.fn>;
|
||||
getAll: ReturnType<typeof vi.fn>;
|
||||
create: ReturnType<typeof vi.fn>;
|
||||
update: ReturnType<typeof vi.fn>;
|
||||
generateFeatureId: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
const testProjectPath = '/test/project';
|
||||
|
||||
const sampleFeature: Feature = {
|
||||
id: 'feature-123-abc',
|
||||
title: 'Test Feature',
|
||||
category: 'UI',
|
||||
description: 'A test feature description',
|
||||
status: 'pending',
|
||||
priority: 1,
|
||||
dependencies: ['feature-456'],
|
||||
descriptionHistory: [
|
||||
{
|
||||
description: 'Initial description',
|
||||
timestamp: '2024-01-01T00:00:00.000Z',
|
||||
source: 'initial',
|
||||
},
|
||||
],
|
||||
planSpec: {
|
||||
status: 'generated',
|
||||
content: 'Plan content',
|
||||
version: 1,
|
||||
reviewedByUser: false,
|
||||
},
|
||||
imagePaths: ['/tmp/image1.png', '/tmp/image2.jpg'],
|
||||
textFilePaths: [
|
||||
{
|
||||
id: 'file-1',
|
||||
path: '/tmp/doc.txt',
|
||||
filename: 'doc.txt',
|
||||
mimeType: 'text/plain',
|
||||
content: 'Some content',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Create mock FeatureLoader instance
|
||||
mockFeatureLoader = {
|
||||
get: vi.fn(),
|
||||
getAll: vi.fn(),
|
||||
create: vi.fn(),
|
||||
update: vi.fn(),
|
||||
generateFeatureId: vi.fn().mockReturnValue('feature-mock-id'),
|
||||
};
|
||||
|
||||
// Inject mock via constructor
|
||||
exportService = new FeatureExportService(mockFeatureLoader as unknown as FeatureLoader);
|
||||
});
|
||||
|
||||
describe('exportFeatureData', () => {
|
||||
it('should export feature to JSON format', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, { format: 'json' });
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.version).toBe(FEATURE_EXPORT_VERSION);
|
||||
expect(parsed.feature.id).toBe(sampleFeature.id);
|
||||
expect(parsed.feature.title).toBe(sampleFeature.title);
|
||||
expect(parsed.exportedAt).toBeDefined();
|
||||
});
|
||||
|
||||
it('should export feature to YAML format', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, { format: 'yaml' });
|
||||
|
||||
expect(result).toContain('version:');
|
||||
expect(result).toContain('feature:');
|
||||
expect(result).toContain('Test Feature');
|
||||
expect(result).toContain('exportedAt:');
|
||||
});
|
||||
|
||||
it('should exclude description history when option is false', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
includeHistory: false,
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.descriptionHistory).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should include description history by default', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, { format: 'json' });
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.descriptionHistory).toBeDefined();
|
||||
expect(parsed.feature.descriptionHistory).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should exclude plan spec when option is false', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
includePlanSpec: false,
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.planSpec).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should include plan spec by default', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, { format: 'json' });
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.planSpec).toBeDefined();
|
||||
});
|
||||
|
||||
it('should include metadata when provided', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
metadata: { projectName: 'TestProject', branch: 'main' },
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.metadata).toEqual({ projectName: 'TestProject', branch: 'main' });
|
||||
});
|
||||
|
||||
it('should include exportedBy when provided', () => {
|
||||
const result = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
exportedBy: 'test-user',
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.exportedBy).toBe('test-user');
|
||||
});
|
||||
|
||||
it('should remove transient fields (titleGenerating, error)', () => {
|
||||
const featureWithTransient: Feature = {
|
||||
...sampleFeature,
|
||||
titleGenerating: true,
|
||||
error: 'Some error',
|
||||
};
|
||||
|
||||
const result = exportService.exportFeatureData(featureWithTransient, { format: 'json' });
|
||||
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.titleGenerating).toBeUndefined();
|
||||
expect(parsed.feature.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should support compact JSON (prettyPrint: false)', () => {
|
||||
const prettyResult = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
prettyPrint: true,
|
||||
});
|
||||
const compactResult = exportService.exportFeatureData(sampleFeature, {
|
||||
format: 'json',
|
||||
prettyPrint: false,
|
||||
});
|
||||
|
||||
// Compact should have no newlines/indentation
|
||||
expect(compactResult).not.toContain('\n');
|
||||
// Pretty should have newlines
|
||||
expect(prettyResult).toContain('\n');
|
||||
});
|
||||
});
|
||||
|
||||
describe('exportFeature', () => {
|
||||
it('should fetch and export feature by ID', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.exportFeature(testProjectPath, 'feature-123-abc');
|
||||
|
||||
expect(mockFeatureLoader.get).toHaveBeenCalledWith(testProjectPath, 'feature-123-abc');
|
||||
const parsed = JSON.parse(result) as FeatureExport;
|
||||
expect(parsed.feature.id).toBe(sampleFeature.id);
|
||||
});
|
||||
|
||||
it('should throw when feature not found', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
|
||||
await expect(exportService.exportFeature(testProjectPath, 'nonexistent')).rejects.toThrow(
|
||||
'Feature nonexistent not found'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('exportFeatures', () => {
|
||||
const features: Feature[] = [
|
||||
{ ...sampleFeature, id: 'feature-1', category: 'UI' },
|
||||
{ ...sampleFeature, id: 'feature-2', category: 'Backend', status: 'completed' },
|
||||
{ ...sampleFeature, id: 'feature-3', category: 'UI', status: 'pending' },
|
||||
];
|
||||
|
||||
it('should export all features', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath);
|
||||
|
||||
const parsed = JSON.parse(result);
|
||||
expect(parsed.count).toBe(3);
|
||||
expect(parsed.features).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should filter by category', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath, { category: 'UI' });
|
||||
|
||||
const parsed = JSON.parse(result);
|
||||
expect(parsed.count).toBe(2);
|
||||
expect(parsed.features.every((f: FeatureExport) => f.feature.category === 'UI')).toBe(true);
|
||||
});
|
||||
|
||||
it('should filter by status', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath, { status: 'completed' });
|
||||
|
||||
const parsed = JSON.parse(result);
|
||||
expect(parsed.count).toBe(1);
|
||||
expect(parsed.features[0].feature.status).toBe('completed');
|
||||
});
|
||||
|
||||
it('should filter by feature IDs', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath, {
|
||||
featureIds: ['feature-1', 'feature-3'],
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result);
|
||||
expect(parsed.count).toBe(2);
|
||||
const ids = parsed.features.map((f: FeatureExport) => f.feature.id);
|
||||
expect(ids).toContain('feature-1');
|
||||
expect(ids).toContain('feature-3');
|
||||
expect(ids).not.toContain('feature-2');
|
||||
});
|
||||
|
||||
it('should export to YAML format', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath, { format: 'yaml' });
|
||||
|
||||
expect(result).toContain('version:');
|
||||
expect(result).toContain('count:');
|
||||
expect(result).toContain('features:');
|
||||
});
|
||||
|
||||
it('should include metadata when provided', async () => {
|
||||
mockFeatureLoader.getAll.mockResolvedValue(features);
|
||||
|
||||
const result = await exportService.exportFeatures(testProjectPath, {
|
||||
metadata: { projectName: 'TestProject' },
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(result);
|
||||
expect(parsed.metadata).toEqual({ projectName: 'TestProject' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseImportData', () => {
|
||||
it('should parse valid JSON', () => {
|
||||
const json = JSON.stringify(sampleFeature);
|
||||
const result = exportService.parseImportData(json);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect((result as Feature).id).toBe(sampleFeature.id);
|
||||
});
|
||||
|
||||
it('should parse valid YAML', () => {
|
||||
const yaml = `
|
||||
id: feature-yaml-123
|
||||
title: YAML Feature
|
||||
category: Testing
|
||||
description: A YAML feature
|
||||
`;
|
||||
const result = exportService.parseImportData(yaml);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect((result as Feature).id).toBe('feature-yaml-123');
|
||||
expect((result as Feature).title).toBe('YAML Feature');
|
||||
});
|
||||
|
||||
it('should return null for invalid data', () => {
|
||||
const result = exportService.parseImportData('not valid {json} or yaml: [');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should parse FeatureExport wrapper', () => {
|
||||
const exportData: FeatureExport = {
|
||||
version: '1.0.0',
|
||||
feature: sampleFeature,
|
||||
exportedAt: new Date().toISOString(),
|
||||
};
|
||||
const json = JSON.stringify(exportData);
|
||||
|
||||
const result = exportService.parseImportData(json) as FeatureExport;
|
||||
|
||||
expect(result.version).toBe('1.0.0');
|
||||
expect(result.feature.id).toBe(sampleFeature.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('detectFormat', () => {
|
||||
it('should detect JSON format', () => {
|
||||
const json = JSON.stringify({ id: 'test' });
|
||||
expect(exportService.detectFormat(json)).toBe('json');
|
||||
});
|
||||
|
||||
it('should detect YAML format', () => {
|
||||
const yaml = `
|
||||
id: test
|
||||
title: Test
|
||||
`;
|
||||
expect(exportService.detectFormat(yaml)).toBe('yaml');
|
||||
});
|
||||
|
||||
it('should detect YAML for plain text (YAML is very permissive)', () => {
|
||||
// YAML parses any plain text as a string, so this is detected as valid YAML
|
||||
// The actual validation happens in parseImportData which checks for required fields
|
||||
expect(exportService.detectFormat('not valid {[')).toBe('yaml');
|
||||
});
|
||||
|
||||
it('should handle whitespace', () => {
|
||||
const json = ' { "id": "test" } ';
|
||||
expect(exportService.detectFormat(json)).toBe('json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('importFeature', () => {
|
||||
it('should import feature from raw Feature data', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.featureId).toBe(sampleFeature.id);
|
||||
expect(mockFeatureLoader.create).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should import feature from FeatureExport wrapper', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockResolvedValue(sampleFeature);
|
||||
|
||||
const exportData: FeatureExport = {
|
||||
version: '1.0.0',
|
||||
feature: sampleFeature,
|
||||
exportedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: exportData,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.featureId).toBe(sampleFeature.id);
|
||||
});
|
||||
|
||||
it('should use custom ID when provided', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
id: data.id!,
|
||||
}));
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
newId: 'custom-id-123',
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.featureId).toBe('custom-id-123');
|
||||
});
|
||||
|
||||
it('should fail when feature exists and overwrite is false', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
overwrite: false,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.errors).toContain(
|
||||
`Feature with ID ${sampleFeature.id} already exists. Set overwrite: true to replace.`
|
||||
);
|
||||
});
|
||||
|
||||
it('should overwrite when overwrite is true', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(sampleFeature);
|
||||
mockFeatureLoader.update.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
overwrite: true,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.wasOverwritten).toBe(true);
|
||||
expect(mockFeatureLoader.update).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should apply target category override', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
...data,
|
||||
}));
|
||||
|
||||
await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
targetCategory: 'NewCategory',
|
||||
});
|
||||
|
||||
const createCall = mockFeatureLoader.create.mock.calls[0];
|
||||
expect(createCall[1].category).toBe('NewCategory');
|
||||
});
|
||||
|
||||
it('should clear branch info when preserveBranchInfo is false', async () => {
|
||||
const featureWithBranch: Feature = {
|
||||
...sampleFeature,
|
||||
branchName: 'feature/test-branch',
|
||||
};
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...featureWithBranch,
|
||||
...data,
|
||||
}));
|
||||
|
||||
await exportService.importFeature(testProjectPath, {
|
||||
data: featureWithBranch,
|
||||
preserveBranchInfo: false,
|
||||
});
|
||||
|
||||
const createCall = mockFeatureLoader.create.mock.calls[0];
|
||||
expect(createCall[1].branchName).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should preserve branch info when preserveBranchInfo is true', async () => {
|
||||
const featureWithBranch: Feature = {
|
||||
...sampleFeature,
|
||||
branchName: 'feature/test-branch',
|
||||
};
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...featureWithBranch,
|
||||
...data,
|
||||
}));
|
||||
|
||||
await exportService.importFeature(testProjectPath, {
|
||||
data: featureWithBranch,
|
||||
preserveBranchInfo: true,
|
||||
});
|
||||
|
||||
const createCall = mockFeatureLoader.create.mock.calls[0];
|
||||
expect(createCall[1].branchName).toBe('feature/test-branch');
|
||||
});
|
||||
|
||||
it('should warn and clear image paths', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
});
|
||||
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings).toContainEqual(expect.stringContaining('image path'));
|
||||
const createCall = mockFeatureLoader.create.mock.calls[0];
|
||||
expect(createCall[1].imagePaths).toEqual([]);
|
||||
});
|
||||
|
||||
it('should warn and clear text file paths', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockResolvedValue(sampleFeature);
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: sampleFeature,
|
||||
});
|
||||
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings).toContainEqual(expect.stringContaining('text file path'));
|
||||
const createCall = mockFeatureLoader.create.mock.calls[0];
|
||||
expect(createCall[1].textFilePaths).toEqual([]);
|
||||
});
|
||||
|
||||
it('should fail with validation error for missing required fields', async () => {
|
||||
const invalidFeature = {
|
||||
id: 'feature-invalid',
|
||||
// Missing description, title, and category
|
||||
} as Feature;
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: invalidFeature,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.errors).toBeDefined();
|
||||
expect(result.errors!.some((e) => e.includes('title or description'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should generate ID when none provided', async () => {
|
||||
const featureWithoutId = {
|
||||
title: 'No ID Feature',
|
||||
category: 'Testing',
|
||||
description: 'Feature without ID',
|
||||
} as Feature;
|
||||
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...featureWithoutId,
|
||||
id: data.id!,
|
||||
}));
|
||||
|
||||
const result = await exportService.importFeature(testProjectPath, {
|
||||
data: featureWithoutId,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.featureId).toBe('feature-mock-id');
|
||||
});
|
||||
});
|
||||
|
||||
describe('importFeatures', () => {
|
||||
const bulkExport = {
|
||||
version: '1.0.0',
|
||||
exportedAt: new Date().toISOString(),
|
||||
count: 2,
|
||||
features: [
|
||||
{
|
||||
version: '1.0.0',
|
||||
feature: { ...sampleFeature, id: 'feature-1' },
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
{
|
||||
version: '1.0.0',
|
||||
feature: { ...sampleFeature, id: 'feature-2' },
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
it('should import multiple features from JSON string', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
id: data.id!,
|
||||
}));
|
||||
|
||||
const results = await exportService.importFeatures(
|
||||
testProjectPath,
|
||||
JSON.stringify(bulkExport)
|
||||
);
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0].success).toBe(true);
|
||||
expect(results[1].success).toBe(true);
|
||||
});
|
||||
|
||||
it('should import multiple features from parsed data', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
id: data.id!,
|
||||
}));
|
||||
|
||||
const results = await exportService.importFeatures(testProjectPath, bulkExport);
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results.every((r) => r.success)).toBe(true);
|
||||
});
|
||||
|
||||
it('should apply options to all features', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValue(null);
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
...data,
|
||||
}));
|
||||
|
||||
await exportService.importFeatures(testProjectPath, bulkExport, {
|
||||
targetCategory: 'ImportedCategory',
|
||||
});
|
||||
|
||||
const createCalls = mockFeatureLoader.create.mock.calls;
|
||||
expect(createCalls[0][1].category).toBe('ImportedCategory');
|
||||
expect(createCalls[1][1].category).toBe('ImportedCategory');
|
||||
});
|
||||
|
||||
it('should return error for invalid bulk format', async () => {
|
||||
const results = await exportService.importFeatures(testProjectPath, '{ "invalid": "data" }');
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0].success).toBe(false);
|
||||
expect(results[0].errors).toContainEqual(expect.stringContaining('Invalid bulk import data'));
|
||||
});
|
||||
|
||||
it('should handle partial failures', async () => {
|
||||
mockFeatureLoader.get.mockResolvedValueOnce(null).mockResolvedValueOnce(sampleFeature); // Second feature exists
|
||||
|
||||
mockFeatureLoader.create.mockImplementation(async (_, data) => ({
|
||||
...sampleFeature,
|
||||
id: data.id!,
|
||||
}));
|
||||
|
||||
const results = await exportService.importFeatures(testProjectPath, bulkExport, {
|
||||
overwrite: false,
|
||||
});
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0].success).toBe(true);
|
||||
expect(results[1].success).toBe(false); // Exists without overwrite
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user