feat: implement config-manager inside tm-core package
next up: connecting it to everything else and testing that tm list works well and loads what it need to load
This commit is contained in:
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -10,5 +10,6 @@
|
||||
},
|
||||
|
||||
"json.format.enable": true,
|
||||
"json.validate.enable": true
|
||||
"json.validate.enable": true,
|
||||
"typescript.tsdk": "node_modules/typescript/lib"
|
||||
}
|
||||
|
||||
370
packages/tm-core/src/config/config-manager.spec.ts
Normal file
370
packages/tm-core/src/config/config-manager.spec.ts
Normal file
@@ -0,0 +1,370 @@
|
||||
/**
|
||||
* @fileoverview Integration tests for ConfigManager
|
||||
* Tests the orchestration of all configuration services
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
|
||||
import { ConfigManager } from './config-manager.js';
|
||||
import { ConfigLoader } from './services/config-loader.service.js';
|
||||
import { ConfigMerger } from './services/config-merger.service.js';
|
||||
import { RuntimeStateManager } from './services/runtime-state-manager.service.js';
|
||||
import { ConfigPersistence } from './services/config-persistence.service.js';
|
||||
import { EnvironmentConfigProvider } from './services/environment-config-provider.service.js';
|
||||
|
||||
// Mock all services
|
||||
vi.mock('./services/config-loader.service.js');
|
||||
vi.mock('./services/config-merger.service.js');
|
||||
vi.mock('./services/runtime-state-manager.service.js');
|
||||
vi.mock('./services/config-persistence.service.js');
|
||||
vi.mock('./services/environment-config-provider.service.js');
|
||||
|
||||
describe('ConfigManager', () => {
|
||||
let manager: ConfigManager;
|
||||
const testProjectRoot = '/test/project';
|
||||
const originalEnv = { ...process.env };
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Clear environment variables
|
||||
Object.keys(process.env).forEach(key => {
|
||||
if (key.startsWith('TASKMASTER_')) {
|
||||
delete process.env[key];
|
||||
}
|
||||
});
|
||||
|
||||
// Setup default mock behaviors
|
||||
vi.mocked(ConfigLoader).mockImplementation(() => ({
|
||||
getDefaultConfig: vi.fn().mockReturnValue({
|
||||
models: { main: 'default-model', fallback: 'fallback-model' },
|
||||
storage: { type: 'file' },
|
||||
version: '1.0.0'
|
||||
}),
|
||||
loadLocalConfig: vi.fn().mockResolvedValue(null),
|
||||
loadGlobalConfig: vi.fn().mockResolvedValue(null),
|
||||
hasLocalConfig: vi.fn().mockResolvedValue(false),
|
||||
hasGlobalConfig: vi.fn().mockResolvedValue(false)
|
||||
} as any));
|
||||
|
||||
vi.mocked(ConfigMerger).mockImplementation(() => ({
|
||||
addSource: vi.fn(),
|
||||
clearSources: vi.fn(),
|
||||
merge: vi.fn().mockReturnValue({
|
||||
models: { main: 'merged-model', fallback: 'fallback-model' },
|
||||
storage: { type: 'file' }
|
||||
}),
|
||||
getSources: vi.fn().mockReturnValue([]),
|
||||
hasSource: vi.fn().mockReturnValue(false),
|
||||
removeSource: vi.fn().mockReturnValue(false)
|
||||
} as any));
|
||||
|
||||
vi.mocked(RuntimeStateManager).mockImplementation(() => ({
|
||||
loadState: vi.fn().mockResolvedValue({ activeTag: 'master' }),
|
||||
saveState: vi.fn().mockResolvedValue(undefined),
|
||||
getActiveTag: vi.fn().mockReturnValue('master'),
|
||||
setActiveTag: vi.fn().mockResolvedValue(undefined),
|
||||
getState: vi.fn().mockReturnValue({ activeTag: 'master' }),
|
||||
updateMetadata: vi.fn().mockResolvedValue(undefined),
|
||||
clearState: vi.fn().mockResolvedValue(undefined)
|
||||
} as any));
|
||||
|
||||
vi.mocked(ConfigPersistence).mockImplementation(() => ({
|
||||
saveConfig: vi.fn().mockResolvedValue(undefined),
|
||||
configExists: vi.fn().mockResolvedValue(false),
|
||||
deleteConfig: vi.fn().mockResolvedValue(undefined),
|
||||
getBackups: vi.fn().mockResolvedValue([]),
|
||||
restoreFromBackup: vi.fn().mockResolvedValue(undefined)
|
||||
} as any));
|
||||
|
||||
vi.mocked(EnvironmentConfigProvider).mockImplementation(() => ({
|
||||
loadConfig: vi.fn().mockReturnValue({}),
|
||||
getRuntimeState: vi.fn().mockReturnValue({}),
|
||||
hasEnvVar: vi.fn().mockReturnValue(false),
|
||||
getAllTaskmasterEnvVars: vi.fn().mockReturnValue({}),
|
||||
addMapping: vi.fn(),
|
||||
getMappings: vi.fn().mockReturnValue([])
|
||||
} as any));
|
||||
|
||||
// Since constructor is private, we need to use the factory method
|
||||
// But for testing, we'll create a test instance using create()
|
||||
manager = await ConfigManager.create(testProjectRoot);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
describe('creation', () => {
|
||||
it('should initialize all services when created', () => {
|
||||
// Services should have been initialized during beforeEach
|
||||
expect(ConfigLoader).toHaveBeenCalledWith(testProjectRoot);
|
||||
expect(ConfigMerger).toHaveBeenCalled();
|
||||
expect(RuntimeStateManager).toHaveBeenCalledWith(testProjectRoot);
|
||||
expect(ConfigPersistence).toHaveBeenCalledWith(testProjectRoot);
|
||||
expect(EnvironmentConfigProvider).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('create (factory method)', () => {
|
||||
it('should create and initialize manager', async () => {
|
||||
const createdManager = await ConfigManager.create(testProjectRoot);
|
||||
|
||||
expect(createdManager).toBeInstanceOf(ConfigManager);
|
||||
expect(createdManager.getConfig()).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('initialization (via create)', () => {
|
||||
it('should load and merge all configuration sources', () => {
|
||||
// Manager was created in beforeEach, so initialization already happened
|
||||
const loader = (manager as any).loader;
|
||||
const merger = (manager as any).merger;
|
||||
const stateManager = (manager as any).stateManager;
|
||||
const envProvider = (manager as any).envProvider;
|
||||
|
||||
// Verify loading sequence
|
||||
expect(merger.clearSources).toHaveBeenCalled();
|
||||
expect(loader.getDefaultConfig).toHaveBeenCalled();
|
||||
expect(loader.loadGlobalConfig).toHaveBeenCalled();
|
||||
expect(loader.loadLocalConfig).toHaveBeenCalled();
|
||||
expect(envProvider.loadConfig).toHaveBeenCalled();
|
||||
expect(merger.merge).toHaveBeenCalled();
|
||||
expect(stateManager.loadState).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should add sources with correct precedence during creation', () => {
|
||||
const merger = (manager as any).merger;
|
||||
|
||||
// Check that sources were added with correct precedence
|
||||
expect(merger.addSource).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
name: 'defaults',
|
||||
precedence: 0
|
||||
})
|
||||
);
|
||||
|
||||
// Note: local and env sources may not be added if they don't exist
|
||||
// The mock setup determines what gets called
|
||||
});
|
||||
});
|
||||
|
||||
describe('configuration access', () => {
|
||||
// Manager is already initialized in the main beforeEach
|
||||
|
||||
it('should return merged configuration', () => {
|
||||
const config = manager.getConfig();
|
||||
expect(config).toEqual({
|
||||
models: { main: 'merged-model', fallback: 'fallback-model' },
|
||||
storage: { type: 'file' }
|
||||
});
|
||||
});
|
||||
|
||||
it('should return storage configuration', () => {
|
||||
const storage = manager.getStorageConfig();
|
||||
expect(storage).toEqual({ type: 'file' });
|
||||
});
|
||||
|
||||
it('should return API storage configuration when configured', async () => {
|
||||
// Create a new instance with API storage config
|
||||
vi.mocked(ConfigMerger).mockImplementationOnce(() => ({
|
||||
addSource: vi.fn(),
|
||||
clearSources: vi.fn(),
|
||||
merge: vi.fn().mockReturnValue({
|
||||
storage: {
|
||||
type: 'api',
|
||||
apiEndpoint: 'https://api.example.com',
|
||||
apiAccessToken: 'token123'
|
||||
}
|
||||
}),
|
||||
getSources: vi.fn().mockReturnValue([]),
|
||||
hasSource: vi.fn().mockReturnValue(false),
|
||||
removeSource: vi.fn().mockReturnValue(false)
|
||||
} as any));
|
||||
|
||||
const apiManager = await ConfigManager.create(testProjectRoot);
|
||||
|
||||
const storage = apiManager.getStorageConfig();
|
||||
expect(storage).toEqual({
|
||||
type: 'api',
|
||||
apiEndpoint: 'https://api.example.com',
|
||||
apiAccessToken: 'token123'
|
||||
});
|
||||
});
|
||||
|
||||
it('should return model configuration', () => {
|
||||
const models = manager.getModelConfig();
|
||||
expect(models).toEqual({
|
||||
main: 'merged-model',
|
||||
fallback: 'fallback-model'
|
||||
});
|
||||
});
|
||||
|
||||
it('should return default models when not configured', () => {
|
||||
// Update the mock for current instance
|
||||
const merger = (manager as any).merger;
|
||||
merger.merge.mockReturnValue({});
|
||||
// Force re-merge
|
||||
(manager as any).config = merger.merge();
|
||||
|
||||
const models = manager.getModelConfig();
|
||||
expect(models).toEqual({
|
||||
main: 'claude-3-5-sonnet-20241022',
|
||||
fallback: 'gpt-4o-mini'
|
||||
});
|
||||
});
|
||||
|
||||
it('should return response language', () => {
|
||||
const language = manager.getResponseLanguage();
|
||||
expect(language).toBe('English');
|
||||
});
|
||||
|
||||
it('should return custom response language', () => {
|
||||
// Update config for current instance
|
||||
(manager as any).config = {
|
||||
custom: { responseLanguage: 'Spanish' }
|
||||
};
|
||||
|
||||
const language = manager.getResponseLanguage();
|
||||
expect(language).toBe('Spanish');
|
||||
});
|
||||
|
||||
it('should return project root', () => {
|
||||
expect(manager.getProjectRoot()).toBe(testProjectRoot);
|
||||
});
|
||||
|
||||
it('should check if using API storage', () => {
|
||||
expect(manager.isUsingApiStorage()).toBe(false);
|
||||
});
|
||||
|
||||
it('should detect API storage', () => {
|
||||
// Update config for current instance
|
||||
(manager as any).config = {
|
||||
storage: {
|
||||
type: 'api',
|
||||
apiEndpoint: 'https://api.example.com',
|
||||
apiAccessToken: 'token'
|
||||
}
|
||||
};
|
||||
|
||||
expect(manager.isUsingApiStorage()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('runtime state', () => {
|
||||
// Manager is already initialized in the main beforeEach
|
||||
|
||||
it('should get active tag from state manager', () => {
|
||||
const tag = manager.getActiveTag();
|
||||
expect(tag).toBe('master');
|
||||
});
|
||||
|
||||
it('should set active tag through state manager', async () => {
|
||||
await manager.setActiveTag('feature-branch');
|
||||
|
||||
const stateManager = (manager as any).stateManager;
|
||||
expect(stateManager.setActiveTag).toHaveBeenCalledWith('feature-branch');
|
||||
});
|
||||
});
|
||||
|
||||
describe('configuration updates', () => {
|
||||
// Manager is already initialized in the main beforeEach
|
||||
|
||||
it('should update configuration and save', async () => {
|
||||
const updates = { models: { main: 'new-model', fallback: 'fallback-model' } };
|
||||
await manager.updateConfig(updates);
|
||||
|
||||
const persistence = (manager as any).persistence;
|
||||
expect(persistence.saveConfig).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should re-initialize after update to maintain precedence', async () => {
|
||||
const merger = (manager as any).merger;
|
||||
merger.clearSources.mockClear();
|
||||
|
||||
await manager.updateConfig({ custom: { test: 'value' } });
|
||||
|
||||
expect(merger.clearSources).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should set response language', async () => {
|
||||
await manager.setResponseLanguage('French');
|
||||
|
||||
const persistence = (manager as any).persistence;
|
||||
expect(persistence.saveConfig).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
custom: { responseLanguage: 'French' }
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should save configuration with options', async () => {
|
||||
await manager.saveConfig();
|
||||
|
||||
const persistence = (manager as any).persistence;
|
||||
expect(persistence.saveConfig).toHaveBeenCalledWith(
|
||||
expect.any(Object),
|
||||
{ createBackup: true, atomic: true }
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('utilities', () => {
|
||||
// Manager is already initialized in the main beforeEach
|
||||
|
||||
it('should reset configuration to defaults', async () => {
|
||||
await manager.reset();
|
||||
|
||||
const persistence = (manager as any).persistence;
|
||||
const stateManager = (manager as any).stateManager;
|
||||
|
||||
expect(persistence.deleteConfig).toHaveBeenCalled();
|
||||
expect(stateManager.clearState).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should re-initialize after reset', async () => {
|
||||
const merger = (manager as any).merger;
|
||||
merger.clearSources.mockClear();
|
||||
|
||||
await manager.reset();
|
||||
|
||||
expect(merger.clearSources).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should get configuration sources for debugging', () => {
|
||||
const merger = (manager as any).merger;
|
||||
const mockSources = [{ name: 'test', config: {}, precedence: 1 }];
|
||||
merger.getSources.mockReturnValue(mockSources);
|
||||
|
||||
const sources = manager.getConfigSources();
|
||||
|
||||
expect(sources).toEqual(mockSources);
|
||||
});
|
||||
|
||||
it('should return no-op function for watch (not implemented)', () => {
|
||||
const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
const callback = vi.fn();
|
||||
|
||||
const unsubscribe = manager.watch(callback);
|
||||
|
||||
expect(warnSpy).toHaveBeenCalledWith('Configuration watching not yet implemented');
|
||||
expect(unsubscribe).toBeInstanceOf(Function);
|
||||
|
||||
// Calling unsubscribe should not throw
|
||||
expect(() => unsubscribe()).not.toThrow();
|
||||
|
||||
warnSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should handle missing services gracefully', async () => {
|
||||
// Even if a service fails, manager should still work
|
||||
const loader = (manager as any).loader;
|
||||
loader.loadLocalConfig.mockRejectedValue(new Error('File error'));
|
||||
|
||||
// Creating a new manager should not throw even if service fails
|
||||
await expect(ConfigManager.create(testProjectRoot)).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,135 +1,134 @@
|
||||
/**
|
||||
* @fileoverview Configuration Manager
|
||||
* Handles loading, caching, and accessing configuration including active tag
|
||||
* Orchestrates configuration services following clean architecture principles
|
||||
*
|
||||
* This ConfigManager delegates responsibilities to specialized services for better
|
||||
* maintainability, testability, and separation of concerns.
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import type { IConfiguration } from '../interfaces/configuration.interface.js';
|
||||
import { ERROR_CODES, TaskMasterError } from '../errors/task-master-error.js';
|
||||
import type { PartialConfiguration } from '../interfaces/configuration.interface.js';
|
||||
import { ConfigLoader } from './services/config-loader.service.js';
|
||||
import {
|
||||
ConfigMerger,
|
||||
CONFIG_PRECEDENCE
|
||||
} from './services/config-merger.service.js';
|
||||
import { RuntimeStateManager } from './services/runtime-state-manager.service.js';
|
||||
import { ConfigPersistence } from './services/config-persistence.service.js';
|
||||
import { EnvironmentConfigProvider } from './services/environment-config-provider.service.js';
|
||||
|
||||
/**
|
||||
* Configuration state including runtime settings
|
||||
*/
|
||||
interface ConfigState {
|
||||
/** The loaded configuration */
|
||||
config: Partial<IConfiguration>;
|
||||
/** Currently active tag (defaults to 'master') */
|
||||
activeTag: string;
|
||||
/** Project root path */
|
||||
projectRoot: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* ConfigManager handles all configuration-related operations
|
||||
* Single source of truth for configuration and active context
|
||||
* ConfigManager orchestrates all configuration services
|
||||
*
|
||||
* This class delegates responsibilities to specialized services:
|
||||
* - ConfigLoader: Loads configuration from files
|
||||
* - ConfigMerger: Merges configurations with precedence
|
||||
* - RuntimeStateManager: Manages runtime state
|
||||
* - ConfigPersistence: Handles file persistence
|
||||
* - EnvironmentConfigProvider: Extracts env var configuration
|
||||
*/
|
||||
export class ConfigManager {
|
||||
private state: ConfigState;
|
||||
private configPath: string;
|
||||
private projectRoot: string;
|
||||
private config: PartialConfiguration = {};
|
||||
private initialized = false;
|
||||
|
||||
constructor(projectRoot: string) {
|
||||
this.state = {
|
||||
config: {},
|
||||
activeTag: 'master',
|
||||
projectRoot
|
||||
};
|
||||
this.configPath = path.join(projectRoot, '.taskmaster', 'config.json');
|
||||
// Services
|
||||
private loader: ConfigLoader;
|
||||
private merger: ConfigMerger;
|
||||
private stateManager: RuntimeStateManager;
|
||||
private persistence: ConfigPersistence;
|
||||
private envProvider: EnvironmentConfigProvider;
|
||||
|
||||
/**
|
||||
* Create and initialize a new ConfigManager instance
|
||||
* This is the ONLY way to create a ConfigManager
|
||||
*
|
||||
* @param projectRoot - The root directory of the project
|
||||
* @returns Fully initialized ConfigManager instance
|
||||
*/
|
||||
static async create(projectRoot: string): Promise<ConfigManager> {
|
||||
const manager = new ConfigManager(projectRoot);
|
||||
await manager.initialize();
|
||||
return manager;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize by loading configuration from disk
|
||||
* Private constructor - use ConfigManager.create() instead
|
||||
* This ensures the ConfigManager is always properly initialized
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
private constructor(projectRoot: string) {
|
||||
this.projectRoot = projectRoot;
|
||||
|
||||
// Initialize services
|
||||
this.loader = new ConfigLoader(projectRoot);
|
||||
this.merger = new ConfigMerger();
|
||||
this.stateManager = new RuntimeStateManager(projectRoot);
|
||||
this.persistence = new ConfigPersistence(projectRoot);
|
||||
this.envProvider = new EnvironmentConfigProvider();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize by loading configuration from all sources
|
||||
* Private - only called by the factory method
|
||||
*/
|
||||
private async initialize(): Promise<void> {
|
||||
if (this.initialized) return;
|
||||
|
||||
try {
|
||||
await this.loadConfig();
|
||||
this.initialized = true;
|
||||
} catch (error) {
|
||||
// If config doesn't exist, use defaults
|
||||
console.debug('No config.json found, using defaults');
|
||||
this.initialized = true;
|
||||
// Clear any existing configuration sources
|
||||
this.merger.clearSources();
|
||||
|
||||
// 1. Load default configuration (lowest precedence)
|
||||
this.merger.addSource({
|
||||
name: 'defaults',
|
||||
config: this.loader.getDefaultConfig(),
|
||||
precedence: CONFIG_PRECEDENCE.DEFAULTS
|
||||
});
|
||||
|
||||
// 2. Load global configuration (if exists)
|
||||
const globalConfig = await this.loader.loadGlobalConfig();
|
||||
if (globalConfig) {
|
||||
this.merger.addSource({
|
||||
name: 'global',
|
||||
config: globalConfig,
|
||||
precedence: CONFIG_PRECEDENCE.GLOBAL
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load configuration from config.json
|
||||
*/
|
||||
private async loadConfig(): Promise<void> {
|
||||
try {
|
||||
const configData = await fs.readFile(this.configPath, 'utf-8');
|
||||
const config = JSON.parse(configData);
|
||||
|
||||
this.state.config = config;
|
||||
|
||||
// Load active tag from config if present
|
||||
if (config.activeTag) {
|
||||
this.state.activeTag = config.activeTag;
|
||||
}
|
||||
|
||||
// Check for environment variable override
|
||||
if (process.env.TASKMASTER_TAG) {
|
||||
this.state.activeTag = process.env.TASKMASTER_TAG;
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw new TaskMasterError(
|
||||
'Failed to load configuration',
|
||||
ERROR_CODES.CONFIG_ERROR,
|
||||
{ configPath: this.configPath },
|
||||
error
|
||||
);
|
||||
}
|
||||
// File doesn't exist, will use defaults
|
||||
// 3. Load local project configuration
|
||||
const localConfig = await this.loader.loadLocalConfig();
|
||||
if (localConfig) {
|
||||
this.merger.addSource({
|
||||
name: 'local',
|
||||
config: localConfig,
|
||||
precedence: CONFIG_PRECEDENCE.LOCAL
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save current configuration to disk
|
||||
*/
|
||||
async saveConfig(): Promise<void> {
|
||||
const configDir = path.dirname(this.configPath);
|
||||
|
||||
try {
|
||||
// Ensure directory exists
|
||||
await fs.mkdir(configDir, { recursive: true });
|
||||
|
||||
// Save config with active tag
|
||||
const configToSave = {
|
||||
...this.state.config,
|
||||
activeTag: this.state.activeTag
|
||||
};
|
||||
|
||||
await fs.writeFile(
|
||||
this.configPath,
|
||||
JSON.stringify(configToSave, null, 2),
|
||||
'utf-8'
|
||||
);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to save configuration',
|
||||
ERROR_CODES.CONFIG_ERROR,
|
||||
{ configPath: this.configPath },
|
||||
error as Error
|
||||
);
|
||||
// 4. Load environment variables (highest precedence)
|
||||
const envConfig = this.envProvider.loadConfig();
|
||||
if (Object.keys(envConfig).length > 0) {
|
||||
this.merger.addSource({
|
||||
name: 'environment',
|
||||
config: envConfig,
|
||||
precedence: CONFIG_PRECEDENCE.ENVIRONMENT
|
||||
});
|
||||
}
|
||||
|
||||
// 5. Merge all configurations
|
||||
this.config = this.merger.merge();
|
||||
|
||||
// 6. Load runtime state
|
||||
await this.stateManager.loadState();
|
||||
|
||||
this.initialized = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the currently active tag
|
||||
*/
|
||||
getActiveTag(): string {
|
||||
return this.state.activeTag;
|
||||
}
|
||||
// ==================== Configuration Access ====================
|
||||
|
||||
/**
|
||||
* Set the active tag
|
||||
* Get full configuration
|
||||
*/
|
||||
async setActiveTag(tag: string): Promise<void> {
|
||||
this.state.activeTag = tag;
|
||||
await this.saveConfig();
|
||||
getConfig(): PartialConfiguration {
|
||||
return this.config;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -140,9 +139,8 @@ export class ConfigManager {
|
||||
apiEndpoint?: string;
|
||||
apiAccessToken?: string;
|
||||
} {
|
||||
const storage = this.state.config.storage;
|
||||
const storage = this.config.storage;
|
||||
|
||||
// Check for Hamster/API configuration
|
||||
if (
|
||||
storage?.type === 'api' &&
|
||||
storage.apiEndpoint &&
|
||||
@@ -155,51 +153,128 @@ export class ConfigManager {
|
||||
};
|
||||
}
|
||||
|
||||
// Default to file storage
|
||||
return { type: 'file' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get model configuration
|
||||
*/
|
||||
getModelConfig() {
|
||||
return (
|
||||
this.config.models || {
|
||||
main: 'claude-3-5-sonnet-20241022',
|
||||
fallback: 'gpt-4o-mini'
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get response language setting
|
||||
*/
|
||||
getResponseLanguage(): string {
|
||||
const customConfig = this.config.custom as any;
|
||||
return customConfig?.responseLanguage || 'English';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get project root path
|
||||
*/
|
||||
getProjectRoot(): string {
|
||||
return this.state.projectRoot;
|
||||
return this.projectRoot;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get full configuration
|
||||
*/
|
||||
getConfig(): Partial<IConfiguration> {
|
||||
return this.state.config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update configuration
|
||||
*/
|
||||
async updateConfig(updates: Partial<IConfiguration>): Promise<void> {
|
||||
this.state.config = {
|
||||
...this.state.config,
|
||||
...updates
|
||||
};
|
||||
await this.saveConfig();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if using API storage (Hamster)
|
||||
* Check if using API storage
|
||||
*/
|
||||
isUsingApiStorage(): boolean {
|
||||
return this.getStorageConfig().type === 'api';
|
||||
}
|
||||
|
||||
// ==================== Runtime State ====================
|
||||
|
||||
/**
|
||||
* Get model configuration for AI providers
|
||||
* Get the currently active tag
|
||||
*/
|
||||
getModelConfig() {
|
||||
return (
|
||||
this.state.config.models || {
|
||||
main: 'claude-3-5-sonnet-20241022',
|
||||
fallback: 'gpt-4o-mini'
|
||||
}
|
||||
);
|
||||
getActiveTag(): string {
|
||||
return this.stateManager.getActiveTag();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the active tag
|
||||
*/
|
||||
async setActiveTag(tag: string): Promise<void> {
|
||||
await this.stateManager.setActiveTag(tag);
|
||||
}
|
||||
|
||||
// ==================== Configuration Updates ====================
|
||||
|
||||
/**
|
||||
* Update configuration
|
||||
*/
|
||||
async updateConfig(updates: PartialConfiguration): Promise<void> {
|
||||
// Merge updates into current config
|
||||
Object.assign(this.config, updates);
|
||||
|
||||
// Save to persistence
|
||||
await this.persistence.saveConfig(this.config);
|
||||
|
||||
// Re-initialize to respect precedence
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set response language
|
||||
*/
|
||||
async setResponseLanguage(language: string): Promise<void> {
|
||||
if (!this.config.custom) {
|
||||
this.config.custom = {};
|
||||
}
|
||||
(this.config.custom as any).responseLanguage = language;
|
||||
await this.persistence.saveConfig(this.config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Save current configuration
|
||||
*/
|
||||
async saveConfig(): Promise<void> {
|
||||
await this.persistence.saveConfig(this.config, {
|
||||
createBackup: true,
|
||||
atomic: true
|
||||
});
|
||||
}
|
||||
|
||||
// ==================== Utilities ====================
|
||||
|
||||
/**
|
||||
* Reset configuration to defaults
|
||||
*/
|
||||
async reset(): Promise<void> {
|
||||
// Clear configuration file
|
||||
await this.persistence.deleteConfig();
|
||||
|
||||
// Clear runtime state
|
||||
await this.stateManager.clearState();
|
||||
|
||||
// Reset internal state
|
||||
this.initialized = false;
|
||||
this.config = {};
|
||||
|
||||
// Re-initialize with defaults
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get configuration sources for debugging
|
||||
*/
|
||||
getConfigSources() {
|
||||
return this.merger.getSources();
|
||||
}
|
||||
|
||||
/**
|
||||
* Watch for configuration changes (placeholder for future)
|
||||
*/
|
||||
watch(_callback: (config: PartialConfiguration) => void): () => void {
|
||||
console.warn('Configuration watching not yet implemented');
|
||||
return () => {}; // Return no-op unsubscribe function
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Zod validation schema for IConfiguration interface
|
||||
* This file provides the main schema export for configuration validation
|
||||
*/
|
||||
|
||||
export {
|
||||
configurationSchema,
|
||||
partialConfigurationSchema,
|
||||
modelConfigSchema,
|
||||
providerConfigSchema,
|
||||
taskSettingsSchema,
|
||||
tagSettingsSchema,
|
||||
storageSettingsSchema,
|
||||
retrySettingsSchema,
|
||||
loggingSettingsSchema,
|
||||
loggingConfigSchema, // Legacy alias
|
||||
cacheConfigSchema,
|
||||
securitySettingsSchema,
|
||||
taskPrioritySchema,
|
||||
taskComplexitySchema,
|
||||
logLevelSchema,
|
||||
storageTypeSchema,
|
||||
tagNamingConventionSchema,
|
||||
bufferEncodingSchema,
|
||||
type ConfigurationSchema,
|
||||
type PartialConfigurationSchema
|
||||
} from './validation.js';
|
||||
|
||||
// Re-export the main schema as the default export for convenience
|
||||
export { configurationSchema as default } from './validation.js';
|
||||
43
packages/tm-core/src/config/index.ts
Normal file
43
packages/tm-core/src/config/index.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
/**
|
||||
* @fileoverview Configuration module exports
|
||||
* Exports the main ConfigManager and all configuration services
|
||||
*/
|
||||
|
||||
// Export the main ConfigManager
|
||||
export { ConfigManager } from './config-manager.js';
|
||||
|
||||
// Export all configuration services for advanced usage
|
||||
export {
|
||||
ConfigLoader,
|
||||
ConfigMerger,
|
||||
CONFIG_PRECEDENCE,
|
||||
RuntimeStateManager,
|
||||
ConfigPersistence,
|
||||
EnvironmentConfigProvider,
|
||||
type ConfigSource,
|
||||
type RuntimeState,
|
||||
type PersistenceOptions
|
||||
} from './services/index.js';
|
||||
|
||||
// Re-export configuration interfaces
|
||||
export type {
|
||||
IConfiguration,
|
||||
PartialConfiguration,
|
||||
ModelConfig,
|
||||
ProviderConfig,
|
||||
TaskSettings,
|
||||
TagSettings,
|
||||
StorageSettings,
|
||||
RetrySettings,
|
||||
LoggingSettings,
|
||||
SecuritySettings,
|
||||
ConfigValidationResult,
|
||||
EnvironmentConfig,
|
||||
ConfigSchema,
|
||||
ConfigProperty,
|
||||
IConfigurationFactory,
|
||||
IConfigurationManager
|
||||
} from '../interfaces/configuration.interface.js';
|
||||
|
||||
// Re-export default values
|
||||
export { DEFAULT_CONFIG_VALUES } from '../interfaces/configuration.interface.js';
|
||||
@@ -0,0 +1,144 @@
|
||||
/**
|
||||
* @fileoverview Unit tests for ConfigLoader service
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import { ConfigLoader } from './config-loader.service.js';
|
||||
import { DEFAULT_CONFIG_VALUES } from '../../interfaces/configuration.interface.js';
|
||||
|
||||
vi.mock('node:fs', () => ({
|
||||
promises: {
|
||||
readFile: vi.fn(),
|
||||
access: vi.fn()
|
||||
}
|
||||
}));
|
||||
|
||||
describe('ConfigLoader', () => {
|
||||
let configLoader: ConfigLoader;
|
||||
const testProjectRoot = '/test/project';
|
||||
|
||||
beforeEach(() => {
|
||||
configLoader = new ConfigLoader(testProjectRoot);
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('getDefaultConfig', () => {
|
||||
it('should return default configuration values', () => {
|
||||
const config = configLoader.getDefaultConfig();
|
||||
|
||||
expect(config.models).toEqual({
|
||||
main: DEFAULT_CONFIG_VALUES.MODELS.MAIN,
|
||||
fallback: DEFAULT_CONFIG_VALUES.MODELS.FALLBACK
|
||||
});
|
||||
|
||||
expect(config.storage).toEqual({
|
||||
type: DEFAULT_CONFIG_VALUES.STORAGE.TYPE,
|
||||
encoding: DEFAULT_CONFIG_VALUES.STORAGE.ENCODING,
|
||||
enableBackup: false,
|
||||
maxBackups: DEFAULT_CONFIG_VALUES.STORAGE.MAX_BACKUPS,
|
||||
enableCompression: false,
|
||||
atomicOperations: true
|
||||
});
|
||||
|
||||
expect(config.version).toBe(DEFAULT_CONFIG_VALUES.VERSION);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadLocalConfig', () => {
|
||||
it('should load and parse local configuration file', async () => {
|
||||
const mockConfig = {
|
||||
models: { main: 'test-model' },
|
||||
storage: { type: 'api' as const }
|
||||
};
|
||||
|
||||
vi.mocked(fs.readFile).mockResolvedValue(JSON.stringify(mockConfig));
|
||||
|
||||
const result = await configLoader.loadLocalConfig();
|
||||
|
||||
expect(fs.readFile).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/config.json',
|
||||
'utf-8'
|
||||
);
|
||||
expect(result).toEqual(mockConfig);
|
||||
});
|
||||
|
||||
it('should return null when config file does not exist', async () => {
|
||||
const error = new Error('File not found') as any;
|
||||
error.code = 'ENOENT';
|
||||
vi.mocked(fs.readFile).mockRejectedValue(error);
|
||||
|
||||
const result = await configLoader.loadLocalConfig();
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should throw TaskMasterError for other file errors', async () => {
|
||||
const error = new Error('Permission denied');
|
||||
vi.mocked(fs.readFile).mockRejectedValue(error);
|
||||
|
||||
await expect(configLoader.loadLocalConfig()).rejects.toThrow(
|
||||
'Failed to load local configuration'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error for invalid JSON', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValue('invalid json');
|
||||
|
||||
await expect(configLoader.loadLocalConfig()).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadGlobalConfig', () => {
|
||||
it('should return null (not implemented yet)', async () => {
|
||||
const result = await configLoader.loadGlobalConfig();
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasLocalConfig', () => {
|
||||
it('should return true when local config exists', async () => {
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
|
||||
const result = await configLoader.hasLocalConfig();
|
||||
|
||||
expect(fs.access).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/config.json'
|
||||
);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when local config does not exist', async () => {
|
||||
vi.mocked(fs.access).mockRejectedValue(new Error('Not found'));
|
||||
|
||||
const result = await configLoader.hasLocalConfig();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasGlobalConfig', () => {
|
||||
it('should check global config path', async () => {
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
|
||||
const result = await configLoader.hasGlobalConfig();
|
||||
|
||||
expect(fs.access).toHaveBeenCalledWith(
|
||||
expect.stringContaining('.taskmaster/config.json')
|
||||
);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when global config does not exist', async () => {
|
||||
vi.mocked(fs.access).mockRejectedValue(new Error('Not found'));
|
||||
|
||||
const result = await configLoader.hasGlobalConfig();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
124
packages/tm-core/src/config/services/config-loader.service.ts
Normal file
124
packages/tm-core/src/config/services/config-loader.service.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
/**
|
||||
* @fileoverview Configuration Loader Service
|
||||
* Responsible for loading configuration from various file sources
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import type { PartialConfiguration } from '../../interfaces/configuration.interface.js';
|
||||
import { DEFAULT_CONFIG_VALUES } from '../../interfaces/configuration.interface.js';
|
||||
import {
|
||||
ERROR_CODES,
|
||||
TaskMasterError
|
||||
} from '../../errors/task-master-error.js';
|
||||
|
||||
/**
|
||||
* ConfigLoader handles loading configuration from files
|
||||
* Single responsibility: File-based configuration loading
|
||||
*/
|
||||
export class ConfigLoader {
|
||||
private localConfigPath: string;
|
||||
private globalConfigPath: string;
|
||||
|
||||
constructor(projectRoot: string) {
|
||||
this.localConfigPath = path.join(projectRoot, '.taskmaster', 'config.json');
|
||||
this.globalConfigPath = path.join(
|
||||
process.env.HOME || '',
|
||||
'.taskmaster',
|
||||
'config.json'
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default configuration values
|
||||
*/
|
||||
getDefaultConfig(): PartialConfiguration {
|
||||
return {
|
||||
models: {
|
||||
main: DEFAULT_CONFIG_VALUES.MODELS.MAIN,
|
||||
fallback: DEFAULT_CONFIG_VALUES.MODELS.FALLBACK
|
||||
},
|
||||
storage: {
|
||||
type: DEFAULT_CONFIG_VALUES.STORAGE.TYPE,
|
||||
encoding: DEFAULT_CONFIG_VALUES.STORAGE.ENCODING,
|
||||
enableBackup: false,
|
||||
maxBackups: DEFAULT_CONFIG_VALUES.STORAGE.MAX_BACKUPS,
|
||||
enableCompression: false,
|
||||
atomicOperations: true
|
||||
},
|
||||
version: DEFAULT_CONFIG_VALUES.VERSION
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load local project configuration
|
||||
*/
|
||||
async loadLocalConfig(): Promise<PartialConfiguration | null> {
|
||||
try {
|
||||
const configData = await fs.readFile(this.localConfigPath, 'utf-8');
|
||||
return JSON.parse(configData);
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
// File doesn't exist, return null
|
||||
console.debug('No local config.json found, using defaults');
|
||||
return null;
|
||||
}
|
||||
throw new TaskMasterError(
|
||||
'Failed to load local configuration',
|
||||
ERROR_CODES.CONFIG_ERROR,
|
||||
{ configPath: this.localConfigPath },
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load global user configuration
|
||||
* @future-implementation Full implementation pending
|
||||
*/
|
||||
async loadGlobalConfig(): Promise<PartialConfiguration | null> {
|
||||
// TODO: Implement in future PR
|
||||
// For now, return null to indicate no global config
|
||||
return null;
|
||||
|
||||
// Future implementation:
|
||||
// try {
|
||||
// const configData = await fs.readFile(this.globalConfigPath, 'utf-8');
|
||||
// return JSON.parse(configData);
|
||||
// } catch (error: any) {
|
||||
// if (error.code === 'ENOENT') {
|
||||
// return null;
|
||||
// }
|
||||
// throw new TaskMasterError(
|
||||
// 'Failed to load global configuration',
|
||||
// ERROR_CODES.CONFIG_ERROR,
|
||||
// { configPath: this.globalConfigPath },
|
||||
// error
|
||||
// );
|
||||
// }
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if local config exists
|
||||
*/
|
||||
async hasLocalConfig(): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(this.localConfigPath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if global config exists
|
||||
*/
|
||||
async hasGlobalConfig(): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(this.globalConfigPath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,237 @@
|
||||
/**
|
||||
* @fileoverview Unit tests for ConfigMerger service
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import { ConfigMerger, CONFIG_PRECEDENCE } from './config-merger.service.js';
|
||||
|
||||
describe('ConfigMerger', () => {
|
||||
let merger: ConfigMerger;
|
||||
|
||||
beforeEach(() => {
|
||||
merger = new ConfigMerger();
|
||||
});
|
||||
|
||||
describe('addSource', () => {
|
||||
it('should add configuration source', () => {
|
||||
const source = {
|
||||
name: 'test',
|
||||
config: { test: true },
|
||||
precedence: 1
|
||||
};
|
||||
|
||||
merger.addSource(source);
|
||||
const sources = merger.getSources();
|
||||
|
||||
expect(sources).toHaveLength(1);
|
||||
expect(sources[0]).toEqual(source);
|
||||
});
|
||||
|
||||
it('should add multiple sources', () => {
|
||||
merger.addSource({ name: 'source1', config: {}, precedence: 1 });
|
||||
merger.addSource({ name: 'source2', config: {}, precedence: 2 });
|
||||
|
||||
expect(merger.getSources()).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('clearSources', () => {
|
||||
it('should remove all configuration sources', () => {
|
||||
merger.addSource({ name: 'test', config: {}, precedence: 1 });
|
||||
merger.clearSources();
|
||||
|
||||
expect(merger.getSources()).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('merge', () => {
|
||||
it('should merge configurations based on precedence', () => {
|
||||
merger.addSource({
|
||||
name: 'low',
|
||||
config: { a: 1, b: 2 },
|
||||
precedence: 1
|
||||
});
|
||||
|
||||
merger.addSource({
|
||||
name: 'high',
|
||||
config: { a: 3, c: 4 },
|
||||
precedence: 2
|
||||
});
|
||||
|
||||
const result = merger.merge();
|
||||
|
||||
expect(result).toEqual({
|
||||
a: 3, // High precedence wins
|
||||
b: 2, // Only in low
|
||||
c: 4 // Only in high
|
||||
});
|
||||
});
|
||||
|
||||
it('should deep merge nested objects', () => {
|
||||
merger.addSource({
|
||||
name: 'base',
|
||||
config: {
|
||||
models: { main: 'model1', fallback: 'model2' },
|
||||
storage: { type: 'file' as const }
|
||||
},
|
||||
precedence: 1
|
||||
});
|
||||
|
||||
merger.addSource({
|
||||
name: 'override',
|
||||
config: {
|
||||
models: { main: 'model3' },
|
||||
storage: { encoding: 'utf8' as const }
|
||||
},
|
||||
precedence: 2
|
||||
});
|
||||
|
||||
const result = merger.merge();
|
||||
|
||||
expect(result).toEqual({
|
||||
models: {
|
||||
main: 'model3', // Overridden
|
||||
fallback: 'model2' // Preserved
|
||||
},
|
||||
storage: {
|
||||
type: 'file', // Preserved
|
||||
encoding: 'utf8' // Added
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle arrays by replacement', () => {
|
||||
merger.addSource({
|
||||
name: 'base',
|
||||
config: { items: [1, 2, 3] },
|
||||
precedence: 1
|
||||
});
|
||||
|
||||
merger.addSource({
|
||||
name: 'override',
|
||||
config: { items: [4, 5] },
|
||||
precedence: 2
|
||||
});
|
||||
|
||||
const result = merger.merge();
|
||||
|
||||
expect(result.items).toEqual([4, 5]); // Arrays are replaced, not merged
|
||||
});
|
||||
|
||||
it('should ignore null and undefined values', () => {
|
||||
merger.addSource({
|
||||
name: 'base',
|
||||
config: { a: 1, b: 2 },
|
||||
precedence: 1
|
||||
});
|
||||
|
||||
merger.addSource({
|
||||
name: 'override',
|
||||
config: { a: null, b: undefined, c: 3 } as any,
|
||||
precedence: 2
|
||||
});
|
||||
|
||||
const result = merger.merge();
|
||||
|
||||
expect(result).toEqual({
|
||||
a: 1, // null ignored
|
||||
b: 2, // undefined ignored
|
||||
c: 3 // new value added
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty object when no sources', () => {
|
||||
const result = merger.merge();
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
|
||||
it('should use CONFIG_PRECEDENCE constants correctly', () => {
|
||||
merger.addSource({
|
||||
name: 'defaults',
|
||||
config: { level: 'default' },
|
||||
precedence: CONFIG_PRECEDENCE.DEFAULTS
|
||||
});
|
||||
|
||||
merger.addSource({
|
||||
name: 'local',
|
||||
config: { level: 'local' },
|
||||
precedence: CONFIG_PRECEDENCE.LOCAL
|
||||
});
|
||||
|
||||
merger.addSource({
|
||||
name: 'environment',
|
||||
config: { level: 'env' },
|
||||
precedence: CONFIG_PRECEDENCE.ENVIRONMENT
|
||||
});
|
||||
|
||||
const result = merger.merge();
|
||||
|
||||
expect(result.level).toBe('env'); // Highest precedence wins
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSources', () => {
|
||||
it('should return sources sorted by precedence (highest first)', () => {
|
||||
merger.addSource({ name: 'low', config: {}, precedence: 1 });
|
||||
merger.addSource({ name: 'high', config: {}, precedence: 3 });
|
||||
merger.addSource({ name: 'medium', config: {}, precedence: 2 });
|
||||
|
||||
const sources = merger.getSources();
|
||||
|
||||
expect(sources[0].name).toBe('high');
|
||||
expect(sources[1].name).toBe('medium');
|
||||
expect(sources[2].name).toBe('low');
|
||||
});
|
||||
|
||||
it('should return a copy of sources array', () => {
|
||||
merger.addSource({ name: 'test', config: {}, precedence: 1 });
|
||||
|
||||
const sources1 = merger.getSources();
|
||||
const sources2 = merger.getSources();
|
||||
|
||||
expect(sources1).not.toBe(sources2); // Different array instances
|
||||
expect(sources1).toEqual(sources2); // Same content
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasSource', () => {
|
||||
it('should return true when source exists', () => {
|
||||
merger.addSource({ name: 'test', config: {}, precedence: 1 });
|
||||
|
||||
expect(merger.hasSource('test')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when source does not exist', () => {
|
||||
expect(merger.hasSource('nonexistent')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeSource', () => {
|
||||
it('should remove source by name and return true', () => {
|
||||
merger.addSource({ name: 'test', config: {}, precedence: 1 });
|
||||
merger.addSource({ name: 'keep', config: {}, precedence: 2 });
|
||||
|
||||
const removed = merger.removeSource('test');
|
||||
|
||||
expect(removed).toBe(true);
|
||||
expect(merger.hasSource('test')).toBe(false);
|
||||
expect(merger.hasSource('keep')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when source does not exist', () => {
|
||||
const removed = merger.removeSource('nonexistent');
|
||||
|
||||
expect(removed).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle removing all sources', () => {
|
||||
merger.addSource({ name: 'test1', config: {}, precedence: 1 });
|
||||
merger.addSource({ name: 'test2', config: {}, precedence: 2 });
|
||||
|
||||
merger.removeSource('test1');
|
||||
merger.removeSource('test2');
|
||||
|
||||
expect(merger.getSources()).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
118
packages/tm-core/src/config/services/config-merger.service.ts
Normal file
118
packages/tm-core/src/config/services/config-merger.service.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
/**
|
||||
* @fileoverview Configuration Merger Service
|
||||
* Responsible for merging configurations from multiple sources with precedence
|
||||
*/
|
||||
|
||||
import type { PartialConfiguration } from '../../interfaces/configuration.interface.js';
|
||||
|
||||
/**
|
||||
* Configuration source with precedence
|
||||
*/
|
||||
export interface ConfigSource {
|
||||
/** Source name for debugging */
|
||||
name: string;
|
||||
/** Configuration data from this source */
|
||||
config: PartialConfiguration;
|
||||
/** Precedence level (higher = more important) */
|
||||
precedence: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration precedence levels (higher number = higher priority)
|
||||
*/
|
||||
export const CONFIG_PRECEDENCE = {
|
||||
DEFAULTS: 0,
|
||||
GLOBAL: 1, // Reserved for future implementation
|
||||
LOCAL: 2,
|
||||
ENVIRONMENT: 3
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* ConfigMerger handles merging configurations with precedence rules
|
||||
* Single responsibility: Configuration merging logic
|
||||
*/
|
||||
export class ConfigMerger {
|
||||
private configSources: ConfigSource[] = [];
|
||||
|
||||
/**
|
||||
* Add a configuration source
|
||||
*/
|
||||
addSource(source: ConfigSource): void {
|
||||
this.configSources.push(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all configuration sources
|
||||
*/
|
||||
clearSources(): void {
|
||||
this.configSources = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge all configuration sources based on precedence
|
||||
*/
|
||||
merge(): PartialConfiguration {
|
||||
// Sort sources by precedence (lowest first)
|
||||
const sortedSources = [...this.configSources].sort(
|
||||
(a, b) => a.precedence - b.precedence
|
||||
);
|
||||
|
||||
// Merge from lowest to highest precedence
|
||||
let merged: PartialConfiguration = {};
|
||||
for (const source of sortedSources) {
|
||||
merged = this.deepMerge(merged, source.config);
|
||||
}
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deep merge two configuration objects
|
||||
* Higher precedence values override lower ones
|
||||
*/
|
||||
private deepMerge(target: any, source: any): any {
|
||||
if (!source) return target;
|
||||
if (!target) return source;
|
||||
|
||||
const result = { ...target };
|
||||
|
||||
for (const key in source) {
|
||||
if (source[key] === null || source[key] === undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (typeof source[key] === 'object' && !Array.isArray(source[key])) {
|
||||
result[key] = this.deepMerge(result[key] || {}, source[key]);
|
||||
} else {
|
||||
result[key] = source[key];
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get configuration sources for debugging
|
||||
*/
|
||||
getSources(): ConfigSource[] {
|
||||
return [...this.configSources].sort((a, b) => b.precedence - a.precedence);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a source exists
|
||||
*/
|
||||
hasSource(name: string): boolean {
|
||||
return this.configSources.some((source) => source.name === name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a source by name
|
||||
*/
|
||||
removeSource(name: string): boolean {
|
||||
const initialLength = this.configSources.length;
|
||||
this.configSources = this.configSources.filter(
|
||||
(source) => source.name !== name
|
||||
);
|
||||
return this.configSources.length < initialLength;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,316 @@
|
||||
/**
|
||||
* @fileoverview Unit tests for ConfigPersistence service
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import { ConfigPersistence } from './config-persistence.service.js';
|
||||
|
||||
vi.mock('node:fs', () => ({
|
||||
promises: {
|
||||
readFile: vi.fn(),
|
||||
writeFile: vi.fn(),
|
||||
mkdir: vi.fn(),
|
||||
unlink: vi.fn(),
|
||||
access: vi.fn(),
|
||||
readdir: vi.fn(),
|
||||
rename: vi.fn()
|
||||
}
|
||||
}));
|
||||
|
||||
describe('ConfigPersistence', () => {
|
||||
let persistence: ConfigPersistence;
|
||||
const testProjectRoot = '/test/project';
|
||||
|
||||
beforeEach(() => {
|
||||
persistence = new ConfigPersistence(testProjectRoot);
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('saveConfig', () => {
|
||||
const mockConfig = {
|
||||
models: { main: 'test-model' },
|
||||
storage: { type: 'file' as const }
|
||||
};
|
||||
|
||||
it('should save configuration to file', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await persistence.saveConfig(mockConfig);
|
||||
|
||||
expect(fs.mkdir).toHaveBeenCalledWith('/test/project/.taskmaster', {
|
||||
recursive: true
|
||||
});
|
||||
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/config.json',
|
||||
JSON.stringify(mockConfig, null, 2),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should use atomic write when specified', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.rename).mockResolvedValue(undefined);
|
||||
|
||||
await persistence.saveConfig(mockConfig, { atomic: true });
|
||||
|
||||
// Should write to temp file first
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/config.json.tmp',
|
||||
JSON.stringify(mockConfig, null, 2),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
// Then rename to final location
|
||||
expect(fs.rename).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/config.json.tmp',
|
||||
'/test/project/.taskmaster/config.json'
|
||||
);
|
||||
});
|
||||
|
||||
it('should create backup when requested', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined); // Config exists
|
||||
vi.mocked(fs.readFile).mockResolvedValue('{"old": "config"}');
|
||||
vi.mocked(fs.readdir).mockResolvedValue([]);
|
||||
|
||||
await persistence.saveConfig(mockConfig, { createBackup: true });
|
||||
|
||||
// Should create backup directory
|
||||
expect(fs.mkdir).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/backups',
|
||||
{ recursive: true }
|
||||
);
|
||||
|
||||
// Should read existing config for backup
|
||||
expect(fs.readFile).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/config.json',
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
// Should write backup file
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/test/project/.taskmaster/backups/config-'),
|
||||
'{"old": "config"}',
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should not create backup if config does not exist', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.access).mockRejectedValue(new Error('Not found'));
|
||||
|
||||
await persistence.saveConfig(mockConfig, { createBackup: true });
|
||||
|
||||
// Should not read or create backup
|
||||
expect(fs.readFile).not.toHaveBeenCalled();
|
||||
expect(fs.writeFile).toHaveBeenCalledTimes(1); // Only the main config
|
||||
});
|
||||
|
||||
it('should throw TaskMasterError on save failure', async () => {
|
||||
vi.mocked(fs.mkdir).mockRejectedValue(new Error('Disk full'));
|
||||
|
||||
await expect(persistence.saveConfig(mockConfig)).rejects.toThrow(
|
||||
'Failed to save configuration'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('configExists', () => {
|
||||
it('should return true when config exists', async () => {
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
|
||||
const exists = await persistence.configExists();
|
||||
|
||||
expect(fs.access).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/config.json'
|
||||
);
|
||||
expect(exists).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when config does not exist', async () => {
|
||||
vi.mocked(fs.access).mockRejectedValue(new Error('Not found'));
|
||||
|
||||
const exists = await persistence.configExists();
|
||||
|
||||
expect(exists).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteConfig', () => {
|
||||
it('should delete configuration file', async () => {
|
||||
vi.mocked(fs.unlink).mockResolvedValue(undefined);
|
||||
|
||||
await persistence.deleteConfig();
|
||||
|
||||
expect(fs.unlink).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/config.json'
|
||||
);
|
||||
});
|
||||
|
||||
it('should not throw when file does not exist', async () => {
|
||||
const error = new Error('File not found') as any;
|
||||
error.code = 'ENOENT';
|
||||
vi.mocked(fs.unlink).mockRejectedValue(error);
|
||||
|
||||
await expect(persistence.deleteConfig()).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it('should throw TaskMasterError for other errors', async () => {
|
||||
vi.mocked(fs.unlink).mockRejectedValue(new Error('Permission denied'));
|
||||
|
||||
await expect(persistence.deleteConfig()).rejects.toThrow(
|
||||
'Failed to delete configuration'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getBackups', () => {
|
||||
it('should return list of backup files sorted newest first', async () => {
|
||||
vi.mocked(fs.readdir).mockResolvedValue([
|
||||
'config-2024-01-01T10-00-00-000Z.json',
|
||||
'config-2024-01-02T10-00-00-000Z.json',
|
||||
'config-2024-01-03T10-00-00-000Z.json',
|
||||
'other-file.txt'
|
||||
] as any);
|
||||
|
||||
const backups = await persistence.getBackups();
|
||||
|
||||
expect(fs.readdir).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/backups'
|
||||
);
|
||||
|
||||
expect(backups).toEqual([
|
||||
'config-2024-01-03T10-00-00-000Z.json',
|
||||
'config-2024-01-02T10-00-00-000Z.json',
|
||||
'config-2024-01-01T10-00-00-000Z.json'
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return empty array when backup directory does not exist', async () => {
|
||||
vi.mocked(fs.readdir).mockRejectedValue(new Error('Not found'));
|
||||
|
||||
const backups = await persistence.getBackups();
|
||||
|
||||
expect(backups).toEqual([]);
|
||||
});
|
||||
|
||||
it('should filter out non-backup files', async () => {
|
||||
vi.mocked(fs.readdir).mockResolvedValue([
|
||||
'config-2024-01-01T10-00-00-000Z.json',
|
||||
'README.md',
|
||||
'.DS_Store',
|
||||
'config.json',
|
||||
'config-backup.json' // Wrong format
|
||||
] as any);
|
||||
|
||||
const backups = await persistence.getBackups();
|
||||
|
||||
expect(backups).toEqual(['config-2024-01-01T10-00-00-000Z.json']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('restoreFromBackup', () => {
|
||||
const backupFile = 'config-2024-01-01T10-00-00-000Z.json';
|
||||
const backupContent = '{"restored": "config"}';
|
||||
|
||||
it('should restore configuration from backup', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValue(backupContent);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await persistence.restoreFromBackup(backupFile);
|
||||
|
||||
expect(fs.readFile).toHaveBeenCalledWith(
|
||||
`/test/project/.taskmaster/backups/${backupFile}`,
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/config.json',
|
||||
backupContent,
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw TaskMasterError when backup file not found', async () => {
|
||||
vi.mocked(fs.readFile).mockRejectedValue(new Error('File not found'));
|
||||
|
||||
await expect(
|
||||
persistence.restoreFromBackup('nonexistent.json')
|
||||
).rejects.toThrow('Failed to restore from backup');
|
||||
});
|
||||
|
||||
it('should throw TaskMasterError on write failure', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValue(backupContent);
|
||||
vi.mocked(fs.writeFile).mockRejectedValue(new Error('Disk full'));
|
||||
|
||||
await expect(persistence.restoreFromBackup(backupFile)).rejects.toThrow(
|
||||
'Failed to restore from backup'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('backup management', () => {
|
||||
it('should clean old backups when limit exceeded', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.readFile).mockResolvedValue('{"old": "config"}');
|
||||
vi.mocked(fs.unlink).mockResolvedValue(undefined);
|
||||
|
||||
// Mock 7 existing backups
|
||||
vi.mocked(fs.readdir).mockResolvedValue([
|
||||
'config-2024-01-01T10-00-00-000Z.json',
|
||||
'config-2024-01-02T10-00-00-000Z.json',
|
||||
'config-2024-01-03T10-00-00-000Z.json',
|
||||
'config-2024-01-04T10-00-00-000Z.json',
|
||||
'config-2024-01-05T10-00-00-000Z.json',
|
||||
'config-2024-01-06T10-00-00-000Z.json',
|
||||
'config-2024-01-07T10-00-00-000Z.json'
|
||||
] as any);
|
||||
|
||||
await persistence.saveConfig({}, { createBackup: true });
|
||||
|
||||
// Should delete oldest backups (keeping 5)
|
||||
expect(fs.unlink).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/backups/config-2024-01-01T10-00-00-000Z.json'
|
||||
);
|
||||
expect(fs.unlink).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/backups/config-2024-01-02T10-00-00-000Z.json'
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle backup cleanup errors gracefully', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.access).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.readFile).mockResolvedValue('{"old": "config"}');
|
||||
vi.mocked(fs.readdir).mockResolvedValue(['config-old.json'] as any);
|
||||
vi.mocked(fs.unlink).mockRejectedValue(new Error('Permission denied'));
|
||||
|
||||
// Mock console.warn to verify it's called
|
||||
const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
|
||||
// Should not throw even if cleanup fails
|
||||
await expect(
|
||||
persistence.saveConfig({}, { createBackup: true })
|
||||
).resolves.not.toThrow();
|
||||
|
||||
expect(warnSpy).toHaveBeenCalledWith(
|
||||
'Failed to clean old backups:',
|
||||
expect.any(Error)
|
||||
);
|
||||
|
||||
warnSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,186 @@
|
||||
/**
|
||||
* @fileoverview Configuration Persistence Service
|
||||
* Handles saving and backup of configuration files
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import type { PartialConfiguration } from '../../interfaces/configuration.interface.js';
|
||||
import {
|
||||
ERROR_CODES,
|
||||
TaskMasterError
|
||||
} from '../../errors/task-master-error.js';
|
||||
|
||||
/**
|
||||
* Persistence options
|
||||
*/
|
||||
export interface PersistenceOptions {
|
||||
/** Enable backup before saving */
|
||||
createBackup?: boolean;
|
||||
/** Maximum number of backups to keep */
|
||||
maxBackups?: number;
|
||||
/** Use atomic write operations */
|
||||
atomic?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* ConfigPersistence handles all configuration file I/O operations
|
||||
* Single responsibility: Configuration persistence
|
||||
*/
|
||||
export class ConfigPersistence {
|
||||
private localConfigPath: string;
|
||||
private backupDir: string;
|
||||
|
||||
constructor(projectRoot: string) {
|
||||
this.localConfigPath = path.join(projectRoot, '.taskmaster', 'config.json');
|
||||
this.backupDir = path.join(projectRoot, '.taskmaster', 'backups');
|
||||
}
|
||||
|
||||
/**
|
||||
* Save configuration to file
|
||||
*/
|
||||
async saveConfig(
|
||||
config: PartialConfiguration,
|
||||
options: PersistenceOptions = {}
|
||||
): Promise<void> {
|
||||
const { createBackup = false, atomic = true } = options;
|
||||
|
||||
try {
|
||||
// Create backup if requested
|
||||
if (createBackup && (await this.configExists())) {
|
||||
await this.createBackup();
|
||||
}
|
||||
|
||||
// Ensure directory exists
|
||||
const configDir = path.dirname(this.localConfigPath);
|
||||
await fs.mkdir(configDir, { recursive: true });
|
||||
|
||||
const jsonContent = JSON.stringify(config, null, 2);
|
||||
|
||||
if (atomic) {
|
||||
// Atomic write: write to temp file then rename
|
||||
const tempPath = `${this.localConfigPath}.tmp`;
|
||||
await fs.writeFile(tempPath, jsonContent, 'utf-8');
|
||||
await fs.rename(tempPath, this.localConfigPath);
|
||||
} else {
|
||||
// Direct write
|
||||
await fs.writeFile(this.localConfigPath, jsonContent, 'utf-8');
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to save configuration',
|
||||
ERROR_CODES.CONFIG_ERROR,
|
||||
{ configPath: this.localConfigPath },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a backup of the current configuration
|
||||
*/
|
||||
private async createBackup(): Promise<string> {
|
||||
try {
|
||||
await fs.mkdir(this.backupDir, { recursive: true });
|
||||
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const backupPath = path.join(this.backupDir, `config-${timestamp}.json`);
|
||||
|
||||
const configContent = await fs.readFile(this.localConfigPath, 'utf-8');
|
||||
await fs.writeFile(backupPath, configContent, 'utf-8');
|
||||
|
||||
// Clean old backups
|
||||
await this.cleanOldBackups();
|
||||
|
||||
return backupPath;
|
||||
} catch (error) {
|
||||
console.warn('Failed to create backup:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean old backup files
|
||||
*/
|
||||
private async cleanOldBackups(maxBackups = 5): Promise<void> {
|
||||
try {
|
||||
const files = await fs.readdir(this.backupDir);
|
||||
const backupFiles = files
|
||||
.filter((f) => f.startsWith('config-') && f.endsWith('.json'))
|
||||
.sort()
|
||||
.reverse();
|
||||
|
||||
// Remove old backups
|
||||
const toDelete = backupFiles.slice(maxBackups);
|
||||
for (const file of toDelete) {
|
||||
await fs.unlink(path.join(this.backupDir, file));
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to clean old backups:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if config file exists
|
||||
*/
|
||||
async configExists(): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(this.localConfigPath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete configuration file
|
||||
*/
|
||||
async deleteConfig(): Promise<void> {
|
||||
try {
|
||||
await fs.unlink(this.localConfigPath);
|
||||
} catch (error: any) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw new TaskMasterError(
|
||||
'Failed to delete configuration',
|
||||
ERROR_CODES.CONFIG_ERROR,
|
||||
{ configPath: this.localConfigPath },
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of available backups
|
||||
*/
|
||||
async getBackups(): Promise<string[]> {
|
||||
try {
|
||||
const files = await fs.readdir(this.backupDir);
|
||||
return files
|
||||
.filter((f) => f.startsWith('config-') && f.endsWith('.json'))
|
||||
.sort()
|
||||
.reverse();
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore from a backup
|
||||
*/
|
||||
async restoreFromBackup(backupFile: string): Promise<void> {
|
||||
const backupPath = path.join(this.backupDir, backupFile);
|
||||
|
||||
try {
|
||||
const backupContent = await fs.readFile(backupPath, 'utf-8');
|
||||
await fs.writeFile(this.localConfigPath, backupContent, 'utf-8');
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to restore from backup',
|
||||
ERROR_CODES.CONFIG_ERROR,
|
||||
{ backupPath },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,343 @@
|
||||
/**
|
||||
* @fileoverview Unit tests for EnvironmentConfigProvider service
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
||||
import { EnvironmentConfigProvider } from './environment-config-provider.service.js';
|
||||
|
||||
describe('EnvironmentConfigProvider', () => {
|
||||
let provider: EnvironmentConfigProvider;
|
||||
const originalEnv = { ...process.env };
|
||||
|
||||
beforeEach(() => {
|
||||
// Clear all TASKMASTER_ env vars
|
||||
Object.keys(process.env).forEach((key) => {
|
||||
if (key.startsWith('TASKMASTER_')) {
|
||||
delete process.env[key];
|
||||
}
|
||||
});
|
||||
provider = new EnvironmentConfigProvider();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
describe('loadConfig', () => {
|
||||
it('should load configuration from environment variables', () => {
|
||||
process.env.TASKMASTER_STORAGE_TYPE = 'api';
|
||||
process.env.TASKMASTER_API_ENDPOINT = 'https://api.example.com';
|
||||
process.env.TASKMASTER_MODEL_MAIN = 'gpt-4';
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
expect(config).toEqual({
|
||||
storage: {
|
||||
type: 'api',
|
||||
apiEndpoint: 'https://api.example.com'
|
||||
},
|
||||
models: {
|
||||
main: 'gpt-4'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty object when no env vars are set', () => {
|
||||
const config = provider.loadConfig();
|
||||
expect(config).toEqual({});
|
||||
});
|
||||
|
||||
it('should skip runtime state variables', () => {
|
||||
process.env.TASKMASTER_TAG = 'feature-branch';
|
||||
process.env.TASKMASTER_MODEL_MAIN = 'claude-3';
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
expect(config).toEqual({
|
||||
models: { main: 'claude-3' }
|
||||
});
|
||||
expect(config).not.toHaveProperty('activeTag');
|
||||
});
|
||||
|
||||
it('should validate storage type values', () => {
|
||||
// Mock console.warn to check validation
|
||||
const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
|
||||
process.env.TASKMASTER_STORAGE_TYPE = 'invalid';
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
expect(config).toEqual({});
|
||||
expect(warnSpy).toHaveBeenCalledWith(
|
||||
'Invalid value for TASKMASTER_STORAGE_TYPE: invalid'
|
||||
);
|
||||
|
||||
warnSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should accept valid storage type values', () => {
|
||||
process.env.TASKMASTER_STORAGE_TYPE = 'file';
|
||||
let config = provider.loadConfig();
|
||||
expect(config.storage?.type).toBe('file');
|
||||
|
||||
process.env.TASKMASTER_STORAGE_TYPE = 'api';
|
||||
provider = new EnvironmentConfigProvider(); // Reset provider
|
||||
config = provider.loadConfig();
|
||||
expect(config.storage?.type).toBe('api');
|
||||
});
|
||||
|
||||
it('should handle nested configuration paths', () => {
|
||||
process.env.TASKMASTER_MODEL_MAIN = 'model1';
|
||||
process.env.TASKMASTER_MODEL_RESEARCH = 'model2';
|
||||
process.env.TASKMASTER_MODEL_FALLBACK = 'model3';
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
expect(config).toEqual({
|
||||
models: {
|
||||
main: 'model1',
|
||||
research: 'model2',
|
||||
fallback: 'model3'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle custom response language', () => {
|
||||
process.env.TASKMASTER_RESPONSE_LANGUAGE = 'Spanish';
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
expect(config).toEqual({
|
||||
custom: {
|
||||
responseLanguage: 'Spanish'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore empty string values', () => {
|
||||
process.env.TASKMASTER_MODEL_MAIN = '';
|
||||
process.env.TASKMASTER_MODEL_FALLBACK = 'fallback-model';
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
expect(config).toEqual({
|
||||
models: {
|
||||
fallback: 'fallback-model'
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRuntimeState', () => {
|
||||
it('should extract runtime state variables', () => {
|
||||
process.env.TASKMASTER_TAG = 'develop';
|
||||
process.env.TASKMASTER_MODEL_MAIN = 'model'; // Should not be included
|
||||
|
||||
const state = provider.getRuntimeState();
|
||||
|
||||
expect(state).toEqual({
|
||||
activeTag: 'develop'
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty object when no runtime state vars', () => {
|
||||
process.env.TASKMASTER_MODEL_MAIN = 'model';
|
||||
|
||||
const state = provider.getRuntimeState();
|
||||
|
||||
expect(state).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasEnvVar', () => {
|
||||
it('should return true when env var exists', () => {
|
||||
process.env.TASKMASTER_MODEL_MAIN = 'test';
|
||||
|
||||
expect(provider.hasEnvVar('TASKMASTER_MODEL_MAIN')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when env var does not exist', () => {
|
||||
expect(provider.hasEnvVar('TASKMASTER_NONEXISTENT')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for undefined values', () => {
|
||||
process.env.TASKMASTER_TEST = undefined as any;
|
||||
|
||||
expect(provider.hasEnvVar('TASKMASTER_TEST')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllTaskmasterEnvVars', () => {
|
||||
it('should return all TASKMASTER_ prefixed variables', () => {
|
||||
process.env.TASKMASTER_VAR1 = 'value1';
|
||||
process.env.TASKMASTER_VAR2 = 'value2';
|
||||
process.env.OTHER_VAR = 'other';
|
||||
process.env.TASK_MASTER = 'wrong-prefix';
|
||||
|
||||
const vars = provider.getAllTaskmasterEnvVars();
|
||||
|
||||
expect(vars).toEqual({
|
||||
TASKMASTER_VAR1: 'value1',
|
||||
TASKMASTER_VAR2: 'value2'
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty object when no TASKMASTER_ vars', () => {
|
||||
process.env.OTHER_VAR = 'value';
|
||||
|
||||
const vars = provider.getAllTaskmasterEnvVars();
|
||||
|
||||
expect(vars).toEqual({});
|
||||
});
|
||||
|
||||
it('should filter out undefined values', () => {
|
||||
process.env.TASKMASTER_DEFINED = 'value';
|
||||
process.env.TASKMASTER_UNDEFINED = undefined as any;
|
||||
|
||||
const vars = provider.getAllTaskmasterEnvVars();
|
||||
|
||||
expect(vars).toEqual({
|
||||
TASKMASTER_DEFINED: 'value'
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('custom mappings', () => {
|
||||
it('should use custom mappings when provided', () => {
|
||||
const customMappings = [{ env: 'CUSTOM_VAR', path: ['custom', 'value'] }];
|
||||
|
||||
const customProvider = new EnvironmentConfigProvider(customMappings);
|
||||
process.env.CUSTOM_VAR = 'test-value';
|
||||
|
||||
const config = customProvider.loadConfig();
|
||||
|
||||
expect(config).toEqual({
|
||||
custom: {
|
||||
value: 'test-value'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should add new mapping with addMapping', () => {
|
||||
process.env.NEW_MAPPING = 'new-value';
|
||||
|
||||
provider.addMapping({
|
||||
env: 'NEW_MAPPING',
|
||||
path: ['new', 'mapping']
|
||||
});
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
expect(config).toHaveProperty('new.mapping', 'new-value');
|
||||
});
|
||||
|
||||
it('should return current mappings with getMappings', () => {
|
||||
const mappings = provider.getMappings();
|
||||
|
||||
expect(mappings).toBeInstanceOf(Array);
|
||||
expect(mappings.length).toBeGreaterThan(0);
|
||||
|
||||
// Check for some expected mappings
|
||||
const envNames = mappings.map((m) => m.env);
|
||||
expect(envNames).toContain('TASKMASTER_STORAGE_TYPE');
|
||||
expect(envNames).toContain('TASKMASTER_MODEL_MAIN');
|
||||
expect(envNames).toContain('TASKMASTER_TAG');
|
||||
});
|
||||
|
||||
it('should return copy of mappings array', () => {
|
||||
const mappings1 = provider.getMappings();
|
||||
const mappings2 = provider.getMappings();
|
||||
|
||||
expect(mappings1).not.toBe(mappings2); // Different instances
|
||||
expect(mappings1).toEqual(mappings2); // Same content
|
||||
});
|
||||
});
|
||||
|
||||
describe('validation', () => {
|
||||
it('should validate values when validator is provided', () => {
|
||||
const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
|
||||
process.env.TASKMASTER_STORAGE_TYPE = 'database'; // Invalid
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
expect(config).toEqual({});
|
||||
expect(warnSpy).toHaveBeenCalledWith(
|
||||
'Invalid value for TASKMASTER_STORAGE_TYPE: database'
|
||||
);
|
||||
|
||||
warnSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should accept values that pass validation', () => {
|
||||
process.env.TASKMASTER_STORAGE_TYPE = 'file';
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
expect(config.storage?.type).toBe('file');
|
||||
});
|
||||
|
||||
it('should work with custom validators', () => {
|
||||
const customProvider = new EnvironmentConfigProvider([
|
||||
{
|
||||
env: 'CUSTOM_NUMBER',
|
||||
path: ['custom', 'number'],
|
||||
validate: (v) => !isNaN(Number(v))
|
||||
}
|
||||
]);
|
||||
|
||||
process.env.CUSTOM_NUMBER = '123';
|
||||
let config = customProvider.loadConfig();
|
||||
expect(config.custom?.number).toBe('123');
|
||||
|
||||
process.env.CUSTOM_NUMBER = 'not-a-number';
|
||||
const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
customProvider = new EnvironmentConfigProvider([
|
||||
{
|
||||
env: 'CUSTOM_NUMBER',
|
||||
path: ['custom', 'number'],
|
||||
validate: (v) => !isNaN(Number(v))
|
||||
}
|
||||
]);
|
||||
config = customProvider.loadConfig();
|
||||
expect(config).toEqual({});
|
||||
expect(warnSpy).toHaveBeenCalled();
|
||||
|
||||
warnSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle special characters in values', () => {
|
||||
process.env.TASKMASTER_API_ENDPOINT =
|
||||
'https://api.example.com/v1?key=abc&token=xyz';
|
||||
process.env.TASKMASTER_API_TOKEN = 'Bearer abc123!@#$%^&*()';
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
expect(config.storage?.apiEndpoint).toBe(
|
||||
'https://api.example.com/v1?key=abc&token=xyz'
|
||||
);
|
||||
expect(config.storage?.apiAccessToken).toBe('Bearer abc123!@#$%^&*()');
|
||||
});
|
||||
|
||||
it('should handle whitespace in values', () => {
|
||||
process.env.TASKMASTER_MODEL_MAIN = ' claude-3 ';
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
// Note: We're not trimming, preserving the value as-is
|
||||
expect(config.models?.main).toBe(' claude-3 ');
|
||||
});
|
||||
|
||||
it('should handle very long values', () => {
|
||||
const longValue = 'a'.repeat(10000);
|
||||
process.env.TASKMASTER_API_TOKEN = longValue;
|
||||
|
||||
const config = provider.loadConfig();
|
||||
|
||||
expect(config.storage?.apiAccessToken).toBe(longValue);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,166 @@
|
||||
/**
|
||||
* @fileoverview Environment Configuration Provider
|
||||
* Extracts configuration from environment variables
|
||||
*/
|
||||
|
||||
import type { PartialConfiguration } from '../../interfaces/configuration.interface.js';
|
||||
|
||||
/**
|
||||
* Environment variable mapping definition
|
||||
*/
|
||||
interface EnvMapping {
|
||||
/** Environment variable name */
|
||||
env: string;
|
||||
/** Path in configuration object */
|
||||
path: readonly string[];
|
||||
/** Optional validator function */
|
||||
validate?: (value: string) => boolean;
|
||||
/** Whether this is runtime state (not configuration) */
|
||||
isRuntimeState?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* EnvironmentConfigProvider extracts configuration from environment variables
|
||||
* Single responsibility: Environment variable configuration extraction
|
||||
*/
|
||||
export class EnvironmentConfigProvider {
|
||||
/**
|
||||
* Default environment variable mappings
|
||||
*/
|
||||
private static readonly DEFAULT_MAPPINGS: EnvMapping[] = [
|
||||
{
|
||||
env: 'TASKMASTER_STORAGE_TYPE',
|
||||
path: ['storage', 'type'],
|
||||
validate: (v: string) => ['file', 'api'].includes(v)
|
||||
},
|
||||
{ env: 'TASKMASTER_API_ENDPOINT', path: ['storage', 'apiEndpoint'] },
|
||||
{ env: 'TASKMASTER_API_TOKEN', path: ['storage', 'apiAccessToken'] },
|
||||
{ env: 'TASKMASTER_MODEL_MAIN', path: ['models', 'main'] },
|
||||
{ env: 'TASKMASTER_MODEL_RESEARCH', path: ['models', 'research'] },
|
||||
{ env: 'TASKMASTER_MODEL_FALLBACK', path: ['models', 'fallback'] },
|
||||
{
|
||||
env: 'TASKMASTER_RESPONSE_LANGUAGE',
|
||||
path: ['custom', 'responseLanguage']
|
||||
}
|
||||
];
|
||||
|
||||
/**
|
||||
* Runtime state mappings (separate from configuration)
|
||||
*/
|
||||
private static readonly RUNTIME_STATE_MAPPINGS: EnvMapping[] = [
|
||||
{ env: 'TASKMASTER_TAG', path: ['activeTag'], isRuntimeState: true }
|
||||
];
|
||||
|
||||
private mappings: EnvMapping[];
|
||||
|
||||
constructor(customMappings?: EnvMapping[]) {
|
||||
this.mappings = customMappings || [
|
||||
...EnvironmentConfigProvider.DEFAULT_MAPPINGS,
|
||||
...EnvironmentConfigProvider.RUNTIME_STATE_MAPPINGS
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Load configuration from environment variables
|
||||
*/
|
||||
loadConfig(): PartialConfiguration {
|
||||
const config: PartialConfiguration = {};
|
||||
|
||||
for (const mapping of this.mappings) {
|
||||
// Skip runtime state variables
|
||||
if (mapping.isRuntimeState) continue;
|
||||
|
||||
const value = process.env[mapping.env];
|
||||
if (!value) continue;
|
||||
|
||||
// Validate value if validator is provided
|
||||
if (mapping.validate && !mapping.validate(value)) {
|
||||
console.warn(`Invalid value for ${mapping.env}: ${value}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Set the value in the config object
|
||||
this.setNestedProperty(config, mapping.path, value);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get runtime state from environment variables
|
||||
*/
|
||||
getRuntimeState(): Record<string, string> {
|
||||
const state: Record<string, string> = {};
|
||||
|
||||
for (const mapping of this.mappings) {
|
||||
if (!mapping.isRuntimeState) continue;
|
||||
|
||||
const value = process.env[mapping.env];
|
||||
if (value) {
|
||||
const key = mapping.path[mapping.path.length - 1];
|
||||
state[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to set a nested property in an object
|
||||
*/
|
||||
private setNestedProperty(
|
||||
obj: any,
|
||||
path: readonly string[],
|
||||
value: any
|
||||
): void {
|
||||
const lastKey = path[path.length - 1];
|
||||
const keys = path.slice(0, -1);
|
||||
|
||||
let current = obj;
|
||||
for (const key of keys) {
|
||||
if (!current[key]) {
|
||||
current[key] = {};
|
||||
}
|
||||
current = current[key];
|
||||
}
|
||||
|
||||
current[lastKey] = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an environment variable is set
|
||||
*/
|
||||
hasEnvVar(envName: string): boolean {
|
||||
return envName in process.env && process.env[envName] !== undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all environment variables that match our prefix
|
||||
*/
|
||||
getAllTaskmasterEnvVars(): Record<string, string> {
|
||||
const vars: Record<string, string> = {};
|
||||
const prefix = 'TASKMASTER_';
|
||||
|
||||
for (const [key, value] of Object.entries(process.env)) {
|
||||
if (key.startsWith(prefix) && value !== undefined) {
|
||||
vars[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return vars;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom mapping
|
||||
*/
|
||||
addMapping(mapping: EnvMapping): void {
|
||||
this.mappings.push(mapping);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current mappings
|
||||
*/
|
||||
getMappings(): EnvMapping[] {
|
||||
return [...this.mappings];
|
||||
}
|
||||
}
|
||||
20
packages/tm-core/src/config/services/index.ts
Normal file
20
packages/tm-core/src/config/services/index.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* @fileoverview Configuration services exports
|
||||
* Export all configuration-related services
|
||||
*/
|
||||
|
||||
export { ConfigLoader } from './config-loader.service.js';
|
||||
export {
|
||||
ConfigMerger,
|
||||
CONFIG_PRECEDENCE,
|
||||
type ConfigSource
|
||||
} from './config-merger.service.js';
|
||||
export {
|
||||
RuntimeStateManager,
|
||||
type RuntimeState
|
||||
} from './runtime-state-manager.service.js';
|
||||
export {
|
||||
ConfigPersistence,
|
||||
type PersistenceOptions
|
||||
} from './config-persistence.service.js';
|
||||
export { EnvironmentConfigProvider } from './environment-config-provider.service.js';
|
||||
@@ -0,0 +1,272 @@
|
||||
/**
|
||||
* @fileoverview Unit tests for RuntimeStateManager service
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import { RuntimeStateManager } from './runtime-state-manager.service.js';
|
||||
import { DEFAULT_CONFIG_VALUES } from '../../interfaces/configuration.interface.js';
|
||||
|
||||
vi.mock('node:fs', () => ({
|
||||
promises: {
|
||||
readFile: vi.fn(),
|
||||
writeFile: vi.fn(),
|
||||
mkdir: vi.fn(),
|
||||
unlink: vi.fn()
|
||||
}
|
||||
}));
|
||||
|
||||
describe('RuntimeStateManager', () => {
|
||||
let stateManager: RuntimeStateManager;
|
||||
const testProjectRoot = '/test/project';
|
||||
|
||||
beforeEach(() => {
|
||||
stateManager = new RuntimeStateManager(testProjectRoot);
|
||||
vi.clearAllMocks();
|
||||
// Clear environment variables
|
||||
delete process.env.TASKMASTER_TAG;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
delete process.env.TASKMASTER_TAG;
|
||||
});
|
||||
|
||||
describe('loadState', () => {
|
||||
it('should load state from file', async () => {
|
||||
const mockState = {
|
||||
activeTag: 'feature-branch',
|
||||
lastUpdated: '2024-01-01T00:00:00.000Z',
|
||||
metadata: { test: 'data' }
|
||||
};
|
||||
|
||||
vi.mocked(fs.readFile).mockResolvedValue(JSON.stringify(mockState));
|
||||
|
||||
const state = await stateManager.loadState();
|
||||
|
||||
expect(fs.readFile).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/state.json',
|
||||
'utf-8'
|
||||
);
|
||||
expect(state.activeTag).toBe('feature-branch');
|
||||
expect(state.metadata).toEqual({ test: 'data' });
|
||||
});
|
||||
|
||||
it('should override with environment variable if set', async () => {
|
||||
const mockState = { activeTag: 'file-tag' };
|
||||
vi.mocked(fs.readFile).mockResolvedValue(JSON.stringify(mockState));
|
||||
|
||||
process.env.TASKMASTER_TAG = 'env-tag';
|
||||
|
||||
const state = await stateManager.loadState();
|
||||
|
||||
expect(state.activeTag).toBe('env-tag');
|
||||
});
|
||||
|
||||
it('should use default state when file does not exist', async () => {
|
||||
const error = new Error('File not found') as any;
|
||||
error.code = 'ENOENT';
|
||||
vi.mocked(fs.readFile).mockRejectedValue(error);
|
||||
|
||||
const state = await stateManager.loadState();
|
||||
|
||||
expect(state.activeTag).toBe(DEFAULT_CONFIG_VALUES.TAGS.DEFAULT_TAG);
|
||||
});
|
||||
|
||||
it('should use environment variable when file does not exist', async () => {
|
||||
const error = new Error('File not found') as any;
|
||||
error.code = 'ENOENT';
|
||||
vi.mocked(fs.readFile).mockRejectedValue(error);
|
||||
|
||||
process.env.TASKMASTER_TAG = 'env-tag';
|
||||
|
||||
const state = await stateManager.loadState();
|
||||
|
||||
expect(state.activeTag).toBe('env-tag');
|
||||
});
|
||||
|
||||
it('should handle file read errors gracefully', async () => {
|
||||
vi.mocked(fs.readFile).mockRejectedValue(new Error('Permission denied'));
|
||||
|
||||
const state = await stateManager.loadState();
|
||||
|
||||
expect(state.activeTag).toBe(DEFAULT_CONFIG_VALUES.TAGS.DEFAULT_TAG);
|
||||
});
|
||||
|
||||
it('should handle invalid JSON gracefully', async () => {
|
||||
vi.mocked(fs.readFile).mockResolvedValue('invalid json');
|
||||
|
||||
// Mock console.warn to avoid noise in tests
|
||||
const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
|
||||
const state = await stateManager.loadState();
|
||||
|
||||
expect(state.activeTag).toBe(DEFAULT_CONFIG_VALUES.TAGS.DEFAULT_TAG);
|
||||
expect(warnSpy).toHaveBeenCalled();
|
||||
|
||||
warnSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveState', () => {
|
||||
it('should save state to file with timestamp', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
// Set a specific state
|
||||
await stateManager.setActiveTag('test-tag');
|
||||
|
||||
// Verify mkdir was called
|
||||
expect(fs.mkdir).toHaveBeenCalledWith('/test/project/.taskmaster', {
|
||||
recursive: true
|
||||
});
|
||||
|
||||
// Verify writeFile was called with correct data
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/state.json',
|
||||
expect.stringContaining('"activeTag":"test-tag"'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
// Verify timestamp is included
|
||||
expect(fs.writeFile).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.stringContaining('"lastUpdated"'),
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw TaskMasterError on save failure', async () => {
|
||||
vi.mocked(fs.mkdir).mockRejectedValue(new Error('Disk full'));
|
||||
|
||||
await expect(stateManager.saveState()).rejects.toThrow(
|
||||
'Failed to save runtime state'
|
||||
);
|
||||
});
|
||||
|
||||
it('should format JSON with proper indentation', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await stateManager.saveState();
|
||||
|
||||
const writeCall = vi.mocked(fs.writeFile).mock.calls[0];
|
||||
const jsonContent = writeCall[1] as string;
|
||||
|
||||
// Check for 2-space indentation
|
||||
expect(jsonContent).toMatch(/\n /);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getActiveTag', () => {
|
||||
it('should return current active tag', () => {
|
||||
const tag = stateManager.getActiveTag();
|
||||
expect(tag).toBe(DEFAULT_CONFIG_VALUES.TAGS.DEFAULT_TAG);
|
||||
});
|
||||
|
||||
it('should return updated tag after setActiveTag', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await stateManager.setActiveTag('new-tag');
|
||||
|
||||
expect(stateManager.getActiveTag()).toBe('new-tag');
|
||||
});
|
||||
});
|
||||
|
||||
describe('setActiveTag', () => {
|
||||
it('should update active tag and save state', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await stateManager.setActiveTag('feature-xyz');
|
||||
|
||||
expect(stateManager.getActiveTag()).toBe('feature-xyz');
|
||||
expect(fs.writeFile).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getState', () => {
|
||||
it('should return copy of current state', () => {
|
||||
const state1 = stateManager.getState();
|
||||
const state2 = stateManager.getState();
|
||||
|
||||
expect(state1).not.toBe(state2); // Different instances
|
||||
expect(state1).toEqual(state2); // Same content
|
||||
expect(state1.activeTag).toBe(DEFAULT_CONFIG_VALUES.TAGS.DEFAULT_TAG);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateMetadata', () => {
|
||||
it('should update metadata and save state', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await stateManager.updateMetadata({ key1: 'value1' });
|
||||
|
||||
const state = stateManager.getState();
|
||||
expect(state.metadata).toEqual({ key1: 'value1' });
|
||||
expect(fs.writeFile).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should merge metadata with existing values', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await stateManager.updateMetadata({ key1: 'value1' });
|
||||
await stateManager.updateMetadata({ key2: 'value2' });
|
||||
|
||||
const state = stateManager.getState();
|
||||
expect(state.metadata).toEqual({
|
||||
key1: 'value1',
|
||||
key2: 'value2'
|
||||
});
|
||||
});
|
||||
|
||||
it('should override existing metadata values', async () => {
|
||||
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
|
||||
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
|
||||
|
||||
await stateManager.updateMetadata({ key1: 'value1' });
|
||||
await stateManager.updateMetadata({ key1: 'value2' });
|
||||
|
||||
const state = stateManager.getState();
|
||||
expect(state.metadata).toEqual({ key1: 'value2' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('clearState', () => {
|
||||
it('should delete state file and reset to defaults', async () => {
|
||||
vi.mocked(fs.unlink).mockResolvedValue(undefined);
|
||||
|
||||
await stateManager.clearState();
|
||||
|
||||
expect(fs.unlink).toHaveBeenCalledWith(
|
||||
'/test/project/.taskmaster/state.json'
|
||||
);
|
||||
expect(stateManager.getActiveTag()).toBe(
|
||||
DEFAULT_CONFIG_VALUES.TAGS.DEFAULT_TAG
|
||||
);
|
||||
expect(stateManager.getState().metadata).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should ignore ENOENT errors when file does not exist', async () => {
|
||||
const error = new Error('File not found') as any;
|
||||
error.code = 'ENOENT';
|
||||
vi.mocked(fs.unlink).mockRejectedValue(error);
|
||||
|
||||
await expect(stateManager.clearState()).resolves.not.toThrow();
|
||||
expect(stateManager.getActiveTag()).toBe(
|
||||
DEFAULT_CONFIG_VALUES.TAGS.DEFAULT_TAG
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw other errors', async () => {
|
||||
vi.mocked(fs.unlink).mockRejectedValue(new Error('Permission denied'));
|
||||
|
||||
await expect(stateManager.clearState()).rejects.toThrow(
|
||||
'Permission denied'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,151 @@
|
||||
/**
|
||||
* @fileoverview Runtime State Manager Service
|
||||
* Manages runtime state separate from configuration
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import {
|
||||
ERROR_CODES,
|
||||
TaskMasterError
|
||||
} from '../../errors/task-master-error.js';
|
||||
import { DEFAULT_CONFIG_VALUES } from '../../interfaces/configuration.interface.js';
|
||||
|
||||
/**
|
||||
* Runtime state data structure
|
||||
*/
|
||||
export interface RuntimeState {
|
||||
/** Currently active tag */
|
||||
activeTag: string;
|
||||
/** Last updated timestamp */
|
||||
lastUpdated?: string;
|
||||
/** Additional metadata */
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* RuntimeStateManager handles runtime state persistence
|
||||
* Single responsibility: Runtime state management (separate from config)
|
||||
*/
|
||||
export class RuntimeStateManager {
|
||||
private stateFilePath: string;
|
||||
private currentState: RuntimeState;
|
||||
|
||||
constructor(projectRoot: string) {
|
||||
this.stateFilePath = path.join(projectRoot, '.taskmaster', 'state.json');
|
||||
this.currentState = {
|
||||
activeTag: DEFAULT_CONFIG_VALUES.TAGS.DEFAULT_TAG
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load runtime state from disk
|
||||
*/
|
||||
async loadState(): Promise<RuntimeState> {
|
||||
try {
|
||||
const stateData = await fs.readFile(this.stateFilePath, 'utf-8');
|
||||
const state = JSON.parse(stateData);
|
||||
|
||||
// Apply environment variable override for active tag
|
||||
if (process.env.TASKMASTER_TAG) {
|
||||
state.activeTag = process.env.TASKMASTER_TAG;
|
||||
}
|
||||
|
||||
this.currentState = state;
|
||||
return state;
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
// State file doesn't exist, use defaults
|
||||
console.debug('No state.json found, using default state');
|
||||
|
||||
// Check environment variable
|
||||
if (process.env.TASKMASTER_TAG) {
|
||||
this.currentState.activeTag = process.env.TASKMASTER_TAG;
|
||||
}
|
||||
|
||||
return this.currentState;
|
||||
}
|
||||
|
||||
console.warn('Failed to load state file:', error.message);
|
||||
return this.currentState;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save runtime state to disk
|
||||
*/
|
||||
async saveState(): Promise<void> {
|
||||
const stateDir = path.dirname(this.stateFilePath);
|
||||
|
||||
try {
|
||||
await fs.mkdir(stateDir, { recursive: true });
|
||||
|
||||
const stateToSave = {
|
||||
...this.currentState,
|
||||
lastUpdated: new Date().toISOString()
|
||||
};
|
||||
|
||||
await fs.writeFile(
|
||||
this.stateFilePath,
|
||||
JSON.stringify(stateToSave, null, 2),
|
||||
'utf-8'
|
||||
);
|
||||
} catch (error) {
|
||||
throw new TaskMasterError(
|
||||
'Failed to save runtime state',
|
||||
ERROR_CODES.CONFIG_ERROR,
|
||||
{ statePath: this.stateFilePath },
|
||||
error as Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the currently active tag
|
||||
*/
|
||||
getActiveTag(): string {
|
||||
return this.currentState.activeTag;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the active tag
|
||||
*/
|
||||
async setActiveTag(tag: string): Promise<void> {
|
||||
this.currentState.activeTag = tag;
|
||||
await this.saveState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current state
|
||||
*/
|
||||
getState(): RuntimeState {
|
||||
return { ...this.currentState };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update metadata
|
||||
*/
|
||||
async updateMetadata(metadata: Record<string, unknown>): Promise<void> {
|
||||
this.currentState.metadata = {
|
||||
...this.currentState.metadata,
|
||||
...metadata
|
||||
};
|
||||
await this.saveState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear state file
|
||||
*/
|
||||
async clearState(): Promise<void> {
|
||||
try {
|
||||
await fs.unlink(this.stateFilePath);
|
||||
} catch (error: any) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
this.currentState = {
|
||||
activeTag: DEFAULT_CONFIG_VALUES.TAGS.DEFAULT_TAG
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,238 +0,0 @@
|
||||
/**
|
||||
* @fileoverview Zod validation schemas for configuration interfaces
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
|
||||
// ============================================================================
|
||||
// Enum Schemas
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Task priority validation schema
|
||||
*/
|
||||
export const taskPrioritySchema = z.enum(['low', 'medium', 'high', 'critical']);
|
||||
|
||||
/**
|
||||
* Task complexity validation schema
|
||||
*/
|
||||
export const taskComplexitySchema = z.enum([
|
||||
'simple',
|
||||
'moderate',
|
||||
'complex',
|
||||
'very-complex'
|
||||
]);
|
||||
|
||||
/**
|
||||
* Log level validation schema
|
||||
*/
|
||||
export const logLevelSchema = z.enum(['error', 'warn', 'info', 'debug']);
|
||||
|
||||
/**
|
||||
* Storage type validation schema
|
||||
* @see can add more storage types here
|
||||
*/
|
||||
export const storageTypeSchema = z.enum(['file', 'api']);
|
||||
|
||||
/**
|
||||
* Tag naming convention validation schema
|
||||
*/
|
||||
export const tagNamingConventionSchema = z.enum([
|
||||
'kebab-case',
|
||||
'camelCase',
|
||||
'snake_case'
|
||||
]);
|
||||
|
||||
/**
|
||||
* Buffer encoding validation schema
|
||||
*/
|
||||
export const bufferEncodingSchema = z.enum([
|
||||
'ascii',
|
||||
'utf8',
|
||||
'utf-8',
|
||||
'utf16le',
|
||||
'ucs2',
|
||||
'ucs-2',
|
||||
'base64',
|
||||
'base64url',
|
||||
'latin1',
|
||||
'binary',
|
||||
'hex'
|
||||
]);
|
||||
|
||||
// ============================================================================
|
||||
// Sub-interface Schemas
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Model configuration validation schema
|
||||
*/
|
||||
export const modelConfigSchema = z.object({
|
||||
main: z.string().min(1, 'Main model name is required'),
|
||||
research: z.string().min(1).optional(),
|
||||
fallback: z.string().min(1, 'Fallback model name is required')
|
||||
});
|
||||
|
||||
/**
|
||||
* Provider configuration validation schema
|
||||
*/
|
||||
export const providerConfigSchema = z.object({
|
||||
name: z.string().min(1, 'Provider name is required'),
|
||||
apiKey: z.string().optional(),
|
||||
baseUrl: z.string().url().optional(),
|
||||
options: z.record(z.unknown()).optional(),
|
||||
enabled: z.boolean().optional().default(true)
|
||||
});
|
||||
|
||||
/**
|
||||
* Task settings validation schema
|
||||
*/
|
||||
export const taskSettingsSchema = z.object({
|
||||
defaultPriority: taskPrioritySchema,
|
||||
defaultComplexity: taskComplexitySchema,
|
||||
maxSubtasks: z.number().int().min(1).max(100),
|
||||
maxConcurrentTasks: z.number().int().min(1).max(50),
|
||||
autoGenerateIds: z.boolean(),
|
||||
taskIdPrefix: z.string().optional(),
|
||||
validateDependencies: z.boolean(),
|
||||
enableTimestamps: z.boolean(),
|
||||
enableEffortTracking: z.boolean()
|
||||
});
|
||||
|
||||
/**
|
||||
* Tag settings validation schema
|
||||
*/
|
||||
export const tagSettingsSchema = z.object({
|
||||
enableTags: z.boolean(),
|
||||
defaultTag: z.string().min(1),
|
||||
maxTagsPerTask: z.number().int().min(1).max(50),
|
||||
autoCreateFromBranch: z.boolean(),
|
||||
tagNamingConvention: tagNamingConventionSchema
|
||||
});
|
||||
|
||||
/**
|
||||
* Storage settings validation schema
|
||||
*/
|
||||
export const storageSettingsSchema = z.object({
|
||||
type: storageTypeSchema,
|
||||
basePath: z.string().optional(),
|
||||
enableBackup: z.boolean(),
|
||||
maxBackups: z.number().int().min(0).max(100),
|
||||
enableCompression: z.boolean(),
|
||||
encoding: bufferEncodingSchema,
|
||||
atomicOperations: z.boolean()
|
||||
});
|
||||
|
||||
/**
|
||||
* Retry settings validation schema
|
||||
*/
|
||||
export const retrySettingsSchema = z.object({
|
||||
retryAttempts: z.number().int().min(0).max(10),
|
||||
retryDelay: z.number().int().min(100).max(60000),
|
||||
maxRetryDelay: z.number().int().min(1000).max(300000),
|
||||
backoffMultiplier: z.number().min(1).max(10),
|
||||
requestTimeout: z.number().int().min(1000).max(600000),
|
||||
retryOnNetworkError: z.boolean(),
|
||||
retryOnRateLimit: z.boolean()
|
||||
});
|
||||
|
||||
/**
|
||||
* Logging settings validation schema
|
||||
*/
|
||||
export const loggingSettingsSchema = z.object({
|
||||
enabled: z.boolean(),
|
||||
level: logLevelSchema,
|
||||
filePath: z.string().optional(),
|
||||
logRequests: z.boolean(),
|
||||
logPerformance: z.boolean(),
|
||||
logStackTraces: z.boolean(),
|
||||
maxFileSize: z.number().min(1).max(1000),
|
||||
maxFiles: z.number().int().min(1).max(100)
|
||||
});
|
||||
|
||||
/**
|
||||
* Security settings validation schema
|
||||
*/
|
||||
export const securitySettingsSchema = z.object({
|
||||
validateApiKeys: z.boolean(),
|
||||
enableRateLimit: z.boolean(),
|
||||
maxRequestsPerMinute: z.number().int().min(1).max(10000),
|
||||
sanitizeInputs: z.boolean(),
|
||||
maxPromptLength: z.number().int().min(100).max(1000000),
|
||||
allowedFileExtensions: z.array(z.string().regex(/^\.[a-zA-Z0-9]+$/)),
|
||||
enableCors: z.boolean()
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Main Configuration Schema
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Base configuration object schema (without refinements)
|
||||
*/
|
||||
const baseConfigurationSchema = z.object({
|
||||
projectPath: z.string().min(1, 'Project path is required'),
|
||||
aiProvider: z.string().min(1, 'AI provider is required'),
|
||||
apiKeys: z.record(z.string()),
|
||||
models: modelConfigSchema,
|
||||
providers: z.record(providerConfigSchema),
|
||||
tasks: taskSettingsSchema,
|
||||
tags: tagSettingsSchema,
|
||||
storage: storageSettingsSchema,
|
||||
retry: retrySettingsSchema,
|
||||
logging: loggingSettingsSchema,
|
||||
security: securitySettingsSchema,
|
||||
custom: z.record(z.unknown()).optional(),
|
||||
version: z.string().min(1, 'Version is required'),
|
||||
lastUpdated: z.string().min(1, 'Last updated timestamp is required')
|
||||
});
|
||||
|
||||
/**
|
||||
* Main configuration validation schema with custom refinements
|
||||
*/
|
||||
export const configurationSchema = baseConfigurationSchema.refine(
|
||||
(data) => {
|
||||
// Custom validation: maxRetryDelay should be >= retryDelay
|
||||
return data.retry.maxRetryDelay >= data.retry.retryDelay;
|
||||
},
|
||||
{
|
||||
message: 'maxRetryDelay must be greater than or equal to retryDelay',
|
||||
path: ['retry', 'maxRetryDelay']
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Partial configuration validation schema for updates
|
||||
*/
|
||||
export const partialConfigurationSchema = baseConfigurationSchema.partial();
|
||||
|
||||
// ============================================================================
|
||||
// Legacy/Alias Exports (for backwards compatibility)
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Alias for loggingSettingsSchema (for backwards compatibility)
|
||||
* @deprecated Use loggingSettingsSchema instead
|
||||
*/
|
||||
export const loggingConfigSchema = loggingSettingsSchema;
|
||||
|
||||
/**
|
||||
* Cache configuration validation schema (stub - not implemented in IConfiguration yet)
|
||||
* This is exported for consistency with config-schema.ts exports
|
||||
*/
|
||||
export const cacheConfigSchema = z
|
||||
.object({
|
||||
enabled: z.boolean().optional().default(false),
|
||||
ttl: z.number().int().min(1).optional().default(300),
|
||||
maxSize: z.number().int().min(1).optional().default(1000)
|
||||
})
|
||||
.optional();
|
||||
|
||||
// ============================================================================
|
||||
// Type exports for runtime validation
|
||||
// ============================================================================
|
||||
|
||||
export type ConfigurationSchema = z.infer<typeof configurationSchema>;
|
||||
export type PartialConfigurationSchema = z.infer<
|
||||
typeof partialConfigurationSchema
|
||||
>;
|
||||
@@ -1,5 +1,5 @@
|
||||
/**
|
||||
* File-based storage implementation for Task Master
|
||||
* @fileoverview File-based storage implementation for Task Master
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
@@ -96,10 +96,19 @@ export class FileStorage implements IStorage {
|
||||
*/
|
||||
async loadTasks(tag?: string): Promise<Task[]> {
|
||||
const filePath = this.getTasksPath(tag);
|
||||
const resolvedTag = tag || 'master';
|
||||
|
||||
try {
|
||||
const data = await this.readJsonFile(filePath);
|
||||
return data?.tasks || [];
|
||||
const rawData = await this.readJsonFile(filePath);
|
||||
|
||||
// Handle legacy format where tasks are wrapped in a tag key
|
||||
if (rawData && typeof rawData === 'object' && resolvedTag in rawData) {
|
||||
const tagData = (rawData as any)[resolvedTag];
|
||||
return tagData?.tasks || [];
|
||||
}
|
||||
|
||||
// Handle standard format
|
||||
return rawData?.tasks || [];
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return []; // File doesn't exist, return empty array
|
||||
@@ -113,24 +122,63 @@ export class FileStorage implements IStorage {
|
||||
*/
|
||||
async saveTasks(tasks: Task[], tag?: string): Promise<void> {
|
||||
const filePath = this.getTasksPath(tag);
|
||||
const resolvedTag = tag || 'master';
|
||||
|
||||
// Ensure directory exists
|
||||
await this.ensureDirectoryExists();
|
||||
|
||||
// Create data structure with metadata
|
||||
const data: FileStorageData = {
|
||||
tasks,
|
||||
metadata: {
|
||||
version: '1.0.0',
|
||||
lastModified: new Date().toISOString(),
|
||||
taskCount: tasks.length,
|
||||
completedCount: tasks.filter((t) => t.status === 'done').length,
|
||||
tags: tag ? [tag] : []
|
||||
// Check if we need to use legacy format
|
||||
let dataToWrite: any;
|
||||
|
||||
try {
|
||||
const existingData = await this.readJsonFile(filePath);
|
||||
// If existing file uses legacy format, maintain it
|
||||
if (
|
||||
existingData &&
|
||||
typeof existingData === 'object' &&
|
||||
resolvedTag in existingData
|
||||
) {
|
||||
dataToWrite = {
|
||||
[resolvedTag]: {
|
||||
tasks,
|
||||
metadata: {
|
||||
version: '1.0.0',
|
||||
lastModified: new Date().toISOString(),
|
||||
taskCount: tasks.length,
|
||||
completedCount: tasks.filter((t) => t.status === 'done').length,
|
||||
tags: [resolvedTag]
|
||||
}
|
||||
}
|
||||
};
|
||||
} else {
|
||||
// Use standard format for new files
|
||||
dataToWrite = {
|
||||
tasks,
|
||||
metadata: {
|
||||
version: '1.0.0',
|
||||
lastModified: new Date().toISOString(),
|
||||
taskCount: tasks.length,
|
||||
completedCount: tasks.filter((t) => t.status === 'done').length,
|
||||
tags: tag ? [tag] : []
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
} catch (error: any) {
|
||||
// File doesn't exist, use standard format
|
||||
dataToWrite = {
|
||||
tasks,
|
||||
metadata: {
|
||||
version: '1.0.0',
|
||||
lastModified: new Date().toISOString(),
|
||||
taskCount: tasks.length,
|
||||
completedCount: tasks.filter((t) => t.status === 'done').length,
|
||||
tags: tag ? [tag] : []
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Write with file locking
|
||||
await this.writeJsonFile(filePath, data);
|
||||
await this.writeJsonFile(filePath, dataToWrite);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -182,10 +230,31 @@ export class FileStorage implements IStorage {
|
||||
*/
|
||||
async loadMetadata(tag?: string): Promise<TaskMetadata | null> {
|
||||
const filePath = this.getTasksPath(tag);
|
||||
const resolvedTag = tag || 'master';
|
||||
|
||||
try {
|
||||
const data = await this.readJsonFile(filePath);
|
||||
return data?.metadata || null;
|
||||
const rawData = await this.readJsonFile(filePath);
|
||||
|
||||
// Handle legacy format where data is wrapped in a tag key
|
||||
if (rawData && typeof rawData === 'object' && resolvedTag in rawData) {
|
||||
const tagData = (rawData as any)[resolvedTag];
|
||||
// Generate metadata if not present in legacy format
|
||||
if (!tagData?.metadata && tagData?.tasks) {
|
||||
return {
|
||||
version: '1.0.0',
|
||||
lastModified: new Date().toISOString(),
|
||||
taskCount: tagData.tasks.length,
|
||||
completedCount: tagData.tasks.filter(
|
||||
(t: any) => t.status === 'done'
|
||||
).length,
|
||||
tags: [resolvedTag]
|
||||
};
|
||||
}
|
||||
return tagData?.metadata || null;
|
||||
}
|
||||
|
||||
// Handle standard format
|
||||
return rawData?.metadata || null;
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
@@ -227,13 +296,17 @@ export class FileStorage implements IStorage {
|
||||
tag?: string
|
||||
): Promise<void> {
|
||||
const tasks = await this.loadTasks(tag);
|
||||
const taskIndex = tasks.findIndex((t) => t.id === taskId);
|
||||
const taskIndex = tasks.findIndex((t) => t.id === taskId.toString());
|
||||
|
||||
if (taskIndex === -1) {
|
||||
throw new Error(`Task ${taskId} not found`);
|
||||
}
|
||||
|
||||
tasks[taskIndex] = { ...tasks[taskIndex], ...updates, id: taskId };
|
||||
tasks[taskIndex] = {
|
||||
...tasks[taskIndex],
|
||||
...updates,
|
||||
id: taskId.toString()
|
||||
};
|
||||
await this.saveTasks(tasks, tag);
|
||||
}
|
||||
|
||||
@@ -354,7 +427,7 @@ export class FileStorage implements IStorage {
|
||||
*/
|
||||
private async writeJsonFile(
|
||||
filePath: string,
|
||||
data: FileStorageData
|
||||
data: FileStorageData | any
|
||||
): Promise<void> {
|
||||
// Use file locking to prevent concurrent writes
|
||||
const lockKey = filePath;
|
||||
@@ -379,7 +452,7 @@ export class FileStorage implements IStorage {
|
||||
*/
|
||||
private async performWrite(
|
||||
filePath: string,
|
||||
data: FileStorageData
|
||||
data: FileStorageData | any
|
||||
): Promise<void> {
|
||||
const tempPath = `${filePath}.tmp`;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user