chore: add integration tests to new cli and mcp (#1430)

This commit is contained in:
Ralph Khreish
2025-11-20 19:36:17 +01:00
committed by GitHub
parent 4049f34d5a
commit e66150e91c
22 changed files with 13419 additions and 6988 deletions

View File

@@ -35,9 +35,10 @@
"@biomejs/biome": "^1.9.4",
"@types/inquirer": "^9.0.3",
"@types/node": "^22.10.5",
"@vitest/coverage-v8": "^4.0.10",
"tsx": "^4.20.4",
"typescript": "^5.9.2",
"vitest": "^2.1.8"
"vitest": "^4.0.10"
},
"engines": {
"node": ">=18.0.0"

307
apps/cli/tests/fixtures/task-fixtures.ts vendored Normal file
View File

@@ -0,0 +1,307 @@
/**
* @fileoverview Test fixtures for creating valid task data structures
*
* WHY FIXTURES:
* - Ensures all required fields are present (prevents validation errors)
* - Provides consistent, realistic test data
* - Easy to override specific fields for test scenarios
* - Single source of truth for valid task structures
*
* USAGE:
* ```ts
* import { createTask, createTasksFile } from '../fixtures/task-fixtures';
*
* // Create a single task with defaults
* const task = createTask({ id: 1, title: 'My Task', status: 'pending' });
*
* // Create a complete tasks.json structure
* const tasksFile = createTasksFile({
* tasks: [
* createTask({ id: 1, title: 'Task 1' }),
* createTask({ id: 2, title: 'Task 2', dependencies: ['1'] })
* ]
* });
* ```
*/
import type { Task, Subtask, TaskMetadata } from '@tm/core';
/**
* File structure for tasks.json
* Note: Uses the 'master' tag as the default tag name
*/
export interface TasksFile {
master: {
tasks: Task[];
metadata: TaskMetadata;
};
}
/**
* Creates a valid task with all required fields
*
* DEFAULTS:
* - id: Converted to string if number is provided
* - status: 'pending'
* - priority: 'medium'
* - dependencies: []
* - subtasks: []
* - description: Same as title
* - details: Empty string
* - testStrategy: Empty string
*/
export function createTask(
overrides: Partial<Omit<Task, 'id'>> & { id: number | string; title: string }
): Task {
return {
id: String(overrides.id),
title: overrides.title,
description: overrides.description ?? overrides.title,
status: overrides.status ?? 'pending',
priority: overrides.priority ?? 'medium',
dependencies: overrides.dependencies ?? [],
details: overrides.details ?? '',
testStrategy: overrides.testStrategy ?? '',
subtasks: overrides.subtasks ?? [],
// Spread any additional optional fields
...(overrides.createdAt && { createdAt: overrides.createdAt }),
...(overrides.updatedAt && { updatedAt: overrides.updatedAt }),
...(overrides.effort && { effort: overrides.effort }),
...(overrides.actualEffort && { actualEffort: overrides.actualEffort }),
...(overrides.tags && { tags: overrides.tags }),
...(overrides.assignee && { assignee: overrides.assignee }),
...(overrides.databaseId && { databaseId: overrides.databaseId }),
...(overrides.complexity && { complexity: overrides.complexity }),
...(overrides.recommendedSubtasks && {
recommendedSubtasks: overrides.recommendedSubtasks
}),
...(overrides.expansionPrompt && {
expansionPrompt: overrides.expansionPrompt
}),
...(overrides.complexityReasoning && {
complexityReasoning: overrides.complexityReasoning
})
};
}
/**
* Creates a valid subtask with all required fields
*
* DEFAULTS:
* - id: Can be number or string
* - status: 'pending'
* - priority: 'medium'
* - dependencies: []
* - description: Same as title
* - details: Empty string
* - testStrategy: Empty string
* - parentId: Derived from id if not provided (e.g., '1.2' -> parentId '1')
*/
export function createSubtask(
overrides: Partial<Omit<Subtask, 'id' | 'parentId'>> & {
id: number | string;
title: string;
parentId?: string;
}
): Subtask {
const idStr = String(overrides.id);
const defaultParentId = idStr.includes('.') ? idStr.split('.')[0] : '1';
return {
id: overrides.id,
parentId: overrides.parentId ?? defaultParentId,
title: overrides.title,
description: overrides.description ?? overrides.title,
status: overrides.status ?? 'pending',
priority: overrides.priority ?? 'medium',
dependencies: overrides.dependencies ?? [],
details: overrides.details ?? '',
testStrategy: overrides.testStrategy ?? '',
// Spread any additional optional fields
...(overrides.createdAt && { createdAt: overrides.createdAt }),
...(overrides.updatedAt && { updatedAt: overrides.updatedAt }),
...(overrides.effort && { effort: overrides.effort }),
...(overrides.actualEffort && { actualEffort: overrides.actualEffort }),
...(overrides.tags && { tags: overrides.tags }),
...(overrides.assignee && { assignee: overrides.assignee }),
...(overrides.databaseId && { databaseId: overrides.databaseId }),
...(overrides.complexity && { complexity: overrides.complexity }),
...(overrides.recommendedSubtasks && {
recommendedSubtasks: overrides.recommendedSubtasks
}),
...(overrides.expansionPrompt && {
expansionPrompt: overrides.expansionPrompt
}),
...(overrides.complexityReasoning && {
complexityReasoning: overrides.complexityReasoning
})
};
}
/**
* Creates a complete tasks.json file structure
*
* DEFAULTS:
* - Empty tasks array
* - version: '1.0.0'
* - lastModified: Current timestamp
* - taskCount: Calculated from tasks array
* - completedCount: Calculated from tasks array
* - description: 'Test tasks'
*/
export function createTasksFile(overrides?: {
tasks?: Task[];
metadata?: Partial<TaskMetadata>;
}): TasksFile {
const tasks = overrides?.tasks ?? [];
const completedTasks = tasks.filter(
(t) =>
t.status === 'done' ||
t.status === 'completed' ||
t.status === 'cancelled'
);
const defaultMetadata: TaskMetadata = {
version: '1.0.0',
lastModified: new Date().toISOString(),
taskCount: tasks.length,
completedCount: completedTasks.length,
description: 'Test tasks',
...overrides?.metadata
};
return {
master: {
tasks,
metadata: defaultMetadata
}
};
}
/**
* Pre-built task scenarios for common test cases
*/
export const TaskScenarios = {
/**
* Single pending task with no dependencies
*/
simplePendingTask: () =>
createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Simple Task',
description: 'A basic pending task'
})
]
}),
/**
* Linear dependency chain: 1 -> 2 -> 3
*/
linearDependencyChain: () =>
createTasksFile({
tasks: [
createTask({ id: 1, title: 'Step 1', status: 'done' }),
createTask({
id: 2,
title: 'Step 2',
status: 'done',
dependencies: ['1']
}),
createTask({
id: 3,
title: 'Step 3',
status: 'pending',
dependencies: ['2']
})
]
}),
/**
* Tasks with mixed statuses
*/
mixedStatuses: () =>
createTasksFile({
tasks: [
createTask({ id: 1, title: 'Done Task', status: 'done' }),
createTask({ id: 2, title: 'In Progress Task', status: 'in-progress' }),
createTask({ id: 3, title: 'Pending Task', status: 'pending' }),
createTask({ id: 4, title: 'Review Task', status: 'review' })
]
}),
/**
* Task with subtasks
*/
taskWithSubtasks: () =>
createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Parent Task',
status: 'in-progress',
subtasks: [
createSubtask({ id: '1.1', title: 'Subtask 1', status: 'done' }),
createSubtask({
id: '1.2',
title: 'Subtask 2',
status: 'in-progress',
dependencies: ['1.1']
}),
createSubtask({
id: '1.3',
title: 'Subtask 3',
status: 'pending',
dependencies: ['1.2']
})
]
})
]
}),
/**
* Complex dependency graph with multiple paths
*/
complexDependencies: () =>
createTasksFile({
tasks: [
createTask({ id: 1, title: 'Foundation', status: 'done' }),
createTask({
id: 2,
title: 'Build A',
status: 'done',
dependencies: ['1']
}),
createTask({
id: 3,
title: 'Build B',
status: 'done',
dependencies: ['1']
}),
createTask({
id: 4,
title: 'Integration',
status: 'pending',
dependencies: ['2', '3']
})
]
}),
/**
* All tasks completed (for testing "no next task" scenario)
*/
allCompleted: () =>
createTasksFile({
tasks: [
createTask({ id: 1, title: 'Done 1', status: 'done' }),
createTask({ id: 2, title: 'Done 2', status: 'done' }),
createTask({ id: 3, title: 'Done 3', status: 'done' })
]
}),
/**
* Empty task list
*/
empty: () => createTasksFile({ tasks: [] })
};

View File

@@ -0,0 +1,19 @@
/**
* @fileoverview Shared test utilities for integration tests
*/
import path from 'node:path';
/**
* Get the absolute path to the compiled CLI binary
*
* IMPORTANT: This resolves to the root dist/ directory, not apps/cli/dist/
* The CLI is built to <repo-root>/dist/task-master.js
*
* @returns Absolute path to task-master.js binary
*/
export function getCliBinPath(): string {
// From apps/cli/tests/helpers/ navigate to repo root
const repoRoot = path.resolve(__dirname, '../../../..');
return path.join(repoRoot, 'dist', 'task-master.js');
}

View File

@@ -0,0 +1,428 @@
/**
* @fileoverview Integration tests for 'task-master list' command
*
* Tests the list command which displays all tasks with optional filtering.
*
* @integration
*/
import { execSync } from 'node:child_process';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import {
createSubtask,
createTask,
createTasksFile
} from '../../fixtures/task-fixtures';
import { getCliBinPath } from '../../helpers/test-utils';
// Capture initial working directory at module load time
const initialCwd = process.cwd();
describe('list command', () => {
let testDir: string;
let tasksPath: string;
let binPath: string;
beforeEach(() => {
testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tm-list-test-'));
process.chdir(testDir);
process.env.TASKMASTER_SKIP_AUTO_UPDATE = '1';
binPath = getCliBinPath();
execSync(`node "${binPath}" init --yes`, {
stdio: 'pipe',
env: { ...process.env, TASKMASTER_SKIP_AUTO_UPDATE: '1' }
});
tasksPath = path.join(testDir, '.taskmaster', 'tasks', 'tasks.json');
// Use fixture to create initial empty tasks file
const initialTasks = createTasksFile();
fs.writeFileSync(tasksPath, JSON.stringify(initialTasks, null, 2));
});
afterEach(() => {
try {
// Restore to the original working directory captured at module load
process.chdir(initialCwd);
} catch {
// Fallback to home directory if initial directory no longer exists
process.chdir(os.homedir());
}
if (testDir && fs.existsSync(testDir)) {
fs.rmSync(testDir, { recursive: true, force: true });
}
delete process.env.TASKMASTER_SKIP_AUTO_UPDATE;
});
const writeTasks = (tasksData: any) => {
fs.writeFileSync(tasksPath, JSON.stringify(tasksData, null, 2));
};
const runList = (args = ''): { output: string; exitCode: number } => {
try {
const output = execSync(`node "${binPath}" list ${args}`, {
encoding: 'utf-8',
stdio: 'pipe',
env: { ...process.env, TASKMASTER_SKIP_AUTO_UPDATE: '1' }
});
return { output, exitCode: 0 };
} catch (error: any) {
return {
output: error.stderr?.toString() || error.stdout?.toString() || '',
exitCode: error.status || 1
};
}
};
it('should display message when no tasks exist', () => {
const { output, exitCode } = runList();
expect(exitCode).toBe(0);
expect(output).toContain('No tasks found');
});
it('should list all tasks with correct information', () => {
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Setup Environment',
description: 'Install and configure',
status: 'done',
priority: 'high'
}),
createTask({
id: 2,
title: 'Write Tests',
description: 'Create test suite',
status: 'in-progress',
priority: 'high',
dependencies: ['1']
}),
createTask({
id: 3,
title: 'Implement Feature',
description: 'Build the thing',
status: 'pending',
priority: 'medium',
dependencies: ['2']
})
]
});
writeTasks(testData);
const { output, exitCode } = runList();
expect(exitCode).toBe(0);
expect(output).toContain('Setup Environment');
expect(output).toContain('Write Tests');
expect(output).toContain('Implement Feature');
});
it('should display task statuses', () => {
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Done Task',
status: 'done',
priority: 'high'
}),
createTask({
id: 2,
title: 'Pending Task',
status: 'pending',
priority: 'high'
}),
createTask({
id: 3,
title: 'In Progress',
status: 'in-progress',
priority: 'high'
})
]
});
writeTasks(testData);
const { output, exitCode } = runList();
expect(exitCode).toBe(0);
// Should show status indicators (exact format may vary)
expect(output.toLowerCase()).toContain('done');
expect(output.toLowerCase()).toContain('pending');
expect(output.toLowerCase()).toContain('progress');
});
it('should show subtasks when --with-subtasks flag is used', () => {
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Parent Task',
status: 'in-progress',
subtasks: [
createSubtask({
id: '1.1',
title: 'First Subtask',
status: 'done',
parentId: '1'
}),
createSubtask({
id: '1.2',
title: 'Second Subtask',
status: 'pending',
parentId: '1'
})
]
})
]
});
writeTasks(testData);
const { output, exitCode } = runList('--with-subtasks');
expect(exitCode).toBe(0);
expect(output).toContain('Parent Task');
expect(output).toContain('First Subtask');
expect(output).toContain('Second Subtask');
});
it('should handle tasks with dependencies', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Foundation', status: 'done' }),
createTask({
id: 2,
title: 'Dependent Task',
status: 'pending',
dependencies: ['1']
})
]
});
writeTasks(testData);
const { output, exitCode } = runList();
expect(exitCode).toBe(0);
expect(output).toContain('Foundation');
expect(output).toContain('Dependent Task');
});
it('should display multiple tasks in order', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Task One', status: 'pending' }),
createTask({ id: 2, title: 'Task Two', status: 'pending' }),
createTask({ id: 3, title: 'Task Three', status: 'pending' }),
createTask({ id: 4, title: 'Task Four', status: 'pending' }),
createTask({ id: 5, title: 'Task Five', status: 'pending' })
]
});
writeTasks(testData);
const { output, exitCode } = runList();
expect(exitCode).toBe(0);
expect(output).toContain('Task One');
expect(output).toContain('Task Two');
expect(output).toContain('Task Three');
expect(output).toContain('Task Four');
expect(output).toContain('Task Five');
});
describe('error handling - validation errors should surface to CLI', () => {
it('should display validation error when task has missing description', () => {
// Create intentionally invalid task data bypassing fixtures
const testData = {
master: {
tasks: [
{
id: 1,
title: 'Invalid Task',
description: '', // ❌ Invalid - empty description
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
}
],
metadata: {
version: '1.0.0',
lastModified: new Date().toISOString(),
taskCount: 1,
completedCount: 0
}
}
};
writeTasks(testData);
const { output, exitCode } = runList();
expect(exitCode).toBe(1);
expect(output.toLowerCase()).toContain('description');
expect(output.toLowerCase()).toContain('required');
// Should NOT contain the generic wrapped error message
expect(output).not.toContain('Failed to get task list');
});
it('should display validation error when task has missing title', () => {
const testData = {
master: {
tasks: [
{
id: 1,
title: '', // ❌ Invalid - empty title
description: 'A task without a title',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
}
],
metadata: {
version: '1.0.0',
lastModified: new Date().toISOString(),
taskCount: 1,
completedCount: 0
}
}
};
writeTasks(testData);
const { output, exitCode } = runList();
expect(exitCode).toBe(1);
expect(output.toLowerCase()).toContain('title');
expect(output.toLowerCase()).toContain('required');
expect(output).not.toContain('Failed to get task list');
});
it('should display validation error when task has only whitespace in description', () => {
const testData = {
master: {
tasks: [
{
id: 1,
title: 'Task with whitespace description',
description: ' ', // ❌ Invalid - only whitespace
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
}
],
metadata: {
version: '1.0.0',
lastModified: new Date().toISOString(),
taskCount: 1,
completedCount: 0
}
}
};
writeTasks(testData);
const { output, exitCode } = runList();
expect(exitCode).toBe(1);
expect(output.toLowerCase()).toContain('description');
expect(output.toLowerCase()).toContain('required');
});
it('should display validation error for first invalid task when multiple tasks exist', () => {
const testData = {
master: {
tasks: [
{
id: 1,
title: 'Valid Task',
description: 'This one is fine',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
},
{
id: 2,
title: 'Invalid Task',
description: '', // ❌ Invalid - this should trigger error
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
},
{
id: 3,
title: 'Another Valid Task',
description: 'This would be fine too',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
}
],
metadata: {
version: '1.0.0',
lastModified: new Date().toISOString(),
taskCount: 3,
completedCount: 0
}
}
};
writeTasks(testData);
const { output, exitCode } = runList();
expect(exitCode).toBe(1);
expect(output.toLowerCase()).toContain('description');
expect(output.toLowerCase()).toContain('required');
});
it('should handle all valid tasks without errors', () => {
// This test verifies the fix doesn't break valid scenarios
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Valid Task 1',
description: 'This task is valid',
status: 'pending',
priority: 'high'
}),
createTask({
id: 2,
title: 'Valid Task 2',
description: 'This task is also valid',
status: 'done',
priority: 'medium'
})
]
});
writeTasks(testData);
const { output, exitCode } = runList();
expect(exitCode).toBe(0);
expect(output).toContain('Valid Task 1');
expect(output).toContain('Valid Task 2');
});
});
});

View File

@@ -0,0 +1,290 @@
/**
* @fileoverview Integration tests for 'task-master next' command
*
* Tests the next command which finds the next available task based on dependencies.
*
* @integration
*/
import { execSync } from 'node:child_process';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { createTask, createTasksFile } from '../../fixtures/task-fixtures';
import { getCliBinPath } from '../../helpers/test-utils';
// Capture initial working directory at module load time
const initialCwd = process.cwd();
describe('next command', () => {
let testDir: string;
let tasksPath: string;
let binPath: string;
beforeEach(() => {
testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tm-next-test-'));
process.chdir(testDir);
process.env.TASKMASTER_SKIP_AUTO_UPDATE = '1';
binPath = getCliBinPath();
execSync(`node "${binPath}" init --yes`, {
stdio: 'pipe',
env: { ...process.env, TASKMASTER_SKIP_AUTO_UPDATE: '1' }
});
tasksPath = path.join(testDir, '.taskmaster', 'tasks', 'tasks.json');
// Use fixture to create initial empty tasks file
const initialTasks = createTasksFile();
fs.writeFileSync(tasksPath, JSON.stringify(initialTasks, null, 2));
});
afterEach(() => {
try {
// Restore to the original working directory captured at module load
process.chdir(initialCwd);
} catch {
// Fallback to home directory if initial directory no longer exists
process.chdir(os.homedir());
}
if (testDir && fs.existsSync(testDir)) {
fs.rmSync(testDir, { recursive: true, force: true });
}
delete process.env.TASKMASTER_SKIP_AUTO_UPDATE;
});
const writeTasks = (tasksData: any) => {
fs.writeFileSync(tasksPath, JSON.stringify(tasksData, null, 2));
};
const runNext = (): { output: string; exitCode: number } => {
try {
const output = execSync(`node "${binPath}" next`, {
encoding: 'utf-8',
stdio: 'pipe',
env: { ...process.env, TASKMASTER_SKIP_AUTO_UPDATE: '1' }
});
return { output, exitCode: 0 };
} catch (error: any) {
// For errors, prioritize stderr (where error messages go)
return {
output: error.stderr?.toString() || error.stdout?.toString() || '',
exitCode: error.status || 1
};
}
};
it('should find first pending task with no dependencies', () => {
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'First Available Task',
description: 'No dependencies',
status: 'pending'
})
]
});
writeTasks(testData);
const { output, exitCode } = runNext();
expect(exitCode).toBe(0);
expect(output).toContain('First Available Task');
});
it('should return task when dependencies are completed', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Prerequisite', status: 'done' }),
createTask({
id: 2,
title: 'Ready Task',
description: 'Dependencies met',
status: 'pending',
dependencies: ['1']
})
]
});
writeTasks(testData);
const { output, exitCode } = runNext();
expect(exitCode).toBe(0);
expect(output).toContain('Ready Task');
});
it('should skip tasks with incomplete dependencies', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Foundation Task', status: 'pending' }),
createTask({
id: 2,
title: 'Blocked Task',
status: 'pending',
dependencies: ['1']
}),
createTask({ id: 3, title: 'Independent Task', status: 'pending' })
]
});
writeTasks(testData);
const { output, exitCode } = runNext();
expect(exitCode).toBe(0);
// Should return either task 1 or task 3 (both have no dependencies)
const hasFoundation = output.includes('Foundation Task');
const hasIndependent = output.includes('Independent Task');
expect(hasFoundation || hasIndependent).toBe(true);
expect(output).not.toContain('Blocked Task');
});
it('should handle complex dependency chain', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Level 1', status: 'done' }),
createTask({
id: 2,
title: 'Level 2',
status: 'done',
dependencies: ['1']
}),
createTask({
id: 3,
title: 'Level 3 - Next',
description: 'Should be next',
status: 'pending',
dependencies: ['1', '2']
}),
createTask({
id: 4,
title: 'Level 3 - Blocked',
status: 'pending',
dependencies: ['3']
})
]
});
writeTasks(testData);
const { output, exitCode } = runNext();
expect(exitCode).toBe(0);
expect(output).toContain('Level 3 - Next');
expect(output).not.toContain('Blocked');
});
it('should skip already completed tasks', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Already Done', status: 'done' }),
createTask({ id: 2, title: 'Also Done', status: 'done' }),
createTask({ id: 3, title: 'Next Up', status: 'pending' })
]
});
writeTasks(testData);
const { output, exitCode } = runNext();
expect(exitCode).toBe(0);
expect(output).toContain('Next Up');
expect(output).not.toContain('Already Done');
expect(output).not.toContain('Also Done');
});
it('should handle empty task list', () => {
const testData = createTasksFile();
writeTasks(testData);
const { output } = runNext();
expect(output.toLowerCase()).toContain('no');
});
it('should handle all tasks completed', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Done 1', status: 'done' }),
createTask({ id: 2, title: 'Done 2', status: 'done' }),
createTask({ id: 3, title: 'Done 3', status: 'done' })
]
});
writeTasks(testData);
const { output } = runNext();
expect(output.toLowerCase()).toContain('no');
});
it('should find first task in linear dependency chain', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Step 1', status: 'done' }),
createTask({
id: 2,
title: 'Step 2',
status: 'done',
dependencies: ['1']
}),
createTask({
id: 3,
title: 'Step 3',
status: 'pending',
dependencies: ['2']
}),
createTask({
id: 4,
title: 'Step 4',
status: 'pending',
dependencies: ['3']
})
]
});
writeTasks(testData);
const { output, exitCode } = runNext();
expect(exitCode).toBe(0);
expect(output).toContain('Step 3');
expect(output).not.toContain('Step 4');
});
it('should find task among multiple ready tasks', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Foundation', status: 'done' }),
createTask({
id: 2,
title: 'Ready Task A',
status: 'pending',
dependencies: ['1']
}),
createTask({
id: 3,
title: 'Ready Task B',
status: 'pending',
dependencies: ['1']
}),
createTask({
id: 4,
title: 'Ready Task C',
status: 'pending',
dependencies: ['1']
})
]
});
writeTasks(testData);
const { output, exitCode } = runNext();
expect(exitCode).toBe(0);
// Should return one of the ready tasks
const hasReadyA = output.includes('Ready Task A');
const hasReadyB = output.includes('Ready Task B');
const hasReadyC = output.includes('Ready Task C');
expect(hasReadyA || hasReadyB || hasReadyC).toBe(true);
});
});

View File

@@ -0,0 +1,208 @@
/**
* @fileoverview Integration tests for 'task-master set-status' command
*
* Tests the set-status command which updates task status.
* Extracted from task-lifecycle.test.ts for better organization.
*
* @integration
*/
import { execSync } from 'node:child_process';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { createTask, createTasksFile } from '../../fixtures/task-fixtures';
import { getCliBinPath } from '../../helpers/test-utils';
// Capture initial working directory at module load time
const initialCwd = process.cwd();
describe('set-status command', () => {
let testDir: string;
let tasksPath: string;
let binPath: string;
beforeEach(() => {
testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tm-status-test-'));
process.chdir(testDir);
process.env.TASKMASTER_SKIP_AUTO_UPDATE = '1';
binPath = getCliBinPath();
execSync(`node "${binPath}" init --yes`, {
stdio: 'pipe',
env: { ...process.env, TASKMASTER_SKIP_AUTO_UPDATE: '1' }
});
tasksPath = path.join(testDir, '.taskmaster', 'tasks', 'tasks.json');
// Use fixture to create initial empty tasks file
const initialTasks = createTasksFile();
fs.writeFileSync(tasksPath, JSON.stringify(initialTasks, null, 2));
});
afterEach(() => {
try {
// Restore to the original working directory captured at module load
process.chdir(initialCwd);
} catch {
// Fallback to home directory if initial directory no longer exists
process.chdir(os.homedir());
}
if (testDir && fs.existsSync(testDir)) {
fs.rmSync(testDir, { recursive: true, force: true });
}
delete process.env.TASKMASTER_SKIP_AUTO_UPDATE;
});
const readTasks = () => {
const content = fs.readFileSync(tasksPath, 'utf-8');
return JSON.parse(content);
};
const writeTasks = (tasksData: any) => {
fs.writeFileSync(tasksPath, JSON.stringify(tasksData, null, 2));
};
const runSetStatus = (id: number, status: string) => {
return execSync(
`node "${binPath}" set-status --id=${id} --status=${status}`,
{
stdio: 'pipe',
env: { ...process.env, TASKMASTER_SKIP_AUTO_UPDATE: '1' }
}
);
};
it('should update task status from pending to done', () => {
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Task to Complete',
description: 'A task we will mark as done',
status: 'pending',
priority: 'high',
details: 'Implementation details',
testStrategy: 'Test strategy'
})
]
});
writeTasks(testData);
runSetStatus(1, 'done');
const tasks = readTasks();
const updatedTask = tasks.master.tasks.find((t: any) => t.id == 1);
expect(updatedTask).toBeDefined();
expect(updatedTask.status).toBe('done');
expect(updatedTask.title).toBe('Task to Complete');
});
it('should handle multiple status changes in sequence', () => {
const testData = createTasksFile({
tasks: [createTask({ id: 1, title: 'Task', status: 'pending' })]
});
writeTasks(testData);
runSetStatus(1, 'in-progress');
let tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe('in-progress');
runSetStatus(1, 'review');
tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe('review');
runSetStatus(1, 'done');
tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe('done');
});
it('should reject invalid status values', () => {
const testData = createTasksFile({
tasks: [createTask({ id: 1, title: 'Task', status: 'pending' })]
});
writeTasks(testData);
expect(() => {
runSetStatus(1, 'invalid');
}).toThrow();
const tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe('pending');
});
it('should update status to all valid values', () => {
const validStatuses = [
'pending',
'in-progress',
'done',
'review',
'deferred',
'cancelled'
];
for (const status of validStatuses) {
const testData = createTasksFile({
tasks: [createTask({ id: 1, title: 'Test', status: 'pending' })]
});
writeTasks(testData);
runSetStatus(1, status);
const tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe(status);
}
});
it('should preserve other task fields when updating status', () => {
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Preserve Fields',
description: 'Original description',
status: 'pending',
priority: 'high',
dependencies: ['2'],
details: 'Original details',
testStrategy: 'Original strategy'
})
]
});
writeTasks(testData);
runSetStatus(1, 'done');
const tasks = readTasks();
const task = tasks.master.tasks[0];
expect(task.status).toBe('done');
expect(task.title).toBe('Preserve Fields');
expect(task.description).toBe('Original description');
expect(task.priority).toBe('high');
expect(task.dependencies).toEqual(['2']);
expect(task.details).toBe('Original details');
expect(task.testStrategy).toBe('Original strategy');
});
it('should handle multiple tasks correctly', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Task 1', status: 'pending' }),
createTask({ id: 2, title: 'Task 2', status: 'pending' }),
createTask({ id: 3, title: 'Task 3', status: 'pending' })
]
});
writeTasks(testData);
runSetStatus(2, 'done');
const tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe('pending'); // Task 1 unchanged
expect(tasks.master.tasks[1].status).toBe('done'); // Task 2 updated
expect(tasks.master.tasks[2].status).toBe('pending'); // Task 3 unchanged
});
});

View File

@@ -0,0 +1,260 @@
/**
* @fileoverview Integration tests for 'task-master show' command
*
* Tests the show command which displays detailed information about a specific task.
*
* @integration
*/
import { execSync } from 'node:child_process';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import {
createTask,
createTasksFile,
createSubtask
} from '../../fixtures/task-fixtures';
import { getCliBinPath } from '../../helpers/test-utils';
// Capture initial working directory at module load time
const initialCwd = process.cwd();
describe('show command', () => {
let testDir: string;
let tasksPath: string;
let binPath: string;
beforeEach(() => {
testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tm-show-test-'));
process.chdir(testDir);
process.env.TASKMASTER_SKIP_AUTO_UPDATE = '1';
binPath = getCliBinPath();
execSync(`node "${binPath}" init --yes`, {
stdio: 'pipe',
env: { ...process.env, TASKMASTER_SKIP_AUTO_UPDATE: '1' }
});
tasksPath = path.join(testDir, '.taskmaster', 'tasks', 'tasks.json');
// Use fixture to create initial empty tasks file
const initialTasks = createTasksFile();
fs.writeFileSync(tasksPath, JSON.stringify(initialTasks, null, 2));
});
afterEach(() => {
try {
// Restore to the original working directory captured at module load
process.chdir(initialCwd);
} catch {
// Fallback to home directory if initial directory no longer exists
process.chdir(os.homedir());
}
if (testDir && fs.existsSync(testDir)) {
fs.rmSync(testDir, { recursive: true, force: true });
}
delete process.env.TASKMASTER_SKIP_AUTO_UPDATE;
});
const writeTasks = (tasksData: any) => {
fs.writeFileSync(tasksPath, JSON.stringify(tasksData, null, 2));
};
const runShow = (taskId: string): { output: string; exitCode: number } => {
try {
const output = execSync(`node "${binPath}" show ${taskId}`, {
encoding: 'utf-8',
stdio: 'pipe',
env: { ...process.env, TASKMASTER_SKIP_AUTO_UPDATE: '1' }
});
return { output, exitCode: 0 };
} catch (error: any) {
// For errors, prioritize stderr (where error messages go)
return {
output: error.stderr?.toString() || error.stdout?.toString() || '',
exitCode: error.status || 1
};
}
};
it('should display complete task details', () => {
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Detailed Task',
description: 'A comprehensive task description',
status: 'pending',
priority: 'high',
details: 'Implementation details go here',
testStrategy: 'Unit tests and integration tests'
})
]
});
writeTasks(testData);
const { output, exitCode } = runShow('1');
expect(exitCode).toBe(0);
expect(output).toContain('Detailed Task');
expect(output).toContain('A comprehensive task description');
expect(output).toContain('pending');
expect(output).toContain('high');
expect(output).toContain('Implementation details');
expect(output).toContain('Unit tests and integration tests');
});
it('should show task with dependencies', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'First Task', status: 'done' }),
createTask({
id: 2,
title: 'Second Task',
description: 'Depends on task 1',
status: 'pending',
dependencies: ['1']
})
]
});
writeTasks(testData);
const { output, exitCode } = runShow('2');
expect(exitCode).toBe(0);
expect(output).toContain('Second Task');
expect(output).toContain('Depends on task 1');
});
it('should show task with subtasks', () => {
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Parent Task',
description: 'Task with multiple subtasks',
status: 'in-progress',
subtasks: [
createSubtask({
id: '1.1',
title: 'Setup Phase',
description: 'Initial setup',
status: 'done',
parentId: '1'
}),
createSubtask({
id: '1.2',
title: 'Implementation Phase',
description: 'Build feature',
status: 'in-progress',
dependencies: ['1.1'],
parentId: '1'
}),
createSubtask({
id: '1.3',
title: 'Testing Phase',
description: 'Write tests',
status: 'pending',
dependencies: ['1.2'],
parentId: '1'
})
]
})
]
});
writeTasks(testData);
const { output, exitCode } = runShow('1');
expect(exitCode).toBe(0);
expect(output).toContain('Parent Task');
expect(output).toContain('Setup Phase');
expect(output).toContain('Implementation Phase');
expect(output).toContain('Testing Phase');
});
it('should show minimal task information', () => {
const testData = createTasksFile({
tasks: [createTask({ id: 1, title: 'Simple Task', status: 'pending' })]
});
writeTasks(testData);
const { output, exitCode } = runShow('1');
expect(exitCode).toBe(0);
expect(output).toContain('Simple Task');
expect(output).toContain('pending');
});
it('should show task with all status types', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Done Task', status: 'done' }),
createTask({ id: 2, title: 'Pending Task', status: 'pending' }),
createTask({ id: 3, title: 'In Progress', status: 'in-progress' }),
createTask({ id: 4, title: 'Review Task', status: 'review' })
]
});
writeTasks(testData);
// Test each status
let result = runShow('1');
expect(result.exitCode).toBe(0);
expect(result.output).toContain('Done Task');
result = runShow('2');
expect(result.exitCode).toBe(0);
expect(result.output).toContain('Pending Task');
result = runShow('3');
expect(result.exitCode).toBe(0);
expect(result.output).toContain('In Progress');
result = runShow('4');
expect(result.exitCode).toBe(0);
expect(result.output).toContain('Review Task');
});
it('should show task with priority levels', () => {
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'High Priority',
status: 'pending',
priority: 'high'
}),
createTask({
id: 2,
title: 'Medium Priority',
status: 'pending',
priority: 'medium'
}),
createTask({
id: 3,
title: 'Low Priority',
status: 'pending',
priority: 'low'
})
]
});
writeTasks(testData);
let result = runShow('1');
expect(result.output).toContain('High Priority');
expect(result.output).toContain('high');
result = runShow('2');
expect(result.output).toContain('Medium Priority');
expect(result.output).toContain('medium');
result = runShow('3');
expect(result.output).toContain('Low Priority');
expect(result.output).toContain('low');
});
});

View File

@@ -0,0 +1,414 @@
/**
* @fileoverview Integration tests for basic task lifecycle operations
*
* TESTING PHILOSOPHY:
* - These are TRUE integration tests - we spawn real CLI processes
* - We use real file system operations (temp directories)
* - We verify behavior by checking file system changes
* - We avoid mocking except for AI SDK to save costs
*
* WHY TEST FILE CHANGES INSTEAD OF CLI OUTPUT:
* - CLI output is formatted for humans (colors, boxes, tables)
* - File system changes are the source of truth
* - More stable - UI can change, but data format is stable
*
* @integration
*/
import { execSync } from 'node:child_process';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { getCliBinPath } from '../helpers/test-utils';
// Capture initial working directory at module load time
const initialCwd = process.cwd();
describe('Task Lifecycle Integration Tests', () => {
let testDir: string;
let tasksPath: string;
let binPath: string;
/**
* SETUP PATTERN:
* Before each test, we:
* 1. Create an isolated temp directory (no cross-test pollution)
* 2. Change into it (CLI commands run in this context)
* 3. Initialize a fresh Task Master project
* 4. Skip auto-updates for deterministic timing
*/
beforeEach(() => {
// Create isolated test environment in OS temp directory
testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tm-test-'));
process.chdir(testDir);
// Disable auto-update checks for deterministic test timing
process.env.TASKMASTER_SKIP_AUTO_UPDATE = '1';
// Path to the compiled CLI binary we're testing
// Binary is built to root dist/ directory, not apps/cli/dist/
binPath = getCliBinPath();
// Initialize a fresh Task Master project
execSync(`node "${binPath}" init --yes`, {
stdio: 'pipe',
env: {
...process.env,
TASKMASTER_SKIP_AUTO_UPDATE: '1'
}
});
// Path where tasks.json will be stored
tasksPath = path.join(testDir, '.taskmaster', 'tasks', 'tasks.json');
// Create initial tasks.json (init doesn't create it until first task added)
const initialTasks = {
master: {
tasks: [],
metadata: {
created: new Date().toISOString(),
description: 'Test tasks'
}
}
};
fs.writeFileSync(tasksPath, JSON.stringify(initialTasks, null, 2));
});
/**
* CLEANUP PATTERN:
* After each test:
* 1. Change back to original directory (can't delete current dir)
* 2. Delete the temp directory recursively
* 3. Clean up environment variables
*
* WHY: Prevents "directory in use" errors and disk space leaks
*/
afterEach(() => {
try {
// Restore to the original working directory captured at module load
process.chdir(initialCwd);
} catch (error) {
// Fallback to home directory if initial directory no longer exists
process.chdir(os.homedir());
}
// Remove test directory and all contents
if (testDir && fs.existsSync(testDir)) {
fs.rmSync(testDir, { recursive: true, force: true });
}
// Clean up environment
delete process.env.TASKMASTER_SKIP_AUTO_UPDATE;
});
/**
* TEST HELPER: Read tasks from tasks.json
*
* EDUCATIONAL NOTE:
* We read the actual file from disk, not mocked data.
* This validates that the CLI actually wrote what we expect.
*/
const readTasks = () => {
const content = fs.readFileSync(tasksPath, 'utf-8');
return JSON.parse(content);
};
/**
* TEST HELPER: Write tasks to tasks.json
*
* EDUCATIONAL NOTE:
* We manually create test data by writing to the real file system.
* This simulates different project states without AI calls.
*/
const writeTasks = (tasksData: any) => {
fs.writeFileSync(tasksPath, JSON.stringify(tasksData, null, 2));
};
/**
* TEST HELPER: Run a CLI command with auto-update disabled
*
* EDUCATIONAL NOTE:
* This helper ensures TASKMASTER_SKIP_AUTO_UPDATE is always set,
* avoiding repetition and ensuring consistent test behavior.
*/
const runCommand = (command: string, options: any = {}) => {
return execSync(`node "${binPath}" ${command}`, {
...options,
env: { ...process.env, TASKMASTER_SKIP_AUTO_UPDATE: '1' }
});
};
describe('task-master init', () => {
it('should initialize project structure', () => {
// ASSERTION PATTERN:
// We verify the actual directory structure was created
expect(fs.existsSync(path.join(testDir, '.taskmaster'))).toBe(true);
expect(fs.existsSync(path.join(testDir, '.taskmaster', 'tasks'))).toBe(
true
);
expect(fs.existsSync(path.join(testDir, '.taskmaster', 'docs'))).toBe(
true
);
expect(fs.existsSync(path.join(testDir, '.taskmaster', 'reports'))).toBe(
true
);
expect(
fs.existsSync(path.join(testDir, '.taskmaster', 'config.json'))
).toBe(true);
expect(
fs.existsSync(path.join(testDir, '.taskmaster', 'state.json'))
).toBe(true);
});
});
describe('task-master set-status', () => {
it('should update task status from pending to done', () => {
// ARRANGE: Create a pending task
const testData = {
master: {
tasks: [
{
id: 1,
title: 'Task to Complete',
description: 'A task we will mark as done',
status: 'pending',
priority: 'high',
dependencies: [],
details: 'Implementation details',
testStrategy: 'Test strategy',
subtasks: []
}
],
metadata: {
created: new Date().toISOString()
}
}
};
writeTasks(testData);
// ACT: Mark task as done via CLI
runCommand('set-status --id=1 --status=done', { stdio: 'pipe' });
// ASSERT: Verify status was updated in actual file
const tasks = readTasks();
// Note: CLI may convert id from number to string
const updatedTask = tasks.master.tasks.find((t: any) => t.id == 1); // == handles both number and string
expect(updatedTask).toBeDefined();
expect(updatedTask.status).toBe('done');
expect(updatedTask.title).toBe('Task to Complete'); // Other fields unchanged
});
it('should update subtask status', () => {
// ARRANGE: Create task with subtasks
const testData = {
master: {
tasks: [
{
id: 1,
title: 'Parent Task',
description: 'Parent task description',
status: 'in-progress',
priority: 'high',
dependencies: [],
details: 'Parent task details',
testStrategy: 'Test strategy',
subtasks: [
{
id: '1',
title: 'First Subtask',
description: 'Subtask to complete',
status: 'pending',
priority: 'medium',
dependencies: [],
details: 'Subtask details'
},
{
id: '2',
title: 'Second Subtask',
description: 'Second subtask',
status: 'pending',
priority: 'medium',
dependencies: ['1.1'],
details: 'Second subtask details'
}
]
}
],
metadata: {
created: new Date().toISOString(),
description: 'Test tasks'
}
}
};
writeTasks(testData);
// ACT: Mark subtask as done
runCommand('set-status --id=1.1 --status=done', { stdio: 'pipe' });
// ASSERT: Verify subtask status updated
const tasks = readTasks();
const parentTask = tasks.master.tasks.find((t: any) => t.id == 1);
expect(parentTask).toBeDefined();
const subtask = parentTask.subtasks.find((s: any) => s.id == 1);
expect(subtask).toBeDefined();
expect(subtask.status).toBe('done');
// Verify other subtask unchanged
const otherSubtask = parentTask.subtasks.find((s: any) => s.id == 2);
expect(otherSubtask.status).toBe('pending');
});
it('should handle multiple status changes in sequence', () => {
// ARRANGE: Create task
const testData = {
master: {
tasks: [
{
id: 1,
title: 'Task',
status: 'pending',
dependencies: [],
subtasks: []
}
],
metadata: {
created: new Date().toISOString()
}
}
};
writeTasks(testData);
// ACT & ASSERT: Change status multiple times
runCommand('set-status --id=1 --status=in-progress', { stdio: 'pipe' });
let tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe('in-progress');
runCommand('set-status --id=1 --status=review', { stdio: 'pipe' });
tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe('review');
runCommand('set-status --id=1 --status=done', { stdio: 'pipe' });
tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe('done');
});
it('should reject invalid status values', () => {
// ARRANGE: Create task
const testData = {
master: {
tasks: [
{
id: 1,
title: 'Task',
status: 'pending',
dependencies: [],
subtasks: []
}
],
metadata: {
created: new Date().toISOString()
}
}
};
writeTasks(testData);
// ACT & ASSERT: Should throw on invalid status
expect(() => {
runCommand('set-status --id=1 --status=invalid', { stdio: 'pipe' });
}).toThrow();
// Verify status unchanged
const tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe('pending');
});
});
/**
* EDUCATIONAL NOTE: Real-World Workflow Test
*
* This test demonstrates a realistic workflow:
* 1. Start with pending tasks
* 2. Mark them as in-progress
* 3. Complete them one by one
* 4. Verify final state
*
* This is the kind of flow a real developer would follow.
*/
describe('Realistic Task Workflow', () => {
it('should support typical development workflow', () => {
// ARRANGE: Create realistic project tasks
const testData = {
master: {
tasks: [
{
id: 1,
title: 'Setup Environment',
description: 'Install dependencies and configure',
status: 'pending',
priority: 'high',
dependencies: [],
subtasks: []
},
{
id: 2,
title: 'Write Tests',
description: 'Create test suite',
status: 'pending',
priority: 'high',
dependencies: [1],
subtasks: []
},
{
id: 3,
title: 'Implement Feature',
description: 'Write actual code',
status: 'pending',
priority: 'medium',
dependencies: [2],
subtasks: []
}
],
metadata: {
created: new Date().toISOString(),
description: 'Sample project'
}
}
};
writeTasks(testData);
// ACT & ASSERT: Work through tasks in realistic order
// Developer starts task 1
runCommand('set-status --id=1 --status=in-progress');
let tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe('in-progress');
// Developer completes task 1
runCommand('set-status --id=1 --status=done');
tasks = readTasks();
expect(tasks.master.tasks[0].status).toBe('done');
// Developer starts task 2
runCommand('set-status --id=2 --status=in-progress');
tasks = readTasks();
expect(tasks.master.tasks[1].status).toBe('in-progress');
// Developer completes task 2
runCommand('set-status --id=2 --status=done');
tasks = readTasks();
expect(tasks.master.tasks[1].status).toBe('done');
// Developer starts and completes task 3
runCommand('set-status --id=3 --status=in-progress');
runCommand('set-status --id=3 --status=done');
tasks = readTasks();
expect(tasks.master.tasks[2].status).toBe('done');
// Verify final state: all tasks done
expect(tasks.master.tasks.every((t: any) => t.status === 'done')).toBe(
true
);
});
});
});

View File

@@ -1,25 +1,21 @@
import { defineConfig } from 'vitest/config';
import { defineConfig, mergeConfig } from 'vitest/config';
import rootConfig from '../../vitest.config';
export default defineConfig({
test: {
globals: true,
environment: 'node',
include: ['tests/**/*.test.ts', 'tests/**/*.spec.ts'],
coverage: {
provider: 'v8',
reporter: ['text', 'json', 'html'],
include: ['src/**/*.ts'],
exclude: [
'node_modules/',
'dist/',
'tests/',
'**/*.test.ts',
'**/*.spec.ts',
'**/*.d.ts',
'**/mocks/**',
'**/fixtures/**',
'vitest.config.ts'
/**
* CLI package Vitest configuration
* Extends root config with CLI-specific settings
*/
export default mergeConfig(
rootConfig,
defineConfig({
test: {
// CLI-specific test patterns
include: [
'tests/**/*.test.ts',
'tests/**/*.spec.ts',
'src/**/*.test.ts',
'src/**/*.spec.ts'
]
}
}
});
})
);

View File

@@ -23,14 +23,15 @@
},
"dependencies": {
"@tm/core": "*",
"zod": "^4.1.11",
"fastmcp": "^3.23.0"
"fastmcp": "^3.23.0",
"zod": "^4.1.11"
},
"devDependencies": {
"@biomejs/biome": "^1.9.4",
"@types/node": "^22.10.5",
"@vitest/coverage-v8": "^4.0.10",
"typescript": "^5.9.2",
"vitest": "^3.2.4"
"vitest": "^4.0.10"
},
"engines": {
"node": ">=18.0.0"

1
apps/mcp/tests/fixtures/task-fixtures.ts vendored Symbolic link
View File

@@ -0,0 +1 @@
../../../cli/tests/fixtures/task-fixtures.ts

View File

@@ -0,0 +1,244 @@
/**
* @fileoverview Integration tests for get_tasks MCP tool
*
* Tests the get_tasks MCP tool using the MCP inspector CLI.
* This approach is simpler than a custom JSON-RPC client.
*
* @integration
*/
import { execSync } from 'node:child_process';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { createTask, createTasksFile } from '../../fixtures/task-fixtures';
describe('get_tasks MCP tool', () => {
let testDir: string;
let tasksPath: string;
let cliPath: string;
let mcpServerPath: string;
beforeEach(() => {
testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tm-mcp-get-tasks-'));
process.chdir(testDir);
cliPath = path.resolve(__dirname, '../../../../../dist/task-master.js');
mcpServerPath = path.resolve(
__dirname,
'../../../../../dist/mcp-server.js'
);
// Initialize Task Master in test directory
execSync(`node "${cliPath}" init --yes`, {
stdio: 'pipe',
env: { ...process.env, TASKMASTER_SKIP_AUTO_UPDATE: '1' }
});
tasksPath = path.join(testDir, '.taskmaster', 'tasks', 'tasks.json');
// Create initial empty tasks file using fixtures
const initialTasks = createTasksFile();
fs.writeFileSync(tasksPath, JSON.stringify(initialTasks, null, 2));
});
afterEach(() => {
// Change back to original directory and cleanup
try {
const originalDir = path.resolve(__dirname, '../../../../..');
process.chdir(originalDir);
} catch {
process.chdir(os.homedir());
}
if (testDir && fs.existsSync(testDir)) {
fs.rmSync(testDir, { recursive: true, force: true });
}
});
const writeTasks = (tasksData: any) => {
fs.writeFileSync(tasksPath, JSON.stringify(tasksData, null, 2));
};
/**
* Call an MCP tool using the inspector CLI
* The inspector returns MCP protocol format: { content: [{ type: "text", text: "<json>" }] }
*/
const callMCPTool = (toolName: string, args: Record<string, any>): any => {
const toolArgs = Object.entries(args)
.map(([key, value]) => `--tool-arg ${key}=${value}`)
.join(' ');
const output = execSync(
`npx @modelcontextprotocol/inspector --cli node "${mcpServerPath}" --method tools/call --tool-name ${toolName} ${toolArgs}`,
{ encoding: 'utf-8', stdio: 'pipe' }
);
// Parse the MCP protocol response: { content: [{ type: "text", text: "<json>" }] }
const mcpResponse = JSON.parse(output);
const resultText = mcpResponse.content[0].text;
return JSON.parse(resultText);
};
it('should return empty task list when no tasks exist', () => {
const data = callMCPTool('get_tasks', { projectRoot: testDir });
expect(data.data.tasks).toEqual([]);
expect(data.data.stats.total).toBe(0);
expect(data.tag).toBe('master');
}, 15000);
it('should get all tasks with correct information', () => {
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Setup Environment',
description: 'Install and configure',
status: 'done',
priority: 'high'
}),
createTask({
id: 2,
title: 'Write Tests',
description: 'Create test suite',
status: 'in-progress',
priority: 'high',
dependencies: ['1']
}),
createTask({
id: 3,
title: 'Implement Feature',
description: 'Build the thing',
status: 'pending',
priority: 'medium',
dependencies: ['2']
})
]
});
writeTasks(testData);
const data = callMCPTool('get_tasks', { projectRoot: testDir });
expect(data.data.tasks).toHaveLength(3);
expect(data.data.tasks[0].title).toBe('Setup Environment');
expect(data.data.tasks[1].title).toBe('Write Tests');
expect(data.data.tasks[2].title).toBe('Implement Feature');
expect(data.data.stats.total).toBe(3);
expect(data.data.stats.completed).toBe(1);
expect(data.data.stats.inProgress).toBe(1);
expect(data.data.stats.pending).toBe(1);
}, 15000);
it('should filter tasks by status', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Done Task', status: 'done' }),
createTask({ id: 2, title: 'Pending Task 1', status: 'pending' }),
createTask({ id: 3, title: 'Pending Task 2', status: 'pending' }),
createTask({ id: 4, title: 'In Progress', status: 'in-progress' })
]
});
writeTasks(testData);
const data = callMCPTool('get_tasks', {
projectRoot: testDir,
status: 'pending'
});
expect(data.data.tasks).toHaveLength(2);
expect(data.data.tasks.every((t: any) => t.status === 'pending')).toBe(
true
);
}, 15000);
it('should include subtasks when requested', () => {
const testData = createTasksFile({
tasks: [
createTask({
id: 1,
title: 'Parent Task',
status: 'in-progress',
subtasks: [
{
id: '1.1',
parentId: '1',
title: 'First Subtask',
description: 'First Subtask',
status: 'done',
priority: 'medium',
dependencies: [],
details: '',
testStrategy: ''
},
{
id: '1.2',
parentId: '1',
title: 'Second Subtask',
description: 'Second Subtask',
status: 'pending',
priority: 'medium',
dependencies: [],
details: '',
testStrategy: ''
}
]
})
]
});
writeTasks(testData);
const data = callMCPTool('get_tasks', {
projectRoot: testDir,
withSubtasks: true
});
expect(data.data.tasks).toHaveLength(1);
expect(data.data.tasks[0].subtasks).toHaveLength(2);
expect(data.data.stats.subtasks.total).toBe(2);
expect(data.data.stats.subtasks.completed).toBe(1);
expect(data.data.stats.subtasks.pending).toBe(1);
}, 15000);
it('should calculate statistics correctly', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Done 1', status: 'done' }),
createTask({ id: 2, title: 'Done 2', status: 'done' }),
createTask({ id: 3, title: 'Done 3', status: 'done' }),
createTask({ id: 4, title: 'Pending', status: 'pending' })
]
});
writeTasks(testData);
const data = callMCPTool('get_tasks', { projectRoot: testDir });
expect(data.data.stats.total).toBe(4);
expect(data.data.stats.completed).toBe(3);
expect(data.data.stats.pending).toBe(1);
expect(data.data.stats.completionPercentage).toBe(75);
}, 15000);
it('should handle multiple status filters', () => {
const testData = createTasksFile({
tasks: [
createTask({ id: 1, title: 'Done Task', status: 'done' }),
createTask({ id: 2, title: 'Pending Task', status: 'pending' }),
createTask({ id: 3, title: 'Blocked Task', status: 'blocked' }),
createTask({ id: 4, title: 'In Progress', status: 'in-progress' })
]
});
writeTasks(testData);
const data = callMCPTool('get_tasks', {
projectRoot: testDir,
status: 'blocked,pending'
});
expect(data.data.tasks).toHaveLength(2);
const statuses = data.data.tasks.map((t: any) => t.status);
expect(statuses).toContain('pending');
expect(statuses).toContain('blocked');
}, 15000);
});

View File

@@ -1,23 +1,21 @@
import { defineConfig } from 'vitest/config';
import { defineConfig, mergeConfig } from 'vitest/config';
import rootConfig from '../../vitest.config';
export default defineConfig({
test: {
globals: true,
environment: 'node',
coverage: {
provider: 'v8',
reporter: ['text', 'json', 'html'],
exclude: [
'node_modules/',
'dist/',
'tests/',
'**/*.test.ts',
'**/*.spec.ts',
'**/*.d.ts',
'**/mocks/**',
'**/fixtures/**',
'vitest.config.ts'
/**
* MCP package Vitest configuration
* Extends root config with MCP-specific settings
*/
export default mergeConfig(
rootConfig,
defineConfig({
test: {
// MCP-specific test patterns
include: [
'tests/**/*.test.ts',
'tests/**/*.spec.ts',
'src/**/*.test.ts',
'src/**/*.spec.ts'
]
}
}
});
})
);

17587
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -147,6 +147,7 @@
"@types/fs-extra": "^11.0.4",
"@types/jest": "^29.5.14",
"@types/marked-terminal": "^6.1.1",
"@vitest/coverage-v8": "^4.0.10",
"concurrently": "^9.2.1",
"cross-env": "^10.0.0",
"execa": "^8.0.1",

View File

@@ -22,7 +22,7 @@
"devDependencies": {
"@types/node": "^22.18.6",
"typescript": "^5.9.2",
"vitest": "^3.2.4"
"vitest": "^4.0.10"
},
"engines": {
"node": ">=18"

View File

@@ -25,7 +25,7 @@
"devDependencies": {
"@types/node": "^22.10.5",
"typescript": "^5.9.2",
"vitest": "^3.2.4"
"vitest": "^4.0.10"
},
"files": ["src", "README.md"],
"keywords": ["temporary", "bridge", "migration"],

View File

@@ -40,10 +40,10 @@
"devDependencies": {
"@types/fs-extra": "^11.0.4",
"@types/node": "^22.10.5",
"@vitest/coverage-v8": "^3.2.4",
"@vitest/coverage-v8": "^4.0.10",
"strip-literal": "3.1.0",
"typescript": "^5.9.2",
"vitest": "^3.2.4"
"vitest": "^4.0.10"
},
"files": ["src", "README.md", "CHANGELOG.md"],
"keywords": ["task-management", "typescript", "ai", "prd", "parser"],

View File

@@ -0,0 +1,385 @@
/**
* @fileoverview Unit tests for TaskEntity validation
* Tests that validation errors are properly thrown with correct error codes
*/
import { describe, expect, it } from 'vitest';
import { TaskEntity } from './task.entity.js';
import { ERROR_CODES, TaskMasterError } from '../../../common/errors/task-master-error.js';
import type { Task } from '../../../common/types/index.js';
describe('TaskEntity', () => {
describe('validation', () => {
it('should create a valid task entity', () => {
const validTask: Task = {
id: '1',
title: 'Test Task',
description: 'A valid test task',
status: 'pending',
priority: 'high',
dependencies: [],
details: 'Some details',
testStrategy: 'Unit tests',
subtasks: []
};
const entity = new TaskEntity(validTask);
expect(entity.id).toBe('1');
expect(entity.title).toBe('Test Task');
expect(entity.description).toBe('A valid test task');
expect(entity.status).toBe('pending');
expect(entity.priority).toBe('high');
});
it('should throw VALIDATION_ERROR when id is missing', () => {
const invalidTask = {
title: 'Test Task',
description: 'A test task',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
} as any;
expect(() => new TaskEntity(invalidTask)).toThrow(TaskMasterError);
try {
new TaskEntity(invalidTask);
expect.fail('Should have thrown an error');
} catch (error: any) {
expect(error).toBeInstanceOf(TaskMasterError);
expect(error.code).toBe(ERROR_CODES.VALIDATION_ERROR);
expect(error.message).toContain('Task ID is required');
}
});
it('should throw VALIDATION_ERROR when title is missing', () => {
const invalidTask = {
id: '1',
title: '',
description: 'A test task',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
} as Task;
expect(() => new TaskEntity(invalidTask)).toThrow(TaskMasterError);
try {
new TaskEntity(invalidTask);
expect.fail('Should have thrown an error');
} catch (error: any) {
expect(error).toBeInstanceOf(TaskMasterError);
expect(error.code).toBe(ERROR_CODES.VALIDATION_ERROR);
expect(error.message).toContain('Task title is required');
}
});
it('should throw VALIDATION_ERROR when description is missing', () => {
const invalidTask = {
id: '1',
title: 'Test Task',
description: '',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
} as Task;
expect(() => new TaskEntity(invalidTask)).toThrow(TaskMasterError);
try {
new TaskEntity(invalidTask);
expect.fail('Should have thrown an error');
} catch (error: any) {
expect(error).toBeInstanceOf(TaskMasterError);
expect(error.code).toBe(ERROR_CODES.VALIDATION_ERROR);
expect(error.message).toContain('Task description is required');
}
});
it('should throw VALIDATION_ERROR when title is only whitespace', () => {
const invalidTask = {
id: '1',
title: ' ',
description: 'A test task',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
} as Task;
expect(() => new TaskEntity(invalidTask)).toThrow(TaskMasterError);
try {
new TaskEntity(invalidTask);
expect.fail('Should have thrown an error');
} catch (error: any) {
expect(error).toBeInstanceOf(TaskMasterError);
expect(error.code).toBe(ERROR_CODES.VALIDATION_ERROR);
expect(error.message).toContain('Task title is required');
}
});
it('should throw VALIDATION_ERROR when description is only whitespace', () => {
const invalidTask = {
id: '1',
title: 'Test Task',
description: ' ',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
} as Task;
expect(() => new TaskEntity(invalidTask)).toThrow(TaskMasterError);
try {
new TaskEntity(invalidTask);
expect.fail('Should have thrown an error');
} catch (error: any) {
expect(error).toBeInstanceOf(TaskMasterError);
expect(error.code).toBe(ERROR_CODES.VALIDATION_ERROR);
expect(error.message).toContain('Task description is required');
}
});
it('should convert numeric id to string', () => {
const taskWithNumericId = {
id: 123,
title: 'Test Task',
description: 'A test task',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
} as any;
const entity = new TaskEntity(taskWithNumericId);
expect(entity.id).toBe('123');
expect(typeof entity.id).toBe('string');
});
it('should convert dependency ids to strings', () => {
const taskWithNumericDeps = {
id: '1',
title: 'Test Task',
description: 'A test task',
status: 'pending',
priority: 'high',
dependencies: [1, 2, '3'] as any,
details: '',
testStrategy: '',
subtasks: []
};
const entity = new TaskEntity(taskWithNumericDeps);
expect(entity.dependencies).toEqual(['1', '2', '3']);
entity.dependencies.forEach((dep) => {
expect(typeof dep).toBe('string');
});
});
it('should normalize subtask ids to strings for parent and numbers for subtask', () => {
const taskWithSubtasks = {
id: '1',
title: 'Parent Task',
description: 'A parent task',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: [
{
id: '1' as any,
parentId: '1',
title: 'Subtask 1',
description: 'First subtask',
status: 'pending',
priority: 'medium',
dependencies: [],
details: '',
testStrategy: ''
},
{
id: 2 as any,
parentId: 1 as any,
title: 'Subtask 2',
description: 'Second subtask',
status: 'pending',
priority: 'medium',
dependencies: [],
details: '',
testStrategy: ''
}
]
} as Task;
const entity = new TaskEntity(taskWithSubtasks);
expect(entity.subtasks[0].id).toBe(1);
expect(typeof entity.subtasks[0].id).toBe('number');
expect(entity.subtasks[0].parentId).toBe('1');
expect(typeof entity.subtasks[0].parentId).toBe('string');
expect(entity.subtasks[1].id).toBe(2);
expect(typeof entity.subtasks[1].id).toBe('number');
expect(entity.subtasks[1].parentId).toBe('1');
expect(typeof entity.subtasks[1].parentId).toBe('string');
});
});
describe('fromObject', () => {
it('should create TaskEntity from plain object', () => {
const plainTask: Task = {
id: '1',
title: 'Test Task',
description: 'A test task',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
};
const entity = TaskEntity.fromObject(plainTask);
expect(entity).toBeInstanceOf(TaskEntity);
expect(entity.id).toBe('1');
expect(entity.title).toBe('Test Task');
});
it('should throw validation error for invalid object', () => {
const invalidTask = {
id: '1',
title: '',
description: 'A test task',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
} as Task;
expect(() => TaskEntity.fromObject(invalidTask)).toThrow(TaskMasterError);
});
});
describe('fromArray', () => {
it('should create array of TaskEntities from plain objects', () => {
const plainTasks: Task[] = [
{
id: '1',
title: 'Task 1',
description: 'First task',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
},
{
id: '2',
title: 'Task 2',
description: 'Second task',
status: 'in-progress',
priority: 'medium',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
}
];
const entities = TaskEntity.fromArray(plainTasks);
expect(entities).toHaveLength(2);
expect(entities[0]).toBeInstanceOf(TaskEntity);
expect(entities[1]).toBeInstanceOf(TaskEntity);
expect(entities[0].id).toBe('1');
expect(entities[1].id).toBe('2');
});
it('should throw validation error if any task is invalid', () => {
const tasksWithInvalid: Task[] = [
{
id: '1',
title: 'Valid Task',
description: 'First task',
status: 'pending',
priority: 'high',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
},
{
id: '2',
title: 'Invalid Task',
description: '', // Invalid - missing description
status: 'pending',
priority: 'medium',
dependencies: [],
details: '',
testStrategy: '',
subtasks: []
}
];
expect(() => TaskEntity.fromArray(tasksWithInvalid)).toThrow(
TaskMasterError
);
});
});
describe('toJSON', () => {
it('should convert TaskEntity to plain object', () => {
const taskData: Task = {
id: '1',
title: 'Test Task',
description: 'A test task',
status: 'pending',
priority: 'high',
dependencies: ['2', '3'],
details: 'Some details',
testStrategy: 'Unit tests',
subtasks: []
};
const entity = new TaskEntity(taskData);
const json = entity.toJSON();
expect(json).toEqual({
id: '1',
title: 'Test Task',
description: 'A test task',
status: 'pending',
priority: 'high',
dependencies: ['2', '3'],
details: 'Some details',
testStrategy: 'Unit tests',
subtasks: []
});
});
});
});

View File

@@ -170,16 +170,13 @@ export class TaskService {
storageType
};
} catch (error) {
// If it's a user-facing error (like NO_BRIEF_SELECTED), don't log it as an internal error
if (
error instanceof TaskMasterError &&
error.is(ERROR_CODES.NO_BRIEF_SELECTED)
) {
// Just re-throw user-facing errors without wrapping
// Re-throw all TaskMasterErrors without wrapping
// These errors are already user-friendly and have appropriate error codes
if (error instanceof TaskMasterError) {
throw error;
}
// Log internal errors
// Only wrap unknown errors
this.logger.error('Failed to get task list', error);
throw new TaskMasterError(
'Failed to get task list',

View File

@@ -1,60 +1,57 @@
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { defineConfig } from 'vitest/config';
import { defineConfig, mergeConfig } from 'vitest/config';
import rootConfig from '../../vitest.config';
// __dirname in ESM
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
export default defineConfig({
test: {
globals: true,
environment: 'node',
include: [
'tests/**/*.test.ts',
'tests/**/*.spec.ts',
'tests/{unit,integration,e2e}/**/*.{test,spec}.ts',
'src/**/*.test.ts',
'src/**/*.spec.ts'
],
exclude: ['node_modules', 'dist', '.git', '.cache'],
coverage: {
provider: 'v8',
reporter: ['text', 'json', 'html', 'lcov'],
exclude: [
'node_modules/',
'dist/',
'tests/',
'**/*.test.ts',
'**/*.spec.ts',
'**/*.d.ts',
'**/mocks/**',
'**/fixtures/**',
'vitest.config.ts',
'src/index.ts'
/**
* Core package Vitest configuration
* Extends root config with core-specific settings including:
* - Path aliases for cleaner imports
* - Test setup file
* - Higher coverage thresholds (80%)
*/
export default mergeConfig(
rootConfig,
defineConfig({
test: {
// Core-specific test patterns
include: [
'tests/**/*.test.ts',
'tests/**/*.spec.ts',
'tests/{unit,integration,e2e}/**/*.{test,spec}.ts',
'src/**/*.test.ts',
'src/**/*.spec.ts'
],
thresholds: {
branches: 80,
functions: 80,
lines: 80,
statements: 80
// Core-specific setup
setupFiles: ['./tests/setup.ts'],
// Higher coverage thresholds for core package
coverage: {
thresholds: {
branches: 80,
functions: 80,
lines: 80,
statements: 80
}
}
},
setupFiles: ['./tests/setup.ts'],
testTimeout: 10000,
clearMocks: true,
restoreMocks: true,
mockReset: true
},
resolve: {
alias: {
'@': path.resolve(__dirname, './src'),
'@/types': path.resolve(__dirname, './src/types'),
'@/providers': path.resolve(__dirname, './src/providers'),
'@/storage': path.resolve(__dirname, './src/storage'),
'@/parser': path.resolve(__dirname, './src/parser'),
'@/utils': path.resolve(__dirname, './src/utils'),
'@/errors': path.resolve(__dirname, './src/errors')
// Path aliases for cleaner imports
resolve: {
alias: {
'@': path.resolve(__dirname, './src'),
'@/types': path.resolve(__dirname, './src/types'),
'@/providers': path.resolve(__dirname, './src/providers'),
'@/storage': path.resolve(__dirname, './src/storage'),
'@/parser': path.resolve(__dirname, './src/parser'),
'@/utils': path.resolve(__dirname, './src/utils'),
'@/errors': path.resolve(__dirname, './src/errors')
}
}
}
});
})
);

61
vitest.config.ts Normal file
View File

@@ -0,0 +1,61 @@
import { defineConfig } from 'vitest/config';
/**
* Root Vitest configuration for Task Master monorepo
* Provides shared defaults for all packages
* Individual packages can extend this config with package-specific settings
*/
export default defineConfig({
test: {
// Enable global test APIs (describe, it, expect, etc.)
globals: true,
// Default environment for all packages (Node.js)
environment: 'node',
// Common test file patterns
include: [
'tests/**/*.test.ts',
'tests/**/*.spec.ts',
'src/**/*.test.ts',
'src/**/*.spec.ts'
],
// Common exclusions
exclude: ['node_modules', 'dist', '.git', '.cache', '**/node_modules/**'],
// Coverage configuration
coverage: {
provider: 'v8',
enabled: true,
reporter: ['text', 'json', 'html'],
include: ['src/**/*.ts'],
exclude: [
'node_modules/',
'dist/',
'tests/',
'**/*.test.ts',
'**/*.spec.ts',
'**/*.d.ts',
'**/mocks/**',
'**/fixtures/**',
'**/types/**',
'vitest.config.ts',
'src/index.ts'
],
// Default thresholds (can be overridden per package)
thresholds: {
branches: 70,
functions: 70,
lines: 70,
statements: 70
}
},
// Test execution settings
testTimeout: 10000,
clearMocks: true,
restoreMocks: true,
mockReset: true
}
});